text stringlengths 2 1.04M | meta dict |
|---|---|
<?xml version="1.0" encoding="utf-8"?>
<!--
~ Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
~
~ WSO2 Inc. licenses this file to you under the Apache License,
~ Version 2.0 (the "License"); you may not use this file except
~ in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<groupId>org.wso2.carbon.identity</groupId>
<artifactId>user-mgt-workflow-feature</artifactId>
<version>4.5.10-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>org.wso2.carbon.user.mgt.workflow.server.feature</artifactId>
<packaging>pom</packaging>
<name>WSO2 Carbon - User Management Workflow Core Feature</name>
<url>http://wso2.org</url>
<description>This feature contains the core bundles required for Back-end User Management Workflow
functionality
</description>
<dependencies>
<dependency>
<groupId>org.wso2.carbon.identity</groupId>
<artifactId>org.wso2.carbon.identity.workflow.mgt</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.wso2.maven</groupId>
<artifactId>carbon-p2-plugin</artifactId>
<version>${carbon.p2.plugin.version}</version>
<executions>
<execution>
<id>p2-feature-generation</id>
<phase>package</phase>
<goals>
<goal>p2-feature-gen</goal>
</goals>
<configuration>
<id>org.wso2.carbon.user.mgt.workflow.server</id>
<propertiesFile>../../etc/feature.properties</propertiesFile>
<adviceFile>
<properties>
<propertyDef>org.wso2.carbon.p2.category.type:server</propertyDef>
<propertyDef>org.eclipse.equinox.p2.type.group:false</propertyDef>
</properties>
</adviceFile>
<bundles>
<bundleDef>org.wso2.carbon.identity:org.wso2.carbon.user.mgt.workflow</bundleDef>
</bundles>
<importFeatures>
<importFeatureDef>org.wso2.carbon.identity.workflow.mgt:${carbon.identity.version}</importFeatureDef>
</importFeatures>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
| {
"content_hash": "945049e20438289e506ede2b0cc9aeda",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 201,
"avg_line_length": 43.65822784810127,
"alnum_prop": 0.5575529138880835,
"repo_name": "dulanjal/carbon-identity",
"id": "b2f188ae561fb1e229a84f84e9b5b3462fc9e9ef",
"size": "3449",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "features/user-mgt-workflow/org.wso2.carbon.user.mgt.workflow.server.feature/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "102709"
},
{
"name": "HTML",
"bytes": "115426"
},
{
"name": "Java",
"bytes": "11657237"
},
{
"name": "JavaScript",
"bytes": "407373"
},
{
"name": "Objective-C",
"bytes": "13608"
},
{
"name": "PLSQL",
"bytes": "54585"
},
{
"name": "Thrift",
"bytes": "338"
},
{
"name": "XSLT",
"bytes": "1030"
}
],
"symlink_target": ""
} |
package container
import (
"fmt"
"testing"
"github.com/docker/docker/api/types"
"gotest.tools/v3/assert"
)
func TestCalculateMemUsageUnixNoCache(t *testing.T) {
// Given
stats := types.MemoryStats{Usage: 500, Stats: map[string]uint64{"total_inactive_file": 400}}
// When
result := calculateMemUsageUnixNoCache(stats)
// Then
assert.Assert(t, inDelta(100.0, result, 1e-6))
}
func TestCalculateMemPercentUnixNoCache(t *testing.T) {
// Given
someLimit := float64(100.0)
noLimit := float64(0.0)
used := float64(70.0)
// When and Then
t.Run("Limit is set", func(t *testing.T) {
result := calculateMemPercentUnixNoCache(someLimit, used)
assert.Assert(t, inDelta(70.0, result, 1e-6))
})
t.Run("No limit, no cgroup data", func(t *testing.T) {
result := calculateMemPercentUnixNoCache(noLimit, used)
assert.Assert(t, inDelta(0.0, result, 1e-6))
})
}
func inDelta(x, y, delta float64) func() (bool, string) {
return func() (bool, string) {
diff := x - y
if diff < -delta || diff > delta {
return false, fmt.Sprintf("%f != %f within %f", x, y, delta)
}
return true, ""
}
}
| {
"content_hash": "d74e3c1aa57e1d3a24bcc83cc111b994",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 93,
"avg_line_length": 23.617021276595743,
"alnum_prop": 0.6720720720720721,
"repo_name": "thaJeztah/cli",
"id": "c6cd0eb9246be091323905d9dc31ae3e303ba15e",
"size": "1110",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cli/command/container/stats_helpers_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "9508"
},
{
"name": "Go",
"bytes": "2499191"
},
{
"name": "HCL",
"bytes": "3179"
},
{
"name": "Makefile",
"bytes": "8025"
},
{
"name": "Shell",
"bytes": "311752"
}
],
"symlink_target": ""
} |
// Copyright (c) 2008-2022, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// <auto-generated>
// This code was generated by a tool.
// Hazelcast Client Protocol Code Generator
// https://github.com/hazelcast/hazelcast-client-protocol
// Change to this file will be lost if the code is regenerated.
// </auto-generated>
#pragma warning disable IDE0051 // Remove unused private members
// ReSharper disable UnusedMember.Local
// ReSharper disable RedundantUsingDirective
// ReSharper disable CheckNamespace
using System;
using System.Threading;
using System.Threading.Tasks;
using System.Collections.Generic;
using Hazelcast.Protocol.BuiltInCodecs;
using Hazelcast.Protocol.CustomCodecs;
using Hazelcast.Core;
using Hazelcast.Messaging;
using Hazelcast.Clustering;
using Hazelcast.Serialization;
using Microsoft.Extensions.Logging;
namespace Hazelcast.Protocol.Codecs
{
/// <summary>
/// Returns true if this list contains all of the elements of the specified collection.
///</summary>
#if SERVER_CODEC
internal static class ListContainsAllServerCodec
#else
internal static class ListContainsAllCodec
#endif
{
public const int RequestMessageType = 328448; // 0x050300
public const int ResponseMessageType = 328449; // 0x050301
private const int RequestInitialFrameSize = Messaging.FrameFields.Offset.PartitionId + BytesExtensions.SizeOfInt;
private const int ResponseResponseFieldOffset = Messaging.FrameFields.Offset.ResponseBackupAcks + BytesExtensions.SizeOfByte;
private const int ResponseInitialFrameSize = ResponseResponseFieldOffset + BytesExtensions.SizeOfBool;
#if SERVER_CODEC
public sealed class RequestParameters
{
/// <summary>
/// Name of the List
///</summary>
public string Name { get; set; }
/// <summary>
/// Collection to be checked for containment in this list
///</summary>
public IList<IData> Values { get; set; }
}
#endif
public static ClientMessage EncodeRequest(string name, ICollection<IData> values)
{
var clientMessage = new ClientMessage
{
IsRetryable = true,
OperationName = "List.ContainsAll"
};
var initialFrame = new Frame(new byte[RequestInitialFrameSize], (FrameFlags) ClientMessageFlags.Unfragmented);
initialFrame.Bytes.WriteIntL(Messaging.FrameFields.Offset.MessageType, RequestMessageType);
initialFrame.Bytes.WriteIntL(Messaging.FrameFields.Offset.PartitionId, -1);
clientMessage.Append(initialFrame);
StringCodec.Encode(clientMessage, name);
ListMultiFrameCodec.Encode(clientMessage, values, DataCodec.Encode);
return clientMessage;
}
#if SERVER_CODEC
public static RequestParameters DecodeRequest(ClientMessage clientMessage)
{
using var iterator = clientMessage.GetEnumerator();
var request = new RequestParameters();
iterator.Take(); // empty initial frame
request.Name = StringCodec.Decode(iterator);
request.Values = ListMultiFrameCodec.Decode(iterator, DataCodec.Decode);
return request;
}
#endif
public sealed class ResponseParameters
{
/// <summary>
/// True if this list contains all of the elements of the
/// specified collection
///</summary>
public bool Response { get; set; }
}
#if SERVER_CODEC
public static ClientMessage EncodeResponse(bool response)
{
var clientMessage = new ClientMessage();
var initialFrame = new Frame(new byte[ResponseInitialFrameSize], (FrameFlags) ClientMessageFlags.Unfragmented);
initialFrame.Bytes.WriteIntL(Messaging.FrameFields.Offset.MessageType, ResponseMessageType);
initialFrame.Bytes.WriteBoolL(ResponseResponseFieldOffset, response);
clientMessage.Append(initialFrame);
return clientMessage;
}
#endif
public static ResponseParameters DecodeResponse(ClientMessage clientMessage)
{
using var iterator = clientMessage.GetEnumerator();
var response = new ResponseParameters();
var initialFrame = iterator.Take();
response.Response = initialFrame.Bytes.ReadBoolL(ResponseResponseFieldOffset);
return response;
}
}
}
| {
"content_hash": "78003edeed9190c5c0fdcd0dd0552413",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 133,
"avg_line_length": 38.61363636363637,
"alnum_prop": 0.6831469491857955,
"repo_name": "asimarslan/hazelcast-csharp-client",
"id": "397b1bcb1ff90642cd20b6c3cb085502c5e4203c",
"size": "5099",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Hazelcast.Net/Protocol/Codecs/ListContainsAllCodec.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "7109601"
},
{
"name": "PowerShell",
"bytes": "115235"
},
{
"name": "Shell",
"bytes": "484"
}
],
"symlink_target": ""
} |
/* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2017 Marvell International Ltd.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef MVEBU_UART_H
#define MVEBU_UART_H
#include <types_ext.h>
#include <drivers/serial.h>
struct mvebu_uart_data {
struct io_pa_va base;
struct serial_chip chip;
};
void mvebu_uart_init(struct mvebu_uart_data *pd, paddr_t pbase,
uint32_t uart_clk, uint32_t baud_rate);
#endif /* MVEBU_UART_H */
| {
"content_hash": "39736e6e23f4060cf52c2dbc325f1e6f",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 79,
"avg_line_length": 41.166666666666664,
"alnum_prop": 0.7599768652400232,
"repo_name": "pascal-brand-st-dev/optee_os",
"id": "fed9ac8b6192cfba69a194a460820d29f05c8571",
"size": "1729",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "core/include/drivers/mvebu_uart.h",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "238926"
},
{
"name": "Awk",
"bytes": "6672"
},
{
"name": "C",
"bytes": "5058496"
},
{
"name": "C++",
"bytes": "46191"
},
{
"name": "HTML",
"bytes": "75678"
},
{
"name": "Makefile",
"bytes": "147021"
},
{
"name": "Python",
"bytes": "9500"
},
{
"name": "Shell",
"bytes": "13758"
}
],
"symlink_target": ""
} |
module Slappy
class Client
include Slappy::Debuggable
attr_reader :start_time
def initialize
Slack.configure { |slack| slack.token = config.token }
@callbacks = {}
end
def client
@client ||= Slack.realtime
end
def start
setup
Debug.log 'Slappy start'
begin
client.start
rescue StandardError => e
@callbacks[:goodnight].each(&:call) if @callbacks[:goodnight]
STDERR.puts e.backtrace.slice!(0) + ': ' + e.message
STDERR.puts "\tfrom " + e.backtrace.join("\n\tfrom ")
exit 1 if config.stop_with_error
end
end
def hello(&block)
register_callback(:hello, :hello, block)
end
def goodnight(&block)
register_callback(:goodnight, :goodnight, block)
end
def hear(pattern, options = {}, &block)
register_callback(:hear, :message, Listener::TextListener.new(pattern, options, &block))
end
def respond(pattern, options = {}, &block)
bot_name = options[:bot_name] || config.robot.botname || config.robot.username
pattern = "^#{bot_name}[[:blank:]]#{pattern}"
register_callback(:respond, :message, Listener::TextListener.new(pattern, options, &block))
end
def monitor(type, options = {}, &block)
register_callback(:monitor, type.to_sym, Listener::TypeListener.new(type, options, &block))
end
def say(text, options = {})
options[:text] = text
Messenger.new(options).message
end
def schedule(pattern, options = {}, &block)
@schedule ||= Schedule.new
@schedule.register pattern, options, &block
Debug.log "Add schedule event(#{@schedule.list.size}): #{pattern}"
end
private
def setup
@start_time = Time.now
@callbacks.each do |event_name, listeners|
register_event event_name, listeners
end
set_signal_trap
end
def register_callback(name, type, callback)
@callbacks[type] ||= []
@callbacks[type].push callback
Debug.log "Add #{name} event(#{@callbacks[type.to_sym].size}): #{type}"
end
def set_signal_trap
[:TERM, :INT].each do |signal|
Signal.trap(signal) do
@callbacks[:goodnight].try(:each) do |callback|
th = Thread.new { callback.call }
th.join
end
EventMachine.stop
end
end
end
def register_event(event_name, listeners)
client.on event_name do |data|
listeners.each do |listener|
case event_name
when :hello
listener.call
else
event = Event.new(data)
listener.call(event)
end
end
end
end
def config
Slappy.configuration
end
end
end
| {
"content_hash": "610f66eeebbcc0ed21e7b143a915d300",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 97,
"avg_line_length": 24.601769911504423,
"alnum_prop": 0.591726618705036,
"repo_name": "wakaba260/slappy",
"id": "633e1cff3dc94ad570f90884b43ddcb0cba302ab",
"size": "2780",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/slappy/client.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "53536"
}
],
"symlink_target": ""
} |
import os
PROJECT_ROOT = os.path.join(os.path.dirname(__file__))
PROJECT_ROOT = os.path.normpath(os.path.abspath(PROJECT_ROOT))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, 'sqlite3.db')
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.3/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['localhost']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'xfh7)zld#c5&%h8!p7h6gn&ndz2u(%dhw^)lpg$-9+j3lz$%%k'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'banzai',
'south',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| {
"content_hash": "d2d9deadeef3e33fcafc3c58b606cab0",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 88,
"avg_line_length": 32.36666666666667,
"alnum_prop": 0.7056642636457261,
"repo_name": "saippuakauppias/django-banzai",
"id": "3355a7b4b048b70f49783512b33247a8a7007093",
"size": "4899",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test_project/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32545"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_65) on Tue Feb 04 13:29:26 EST 2014 -->
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<TITLE>
Uses of Class backtype.storm.generated.Nimbus.beginFileUpload_result (Storm Core 0.9.1-incubating-SNAPSHOT API)
</TITLE>
<META NAME="date" CONTENT="2014-02-04">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class backtype.storm.generated.Nimbus.beginFileUpload_result (Storm Core 0.9.1-incubating-SNAPSHOT API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?backtype/storm/generated//class-useNimbus.beginFileUpload_result.html" target="_top"><B>FRAMES</B></A>
<A HREF="Nimbus.beginFileUpload_result.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>backtype.storm.generated.Nimbus.beginFileUpload_result</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated">Nimbus.beginFileUpload_result</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#backtype.storm.generated"><B>backtype.storm.generated</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="backtype.storm.generated"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated">Nimbus.beginFileUpload_result</A> in <A HREF="../../../../backtype/storm/generated/package-summary.html">backtype.storm.generated</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../backtype/storm/generated/package-summary.html">backtype.storm.generated</A> that return <A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated">Nimbus.beginFileUpload_result</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated">Nimbus.beginFileUpload_result</A></CODE></FONT></TD>
<TD><CODE><B>Nimbus.beginFileUpload_result.</B><B><A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html#deepCopy()">deepCopy</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../backtype/storm/generated/package-summary.html">backtype.storm.generated</A> with parameters of type <A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated">Nimbus.beginFileUpload_result</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> int</CODE></FONT></TD>
<TD><CODE><B>Nimbus.beginFileUpload_result.</B><B><A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html#compareTo(backtype.storm.generated.Nimbus.beginFileUpload_result)">compareTo</A></B>(<A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated">Nimbus.beginFileUpload_result</A> other)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> boolean</CODE></FONT></TD>
<TD><CODE><B>Nimbus.beginFileUpload_result.</B><B><A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html#equals(backtype.storm.generated.Nimbus.beginFileUpload_result)">equals</A></B>(<A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated">Nimbus.beginFileUpload_result</A> that)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Constructors in <A HREF="../../../../backtype/storm/generated/package-summary.html">backtype.storm.generated</A> with parameters of type <A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated">Nimbus.beginFileUpload_result</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html#Nimbus.beginFileUpload_result(backtype.storm.generated.Nimbus.beginFileUpload_result)">Nimbus.beginFileUpload_result</A></B>(<A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated">Nimbus.beginFileUpload_result</A> other)</CODE>
<BR>
Performs a deep copy on <i>other</i>.</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../backtype/storm/generated/Nimbus.beginFileUpload_result.html" title="class in backtype.storm.generated"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?backtype/storm/generated//class-useNimbus.beginFileUpload_result.html" target="_top"><B>FRAMES</B></A>
<A HREF="Nimbus.beginFileUpload_result.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2014 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All Rights Reserved.
</BODY>
</HTML>
| {
"content_hash": "3470385e0441389308a21453de2e053f",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 395,
"avg_line_length": 50.397260273972606,
"alnum_prop": 0.663314306423847,
"repo_name": "techdocscn/storm",
"id": "aa21915f1475ae70e3f4418f53f7c1bd11efb71e",
"size": "11037",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "source/apidocs/backtype/storm/generated/class-use/Nimbus.beginFileUpload_result.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10888"
},
{
"name": "Ruby",
"bytes": "1341"
}
],
"symlink_target": ""
} |
/* ------------------------------------------------------------------------------
*
* # Session timeout
*
* Specific JS code additions for extra_session_timeout.html page
*
* Version: 1.0
* Latest update: Aug 1, 2015
*
* ---------------------------------------------------------------------------- */
$(function() {
// Idle timeout
$.sessionTimeout({
heading: 'h5',
title: 'Idle Timeout',
message: 'Your session is about to expire. Do you want to stay connected?',
warnAfter: 5000,
redirAfter: 15000,
keepAliveUrl: '/',
redirUrl: 'login_unlock.html',
logoutUrl: 'login_advanced.html'
});
});
| {
"content_hash": "03e875dab43d4ef30d4d8e457a4de3b0",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 83,
"avg_line_length": 27.115384615384617,
"alnum_prop": 0.425531914893617,
"repo_name": "iBase4J/iBase4J",
"id": "d92c20598446eacda059642d33bb79cee4e22598",
"size": "705",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "iBase4J-UI/iBase4J-UI-DataTables/src/assets/js/pages/extra_idle_timeout.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2040473"
},
{
"name": "HTML",
"bytes": "702651"
},
{
"name": "Java",
"bytes": "285812"
},
{
"name": "JavaScript",
"bytes": "6362897"
},
{
"name": "PHP",
"bytes": "11363"
},
{
"name": "TSQL",
"bytes": "1053172"
}
],
"symlink_target": ""
} |
from keras.layers import Dense, merge
#Makes Dense connections to a series of previous outputs
#Can be used for making connections to all previous layers
#Eg http://arxiv.org/abs/1608.06993 but for Dense networks
#Avoids the need to concat inputs by product then sum
def make_densedense(output_dim, inputs):
out_arr = []
for layer in inputs:
out_dense = Dense(output_dim)(layer)
out_arr.append(out_dense)
if len(out_arr) == 1:
return out_arr[0]
else:
return merge(out_arr, mode='sum') | {
"content_hash": "ce21f519664dd1c4f88db16fff51dcef",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 58,
"avg_line_length": 33.375,
"alnum_prop": 0.6891385767790262,
"repo_name": "kuza55/keras-extras",
"id": "933a925373efd835eff1e5821b2b46d24a0df08b",
"size": "534",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "layers/layer_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7086"
}
],
"symlink_target": ""
} |
FROM hseeberger/scala-sbt:8u151-2.12.5-1.1.2
WORKDIR /finch
COPY project project
COPY src src
COPY build.sbt build.sbt
RUN sbt assembly -batch
EXPOSE 9000
CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:+AggressiveOpts", "-Dio.netty.recycler.maxCapacityPerThread=0", "-Dcom.twitter.finagle.tracing.enabled=false", "-Dio.netty.leakDetection.level=disabled", "-jar", "target/scala-2.12/finch-benchmark.jar"]
| {
"content_hash": "7038cf421c6e77c7eb6c553f0a2aa769",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 269,
"avg_line_length": 42.7,
"alnum_prop": 0.7447306791569087,
"repo_name": "MTDdk/FrameworkBenchmarks",
"id": "64dbac482660047ea9343c8a84ffe375b2c922e5",
"size": "427",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "frameworks/Scala/finch/finch.dockerfile",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "104"
},
{
"name": "Batchfile",
"bytes": "1125"
},
{
"name": "C",
"bytes": "243674"
},
{
"name": "C#",
"bytes": "485424"
},
{
"name": "C++",
"bytes": "197934"
},
{
"name": "CMake",
"bytes": "6315"
},
{
"name": "CSS",
"bytes": "2035"
},
{
"name": "Clojure",
"bytes": "80972"
},
{
"name": "Common Lisp",
"bytes": "22120"
},
{
"name": "Crystal",
"bytes": "27193"
},
{
"name": "D",
"bytes": "203825"
},
{
"name": "Dart",
"bytes": "52130"
},
{
"name": "Dockerfile",
"bytes": "340268"
},
{
"name": "Dylan",
"bytes": "868"
},
{
"name": "Elixir",
"bytes": "14485"
},
{
"name": "Erlang",
"bytes": "41222"
},
{
"name": "F#",
"bytes": "91061"
},
{
"name": "Go",
"bytes": "161938"
},
{
"name": "Groovy",
"bytes": "21834"
},
{
"name": "HTML",
"bytes": "142024"
},
{
"name": "Hack",
"bytes": "2261"
},
{
"name": "Haskell",
"bytes": "55691"
},
{
"name": "Java",
"bytes": "678499"
},
{
"name": "JavaScript",
"bytes": "175373"
},
{
"name": "Kotlin",
"bytes": "57654"
},
{
"name": "Lua",
"bytes": "14508"
},
{
"name": "Makefile",
"bytes": "4991"
},
{
"name": "Meson",
"bytes": "846"
},
{
"name": "MoonScript",
"bytes": "2396"
},
{
"name": "Nim",
"bytes": "1290"
},
{
"name": "PHP",
"bytes": "532533"
},
{
"name": "PLpgSQL",
"bytes": "3446"
},
{
"name": "Perl",
"bytes": "15376"
},
{
"name": "Python",
"bytes": "359775"
},
{
"name": "QMake",
"bytes": "2301"
},
{
"name": "Racket",
"bytes": "5069"
},
{
"name": "Ruby",
"bytes": "89692"
},
{
"name": "Rust",
"bytes": "89500"
},
{
"name": "Scala",
"bytes": "101770"
},
{
"name": "Shell",
"bytes": "96874"
},
{
"name": "Smarty",
"bytes": "744"
},
{
"name": "Swift",
"bytes": "101361"
},
{
"name": "TypeScript",
"bytes": "15109"
},
{
"name": "UrWeb",
"bytes": "4453"
},
{
"name": "Vala",
"bytes": "1579"
},
{
"name": "Visual Basic .NET",
"bytes": "27087"
},
{
"name": "Volt",
"bytes": "511"
}
],
"symlink_target": ""
} |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Buffers;
using System.Buffers.Text;
using Xunit;
namespace System.Text.Formatting.Tests
{
public class CustomCultureTests
{
internal static SymbolTable Culture1;
internal static SymbolTable Culture5;
internal static ArrayPool<byte> pool = ArrayPool<byte>.Shared;
// Sets up cultures with digits represented by 1 or 5 'A's (0) through 1 or 5 'J's (9) and the minus sigh represented by an underscore followed by a question mark
static CustomCultureTests()
{
byte[][] utf16digitsAndSymbols = new byte[17][];
for (ushort digit = 0; digit < 10; digit++)
{
char digitChar = (char)(digit + 'A');
var digitString = new string(digitChar, 5);
utf16digitsAndSymbols[digit] = GetBytesUtf16(digitString);
}
utf16digitsAndSymbols[(ushort)SymbolTable.Symbol.DecimalSeparator] = GetBytesUtf16(".");
utf16digitsAndSymbols[(ushort)SymbolTable.Symbol.GroupSeparator] = GetBytesUtf16(",");
utf16digitsAndSymbols[(ushort)SymbolTable.Symbol.MinusSign] = GetBytesUtf16("_?");
Culture5 = new CustomUtf16SymbolTable(utf16digitsAndSymbols);
utf16digitsAndSymbols = new byte[17][];
for (ushort digit = 0; digit < 10; digit++)
{
char digitChar = (char)(digit + 'A');
var digitString = new string(digitChar, 1);
utf16digitsAndSymbols[digit] = GetBytesUtf16(digitString);
}
utf16digitsAndSymbols[(ushort)SymbolTable.Symbol.DecimalSeparator] = GetBytesUtf16(".");
utf16digitsAndSymbols[(ushort)SymbolTable.Symbol.GroupSeparator] = GetBytesUtf16(",");
utf16digitsAndSymbols[(ushort)SymbolTable.Symbol.MinusSign] = GetBytesUtf16("_?");
Culture1 = new CustomUtf16SymbolTable(utf16digitsAndSymbols);
}
private static byte[] GetBytesUtf16(string text)
{
return System.Text.Encoding.Unicode.GetBytes(text);
}
[Fact]
public void CustomCulture()
{
var sb = new StringFormatter();
sb.SymbolTable = Culture5;
sb.Append(-1234567890);
Assert.Equal("_?BBBBBCCCCCDDDDDEEEEEFFFFFGGGGGHHHHHIIIIIJJJJJAAAAA", sb.ToString());
}
}
}
| {
"content_hash": "5ec8d0cfcfae6a30b95a4fe96c9b06f3",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 170,
"avg_line_length": 42.983333333333334,
"alnum_prop": 0.6324156649864289,
"repo_name": "joshfree/corefxlab",
"id": "df199da804948478b22b13eb051b2ad948802c2a",
"size": "2581",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "tests/System.Text.Formatting.Tests/CustomCulture.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "678"
},
{
"name": "C#",
"bytes": "7301244"
},
{
"name": "Groovy",
"bytes": "2527"
},
{
"name": "PowerShell",
"bytes": "26354"
},
{
"name": "Shell",
"bytes": "25132"
},
{
"name": "Smalltalk",
"bytes": "3"
}
],
"symlink_target": ""
} |
"""
Django settings for doughuware project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'xb+%x5y0h2h#uz6gq20%q@w8emxrou(0k-%%4w5c1=^27f+mq4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'widget_tweaks',
'core',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'doughuware.urls'
WSGI_APPLICATION = 'doughuware.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
| {
"content_hash": "cb93d2bc7f7988380562dc3d8f9c8e5a",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 71,
"avg_line_length": 25.89622641509434,
"alnum_prop": 0.6881602914389799,
"repo_name": "JDougherty/doughuware",
"id": "2924dc819ec4c6dc7d09f18d41260bfbfff116ea",
"size": "2745",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doughuware/doughuware/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1004"
},
{
"name": "HTML",
"bytes": "4161"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "7031"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "e20b2193a3b2108e32f2903cf8544c81",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "789164d385595e943cc6c6a395ea21a285cf827d",
"size": "196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Ranunculales/Ranunculaceae/Delphinium/Delphinium taliense/Delphinium taliense platycentrum/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package org.springframework.boot;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
/**
* Tests for {@link DefaultApplicationArguments}.
*
* @author Phillip Webb
*/
public class DefaultApplicationArgumentsTests {
private static final String[] ARGS = new String[] { "--foo=bar", "--foo=baz",
"--debug", "spring", "boot" };
@Test
public void argumentsMustNotBeNull() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new DefaultApplicationArguments(null))
.withMessageContaining("Args must not be null");
}
@Test
public void getArgs() {
ApplicationArguments arguments = new DefaultApplicationArguments(ARGS);
assertThat(arguments.getSourceArgs()).isEqualTo(ARGS);
}
@Test
public void optionNames() {
ApplicationArguments arguments = new DefaultApplicationArguments(ARGS);
Set<String> expected = new HashSet<>(Arrays.asList("foo", "debug"));
assertThat(arguments.getOptionNames()).isEqualTo(expected);
}
@Test
public void containsOption() {
ApplicationArguments arguments = new DefaultApplicationArguments(ARGS);
assertThat(arguments.containsOption("foo")).isTrue();
assertThat(arguments.containsOption("debug")).isTrue();
assertThat(arguments.containsOption("spring")).isFalse();
}
@Test
public void getOptionValues() {
ApplicationArguments arguments = new DefaultApplicationArguments(ARGS);
assertThat(arguments.getOptionValues("foo"))
.isEqualTo(Arrays.asList("bar", "baz"));
assertThat(arguments.getOptionValues("debug")).isEmpty();
assertThat(arguments.getOptionValues("spring")).isNull();
}
@Test
public void getNonOptionArgs() {
ApplicationArguments arguments = new DefaultApplicationArguments(ARGS);
assertThat(arguments.getNonOptionArgs()).containsExactly("spring", "boot");
}
@Test
public void getNoNonOptionArgs() {
ApplicationArguments arguments = new DefaultApplicationArguments("--debug");
assertThat(arguments.getNonOptionArgs()).isEmpty();
}
}
| {
"content_hash": "bfdd822ab4a6cc27c0ad5d8283799941",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 81,
"avg_line_length": 29.34246575342466,
"alnum_prop": 0.7553688141923436,
"repo_name": "hello2009chen/spring-boot",
"id": "45029e476ad40b5c30ea66fd005968daf34a0307",
"size": "2762",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spring-boot-project/spring-boot/src/test/java/org/springframework/boot/DefaultApplicationArgumentsTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1948"
},
{
"name": "CSS",
"bytes": "5774"
},
{
"name": "Groovy",
"bytes": "46492"
},
{
"name": "HTML",
"bytes": "70389"
},
{
"name": "Java",
"bytes": "7092425"
},
{
"name": "JavaScript",
"bytes": "37789"
},
{
"name": "Ruby",
"bytes": "1305"
},
{
"name": "SQLPL",
"bytes": "20085"
},
{
"name": "Shell",
"bytes": "8165"
},
{
"name": "Smarty",
"bytes": "3276"
},
{
"name": "XSLT",
"bytes": "33894"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright (C) 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<android.support.v7.internal.view.menu.ListMenuItemView
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="?attr/listPreferredItemHeightSmall">
<!-- Icon will be inserted here. -->
<!-- The title and summary have some gap between them, and this 'group' should be centered vertically. -->
<RelativeLayout
android:layout_width="0dip"
android:layout_weight="1"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical"
android:layout_marginLeft="?attr/listPreferredItemPaddingLeft"
android:layout_marginRight="?attr/listPreferredItemPaddingRight"
android:duplicateParentState="true">
<TextView
android:id="@+id/title"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_alignParentLeft="true"
android:textAppearance="?attr/textAppearanceListItemSmall"
android:singleLine="true"
android:duplicateParentState="true"
android:ellipsize="marquee"
android:fadingEdge="horizontal" />
<TextView
android:id="@+id/shortcut"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@id/title"
android:layout_alignParentLeft="true"
android:textAppearance="?android:attr/textAppearanceSmall"
android:singleLine="true"
android:duplicateParentState="true" />
</RelativeLayout>
<!-- Checkbox, and/or radio button will be inserted here. -->
</android.support.v7.internal.view.menu.ListMenuItemView>
<!-- From: file:/usr/local/google/buildbot/repo_clients/https___googleplex-android.googlesource.com_a_platform_manifest.git/mnc-sdk-release/frameworks/support/v7/appcompat/res/layout/abc_list_menu_item_layout.xml --><!-- From: file:/Users/jonathan/Documents/cucei2016B/sbc/se-ov/android/app/build/intermediates/exploded-aar/com.android.support/appcompat-v7/23.0.1/res/layout/abc_list_menu_item_layout.xml --> | {
"content_hash": "b294de51098602fab9086538c83b5c16",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 408,
"avg_line_length": 47.91803278688525,
"alnum_prop": 0.6811495039343141,
"repo_name": "jonajgs/se-ov",
"id": "451d0896fbe59926840a8e2f8cff2107033fcbcb",
"size": "2923",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "android/app/build/intermediates/res/merged/debug/layout/abc_list_menu_item_layout.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3223"
},
{
"name": "HTML",
"bytes": "903"
},
{
"name": "Java",
"bytes": "1172894"
},
{
"name": "JavaScript",
"bytes": "24745"
},
{
"name": "Objective-C",
"bytes": "4386"
},
{
"name": "Python",
"bytes": "1633"
}
],
"symlink_target": ""
} |
RiboPip [](https://travis-ci.org/stepf/RiboPip) [](https://codeclimate.com/github/stepf/RiboPip) [](https://gemnasium.com/github.com/stepf/RiboPip) [](http://inch-ci.org/github/stepf/RiboPip)
=========
**An alignment and analysis pipeline for Ribo-seq and RNA-seq data**
RiboPip is Ruby based pipeline for processing Ribosome Profiling (Ribo-seq) and RNA sequencing (RNA-seq) datasets. `ribopip align` starts from raw sequences files and computes a splice-aware alignment to a reference database along with a read summarization (counting mapped reads per genomic feature) and data quality assessments. Read summarization data can be merged and used for differential expression estimation with `ribopip postproc`
Getting started
---------------
RiboPip wraps around all pipeline steps and ties them together for which it depends a variety of external software. It has been designed to run on every POSIX compliant UNIX system, for example, Linux, Mac OS X, and OpenBSD.
**Automatic installation**
Running `scripts/bootstrap` installs all external dependencies and sets up RiboPip correctly. It works out-of-the-box with most Linux flavours, although you might want to modify the bash script according to your needs.
**Docker container**
A minmal Docker container (Debian) with pre-installed dependencies can be obtained through the Docker Hub:
```bash
docker image pull stepf/ribopip
```
You can then spin up an interactive session to run / test the pipeline. Beware that this requires a lot of computational power, so your cluster setup will need to support containers:
```bash
docker container run --interactive --tty --rm stepf/ribopip
# run pipeline as described below
```
**Manual installation**
* Install all external software dependencies (see `scripts/bootstrap` for a list)
* Manually build C extension and Ruby gem:
```bash
cd "ext/fastq-bucketize-0.1" && make # && copy bin to any directory in your PATH
cd -
bundle install
rake build && gem install "./pkg/ribopip-$(rake version).gem"
```
* Run tests:
```bash
rake spec
```
Usage
---------------
```bash
> ribopip -h
Commands:
ribopip align
ribopip help [COMMAND]
ribopip postproc
```
**Example of a full pipeline run**
```bash
# Reference files:
ncrna="GRCm38.80.ncrna.fa"
genome="GRCm38.dna.primary_assembly.fa"
annotation="Mus_musculus.GRCm38.80.gtf"
igv="GRCm38.igv.genome"
# Arguments for ribopip align:
aln_args="-n ${ncrna}$ -g ${genome} -a ${annotation} --igv_ref ${igv}"
# Run upstream pipeline for datasets, each comprising of
# - footprint and mrna data : fp, mrna
# - 2 experimental conditions : treated, control
# - 2 replicates : rep1, rep2
ribopip align $aln_args -r fp_treated_rep1.fastq
ribopip align $aln_args -r fp_treated_rep2.fastq
ribopip align $aln_args -r fp_control_rep1.fastq
ribopip align $aln_args -r fp_control_rep2.fastq
ribopip align $aln_args -r mrna_treated_rep1.fastq
ribopip align $aln_args -r mrna_treated_rep2.fastq
ribopip align $aln_args -r mrna_control_rep1.fastq
ribopip align $aln_args -r mrna_control_rep2.fastq
# Run downstream pipeline to analyze feature counts
ribopip postproc -a $annotation -o . \
--fp-1 fp_treated \
fp_treated_rep1.vs_genome.uni.ft.dist.txt \
fp_treated_rep2.vs_genome.uni.ft.dist.txt \
--fp-control fp_control \
fp_control_rep1.vs_genome.uni.ft.dist.txt \
fp_control_rep2.vs_genome.uni.ft.dist.txt \
--mrna-1 mrna_treated \
mrna_treated_rep1.vs_genome.uni.ft.dist.txt \
mrna_treated_rep2.vs_genome.uni.ft.dist.txt \
--mrna-control mrna_control \
mrna_control_rep1.vs_genome.uni.ft.dist.txt \
mrna_control_rep2.vs_genome.uni.ft.dist.txt
```
Pipeline feature overview
---------------
### Pre-Processing: Mapping the sequencing results
The pre-processing pipeline comprises of four consecutive steps. The first step filters and prepares the raw sequence files. The second step removes unwanted RNAs. The third step computes a splice-aware alignment to a given reference database. The fourth step extracts data subsets with desired properties for further analysis.
1. **Data preparation**
* **Linker clipping**: Deep-sequencing technologies require specific linker sequences to be ligated to the 3’ fragment ends, which can introduce an analytical bias. Users can provide a linker sequence and choose between an perfect-match or error-tolarant clipping approach.
* **Nucleotide trimming**: During reverse transcription an untemplated nucleotide is frequently added to the 5’ end of each read, which can be trimmed off.
* **Length selection**: Very short reads can introduce ambiguities and thus can be removed.
2. **Removal of unwanted RNAs**: Nuclease footprinting routinely leaves rRNA intact, which can comprise a large fraction of the reads and introduce a bias. All unwanted RNAs can be removed by mapping them contiguously to a corresponding sequence database (e.g. all ncRNAs) and discarding all successfully mapped reads.
3. **Alignment to the genome reference**: The remaining reads can be aligned to a genome reference using a splice-aware aligner.
4. **Read extraction**: For more specific analysis, subsets of reads can be extracted based on criteria like the number of hits (unique, multiple) or number of mismatches.
#### Metrics
During pre-processing a number of metrics is computed:
* **Read counts**: Counting the total number of alignments contained in a file provides a simple, yet important metric about a pre-processing step, e.g. how many reads were successfully mapping to the ncRNA database and sorted out consequently.
* **Counting reads per genomic feature**: The alignment to a genome reference during pre-processing results in reported coordinates for each aligned read. These coordinates can be further matched to known annotated genomic features. This enables to test for Differential Expression or Translational Efficiency.
* **Quality metrics**: Quality checking reads can help to spot potential issues before the alignment is performed. For example a dropping base quality towards the 3’ ends of all reads can be typical for NGS data.
* **Density tracks**: Much information in a ribosome profiling experiment is contained in the footprint density. Visual inspection using a genome browser programs like IGV enable investigators to not only quality control, but furthermore to inspect potential single nucleotide variants (SNVs) or alternative splicing sites (Sashimi Plots) at all sites of interest.
### Post-Processing: Statistical analyses
* **Expression normalization**: By assigning reads to annotated genomic features, it is possible to estimate the expression of that feature. As read counts arising from a transcript are proportional to the transcript length and sampling depth, it is important to normalize these expression estimates to ensure comparability across different samples and features.
* **Differential expression**: Information across experimental replicates is used to estimate normalization factors and dispersions to then test each genomic feature for differential expression.
* **Translational efficiency**: Information about ribosome occupancy and RNA abundance can be exploited to determine the translational efficiency every gene.
Copyright
---------------
Copyright (c) 2016 Stefan Dang. See LICENSE for details.
| {
"content_hash": "97edfb43f68af7cda84a5f0fe0fc94c0",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 475,
"avg_line_length": 60.03174603174603,
"alnum_prop": 0.7710206240084612,
"repo_name": "stepf/RiboPip",
"id": "5a0f8a3d25c207f8a21acabe0f5b281da22b47cd",
"size": "7570",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Readme.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6636"
},
{
"name": "Makefile",
"bytes": "62"
},
{
"name": "R",
"bytes": "2645"
},
{
"name": "Ruby",
"bytes": "114630"
},
{
"name": "Shell",
"bytes": "6701"
}
],
"symlink_target": ""
} |
/** @file
FontAwesome provides a range of modern 2D symbols. See
http://fortawesome.github.io/
This file makes it easy to use in JUCE software. It's *great* for instantly
useful icons.
Font Awesome is licensed under the OFL v1.1. Full details are available online.
The OFL allows the licensed fonts to be used, studied, modified and
redistributed freely as long as they are not sold by themselves. The
fonts, including any derivative works, can be bundled, embedded,
redistributed and/or sold with any software provided that any reserved
names are not used by derivative works. The fonts and derivatives,
however, cannot be released under any other type of license. The
requirement for fonts to remain under this license does not apply
to any document created using the fonts or their derivatives.
*/
#ifndef FONTAWESOME_H_INCLUDED
#define FONTAWESOME_H_INCLUDED
#include "../JuceLibraryCode/JuceHeader.h"
namespace FontAwesomeData {
extern const char* FontAwesome_otf;
const int FontAwesome_otfSize = 106260;
};
/**
See the static method drawIcon for an example of how to use the typeface.
*/
class FontAwesomeIcons {
public:
/** Returns a Typeface::Ptr for FontAwesome. */
static Typeface::Ptr getTypeface()
{
static Typeface::Ptr typeface = Typeface::createSystemTypefaceFor(FontAwesomeData::FontAwesome_otf, FontAwesomeData::FontAwesome_otfSize);
return typeface;
}
/**
* Draws a font awesome icon, in the current colour, on some area of the screen
* FontAwesomeIcons::draw(g, FontAwesomeIcons::faAnchor, someDrawArea);
*/
static void drawIcon(Graphics & g, int iconCode, Rectangle<float> area)
{
g.setFont(Font(getTypeface()));
g.drawText(String::charToString(iconCode), area, Justification::centred, false);
}
static const int fa500px = 0xf26e;
static const int faAdjust = 0xf042;
static const int faAdn = 0xf170;
static const int faAlignCenter = 0xf037;
static const int faAlignJustify = 0xf039;
static const int faAlignLeft = 0xf036;
static const int faAlignRight = 0xf038;
static const int faAmazon = 0xf270;
static const int faAmbulance = 0xf0f9;
static const int faAnchor = 0xf13d;
static const int faAndroid = 0xf17b;
static const int faAngellist = 0xf209;
static const int faAngleDoubleDown = 0xf103;
static const int faAngleDoubleLeft = 0xf100;
static const int faAngleDoubleRight = 0xf101;
static const int faAngleDoubleUp = 0xf102;
static const int faAngleDown = 0xf107;
static const int faAngleLeft = 0xf104;
static const int faAngleRight = 0xf105;
static const int faAngleUp = 0xf106;
static const int faApple = 0xf179;
static const int faArchive = 0xf187;
static const int faAreaChart = 0xf1fe;
static const int faArrowCircleDown = 0xf0ab;
static const int faArrowCircleLeft = 0xf0a8;
static const int faArrowCircleODown = 0xf01a;
static const int faArrowCircleOLeft = 0xf190;
static const int faArrowCircleORight = 0xf18e;
static const int faArrowCircleOUp = 0xf01b;
static const int faArrowCircleRight = 0xf0a9;
static const int faArrowCircleUp = 0xf0aa;
static const int faArrowDown = 0xf063;
static const int faArrowLeft = 0xf060;
static const int faArrowRight = 0xf061;
static const int faArrowUp = 0xf062;
static const int faArrows = 0xf047;
static const int faArrowsAlt = 0xf0b2;
static const int faArrowsH = 0xf07e;
static const int faArrowsV = 0xf07d;
static const int faAsterisk = 0xf069;
static const int faAt = 0xf1fa;
static const int faAutomobile = 0xf1b9;
static const int faBackward = 0xf04a;
static const int faBalanceScale = 0xf24e;
static const int faBan = 0xf05e;
static const int faBank = 0xf19c;
static const int faBarChart = 0xf080;
static const int faBarChartO = 0xf080;
static const int faBarcode = 0xf02a;
static const int faBars = 0xf0c9;
static const int faBattery0 = 0xf244;
static const int faBattery1 = 0xf243;
static const int faBattery2 = 0xf242;
static const int faBattery3 = 0xf241;
static const int faBattery4 = 0xf240;
static const int faBatteryEmpty = 0xf244;
static const int faBatteryFull = 0xf240;
static const int faBatteryHalf = 0xf242;
static const int faBatteryQuarter = 0xf243;
static const int faBatteryThreeQuarters = 0xf241;
static const int faBed = 0xf236;
static const int faBeer = 0xf0fc;
static const int faBehance = 0xf1b4;
static const int faBehanceSquare = 0xf1b5;
static const int faBell = 0xf0f3;
static const int faBellO = 0xf0a2;
static const int faBellSlash = 0xf1f6;
static const int faBellSlashO = 0xf1f7;
static const int faBicycle = 0xf206;
static const int faBinoculars = 0xf1e5;
static const int faBirthdayCake = 0xf1fd;
static const int faBitbucket = 0xf171;
static const int faBitbucketSquare = 0xf172;
static const int faBitcoin = 0xf15a;
static const int faBlackTie = 0xf27e;
static const int faBluetooth = 0xf293;
static const int faBluetoothB = 0xf294;
static const int faBold = 0xf032;
static const int faBolt = 0xf0e7;
static const int faBomb = 0xf1e2;
static const int faBook = 0xf02d;
static const int faBookmark = 0xf02e;
static const int faBookmarkO = 0xf097;
static const int faBriefcase = 0xf0b1;
static const int faBtc = 0xf15a;
static const int faBug = 0xf188;
static const int faBuilding = 0xf1ad;
static const int faBuildingO = 0xf0f7;
static const int faBullhorn = 0xf0a1;
static const int faBullseye = 0xf140;
static const int faBus = 0xf207;
static const int faBuysellads = 0xf20d;
static const int faCab = 0xf1ba;
static const int faCalculator = 0xf1ec;
static const int faCalendar = 0xf073;
static const int faCalendarCheckO = 0xf274;
static const int faCalendarMinusO = 0xf272;
static const int faCalendarO = 0xf133;
static const int faCalendarPlusO = 0xf271;
static const int faCalendarTimesO = 0xf273;
static const int faCamera = 0xf030;
static const int faCameraRetro = 0xf083;
static const int faCar = 0xf1b9;
static const int faCaretDown = 0xf0d7;
static const int faCaretLeft = 0xf0d9;
static const int faCaretRight = 0xf0da;
static const int faCaretSquareODown = 0xf150;
static const int faCaretSquareOLeft = 0xf191;
static const int faCaretSquareORight = 0xf152;
static const int faCaretSquareOUp = 0xf151;
static const int faCaretUp = 0xf0d8;
static const int faCartArrowDown = 0xf218;
static const int faCartPlus = 0xf217;
static const int faCc = 0xf20a;
static const int faCcAmex = 0xf1f3;
static const int faCcDinersClub = 0xf24c;
static const int faCcDiscover = 0xf1f2;
static const int faCcJcb = 0xf24b;
static const int faCcMastercard = 0xf1f1;
static const int faCcPaypal = 0xf1f4;
static const int faCcStripe = 0xf1f5;
static const int faCcVisa = 0xf1f0;
static const int faCertificate = 0xf0a3;
static const int faChain = 0xf0c1;
static const int faChainBroken = 0xf127;
static const int faCheck = 0xf00c;
static const int faCheckCircle = 0xf058;
static const int faCheckCircleO = 0xf05d;
static const int faCheckSquare = 0xf14a;
static const int faCheckSquareO = 0xf046;
static const int faChevronCircleDown = 0xf13a;
static const int faChevronCircleLeft = 0xf137;
static const int faChevronCircleRight = 0xf138;
static const int faChevronCircleUp = 0xf139;
static const int faChevronDown = 0xf078;
static const int faChevronLeft = 0xf053;
static const int faChevronRight = 0xf054;
static const int faChevronUp = 0xf077;
static const int faChild = 0xf1ae;
static const int faChrome = 0xf268;
static const int faCircle = 0xf111;
static const int faCircleO = 0xf10c;
static const int faCircleONotch = 0xf1ce;
static const int faCircleThin = 0xf1db;
static const int faClipboard = 0xf0ea;
static const int faClockO = 0xf017;
static const int faClone = 0xf24d;
static const int faClose = 0xf00d;
static const int faCloud = 0xf0c2;
static const int faCloudDownload = 0xf0ed;
static const int faCloudUpload = 0xf0ee;
static const int faCny = 0xf157;
static const int faCode = 0xf121;
static const int faCodeFork = 0xf126;
static const int faCodepen = 0xf1cb;
static const int faCodiepie = 0xf284;
static const int faCoffee = 0xf0f4;
static const int faCog = 0xf013;
static const int faCogs = 0xf085;
static const int faColumns = 0xf0db;
static const int faComment = 0xf075;
static const int faCommentO = 0xf0e5;
static const int faCommenting = 0xf27a;
static const int faCommentingO = 0xf27b;
static const int faComments = 0xf086;
static const int faCommentsO = 0xf0e6;
static const int faCompass = 0xf14e;
static const int faCompress = 0xf066;
static const int faConnectdevelop = 0xf20e;
static const int faContao = 0xf26d;
static const int faCopy = 0xf0c5;
static const int faCopyright = 0xf1f9;
static const int faCreativeCommons = 0xf25e;
static const int faCreditCard = 0xf09d;
static const int faCreditCardAlt = 0xf283;
static const int faCrop = 0xf125;
static const int faCrosshairs = 0xf05b;
static const int faCss3 = 0xf13c;
static const int faCube = 0xf1b2;
static const int faCubes = 0xf1b3;
static const int faCut = 0xf0c4;
static const int faCutlery = 0xf0f5;
static const int faDashboard = 0xf0e4;
static const int faDashcube = 0xf210;
static const int faDatabase = 0xf1c0;
static const int faDedent = 0xf03b;
static const int faDelicious = 0xf1a5;
static const int faDesktop = 0xf108;
static const int faDeviantart = 0xf1bd;
static const int faDiamond = 0xf219;
static const int faDigg = 0xf1a6;
static const int faDollar = 0xf155;
static const int faDotCircleO = 0xf192;
static const int faDownload = 0xf019;
static const int faDribbble = 0xf17d;
static const int faDropbox = 0xf16b;
static const int faDrupal = 0xf1a9;
static const int faEdge = 0xf282;
static const int faEdit = 0xf044;
static const int faEject = 0xf052;
static const int faEllipsisH = 0xf141;
static const int faEllipsisV = 0xf142;
static const int faEmpire = 0xf1d1;
static const int faEnvelope = 0xf0e0;
static const int faEnvelopeO = 0xf003;
static const int faEnvelopeSquare = 0xf199;
static const int faEraser = 0xf12d;
static const int faEur = 0xf153;
static const int faEuro = 0xf153;
static const int faExchange = 0xf0ec;
static const int faExclamation = 0xf12a;
static const int faExclamationCircle = 0xf06a;
static const int faExclamationTriangle = 0xf071;
static const int faExpand = 0xf065;
static const int faExpeditedssl = 0xf23e;
static const int faExternalLink = 0xf08e;
static const int faExternalLinkSquare = 0xf14c;
static const int faEye = 0xf06e;
static const int faEyeSlash = 0xf070;
static const int faEyedropper = 0xf1fb;
static const int faFacebook = 0xf09a;
static const int faFacebookF = 0xf09a;
static const int faFacebookOfficial = 0xf230;
static const int faFacebookSquare = 0xf082;
static const int faFastBackward = 0xf049;
static const int faFastForward = 0xf050;
static const int faFax = 0xf1ac;
static const int faFeed = 0xf09e;
static const int faFemale = 0xf182;
static const int faFighterJet = 0xf0fb;
static const int faFile = 0xf15b;
static const int faFileArchiveO = 0xf1c6;
static const int faFileAudioO = 0xf1c7;
static const int faFileCodeO = 0xf1c9;
static const int faFileExcelO = 0xf1c3;
static const int faFileImageO = 0xf1c5;
static const int faFileMovieO = 0xf1c8;
static const int faFileO = 0xf016;
static const int faFilePdfO = 0xf1c1;
static const int faFilePhotoO = 0xf1c5;
static const int faFilePictureO = 0xf1c5;
static const int faFilePowerpointO = 0xf1c4;
static const int faFileSoundO = 0xf1c7;
static const int faFileText = 0xf15c;
static const int faFileTextO = 0xf0f6;
static const int faFileVideoO = 0xf1c8;
static const int faFileWordO = 0xf1c2;
static const int faFileZipO = 0xf1c6;
static const int faFilesO = 0xf0c5;
static const int faFilm = 0xf008;
static const int faFilter = 0xf0b0;
static const int faFire = 0xf06d;
static const int faFireExtinguisher = 0xf134;
static const int faFirefox = 0xf269;
static const int faFlag = 0xf024;
static const int faFlagCheckered = 0xf11e;
static const int faFlagO = 0xf11d;
static const int faFlash = 0xf0e7;
static const int faFlask = 0xf0c3;
static const int faFlickr = 0xf16e;
static const int faFloppyO = 0xf0c7;
static const int faFolder = 0xf07b;
static const int faFolderO = 0xf114;
static const int faFolderOpen = 0xf07c;
static const int faFolderOpenO = 0xf115;
static const int faFont = 0xf031;
static const int faFonticons = 0xf280;
static const int faFortAwesome = 0xf286;
static const int faForumbee = 0xf211;
static const int faForward = 0xf04e;
static const int faFoursquare = 0xf180;
static const int faFrownO = 0xf119;
static const int faFutbolO = 0xf1e3;
static const int faGamepad = 0xf11b;
static const int faGavel = 0xf0e3;
static const int faGbp = 0xf154;
static const int faGe = 0xf1d1;
static const int faGear = 0xf013;
static const int faGears = 0xf085;
static const int faGenderless = 0xf22d;
static const int faGetPocket = 0xf265;
static const int faGg = 0xf260;
static const int faGgCircle = 0xf261;
static const int faGift = 0xf06b;
static const int faGit = 0xf1d3;
static const int faGitSquare = 0xf1d2;
static const int faGithub = 0xf09b;
static const int faGithubAlt = 0xf113;
static const int faGithubSquare = 0xf092;
static const int faGittip = 0xf184;
static const int faGlass = 0xf000;
static const int faGlobe = 0xf0ac;
static const int faGoogle = 0xf1a0;
static const int faGooglePlus = 0xf0d5;
static const int faGooglePlusSquare = 0xf0d4;
static const int faGoogleWallet = 0xf1ee;
static const int faGraduationCap = 0xf19d;
static const int faGratipay = 0xf184;
static const int faGroup = 0xf0c0;
static const int faHSquare = 0xf0fd;
static const int faHackerNews = 0xf1d4;
static const int faHandGrabO = 0xf255;
static const int faHandLizardO = 0xf258;
static const int faHandODown = 0xf0a7;
static const int faHandOLeft = 0xf0a5;
static const int faHandORight = 0xf0a4;
static const int faHandOUp = 0xf0a6;
static const int faHandPaperO = 0xf256;
static const int faHandPeaceO = 0xf25b;
static const int faHandPointerO = 0xf25a;
static const int faHandRockO = 0xf255;
static const int faHandScissorsO = 0xf257;
static const int faHandSpockO = 0xf259;
static const int faHandStopO = 0xf256;
static const int faHashtag = 0xf292;
static const int faHddO = 0xf0a0;
static const int faHeader = 0xf1dc;
static const int faHeadphones = 0xf025;
static const int faHeart = 0xf004;
static const int faHeartO = 0xf08a;
static const int faHeartbeat = 0xf21e;
static const int faHistory = 0xf1da;
static const int faHome = 0xf015;
static const int faHospitalO = 0xf0f8;
static const int faHotel = 0xf236;
static const int faHourglass = 0xf254;
static const int faHourglass1 = 0xf251;
static const int faHourglass2 = 0xf252;
static const int faHourglass3 = 0xf253;
static const int faHourglassEnd = 0xf253;
static const int faHourglassHalf = 0xf252;
static const int faHourglassO = 0xf250;
static const int faHourglassStart = 0xf251;
static const int faHouzz = 0xf27c;
static const int faHtml5 = 0xf13b;
static const int faICursor = 0xf246;
static const int faIls = 0xf20b;
static const int faImage = 0xf03e;
static const int faInbox = 0xf01c;
static const int faIndent = 0xf03c;
static const int faIndustry = 0xf275;
static const int faInfo = 0xf129;
static const int faInfoCircle = 0xf05a;
static const int faInr = 0xf156;
static const int faInstagram = 0xf16d;
static const int faInstitution = 0xf19c;
static const int faInternetExplorer = 0xf26b;
static const int faIntersex = 0xf224;
static const int faIoxhost = 0xf208;
static const int faItalic = 0xf033;
static const int faJoomla = 0xf1aa;
static const int faJpy = 0xf157;
static const int faJsfiddle = 0xf1cc;
static const int faKey = 0xf084;
static const int faKeyboardO = 0xf11c;
static const int faKrw = 0xf159;
static const int faLanguage = 0xf1ab;
static const int faLaptop = 0xf109;
static const int faLastfm = 0xf202;
static const int faLastfmSquare = 0xf203;
static const int faLeaf = 0xf06c;
static const int faLeanpub = 0xf212;
static const int faLegal = 0xf0e3;
static const int faLemonO = 0xf094;
static const int faLevelDown = 0xf149;
static const int faLevelUp = 0xf148;
static const int faLifeBouy = 0xf1cd;
static const int faLifeBuoy = 0xf1cd;
static const int faLifeRing = 0xf1cd;
static const int faLifeSaver = 0xf1cd;
static const int faLightbulbO = 0xf0eb;
static const int faLineChart = 0xf201;
static const int faLink = 0xf0c1;
static const int faLinkedin = 0xf0e1;
static const int faLinkedinSquare = 0xf08c;
static const int faLinux = 0xf17c;
static const int faList = 0xf03a;
static const int faListAlt = 0xf022;
static const int faListOl = 0xf0cb;
static const int faListUl = 0xf0ca;
static const int faLocationArrow = 0xf124;
static const int faLock = 0xf023;
static const int faLongArrowDown = 0xf175;
static const int faLongArrowLeft = 0xf177;
static const int faLongArrowRight = 0xf178;
static const int faLongArrowUp = 0xf176;
static const int faMagic = 0xf0d0;
static const int faMagnet = 0xf076;
static const int faMailForward = 0xf064;
static const int faMailReply = 0xf112;
static const int faMailReplyAll = 0xf122;
static const int faMale = 0xf183;
static const int faMap = 0xf279;
static const int faMapMarker = 0xf041;
static const int faMapO = 0xf278;
static const int faMapPin = 0xf276;
static const int faMapSigns = 0xf277;
static const int faMars = 0xf222;
static const int faMarsDouble = 0xf227;
static const int faMarsStroke = 0xf229;
static const int faMarsStrokeH = 0xf22b;
static const int faMarsStrokeV = 0xf22a;
static const int faMaxcdn = 0xf136;
static const int faMeanpath = 0xf20c;
static const int faMedium = 0xf23a;
static const int faMedkit = 0xf0fa;
static const int faMehO = 0xf11a;
static const int faMercury = 0xf223;
static const int faMicrophone = 0xf130;
static const int faMicrophoneSlash = 0xf131;
static const int faMinus = 0xf068;
static const int faMinusCircle = 0xf056;
static const int faMinusSquare = 0xf146;
static const int faMinusSquareO = 0xf147;
static const int faMixcloud = 0xf289;
static const int faMobile = 0xf10b;
static const int faMobilePhone = 0xf10b;
static const int faModx = 0xf285;
static const int faMoney = 0xf0d6;
static const int faMoonO = 0xf186;
static const int faMortarBoard = 0xf19d;
static const int faMotorcycle = 0xf21c;
static const int faMousePointer = 0xf245;
static const int faMusic = 0xf001;
static const int faNavicon = 0xf0c9;
static const int faNeuter = 0xf22c;
static const int faNewspaperO = 0xf1ea;
static const int faObjectGroup = 0xf247;
static const int faObjectUngroup = 0xf248;
static const int faOdnoklassniki = 0xf263;
static const int faOdnoklassnikiSquare = 0xf264;
static const int faOpencart = 0xf23d;
static const int faOpenid = 0xf19b;
static const int faOpera = 0xf26a;
static const int faOptinMonster = 0xf23c;
static const int faOutdent = 0xf03b;
static const int faPagelines = 0xf18c;
static const int faPaintBrush = 0xf1fc;
static const int faPaperPlane = 0xf1d8;
static const int faPaperPlaneO = 0xf1d9;
static const int faPaperclip = 0xf0c6;
static const int faParagraph = 0xf1dd;
static const int faPaste = 0xf0ea;
static const int faPause = 0xf04c;
static const int faPauseCircle = 0xf28b;
static const int faPauseCircleO = 0xf28c;
static const int faPaw = 0xf1b0;
static const int faPaypal = 0xf1ed;
static const int faPencil = 0xf040;
static const int faPencilSquare = 0xf14b;
static const int faPencilSquareO = 0xf044;
static const int faPercent = 0xf295;
static const int faPhone = 0xf095;
static const int faPhoneSquare = 0xf098;
static const int faPhoto = 0xf03e;
static const int faPictureO = 0xf03e;
static const int faPieChart = 0xf200;
static const int faPiedPiper = 0xf1a7;
static const int faPiedPiperAlt = 0xf1a8;
static const int faPinterest = 0xf0d2;
static const int faPinterestP = 0xf231;
static const int faPinterestSquare = 0xf0d3;
static const int faPlane = 0xf072;
static const int faPlay = 0xf04b;
static const int faPlayCircle = 0xf144;
static const int faPlayCircleO = 0xf01d;
static const int faPlug = 0xf1e6;
static const int faPlus = 0xf067;
static const int faPlusCircle = 0xf055;
static const int faPlusSquare = 0xf0fe;
static const int faPlusSquareO = 0xf196;
static const int faPowerOff = 0xf011;
static const int faPrint = 0xf02f;
static const int faProductHunt = 0xf288;
static const int faPuzzlePiece = 0xf12e;
static const int faQq = 0xf1d6;
static const int faQrcode = 0xf029;
static const int faQuestion = 0xf128;
static const int faQuestionCircle = 0xf059;
static const int faQuoteLeft = 0xf10d;
static const int faQuoteRight = 0xf10e;
static const int faRa = 0xf1d0;
static const int faRandom = 0xf074;
static const int faRebel = 0xf1d0;
static const int faRecycle = 0xf1b8;
static const int faReddit = 0xf1a1;
static const int faRedditAlien = 0xf281;
static const int faRedditSquare = 0xf1a2;
static const int faRefresh = 0xf021;
static const int faRegistered = 0xf25d;
static const int faRemove = 0xf00d;
static const int faRenren = 0xf18b;
static const int faReorder = 0xf0c9;
static const int faRepeat = 0xf01e;
static const int faReply = 0xf112;
static const int faReplyAll = 0xf122;
static const int faRetweet = 0xf079;
static const int faRmb = 0xf157;
static const int faRoad = 0xf018;
static const int faRocket = 0xf135;
static const int faRotateLeft = 0xf0e2;
static const int faRotateRight = 0xf01e;
static const int faRouble = 0xf158;
static const int faRss = 0xf09e;
static const int faRssSquare = 0xf143;
static const int faRub = 0xf158;
static const int faRuble = 0xf158;
static const int faRupee = 0xf156;
static const int faSafari = 0xf267;
static const int faSave = 0xf0c7;
static const int faScissors = 0xf0c4;
static const int faScribd = 0xf28a;
static const int faSearch = 0xf002;
static const int faSearchMinus = 0xf010;
static const int faSearchPlus = 0xf00e;
static const int faSellsy = 0xf213;
static const int faSend = 0xf1d8;
static const int faSendO = 0xf1d9;
static const int faServer = 0xf233;
static const int faShare = 0xf064;
static const int faShareAlt = 0xf1e0;
static const int faShareAltSquare = 0xf1e1;
static const int faShareSquare = 0xf14d;
static const int faShareSquareO = 0xf045;
static const int faShekel = 0xf20b;
static const int faSheqel = 0xf20b;
static const int faShield = 0xf132;
static const int faShip = 0xf21a;
static const int faShirtsinbulk = 0xf214;
static const int faShoppingBag = 0xf290;
static const int faShoppingBasket = 0xf291;
static const int faShoppingCart = 0xf07a;
static const int faSignIn = 0xf090;
static const int faSignOut = 0xf08b;
static const int faSignal = 0xf012;
static const int faSimplybuilt = 0xf215;
static const int faSitemap = 0xf0e8;
static const int faSkyatlas = 0xf216;
static const int faSkype = 0xf17e;
static const int faSlack = 0xf198;
static const int faSliders = 0xf1de;
static const int faSlideshare = 0xf1e7;
static const int faSmileO = 0xf118;
static const int faSoccerBallO = 0xf1e3;
static const int faSort = 0xf0dc;
static const int faSortAlphaAsc = 0xf15d;
static const int faSortAlphaDesc = 0xf15e;
static const int faSortAmountAsc = 0xf160;
static const int faSortAmountDesc = 0xf161;
static const int faSortAsc = 0xf0de;
static const int faSortDesc = 0xf0dd;
static const int faSortDown = 0xf0dd;
static const int faSortNumericAsc = 0xf162;
static const int faSortNumericDesc = 0xf163;
static const int faSortUp = 0xf0de;
static const int faSoundcloud = 0xf1be;
static const int faSpaceShuttle = 0xf197;
static const int faSpinner = 0xf110;
static const int faSpoon = 0xf1b1;
static const int faSpotify = 0xf1bc;
static const int faSquare = 0xf0c8;
static const int faSquareO = 0xf096;
static const int faStackExchange = 0xf18d;
static const int faStackOverflow = 0xf16c;
static const int faStar = 0xf005;
static const int faStarHalf = 0xf089;
static const int faStarHalfEmpty = 0xf123;
static const int faStarHalfFull = 0xf123;
static const int faStarHalfO = 0xf123;
static const int faStarO = 0xf006;
static const int faSteam = 0xf1b6;
static const int faSteamSquare = 0xf1b7;
static const int faStepBackward = 0xf048;
static const int faStepForward = 0xf051;
static const int faStethoscope = 0xf0f1;
static const int faStickyNote = 0xf249;
static const int faStickyNoteO = 0xf24a;
static const int faStop = 0xf04d;
static const int faStopCircle = 0xf28d;
static const int faStopCircleO = 0xf28e;
static const int faStreetView = 0xf21d;
static const int faStrikethrough = 0xf0cc;
static const int faStumbleupon = 0xf1a4;
static const int faStumbleuponCircle = 0xf1a3;
static const int faSubscript = 0xf12c;
static const int faSubway = 0xf239;
static const int faSuitcase = 0xf0f2;
static const int faSunO = 0xf185;
static const int faSuperscript = 0xf12b;
static const int faSupport = 0xf1cd;
static const int faTable = 0xf0ce;
static const int faTablet = 0xf10a;
static const int faTachometer = 0xf0e4;
static const int faTag = 0xf02b;
static const int faTags = 0xf02c;
static const int faTasks = 0xf0ae;
static const int faTaxi = 0xf1ba;
static const int faTelevision = 0xf26c;
static const int faTencentWeibo = 0xf1d5;
static const int faTerminal = 0xf120;
static const int faTextHeight = 0xf034;
static const int faTextWidth = 0xf035;
static const int faTh = 0xf00a;
static const int faThLarge = 0xf009;
static const int faThList = 0xf00b;
static const int faThumbTack = 0xf08d;
static const int faThumbsDown = 0xf165;
static const int faThumbsODown = 0xf088;
static const int faThumbsOUp = 0xf087;
static const int faThumbsUp = 0xf164;
static const int faTicket = 0xf145;
static const int faTimes = 0xf00d;
static const int faTimesCircle = 0xf057;
static const int faTimesCircleO = 0xf05c;
static const int faTint = 0xf043;
static const int faToggleDown = 0xf150;
static const int faToggleLeft = 0xf191;
static const int faToggleOff = 0xf204;
static const int faToggleOn = 0xf205;
static const int faToggleRight = 0xf152;
static const int faToggleUp = 0xf151;
static const int faTrademark = 0xf25c;
static const int faTrain = 0xf238;
static const int faTransgender = 0xf224;
static const int faTransgenderAlt = 0xf225;
static const int faTrash = 0xf1f8;
static const int faTrashO = 0xf014;
static const int faTree = 0xf1bb;
static const int faTrello = 0xf181;
static const int faTripadvisor = 0xf262;
static const int faTrophy = 0xf091;
static const int faTruck = 0xf0d1;
static const int faTry = 0xf195;
static const int faTty = 0xf1e4;
static const int faTumblr = 0xf173;
static const int faTumblrSquare = 0xf174;
static const int faTurkishLira = 0xf195;
static const int faTv = 0xf26c;
static const int faTwitch = 0xf1e8;
static const int faTwitter = 0xf099;
static const int faTwitterSquare = 0xf081;
static const int faUmbrella = 0xf0e9;
static const int faUnderline = 0xf0cd;
static const int faUndo = 0xf0e2;
static const int faUniversity = 0xf19c;
static const int faUnlink = 0xf127;
static const int faUnlock = 0xf09c;
static const int faUnlockAlt = 0xf13e;
static const int faUnsorted = 0xf0dc;
static const int faUpload = 0xf093;
static const int faUsb = 0xf287;
static const int faUsd = 0xf155;
static const int faUser = 0xf007;
static const int faUserMd = 0xf0f0;
static const int faUserPlus = 0xf234;
static const int faUserSecret = 0xf21b;
static const int faUserTimes = 0xf235;
static const int faUsers = 0xf0c0;
static const int faVenus = 0xf221;
static const int faVenusDouble = 0xf226;
static const int faVenusMars = 0xf228;
static const int faViacoin = 0xf237;
static const int faVideoCamera = 0xf03d;
static const int faVimeo = 0xf27d;
static const int faVimeoSquare = 0xf194;
static const int faVine = 0xf1ca;
static const int faVk = 0xf189;
static const int faVolumeDown = 0xf027;
static const int faVolumeOff = 0xf026;
static const int faVolumeUp = 0xf028;
static const int faWarning = 0xf071;
static const int faWechat = 0xf1d7;
static const int faWeibo = 0xf18a;
static const int faWeixin = 0xf1d7;
static const int faWhatsapp = 0xf232;
static const int faWheelchair = 0xf193;
static const int faWifi = 0xf1eb;
static const int faWikipediaW = 0xf266;
static const int faWindows = 0xf17a;
static const int faWon = 0xf159;
static const int faWordpress = 0xf19a;
static const int faWrench = 0xf0ad;
static const int faXing = 0xf168;
static const int faXingSquare = 0xf169;
static const int faYCombinator = 0xf23b;
static const int faYCombinatorSquare = 0xf1d4;
static const int faYahoo = 0xf19e;
static const int faYc = 0xf23b;
static const int faYcSquare = 0xf1d4;
static const int faYelp = 0xf1e9;
static const int faYen = 0xf157;
static const int faYoutube = 0xf167;
static const int faYoutubePlay = 0xf16a;
static const int faYoutubeSquare = 0xf166;
};
#endif // FONTAWESOME_H_INCLUDED
| {
"content_hash": "47dd31624e1cdef27ccb79ee9e98c250",
"timestamp": "",
"source": "github",
"line_count": 752,
"max_line_length": 140,
"avg_line_length": 38.67952127659574,
"alnum_prop": 0.7691752329219239,
"repo_name": "jcredland/juce-alerts",
"id": "84ab205dbc732ade09e5d1c528834b030365a004",
"size": "29087",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/FontAwesome.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "428548"
},
{
"name": "Python",
"bytes": "1168"
}
],
"symlink_target": ""
} |
CREATE EXTENSION IF NOT EXISTS "ltree";
-- Rename parent_id column
ALTER TABLE iterations RENAME parent_id to path;
-- Need to convert the path column to text in order to
-- replace non-locale characters with an underscore
ALTER TABLE iterations ALTER path TYPE text USING path::text;
-- Need to update values of Iteration's' ParentID in order to migrate it to ltree
-- Replace every non-C-LOCALE character with an underscore
UPDATE iterations SET path = regexp_replace(path, '[^a-zA-Z0-9_\.]', '_', 'g');
-- Finally values in path are now in good shape for ltree and can be casted automatically to type ltree
-- Convert the parent column from type uuid to ltree
ALTER TABLE iterations ALTER path TYPE ltree USING path::ltree;
-- Enable full text search operaions using GIST index on path
CREATE INDEX iteration_path_gist_idx ON iterations USING GIST (path);
| {
"content_hash": "f4d84fc6bfe7186677514894fc0c0959",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 103,
"avg_line_length": 45.473684210526315,
"alnum_prop": 0.7650462962962963,
"repo_name": "ldimaggi/almighty-core",
"id": "a647d3acb9e6708955ed9858eb0b9f26d1c3450c",
"size": "864",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "migration/sql-files/031-iterations-parent-path-ltree.sql",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "20107"
},
{
"name": "Go",
"bytes": "1733421"
},
{
"name": "HTML",
"bytes": "2137"
},
{
"name": "Makefile",
"bytes": "36907"
},
{
"name": "PLSQL",
"bytes": "147"
},
{
"name": "PLpgSQL",
"bytes": "18055"
},
{
"name": "PowerShell",
"bytes": "912"
},
{
"name": "Ruby",
"bytes": "657"
},
{
"name": "SQLPL",
"bytes": "2868"
},
{
"name": "Shell",
"bytes": "4664"
}
],
"symlink_target": ""
} |
package ReadObjects;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import javax.vecmath.Vector3f;
public class ObjReader {
private VertexFaceData vfData;
public ObjReader(String filename) throws IOException{
String line = null;
boolean has_vp = false;
boolean once = true;
boolean once2 = true;
String delimiter = "//";
long vertexCount = 0;
long faceCount = 0;
ArrayList<Vertex> vertices = new ArrayList<Vertex>();
ArrayList<float[]> texCoords = new ArrayList<float[]>();
ArrayList<float[]> normals = new ArrayList<float[]>();
ArrayList<float[]> parameter_space = new ArrayList<float[]>();
ArrayList<Face> faces = new ArrayList<Face>();
BufferedReader reader = new BufferedReader(new FileReader(filename));
while((line = reader.readLine()) != null){
String[] s = line.split("\\s+");
if(s[0].compareTo("v")==0){
float[] v = new float[3];
v[0] = Float.valueOf(s[1]).floatValue();
v[1] = Float.valueOf(s[2]).floatValue();
v[2] = Float.valueOf(s[3]).floatValue();
vertices.add(new Vertex(vertexCount, v));
vertexCount++;
}else if(s[0].compareTo("vt")==0){
float[] t = new float[2];
t[0] = Float.valueOf(s[1]).floatValue();
t[1] = Float.valueOf(s[2]).floatValue();
texCoords.add(t);
}else if(s[0].compareTo("vn")==0){
float[] n = new float[3];
n[0] = Float.valueOf(s[1]).floatValue();
n[1] = Float.valueOf(s[2]).floatValue();
n[2] = Float.valueOf(s[3]).floatValue();
Vector3f tmp = new Vector3f(n[0], n[1], n[2]);
tmp.normalize();
n[0] = tmp.x;
n[1] = tmp.y;
n[2] = tmp.z;
normals.add(n);
}else if(s[0].compareTo("tg")==0){
has_vp = true;
float[] vp = new float[3];
vp[0] = Float.valueOf(s[1]).floatValue();
vp[1] = Float.valueOf(s[2]).floatValue();
vp[2] = Float.valueOf(s[3]).floatValue();
parameter_space.add(vp);
}else if(s[0].compareTo("f")==0){
faceCount++;
// find seperator
if(once){
once = false;
boolean var1 = s[1].contains("//");
if(!var1) delimiter = "/";
}
String reg = "\\" + delimiter;
String[] column_of_row1 = s[1].split(reg);
String[] column_of_row2 = s[2].split(reg);
String[] column_of_row3 = s[3].split(reg);
int i1 = Integer.parseInt(column_of_row1[0])-1;
int i2 = Integer.parseInt(column_of_row2[0])-1;
int i3 = Integer.parseInt(column_of_row3[0])-1;
Vertex v1 = vertices.get(i1);
Vertex v2 = vertices.get(i2);
Vertex v3 = vertices.get(i3);
Face face = new Face(faceCount, v1, v2, v3);
if(once2){
once2 = true;
int elCount = column_of_row1.length;
if(elCount==2){
}else if(elCount == 3){
i1 = Integer.parseInt(column_of_row1[1])-1;
i2 = Integer.parseInt(column_of_row2[1])-1;
i3 = Integer.parseInt(column_of_row3[1])-1;
face.addTriTextCoord(texCoords.get(i1));
face.addTriTextCoord(texCoords.get(i2));
face.addTriTextCoord(texCoords.get(i3));
//
// i1 = Integer.parseInt(column_of_row1[2])-1;
// i2 = Integer.parseInt(column_of_row2[2])-1;
// i3 = Integer.parseInt(column_of_row3[2])-1;
// face.addTriNormal(normals.get(i1));
// face.addTriNormal(normals.get(i2));
// face.addTriNormal(normals.get(i3));
}
}
faces.add(face);
}
} // end while loop
long counter = 0;
for(Vertex v : vertices){
v.setNormal(normals.get((int) counter));
v.setTextureCoordinate(texCoords.get((int) counter));
counter++;
}
this.vfData = new VertexFaceData(vertices, faces, parameter_space, normals);
}
public VertexFaceData getVFData(){
return this.vfData;
}
}
| {
"content_hash": "e2622e575cbf7dbb8c893aa6cabe0e00",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 78,
"avg_line_length": 27.644927536231883,
"alnum_prop": 0.5981651376146789,
"repo_name": "simplay/Bachelor-Thesis",
"id": "fc193d3ef4902c440e9cf75da107f34a8849d1ff",
"size": "3815",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scene/src/ReadObjects/ObjReader.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "76901"
},
{
"name": "Java",
"bytes": "292316"
},
{
"name": "Matlab",
"bytes": "33527014"
},
{
"name": "Mercury",
"bytes": "2429"
},
{
"name": "TeX",
"bytes": "423948"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "d2f08806a6f2104cedc7ee3ec34fd6cf",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "7b511d4603ec0fd6a5d78114f2003cb3fafb27f4",
"size": "191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Aster/Aster batangensis/ Syn. Aster batangensis batangensis/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
module Kernel
if defined?(gem_original_require) then
# Ruby ships with a custom_require, override its require
remove_method :require
else
##
# The Kernel#require from before RubyGems was loaded.
alias gem_original_require require
private :gem_original_require
end
##
# When RubyGems is required, Kernel#require is replaced with our own which
# is capable of loading gems on demand.
#
# When you call <tt>require 'x'</tt>, this is what happens:
# * If the file can be loaded from the existing Ruby loadpath, it
# is.
# * Otherwise, installed gems are searched for a file that matches.
# If it's found in gem 'y', that gem is activated (added to the
# loadpath).
#
# The normal <tt>require</tt> functionality of returning false if
# that file has already been loaded is preserved.
def require path
spec = Gem.find_unresolved_default_spec(path)
if spec
Gem.remove_unresolved_default_spec(spec)
gem(spec.name)
end
# If there are no unresolved deps, then we can use just try
# normal require handle loading a gem from the rescue below.
if Gem::Specification.unresolved_deps.empty? then
return gem_original_require(path)
end
# If +path+ is for a gem that has already been loaded, don't
# bother trying to find it in an unresolved gem, just go straight
# to normal require.
#--
# TODO request access to the C implementation of this to speed up RubyGems
spec = Gem::Specification.find { |s|
s.activated? and s.contains_requirable_file? path
}
return gem_original_require(path) if spec
# Attempt to find +path+ in any unresolved gems...
found_specs = Gem::Specification.find_in_unresolved path
# If there are no directly unresolved gems, then try and find +path+
# in any gems that are available via the currently unresolved gems.
# For example, given:
#
# a => b => c => d
#
# If a and b are currently active with c being unresolved and d.rb is
# requested, then find_in_unresolved_tree will find d.rb in d because
# it's a dependency of c.
#
if found_specs.empty? then
found_specs = Gem::Specification.find_in_unresolved_tree path
found_specs.each do |found_spec|
found_spec.activate
end
# We found +path+ directly in an unresolved gem. Now we figure out, of
# the possible found specs, which one we should activate.
else
# Check that all the found specs are just different
# versions of the same gem
names = found_specs.map(&:name).uniq
if names.size > 1 then
raise Gem::LoadError, "#{path} found in multiple gems: #{names.join ', '}"
end
# Ok, now find a gem that has no conflicts, starting
# at the highest version.
valid = found_specs.select { |s| s.conflicts.empty? }.last
unless valid then
le = Gem::LoadError.new "unable to find a version of '#{names.first}' to activate"
le.name = names.first
raise le
end
valid.activate
end
gem_original_require path
rescue LoadError => load_error
if load_error.message.start_with?("Could not find") or
(load_error.message.end_with?(path) and Gem.try_activate(path)) then
return gem_original_require(path)
end
raise load_error
end
private :require
end
| {
"content_hash": "adcda4e08497ac8c430a64ef89a5ffc4",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 90,
"avg_line_length": 29.991150442477878,
"alnum_prop": 0.6627323694305105,
"repo_name": "booyuan/Waguu",
"id": "e6dfce644fe577fd1c145900fedb04b5c548abbf",
"size": "3526",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "Waguu/packages/Ruby.Compass.0.12.2.2/ruby/lib/ruby/2.0.0/rubygems/core_ext/kernel_require.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "100"
},
{
"name": "C",
"bytes": "202739"
},
{
"name": "C#",
"bytes": "103002"
},
{
"name": "C++",
"bytes": "1549"
},
{
"name": "CSS",
"bytes": "481675"
},
{
"name": "JavaScript",
"bytes": "3234412"
},
{
"name": "Objective-C",
"bytes": "1102"
},
{
"name": "PowerShell",
"bytes": "87046"
},
{
"name": "Puppet",
"bytes": "1877"
},
{
"name": "Ruby",
"bytes": "9252875"
},
{
"name": "Shell",
"bytes": "2289"
},
{
"name": "TypeScript",
"bytes": "412028"
}
],
"symlink_target": ""
} |
<?php
/**
* Core functions used all over the scripts.
* This script is distinct from libraries/common.inc.php because this
* script is called from /test.
*/
declare(strict_types=1);
namespace PhpMyAdmin;
use PhpMyAdmin\Plugins\AuthenticationPlugin;
use Symfony\Component\Config\FileLocator;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\DependencyInjection\ContainerInterface;
use Symfony\Component\DependencyInjection\Loader\PhpFileLoader;
use const DATE_RFC1123;
use const E_USER_ERROR;
use const E_USER_WARNING;
use const FILTER_VALIDATE_IP;
use function array_keys;
use function array_pop;
use function array_walk_recursive;
use function chr;
use function count;
use function date_default_timezone_get;
use function date_default_timezone_set;
use function defined;
use function explode;
use function extension_loaded;
use function filter_var;
use function function_exists;
use function getenv;
use function gettype;
use function gmdate;
use function hash_equals;
use function hash_hmac;
use function header;
use function htmlspecialchars;
use function http_build_query;
use function implode;
use function in_array;
use function ini_get;
use function ini_set;
use function intval;
use function is_array;
use function is_numeric;
use function is_scalar;
use function is_string;
use function json_encode;
use function mb_internal_encoding;
use function mb_strlen;
use function mb_strpos;
use function mb_strrpos;
use function mb_substr;
use function parse_str;
use function parse_url;
use function preg_match;
use function preg_replace;
use function session_id;
use function session_write_close;
use function sprintf;
use function str_replace;
use function strlen;
use function strpos;
use function strtolower;
use function strtr;
use function substr;
use function trigger_error;
use function unserialize;
use function urldecode;
use function vsprintf;
use function json_decode;
/**
* Core class
*/
class Core
{
/**
* checks given $var and returns it if valid, or $default of not valid
* given $var is also checked for type being 'similar' as $default
* or against any other type if $type is provided
*
* <code>
* // $_REQUEST['db'] not set
* echo Core::ifSetOr($_REQUEST['db'], ''); // ''
* // $_POST['sql_query'] not set
* echo Core::ifSetOr($_POST['sql_query']); // null
* // $cfg['EnableFoo'] not set
* echo Core::ifSetOr($cfg['EnableFoo'], false, 'boolean'); // false
* echo Core::ifSetOr($cfg['EnableFoo']); // null
* // $cfg['EnableFoo'] set to 1
* echo Core::ifSetOr($cfg['EnableFoo'], false, 'boolean'); // false
* echo Core::ifSetOr($cfg['EnableFoo'], false, 'similar'); // 1
* echo Core::ifSetOr($cfg['EnableFoo'], false); // 1
* // $cfg['EnableFoo'] set to true
* echo Core::ifSetOr($cfg['EnableFoo'], false, 'boolean'); // true
* </code>
*
* @see self::isValid()
*
* @param mixed $var param to check
* @param mixed $default default value
* @param mixed $type var type or array of values to check against $var
*
* @return mixed $var or $default
*/
public static function ifSetOr(&$var, $default = null, $type = 'similar')
{
if (! self::isValid($var, $type, $default)) {
return $default;
}
return $var;
}
/**
* checks given $var against $type or $compare
*
* $type can be:
* - false : no type checking
* - 'scalar' : whether type of $var is integer, float, string or boolean
* - 'numeric' : whether type of $var is any number representation
* - 'length' : whether type of $var is scalar with a string length > 0
* - 'similar' : whether type of $var is similar to type of $compare
* - 'equal' : whether type of $var is identical to type of $compare
* - 'identical' : whether $var is identical to $compare, not only the type!
* - or any other valid PHP variable type
*
* <code>
* // $_REQUEST['doit'] = true;
* Core::isValid($_REQUEST['doit'], 'identical', 'true'); // false
* // $_REQUEST['doit'] = 'true';
* Core::isValid($_REQUEST['doit'], 'identical', 'true'); // true
* </code>
*
* NOTE: call-by-reference is used to not get NOTICE on undefined vars,
* but the var is not altered inside this function, also after checking a var
* this var exists nut is not set, example:
* <code>
* // $var is not set
* isset($var); // false
* functionCallByReference($var); // false
* isset($var); // true
* functionCallByReference($var); // true
* </code>
*
* to avoid this we set this var to null if not isset
*
* @see https://www.php.net/gettype
*
* @param mixed $var variable to check
* @param mixed $type var type or array of valid values to check against $var
* @param mixed $compare var to compare with $var
*
* @return bool whether valid or not
*
* @todo add some more var types like hex, bin, ...?
*/
public static function isValid(&$var, $type = 'length', $compare = null): bool
{
if (! isset($var)) {
// var is not even set
return false;
}
if ($type === false) {
// no vartype requested
return true;
}
if (is_array($type)) {
return in_array($var, $type);
}
// allow some aliases of var types
$type = strtolower($type);
switch ($type) {
case 'identic':
$type = 'identical';
break;
case 'len':
$type = 'length';
break;
case 'bool':
$type = 'boolean';
break;
case 'float':
$type = 'double';
break;
case 'int':
$type = 'integer';
break;
case 'null':
$type = 'NULL';
break;
}
if ($type === 'identical') {
return $var === $compare;
}
// whether we should check against given $compare
if ($type === 'similar') {
switch (gettype($compare)) {
case 'string':
case 'boolean':
$type = 'scalar';
break;
case 'integer':
case 'double':
$type = 'numeric';
break;
default:
$type = gettype($compare);
}
} elseif ($type === 'equal') {
$type = gettype($compare);
}
// do the check
if ($type === 'length' || $type === 'scalar') {
$is_scalar = is_scalar($var);
if ($is_scalar && $type === 'length') {
return strlen((string) $var) > 0;
}
return $is_scalar;
}
if ($type === 'numeric') {
return is_numeric($var);
}
return gettype($var) === $type;
}
/**
* Removes insecure parts in a path; used before include() or
* require() when a part of the path comes from an insecure source
* like a cookie or form.
*
* @param string $path The path to check
*/
public static function securePath(string $path): string
{
// change .. to .
return (string) preg_replace('@\.\.*@', '.', $path);
}
/**
* displays the given error message on phpMyAdmin error page in foreign language,
* ends script execution and closes session
*
* loads language file if not loaded already
*
* @param string $error_message the error message or named error message
* @param string|array $message_args arguments applied to $error_message
*/
public static function fatalError(
string $error_message,
$message_args = null
): void {
global $dbi;
/* Use format string if applicable */
if (is_string($message_args)) {
$error_message = sprintf($error_message, $message_args);
} elseif (is_array($message_args)) {
$error_message = vsprintf($error_message, $message_args);
}
/*
* Avoid using Response class as config does not have to be loaded yet
* (this can happen on early fatal error)
*/
if (isset($dbi, $GLOBALS['PMA_Config']) && $dbi !== null
&& $GLOBALS['PMA_Config']->get('is_setup') === false
&& Response::getInstance()->isAjax()
) {
$response = Response::getInstance();
$response->setRequestStatus(false);
$response->addJSON('message', Message::error($error_message));
} elseif (! empty($_REQUEST['ajax_request'])) {
// Generate JSON manually
self::headerJSON();
echo json_encode(
[
'success' => false,
'message' => Message::error($error_message)->getDisplay(),
]
);
} else {
$error_message = strtr($error_message, ['<br>' => '[br]']);
$template = new Template();
echo $template->render('error/generic', [
'lang' => $GLOBALS['lang'] ?? 'en',
'dir' => $GLOBALS['text_dir'] ?? 'ltr',
'error_message' => Sanitize::sanitizeMessage($error_message),
]);
}
if (! defined('TESTSUITE')) {
exit;
}
}
/**
* Returns a link to the PHP documentation
*
* @param string $target anchor in documentation
*
* @return string the URL
*
* @access public
*/
public static function getPHPDocLink(string $target): string
{
/* List of PHP documentation translations */
$php_doc_languages = [
'pt_BR',
'zh',
'fr',
'de',
'it',
'ja',
'ro',
'ru',
'es',
'tr',
];
$lang = 'en';
if (isset($GLOBALS['lang']) && in_array($GLOBALS['lang'], $php_doc_languages)) {
$lang = $GLOBALS['lang'];
}
return self::linkURL('https://www.php.net/manual/' . $lang . '/' . $target);
}
/**
* Warn or fail on missing extension.
*
* @param string $extension Extension name
* @param bool $fatal Whether the error is fatal.
* @param string $extra Extra string to append to message.
*/
public static function warnMissingExtension(
string $extension,
bool $fatal = false,
string $extra = ''
): void {
/** @var ErrorHandler $error_handler */
global $error_handler;
/* Gettext does not have to be loaded yet here */
if (function_exists('__')) {
$message = __(
'The %s extension is missing. Please check your PHP configuration.'
);
} else {
$message
= 'The %s extension is missing. Please check your PHP configuration.';
}
$doclink = self::getPHPDocLink('book.' . $extension . '.php');
$message = sprintf(
$message,
'[a@' . $doclink . '@Documentation][em]' . $extension . '[/em][/a]'
);
if ($extra != '') {
$message .= ' ' . $extra;
}
if ($fatal) {
self::fatalError($message);
return;
}
$error_handler->addError(
$message,
E_USER_WARNING,
'',
0,
false
);
}
/**
* returns count of tables in given db
*
* @param string $db database to count tables for
*
* @return int count of tables in $db
*/
public static function getTableCount(string $db): int
{
global $dbi;
$tables = $dbi->tryQuery(
'SHOW TABLES FROM ' . Util::backquote($db) . ';',
DatabaseInterface::CONNECT_USER,
DatabaseInterface::QUERY_STORE
);
if ($tables) {
$num_tables = $dbi->numRows($tables);
$dbi->freeResult($tables);
} else {
$num_tables = 0;
}
return $num_tables;
}
/**
* Converts numbers like 10M into bytes
* Used with permission from Moodle (https://moodle.org) by Martin Dougiamas
* (renamed with PMA prefix to avoid double definition when embedded
* in Moodle)
*
* @param string|int $size size (Default = 0)
*/
public static function getRealSize($size = 0): int
{
if (! $size) {
return 0;
}
$binaryprefixes = [
'T' => 1099511627776,
't' => 1099511627776,
'G' => 1073741824,
'g' => 1073741824,
'M' => 1048576,
'm' => 1048576,
'K' => 1024,
'k' => 1024,
];
if (preg_match('/^([0-9]+)([KMGT])/i', (string) $size, $matches)) {
return (int) ($matches[1] * $binaryprefixes[$matches[2]]);
}
return (int) $size;
}
/**
* Checks given $page against given $allowList and returns true if valid
* it optionally ignores query parameters in $page (script.php?ignored)
*
* @param string $page page to check
* @param array $allowList allow list to check page against
* @param bool $include whether the page is going to be included
*
* @return bool whether $page is valid or not (in $allowList or not)
*/
public static function checkPageValidity(&$page, array $allowList = [], $include = false): bool
{
if (empty($allowList)) {
$allowList = ['index.php'];
}
if (empty($page)) {
return false;
}
if (in_array($page, $allowList)) {
return true;
}
if ($include) {
return false;
}
$_page = mb_substr(
$page,
0,
(int) mb_strpos($page . '?', '?')
);
if (in_array($_page, $allowList)) {
return true;
}
$_page = urldecode($page);
$_page = mb_substr(
$_page,
0,
(int) mb_strpos($_page . '?', '?')
);
return in_array($_page, $allowList);
}
/**
* tries to find the value for the given environment variable name
*
* searches in $_SERVER, $_ENV then tries getenv() and apache_getenv()
* in this order
*
* @param string $var_name variable name
*
* @return string value of $var or empty string
*/
public static function getenv(string $var_name): string
{
if (isset($_SERVER[$var_name])) {
return (string) $_SERVER[$var_name];
}
if (isset($_ENV[$var_name])) {
return (string) $_ENV[$var_name];
}
if (getenv($var_name)) {
return (string) getenv($var_name);
}
if (function_exists('apache_getenv')
&& apache_getenv($var_name, true)
) {
return (string) apache_getenv($var_name, true);
}
return '';
}
/**
* Send HTTP header, taking IIS limits into account (600 seems ok)
*
* @param string $uri the header to send
* @param bool $use_refresh whether to use Refresh: header when running on IIS
*/
public static function sendHeaderLocation(string $uri, bool $use_refresh = false): void
{
if ($GLOBALS['PMA_Config']->get('PMA_IS_IIS') && mb_strlen($uri) > 600) {
Response::getInstance()->disable();
$template = new Template();
echo $template->render('header_location', ['uri' => $uri]);
return;
}
/*
* Avoid relative path redirect problems in case user entered URL
* like /phpmyadmin/index.php/ which some web servers happily accept.
*/
if ($uri[0] === '.') {
$uri = $GLOBALS['PMA_Config']->getRootPath() . substr($uri, 2);
}
$response = Response::getInstance();
session_write_close();
if ($response->headersSent()) {
trigger_error(
'Core::sendHeaderLocation called when headers are already sent!',
E_USER_ERROR
);
}
// bug #1523784: IE6 does not like 'Refresh: 0', it
// results in a blank page
// but we need it when coming from the cookie login panel)
if ($GLOBALS['PMA_Config']->get('PMA_IS_IIS') && $use_refresh) {
$response->header('Refresh: 0; ' . $uri);
} else {
$response->header('Location: ' . $uri);
}
}
/**
* Outputs application/json headers. This includes no caching.
*/
public static function headerJSON(): void
{
if (defined('TESTSUITE')) {
return;
}
// No caching
self::noCacheHeader();
// MIME type
header('Content-Type: application/json; charset=UTF-8');
// Disable content sniffing in browser
// This is needed in case we include HTML in JSON, browser might assume it's
// html to display
header('X-Content-Type-Options: nosniff');
}
/**
* Outputs headers to prevent caching in browser (and on the way).
*/
public static function noCacheHeader(): void
{
if (defined('TESTSUITE')) {
return;
}
// rfc2616 - Section 14.21
header('Expires: ' . gmdate(DATE_RFC1123));
// HTTP/1.1
header(
'Cache-Control: no-store, no-cache, must-revalidate,'
. ' pre-check=0, post-check=0, max-age=0'
);
header('Pragma: no-cache'); // HTTP/1.0
// test case: exporting a database into a .gz file with Safari
// would produce files not having the current time
// (added this header for Safari but should not harm other browsers)
header('Last-Modified: ' . gmdate(DATE_RFC1123));
}
/**
* Sends header indicating file download.
*
* @param string $filename Filename to include in headers if empty,
* none Content-Disposition header will be sent.
* @param string $mimetype MIME type to include in headers.
* @param int $length Length of content (optional)
* @param bool $no_cache Whether to include no-caching headers.
*/
public static function downloadHeader(
string $filename,
string $mimetype,
int $length = 0,
bool $no_cache = true
): void {
if ($no_cache) {
self::noCacheHeader();
}
/* Replace all possibly dangerous chars in filename */
$filename = Sanitize::sanitizeFilename($filename);
if (! empty($filename)) {
header('Content-Description: File Transfer');
header('Content-Disposition: attachment; filename="' . $filename . '"');
}
header('Content-Type: ' . $mimetype);
// inform the server that compression has been done,
// to avoid a double compression (for example with Apache + mod_deflate)
$notChromeOrLessThan43 = PMA_USR_BROWSER_AGENT != 'CHROME' // see bug #4942
|| (PMA_USR_BROWSER_AGENT == 'CHROME' && PMA_USR_BROWSER_VER < 43);
if (strpos($mimetype, 'gzip') !== false && $notChromeOrLessThan43) {
header('Content-Encoding: gzip');
}
header('Content-Transfer-Encoding: binary');
if ($length <= 0) {
return;
}
header('Content-Length: ' . $length);
}
/**
* Returns value of an element in $array given by $path.
* $path is a string describing position of an element in an associative array,
* eg. Servers/1/host refers to $array[Servers][1][host]
*
* @param string $path path in the array
* @param array $array the array
* @param mixed $default default value
*
* @return array|mixed|null array element or $default
*/
public static function arrayRead(string $path, array $array, $default = null)
{
$keys = explode('/', $path);
$value =& $array;
foreach ($keys as $key) {
if (! isset($value[$key])) {
return $default;
}
$value =& $value[$key];
}
return $value;
}
/**
* Stores value in an array
*
* @param string $path path in the array
* @param array $array the array
* @param mixed $value value to store
*/
public static function arrayWrite(string $path, array &$array, $value): void
{
$keys = explode('/', $path);
$last_key = array_pop($keys);
$a =& $array;
foreach ($keys as $key) {
if (! isset($a[$key])) {
$a[$key] = [];
}
$a =& $a[$key];
}
$a[$last_key] = $value;
}
/**
* Removes value from an array
*
* @param string $path path in the array
* @param array $array the array
*/
public static function arrayRemove(string $path, array &$array): void
{
$keys = explode('/', $path);
$keys_last = array_pop($keys);
$path = [];
$depth = 0;
$path[0] =& $array;
$found = true;
// go as deep as required or possible
foreach ($keys as $key) {
if (! isset($path[$depth][$key])) {
$found = false;
break;
}
$depth++;
$path[$depth] =& $path[$depth - 1][$key];
}
// if element found, remove it
if ($found) {
unset($path[$depth][$keys_last]);
$depth--;
}
// remove empty nested arrays
for (; $depth >= 0; $depth--) {
if (isset($path[$depth + 1]) && count($path[$depth + 1]) !== 0) {
break;
}
unset($path[$depth][$keys[$depth]]);
}
}
/**
* Returns link to (possibly) external site using defined redirector.
*
* @param string $url URL where to go.
*
* @return string URL for a link.
*/
public static function linkURL(string $url): string
{
if (! preg_match('#^https?://#', $url)) {
return $url;
}
$params = [];
$params['url'] = $url;
$url = Url::getCommon($params);
//strip off token and such sensitive information. Just keep url.
$arr = parse_url($url);
if (! is_array($arr)) {
$arr = [];
}
parse_str($arr['query'] ?? '', $vars);
$query = http_build_query(['url' => $vars['url']]);
if ($GLOBALS['PMA_Config'] !== null && $GLOBALS['PMA_Config']->get('is_setup')) {
$url = '../url.php?' . $query;
} else {
$url = './url.php?' . $query;
}
return $url;
}
/**
* Checks whether domain of URL is an allowed domain or not.
* Use only for URLs of external sites.
*
* @param string $url URL of external site.
*
* @return bool True: if domain of $url is allowed domain,
* False: otherwise.
*/
public static function isAllowedDomain(string $url): bool
{
$arr = parse_url($url);
if (! is_array($arr)) {
$arr = [];
}
// We need host to be set
if (! isset($arr['host']) || strlen($arr['host']) == 0) {
return false;
}
// We do not want these to be present
$blocked = [
'user',
'pass',
'port',
];
foreach ($blocked as $part) {
if (isset($arr[$part]) && strlen((string) $arr[$part]) != 0) {
return false;
}
}
$domain = $arr['host'];
$domainAllowList = [
/* Include current domain */
$_SERVER['SERVER_NAME'],
/* phpMyAdmin domains */
'wiki.phpmyadmin.net',
'www.phpmyadmin.net',
'phpmyadmin.net',
'demo.phpmyadmin.net',
'docs.phpmyadmin.net',
/* mysql.com domains */
'dev.mysql.com',
'bugs.mysql.com',
/* mariadb domains */
'mariadb.org',
'mariadb.com',
/* php.net domains */
'php.net',
'www.php.net',
/* Github domains*/
'github.com',
'www.github.com',
/* Percona domains */
'www.percona.com',
/* Following are doubtful ones. */
'mysqldatabaseadministration.blogspot.com',
];
return in_array($domain, $domainAllowList);
}
/**
* Replace some html-unfriendly stuff
*
* @param string $buffer String to process
*
* @return string Escaped and cleaned up text suitable for html
*/
public static function mimeDefaultFunction(string $buffer): string
{
$buffer = htmlspecialchars($buffer);
$buffer = str_replace(' ', ' ', $buffer);
return (string) preg_replace("@((\015\012)|(\015)|(\012))@", '<br>' . "\n", $buffer);
}
/**
* Displays SQL query before executing.
*
* @param array|string $query_data Array containing queries or query itself
*/
public static function previewSQL($query_data): void
{
$retval = '<div class="preview_sql">';
if (empty($query_data)) {
$retval .= __('No change');
} elseif (is_array($query_data)) {
foreach ($query_data as $query) {
$retval .= Html\Generator::formatSql($query);
}
} else {
$retval .= Html\Generator::formatSql($query_data);
}
$retval .= '</div>';
$response = Response::getInstance();
$response->addJSON('sql_data', $retval);
}
/**
* recursively check if variable is empty
*
* @param mixed $value the variable
*
* @return bool true if empty
*/
public static function emptyRecursive($value): bool
{
$empty = true;
if (is_array($value)) {
array_walk_recursive(
$value,
/**
* @param mixed $item
*/
static function ($item) use (&$empty) {
$empty = $empty && empty($item);
}
);
} else {
$empty = empty($value);
}
return $empty;
}
/**
* Creates some globals from $_POST variables matching a pattern
*
* @param array $post_patterns The patterns to search for
*/
public static function setPostAsGlobal(array $post_patterns): void
{
global $containerBuilder;
foreach (array_keys($_POST) as $post_key) {
foreach ($post_patterns as $one_post_pattern) {
if (! preg_match($one_post_pattern, $post_key)) {
continue;
}
$GLOBALS[$post_key] = $_POST[$post_key];
$containerBuilder->setParameter($post_key, $GLOBALS[$post_key]);
}
}
}
public static function setDatabaseAndTableFromRequest(ContainerInterface $containerBuilder): void
{
global $db, $table, $url_params;
$databaseFromRequest = $_POST['db'] ?? $_GET['db'] ?? $_REQUEST['db'] ?? null;
$tableFromRequest = $_POST['table'] ?? $_GET['table'] ?? $_REQUEST['table'] ?? null;
$db = self::isValid($databaseFromRequest) ? $databaseFromRequest : '';
$table = self::isValid($tableFromRequest) ? $tableFromRequest : '';
$url_params['db'] = $db;
$url_params['table'] = $table;
$containerBuilder->setParameter('db', $db);
$containerBuilder->setParameter('table', $table);
$containerBuilder->setParameter('url_params', $url_params);
}
/**
* PATH_INFO could be compromised if set, so remove it from PHP_SELF
* and provide a clean PHP_SELF here
*/
public static function cleanupPathInfo(): void
{
global $PMA_PHP_SELF;
$PMA_PHP_SELF = self::getenv('PHP_SELF');
if (empty($PMA_PHP_SELF)) {
$PMA_PHP_SELF = urldecode(self::getenv('REQUEST_URI'));
}
$_PATH_INFO = self::getenv('PATH_INFO');
if (! empty($_PATH_INFO) && ! empty($PMA_PHP_SELF)) {
$question_pos = mb_strpos($PMA_PHP_SELF, '?');
if ($question_pos != false) {
$PMA_PHP_SELF = mb_substr($PMA_PHP_SELF, 0, $question_pos);
}
$path_info_pos = mb_strrpos($PMA_PHP_SELF, $_PATH_INFO);
if ($path_info_pos !== false) {
$path_info_part = mb_substr($PMA_PHP_SELF, $path_info_pos, mb_strlen($_PATH_INFO));
if ($path_info_part == $_PATH_INFO) {
$PMA_PHP_SELF = mb_substr($PMA_PHP_SELF, 0, $path_info_pos);
}
}
}
$path = [];
foreach (explode('/', $PMA_PHP_SELF) as $part) {
// ignore parts that have no value
if (empty($part) || $part === '.') {
continue;
}
if ($part !== '..') {
// cool, we found a new part
$path[] = $part;
} elseif (count($path) > 0) {
// going back up? sure
array_pop($path);
}
// Here we intentionall ignore case where we go too up
// as there is nothing sane to do
}
$PMA_PHP_SELF = htmlspecialchars('/' . implode('/', $path));
}
/**
* Checks that required PHP extensions are there.
*/
public static function checkExtensions(): void
{
/**
* Warning about mbstring.
*/
if (! function_exists('mb_detect_encoding')) {
self::warnMissingExtension('mbstring');
}
/**
* We really need this one!
*/
if (! function_exists('preg_replace')) {
self::warnMissingExtension('pcre', true);
}
/**
* JSON is required in several places.
*/
if (! function_exists('json_encode')) {
self::warnMissingExtension('json', true);
}
/**
* ctype is required for Twig.
*/
if (! function_exists('ctype_alpha')) {
self::warnMissingExtension('ctype', true);
}
/**
* hash is required for cookie authentication.
*/
if (function_exists('hash_hmac')) {
return;
}
self::warnMissingExtension('hash', true);
}
/**
* Gets the "true" IP address of the current user
*
* @return string|bool the ip of the user
*
* @access private
*/
public static function getIp()
{
/* Get the address of user */
if (empty($_SERVER['REMOTE_ADDR'])) {
/* We do not know remote IP */
return false;
}
$direct_ip = $_SERVER['REMOTE_ADDR'];
/* Do we trust this IP as a proxy? If yes we will use it's header. */
if (! isset($GLOBALS['cfg']['TrustedProxies'][$direct_ip])) {
/* Return true IP */
return $direct_ip;
}
/**
* Parse header in form:
* X-Forwarded-For: client, proxy1, proxy2
*/
// Get header content
$value = self::getenv($GLOBALS['cfg']['TrustedProxies'][$direct_ip]);
// Grab first element what is client adddress
$value = explode(',', $value)[0];
// checks that the header contains only one IP address,
$is_ip = filter_var($value, FILTER_VALIDATE_IP);
if ($is_ip !== false) {
// True IP behind a proxy
return $value;
}
// We could not parse header
return false;
}
/**
* Sanitizes MySQL hostname
*
* * strips p: prefix(es)
*
* @param string $name User given hostname
*/
public static function sanitizeMySQLHost(string $name): string
{
while (strtolower(substr($name, 0, 2)) === 'p:') {
$name = substr($name, 2);
}
return $name;
}
/**
* Sanitizes MySQL username
*
* * strips part behind null byte
*
* @param string $name User given username
*/
public static function sanitizeMySQLUser(string $name): string
{
$position = strpos($name, chr(0));
if ($position !== false) {
return substr($name, 0, $position);
}
return $name;
}
/**
* Safe unserializer wrapper
*
* It does not unserialize data containing objects
*
* @param string $data Data to unserialize
*
* @return mixed|null
*/
public static function safeUnserialize(string $data)
{
if (! is_string($data)) {
return null;
}
/* validate serialized data */
$length = strlen($data);
$depth = 0;
for ($i = 0; $i < $length; $i++) {
$value = $data[$i];
switch ($value) {
case '}':
/* end of array */
if ($depth <= 0) {
return null;
}
$depth--;
break;
case 's':
/* string */
// parse sting length
$strlen = intval(substr($data, $i + 2));
// string start
$i = strpos($data, ':', $i + 2);
if ($i === false) {
return null;
}
// skip string, quotes and ;
$i += 2 + $strlen + 1;
if ($data[$i] !== ';') {
return null;
}
break;
case 'b':
case 'i':
case 'd':
/* bool, integer or double */
// skip value to separator
$i = strpos($data, ';', $i);
if ($i === false) {
return null;
}
break;
case 'a':
/* array */
// find array start
$i = strpos($data, '{', $i);
if ($i === false) {
return null;
}
// remember nesting
$depth++;
break;
case 'N':
/* null */
// skip to end
$i = strpos($data, ';', $i);
if ($i === false) {
return null;
}
break;
default:
/* any other elements are not wanted */
return null;
}
}
// check unterminated arrays
if ($depth > 0) {
return null;
}
return unserialize($data);
}
/**
* Applies changes to PHP configuration.
*/
public static function configure(): void
{
/**
* Set utf-8 encoding for PHP
*/
ini_set('default_charset', 'utf-8');
mb_internal_encoding('utf-8');
/**
* Set precision to sane value, with higher values
* things behave slightly unexpectedly, for example
* round(1.2, 2) returns 1.199999999999999956.
*/
ini_set('precision', '14');
/**
* check timezone setting
* this could produce an E_WARNING - but only once,
* if not done here it will produce E_WARNING on every date/time function
*/
date_default_timezone_set(@date_default_timezone_get());
}
/**
* Check whether PHP configuration matches our needs.
*/
public static function checkConfiguration(): void
{
/**
* As we try to handle charsets by ourself, mbstring overloads just
* break it, see bug 1063821.
*
* We specifically use empty here as we are looking for anything else than
* empty value or 0.
*/
if (extension_loaded('mbstring') && ! empty(ini_get('mbstring.func_overload'))) {
self::fatalError(
__(
'You have enabled mbstring.func_overload in your PHP '
. 'configuration. This option is incompatible with phpMyAdmin '
. 'and might cause some data to be corrupted!'
)
);
}
/**
* The ini_set and ini_get functions can be disabled using
* disable_functions but we're relying quite a lot of them.
*/
if (function_exists('ini_get') && function_exists('ini_set')) {
return;
}
self::fatalError(
__(
'The ini_get and/or ini_set functions are disabled in php.ini. '
. 'phpMyAdmin requires these functions!'
)
);
}
/**
* Checks request and fails with fatal error if something problematic is found
*/
public static function checkRequest(): void
{
if (isset($_REQUEST['GLOBALS']) || isset($_FILES['GLOBALS'])) {
self::fatalError(__('GLOBALS overwrite attempt'));
}
/**
* protect against possible exploits - there is no need to have so much variables
*/
if (count($_REQUEST) <= 1000) {
return;
}
self::fatalError(__('possible exploit'));
}
/**
* Sign the sql query using hmac using the session token
*
* @param string $sqlQuery The sql query
*
* @return string
*/
public static function signSqlQuery($sqlQuery)
{
global $cfg;
$secret = $_SESSION[' HMAC_secret '] ?? '';
return hash_hmac('sha256', $sqlQuery, $secret . $cfg['blowfish_secret']);
}
/**
* Check that the sql query has a valid hmac signature
*
* @param string $sqlQuery The sql query
* @param string $signature The Signature to check
*
* @return bool
*/
public static function checkSqlQuerySignature($sqlQuery, $signature)
{
global $cfg;
$secret = $_SESSION[' HMAC_secret '] ?? '';
$hmac = hash_hmac('sha256', $sqlQuery, $secret . $cfg['blowfish_secret']);
return hash_equals($hmac, $signature);
}
/**
* Check whether user supplied token is valid, if not remove any possibly
* dangerous stuff from request.
*
* Check for token mismatch only if the Request method is POST.
* GET Requests would never have token and therefore checking
* mis-match does not make sense.
*/
public static function checkTokenRequestParam(): void
{
global $token_mismatch, $token_provided;
$token_mismatch = true;
$token_provided = false;
if (($_SERVER['REQUEST_METHOD'] ?? 'GET') !== 'POST') {
return;
}
if (self::isValid($_POST['token'])) {
$token_provided = true;
$token_mismatch = ! @hash_equals($_SESSION[' PMA_token '], $_POST['token']);
}
if (! $token_mismatch) {
return;
}
// Warn in case the mismatch is result of failed setting of session cookie
if (isset($_POST['set_session']) && $_POST['set_session'] !== session_id()) {
trigger_error(
__(
'Failed to set session cookie. Maybe you are using '
. 'HTTP instead of HTTPS to access phpMyAdmin.'
),
E_USER_ERROR
);
}
/**
* We don't allow any POST operation parameters if the token is mismatched
* or is not provided.
*/
$allowList = ['ajax_request'];
Sanitize::removeRequestVars($allowList);
}
public static function setGotoAndBackGlobals(ContainerInterface $container, Config $config): void
{
global $goto, $back, $url_params;
// Holds page that should be displayed.
$goto = '';
$container->setParameter('goto', $goto);
if (isset($_REQUEST['goto']) && self::checkPageValidity($_REQUEST['goto'])) {
$goto = $_REQUEST['goto'];
$url_params['goto'] = $goto;
$container->setParameter('goto', $goto);
$container->setParameter('url_params', $url_params);
} else {
if ($config->issetCookie('goto')) {
$config->removeCookie('goto');
}
unset($_REQUEST['goto'], $_GET['goto'], $_POST['goto']);
}
if (isset($_REQUEST['back']) && self::checkPageValidity($_REQUEST['back'])) {
// Returning page.
$back = $_REQUEST['back'];
$container->setParameter('back', $back);
} else {
if ($config->issetCookie('back')) {
$config->removeCookie('back');
}
unset($_REQUEST['back'], $_GET['back'], $_POST['back']);
}
}
public static function connectToDatabaseServer(DatabaseInterface $dbi, AuthenticationPlugin $auth): void
{
global $cfg;
/**
* Try to connect MySQL with the control user profile (will be used to get the privileges list for the current
* user but the true user link must be open after this one so it would be default one for all the scripts).
*/
$controlLink = false;
if ($cfg['Server']['controluser'] !== '') {
$controlLink = $dbi->connect(DatabaseInterface::CONNECT_CONTROL);
}
// Connects to the server (validates user's login)
$userLink = $dbi->connect(DatabaseInterface::CONNECT_USER);
if ($userLink === false) {
$auth->showFailure('mysql-denied');
}
if ($controlLink) {
return;
}
/**
* Open separate connection for control queries, this is needed to avoid problems with table locking used in
* main connection and phpMyAdmin issuing queries to configuration storage, which is not locked by that time.
*/
$dbi->connect(DatabaseInterface::CONNECT_USER, null, DatabaseInterface::CONNECT_CONTROL);
}
/**
* Get the container builder
*/
public static function getContainerBuilder(): ContainerBuilder
{
$containerBuilder = new ContainerBuilder();
$loader = new PhpFileLoader($containerBuilder, new FileLocator(ROOT_PATH . 'libraries'));
$loader->load('services_loader.php');
return $containerBuilder;
}
/**
* @return void
*/
public static function populateRequestWithEncryptedQueryParams()
{
if (
(! isset($_GET['eq']) || ! is_string($_GET['eq']))
&& (! isset($_POST['eq']) || ! is_string($_POST['eq']))
) {
unset($_GET['eq'], $_POST['eq'], $_REQUEST['eq']);
return;
}
$isFromPost = isset($_POST['eq']);
$decryptedQuery = Url::decryptQuery($isFromPost ? $_POST['eq'] : $_GET['eq']);
unset($_GET['eq'], $_POST['eq'], $_REQUEST['eq']);
if ($decryptedQuery === null) {
return;
}
$urlQueryParams = (array) json_decode($decryptedQuery);
foreach ($urlQueryParams as $urlQueryParamKey => $urlQueryParamValue) {
if ($isFromPost) {
$_POST[$urlQueryParamKey] = $urlQueryParamValue;
} else {
$_GET[$urlQueryParamKey] = $urlQueryParamValue;
}
$_REQUEST[$urlQueryParamKey] = $urlQueryParamValue;
}
}
}
| {
"content_hash": "8e717fb02d4566acd38e4ab393b86cce",
"timestamp": "",
"source": "github",
"line_count": 1463,
"max_line_length": 118,
"avg_line_length": 30.365003417635,
"alnum_prop": 0.5073608860075635,
"repo_name": "cytopia/devilbox",
"id": "75a4a7c6ada73dac27a9c6dcd3723661e7b21be6",
"size": "44424",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".devilbox/www/htdocs/vendor/phpmyadmin-5.1.3/libraries/classes/Core.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "86"
},
{
"name": "CSS",
"bytes": "3980"
},
{
"name": "JavaScript",
"bytes": "923"
},
{
"name": "Makefile",
"bytes": "4209"
},
{
"name": "PHP",
"bytes": "221096"
},
{
"name": "Shell",
"bytes": "204888"
}
],
"symlink_target": ""
} |
int64_t GetTime();
int64_t GetTimeMillis();
int64_t GetTimeMicros();
int64_t GetLogTimeMicros();
void SetMockTime(int64_t nMockTimeIn);
void MilliSleep(int64_t n);
std::string DateTimeStrFormat(const char* pszFormat, int64_t nTime);
#endif // OMNICOIN_UTILTIME_H
| {
"content_hash": "35deb410befd87516b5da1ce8b540be6",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 68,
"avg_line_length": 26.5,
"alnum_prop": 0.7660377358490567,
"repo_name": "MeshCollider/Omnicoin",
"id": "05f6070dc4e7f9005a7106a8a65dcdc03d452bc2",
"size": "600",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/utiltime.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "775020"
},
{
"name": "C++",
"bytes": "4070071"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "18445"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "Makefile",
"bytes": "63419"
},
{
"name": "Objective-C",
"bytes": "2165"
},
{
"name": "Objective-C++",
"bytes": "7242"
},
{
"name": "Protocol Buffer",
"bytes": "2312"
},
{
"name": "Python",
"bytes": "599494"
},
{
"name": "QMake",
"bytes": "2020"
},
{
"name": "Shell",
"bytes": "31469"
}
],
"symlink_target": ""
} |
namespace pythonic
{
namespace types
{
template <class Op, class... Args>
numpy_expr<Op, Args...>::numpy_expr()
{
}
template <class Op, class... Args>
numpy_expr<Op, Args...>::numpy_expr(Args const &... args)
: args(args...),
_shape(std::get<utils::max_element<Args::value...>::index>(this->args)
.shape())
{
}
template <class Op, class... Args>
typename numpy_expr<Op, Args...>::iterator
numpy_expr<Op, Args...>::begin() const
{
return {*this, 0};
}
template <class Op, class... Args>
typename numpy_expr<Op, Args...>::iterator
numpy_expr<Op, Args...>::end() const
{
return {*this, _shape[0]};
}
template <class Op, class... Args>
template <int... I>
auto numpy_expr<Op, Args...>::_fast(long i, utils::seq<I...>) const
-> decltype(Op()(std::get<I>(args).fast(i)...))
{
return Op()(std::get<I>(args).fast(i)...); // FIXME: broadcasting can be
// achieved here through a
// modulus, but that's terribly
// costly
}
template <class Op, class... Args>
auto numpy_expr<Op, Args...>::fast(long i) const
-> decltype(this->_fast(i,
typename utils::gens<sizeof...(Args)>::type{}))
{
return _fast(i, typename utils::gens<sizeof...(Args)>::type{});
}
template <class Op, class... Args>
auto numpy_expr<Op, Args...>::operator[](long i) const
-> decltype(this->fast(i))
{
if (i < 0)
i += _shape[0];
return fast(i);
}
template <class Op, class... Args>
auto numpy_expr<Op, Args...>::operator()(long i) const
-> decltype(this->fast(i))
{
if (i < 0)
i += _shape[0];
return fast(i);
}
template <class Op, class... Args>
array<long, numpy_expr<Op, Args...>::value> const &
numpy_expr<Op, Args...>::shape() const
{
return _shape;
}
#ifdef USE_BOOST_SIMD
template <class Op, class... Args>
template <int... I>
auto numpy_expr<Op, Args...>::_load(long i, utils::seq<I...>) const
-> decltype(Op()(std::get<I>(args).load(i)...))
{
return Op()(std::get<I>(args).load(i)...);
}
template <class Op, class... Args>
template <class I> // template to prevent automatic instantiation when the
// type is not vectorizable
auto numpy_expr<Op, Args...>::load(I i) const
-> decltype(this->_load(i,
typename utils::gens<sizeof...(Args)>::type{}))
{
return _load(i, typename utils::gens<sizeof...(Args)>::type{});
}
#endif
template <class Op, class... Args>
template <int... I, class... S>
auto numpy_expr<Op, Args...>::_get(utils::seq<I...>, S const &... s) const
-> decltype(Op{}(std::get<I>(args)(s...)...))
{
return Op{}(std::get<I>(args)(s...)...);
}
template <class Op, class... Args>
template <class S0, class... S>
auto numpy_expr<Op, Args...>::operator()(S0 const &s0, S const &... s) const
-> typename std::enable_if<
not std::is_scalar<S0>::value,
decltype(this->_get(typename utils::gens<sizeof...(Args)>::type{},
s0, s...))>::type
{
return _get(typename utils::gens<sizeof...(Args)>::type{}, s0, s...);
}
template <class Op, class... Args>
template <class F>
typename std::enable_if<is_numexpr_arg<F>::value,
numpy_fexpr<numpy_expr<Op, Args...>, F>>::type
numpy_expr<Op, Args...>::fast(F const &filter) const
{
return numpy_fexpr<numpy_expr, F>(*this, filter);
}
template <class Op, class... Args>
template <class F>
typename std::enable_if<is_numexpr_arg<F>::value,
numpy_fexpr<numpy_expr<Op, Args...>, F>>::type
numpy_expr<Op, Args...>::
operator[](F const &filter) const
{
return fast(filter);
}
template <class Op, class... Args>
template <int... I>
long numpy_expr<Op, Args...>::_flat_size(utils::seq<I...>) const
{
long const sizes[] = {std::get<I>(args).flat_size()...};
return *std::max_element(std::begin(sizes), std::end(sizes));
}
template <class Op, class... Args>
long numpy_expr<Op, Args...>::flat_size() const
{
return _flat_size(typename utils::gens<sizeof...(Args)>::type{});
}
template <class Op, class... Args>
long numpy_expr<Op, Args...>::size() const
{
return _shape[0];
}
}
}
#endif
| {
"content_hash": "eab9282bf4e0ce39e864753e8aa49b81",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 80,
"avg_line_length": 29.88607594936709,
"alnum_prop": 0.5120711562897078,
"repo_name": "hainm/pythran",
"id": "fe5b693b959bf809a52dc34a5fa34b8d18caba83",
"size": "4885",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pythran/pythonic/types/numpy_expr.hpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "1370181"
},
{
"name": "Makefile",
"bytes": "1185"
},
{
"name": "Python",
"bytes": "1195224"
},
{
"name": "Shell",
"bytes": "264"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2005-2011 The Kuali Foundation
Licensed under the Educational Community License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.opensource.org/licenses/ecl2.php
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<data xmlns="ns:workflow" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="ns:workflow resource:WorkflowData">
<styles xmlns="ns:workflow/Style" xsi:schemaLocation="ns:workflow/Style resource:Style">
<style name="an_arbitrary_style">
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:wf="http://xml.apache.org/xalan/java/org.kuali.rice.edl.framework.util.EDLFunctionstionstions">
<xsl:include href="widgets"/>
<xsl:output doctype-public="-//W3C//DTD XHTML 1.0 Transitional//EN" indent="yes" method="html" omit-xml-declaration="yes"/>
<xsl:param name="overrideMain" select="'false'"/>
<xsl:template name="mainBody">
<table align="center" border="0" cellpadding="0" cellspacing="0" class="bord-r-t" width="80%" xmlns="http://www.w3.org/1999/xhtml">
<xsl:for-each select="//fieldDef">
<tr>
<td class="thnormal" width="30%">
<xsl:call-template name="widget_render">
<xsl:with-param name="fieldName" select="@name"/>
<xsl:with-param name="renderCmd" select="'title'"/>
</xsl:call-template>
</td>
<td class="datacell">
<xsl:call-template name="widget_render">
<xsl:with-param name="fieldName" select="@name"/>
<xsl:with-param name="renderCmd" select="'input'"/>
</xsl:call-template>
</td>
</tr>
</xsl:for-each>
</table>
</xsl:template>
<xsl:template name="mainForm">
Set overrideMain=true and what you can call widget templates as you see fit...
</xsl:template>
</xsl:stylesheet>
</style>
</styles>
</data>
| {
"content_hash": "c95dd87ad776b78f0697a10fd7b9a2f1",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 194,
"avg_line_length": 57.21153846153846,
"alnum_prop": 0.5196638655462185,
"repo_name": "ua-eas/ua-rice-2.1.9",
"id": "66699c56f734742d3cd443f12bd51241ac2dd0dc",
"size": "2975",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "it/kew/src/test/resources/org/kuali/rice/kew/xml/export/StyleExportConfig.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "430866"
},
{
"name": "Groovy",
"bytes": "2203909"
},
{
"name": "Java",
"bytes": "25128172"
},
{
"name": "JavaScript",
"bytes": "1613350"
},
{
"name": "PHP",
"bytes": "15766"
},
{
"name": "Shell",
"bytes": "1583"
},
{
"name": "XSLT",
"bytes": "107653"
}
],
"symlink_target": ""
} |
package org.elasticsearch.transport;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
/**
* This class encapsulates all remote cluster information to be rendered on
* <tt>_remote/info</tt> requests.
*/
public final class RemoteConnectionInfo implements ToXContentFragment, Writeable {
final List<TransportAddress> seedNodes;
final List<TransportAddress> httpAddresses;
final int connectionsPerCluster;
final TimeValue initialConnectionTimeout;
final int numNodesConnected;
final String clusterAlias;
final boolean skipUnavailable;
RemoteConnectionInfo(String clusterAlias, List<TransportAddress> seedNodes,
List<TransportAddress> httpAddresses,
int connectionsPerCluster, int numNodesConnected,
TimeValue initialConnectionTimeout, boolean skipUnavailable) {
this.clusterAlias = clusterAlias;
this.seedNodes = seedNodes;
this.httpAddresses = httpAddresses;
this.connectionsPerCluster = connectionsPerCluster;
this.numNodesConnected = numNodesConnected;
this.initialConnectionTimeout = initialConnectionTimeout;
this.skipUnavailable = skipUnavailable;
}
public RemoteConnectionInfo(StreamInput input) throws IOException {
seedNodes = input.readList(TransportAddress::new);
httpAddresses = input.readList(TransportAddress::new);
connectionsPerCluster = input.readVInt();
initialConnectionTimeout = new TimeValue(input);
numNodesConnected = input.readVInt();
clusterAlias = input.readString();
if (input.getVersion().onOrAfter(Version.V_6_1_0)) {
skipUnavailable = input.readBoolean();
} else {
skipUnavailable = false;
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(clusterAlias);
{
builder.startArray("seeds");
for (TransportAddress addr : seedNodes) {
builder.value(addr.toString());
}
builder.endArray();
builder.startArray("http_addresses");
for (TransportAddress addr : httpAddresses) {
builder.value(addr.toString());
}
builder.endArray();
builder.field("connected", numNodesConnected > 0);
builder.field("num_nodes_connected", numNodesConnected);
builder.field("max_connections_per_cluster", connectionsPerCluster);
builder.field("initial_connect_timeout", initialConnectionTimeout);
builder.field("skip_unavailable", skipUnavailable);
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeList(seedNodes);
out.writeList(httpAddresses);
out.writeVInt(connectionsPerCluster);
initialConnectionTimeout.writeTo(out);
out.writeVInt(numNodesConnected);
out.writeString(clusterAlias);
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
out.writeBoolean(skipUnavailable);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RemoteConnectionInfo that = (RemoteConnectionInfo) o;
return connectionsPerCluster == that.connectionsPerCluster &&
numNodesConnected == that.numNodesConnected &&
Objects.equals(seedNodes, that.seedNodes) &&
Objects.equals(httpAddresses, that.httpAddresses) &&
Objects.equals(initialConnectionTimeout, that.initialConnectionTimeout) &&
Objects.equals(clusterAlias, that.clusterAlias) &&
skipUnavailable == that.skipUnavailable;
}
@Override
public int hashCode() {
return Objects.hash(seedNodes, httpAddresses, connectionsPerCluster, initialConnectionTimeout,
numNodesConnected, clusterAlias, skipUnavailable);
}
}
| {
"content_hash": "4c66dc4e2868ec7028b7c3923d055921",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 102,
"avg_line_length": 40.54867256637168,
"alnum_prop": 0.6811436054124836,
"repo_name": "ThiagoGarciaAlves/elasticsearch",
"id": "f95243921e9bd84e551129d248fee2ec4c5ef518",
"size": "5370",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "core/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "10240"
},
{
"name": "Groovy",
"bytes": "451"
},
{
"name": "HTML",
"bytes": "1338"
},
{
"name": "Java",
"bytes": "28645812"
},
{
"name": "Perl",
"bytes": "13729"
},
{
"name": "Python",
"bytes": "86811"
},
{
"name": "Ruby",
"bytes": "17776"
},
{
"name": "Shell",
"bytes": "93213"
},
{
"name": "XSLT",
"bytes": "885"
}
],
"symlink_target": ""
} |
package gitj.ui.dialogs;
import iconlib.IconUtils;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JTextField;
import javax.swing.LayoutStyle.ComponentPlacement;
import git.Repository;
import gitj.tasks.CreateStashTask;
import gitj.ui.MainFrame;
@SuppressWarnings("serial")
public class DialogCreateStash extends JDialog {
private MainFrame parent;
private Repository repo;
private JTextField textField;
private JCheckBox chckbxKeepStagedChanges;
public DialogCreateStash(MainFrame parent, Repository repo) {
setIconImage(IconUtils.getIcon("stash-add").getImage());
setTitle("Stash");
setAlwaysOnTop(true);
setModal(true);
this.parent = parent;
this.repo = repo;
setBounds(100, 100, 450, 137);
JButton btnCancel = new JButton("Cancel");
btnCancel.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
cancel();
}
});
JButton btnStash = new JButton("Stash");
btnStash.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
stash();
}
});
JLabel lblMessage = new JLabel("Message");
textField = new JTextField();
textField.setColumns(10);
chckbxKeepStagedChanges = new JCheckBox("Keep staged changes");
GroupLayout groupLayout = new GroupLayout(getContentPane());
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addContainerGap()
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addComponent(chckbxKeepStagedChanges)
.addPreferredGap(ComponentPlacement.RELATED, 155, Short.MAX_VALUE)
.addComponent(btnStash)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(btnCancel))
.addGroup(groupLayout.createSequentialGroup()
.addComponent(lblMessage)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(textField, GroupLayout.DEFAULT_SIZE, 368, Short.MAX_VALUE)))
.addContainerGap())
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.TRAILING)
.addGroup(groupLayout.createSequentialGroup()
.addContainerGap()
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(textField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(lblMessage))
.addPreferredGap(ComponentPlacement.RELATED, 33, Short.MAX_VALUE)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(chckbxKeepStagedChanges)
.addComponent(btnCancel)
.addComponent(btnStash))
.addContainerGap())
);
getContentPane().setLayout(groupLayout);
setLocationRelativeTo(null);
}
public void stash() {
parent.runTask(new CreateStashTask(repo, textField.getText(), chckbxKeepStagedChanges.isSelected()));
}
public void cancel() {
setVisible(false);
dispose();
}
}
| {
"content_hash": "194df03bbaa4bace34e24309d25f2664",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 112,
"avg_line_length": 31.048076923076923,
"alnum_prop": 0.7550325178073707,
"repo_name": "redpois0n/gitj",
"id": "55188ce8cadee82eb712dce477a22d608f2cfb63",
"size": "3229",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/gitj/ui/dialogs/DialogCreateStash.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "136192"
}
],
"symlink_target": ""
} |
Documentation for [Inesita](https://github.com/inesita-rb/inesita) framework located [here](https://inesita.fazibear.me).

## Thank you!
[](https://www.patreon.com/bePatron?u=6912974)
| {
"content_hash": "edc1e7eaf777d5477cc45f4fa46723fa",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 128,
"avg_line_length": 53.142857142857146,
"alnum_prop": 0.7688172043010753,
"repo_name": "inesita-rb/documentation",
"id": "9cc814a8be853d4eaf5d7c33cc8f427c2d4de0b6",
"size": "529",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "28528"
},
{
"name": "HTML",
"bytes": "4922"
},
{
"name": "JavaScript",
"bytes": "404793"
},
{
"name": "Ruby",
"bytes": "4652"
}
],
"symlink_target": ""
} |
package io.upit.services;
import io.upit.UpitServiceException;
import io.upit.dal.DAO;
import io.upit.dal.models.Resource;
import java.io.Serializable;
public class AbstractResourceService<ResourceClass extends Resource<IDType>, IDType extends Serializable> {
private final Class<ResourceClass> resourceClass;
private final DAO<ResourceClass, IDType> resourceClassDAO;
public AbstractResourceService(Class<ResourceClass> resourceClass, DAO<ResourceClass, IDType> resourceClassDAO) {
this.resourceClass = resourceClass;
this.resourceClassDAO = resourceClassDAO;
}
public ResourceClass create(ResourceClass resource) throws UpitServiceException {
return resourceClassDAO.create(resource);
}
public ResourceClass update(ResourceClass resource) {
return resourceClassDAO.update(resource);
}
public ResourceClass delete(ResourceClass resource) {
return resourceClassDAO.delete(resource);
}
public ResourceClass deleteById(IDType id) {
return resourceClassDAO.deleteById(id);
}
public ResourceClass getById(IDType id) {
return resourceClassDAO.getById(id);
}
}
| {
"content_hash": "44713dd9941eced1022f85a64f93593e",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 117,
"avg_line_length": 30.205128205128204,
"alnum_prop": 0.7487266553480475,
"repo_name": "GeorgeMH/upit",
"id": "9f9a842aa6a4df760a210e317fb75be0f436d771",
"size": "1178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "upit-core-api/src/main/java/io/upit/services/AbstractResourceService.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "24179"
},
{
"name": "CSS",
"bytes": "610"
},
{
"name": "HTML",
"bytes": "22192"
},
{
"name": "Java",
"bytes": "104354"
},
{
"name": "JavaScript",
"bytes": "37338"
}
],
"symlink_target": ""
} |
class TargetPool(GCPResource):
'''Object to represent a gcp targetPool'''
resource_type = "compute.v1.targetPool"
# pylint: disable=too-many-arguments
def __init__(self,
rname,
project,
zone,
desc,
region,
health_checks=None, #pd-ssd, local-ssd
instances=None,
session_affinity=None,
):
'''constructor for gcp resource'''
super(TargetPool, self).__init__(rname,
TargetPool.resource_type,
project,
zone)
self._region = region
self._desc = desc
self._session_affinity = session_affinity
self._instances = instances
self._health_checks = health_checks
self._instance_refs = None
self._health_checks_refs = None
@property
def description(self):
'''property for resource description'''
return self._desc
@property
def region(self):
'''property for resource region'''
return self._region
@property
def session_affinity(self):
'''property for resource session_affinity'''
return self._session_affinity
@property
def instances(self):
'''property for resource instances'''
return self._instances
@property
def health_checks(self):
'''property for resource health_checks'''
return self._health_checks
@property
def instance_refs(self):
'''property for resource instance references type'''
if self._instance_refs == None:
self._instance_refs = ['$(ref.%s.selfLink)' % inst for inst in self.instances]
return self._instance_refs
@property
def health_checks_refs(self):
'''property for resource health_checks'''
if self._health_checks_refs == None:
self._health_checks_refs = ['$(ref.%s.selfLink)' % check for check in self.health_checks]
return self._health_checks_refs
def to_resource(self):
""" return the resource representation"""
return {'name': self.name,
'type': TargetPool.resource_type,
'properties': {'description': self.description,
'healthChecks': self.health_checks_refs,
'instances': self.instance_refs,
'sessionAffinity': 'NONE',
'region': self.region,
}
}
| {
"content_hash": "a9441d3080e59f2a9964841e08d4b71b",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 101,
"avg_line_length": 32.5,
"alnum_prop": 0.5223264540337711,
"repo_name": "drewandersonnz/openshift-tools",
"id": "2e10c298561c8f67f8979e4178d1bf6e2e9ccace",
"size": "2733",
"binary": false,
"copies": "13",
"ref": "refs/heads/prod",
"path": "ansible/roles/lib_gcloud/build/lib/target_pool.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "24919"
},
{
"name": "Dockerfile",
"bytes": "10248"
},
{
"name": "Go",
"bytes": "127388"
},
{
"name": "Groovy",
"bytes": "6322"
},
{
"name": "HTML",
"bytes": "67678"
},
{
"name": "JavaScript",
"bytes": "9573"
},
{
"name": "Makefile",
"bytes": "1108"
},
{
"name": "PHP",
"bytes": "30017"
},
{
"name": "Python",
"bytes": "19774421"
},
{
"name": "Shell",
"bytes": "553874"
}
],
"symlink_target": ""
} |
package builds
import (
g "github.com/onsi/ginkgo"
o "github.com/onsi/gomega"
exutil "github.com/openshift/origin/test/extended/util"
)
var _ = g.Describe("[sig-builds][Feature:Builds] buildconfig secret injector", func() {
defer g.GinkgoRecover()
var (
itemsPath = exutil.FixturePath("testdata", "builds", "test-buildconfigsecretinjector.yaml")
oc = exutil.NewCLI("buildconfigsecretinjector")
)
g.Context("", func() {
g.BeforeEach(func() {
exutil.PreTestDump()
})
g.JustBeforeEach(func() {
g.By("creating buildconfigs")
err := oc.Run("create").Args("-f", itemsPath).Execute()
o.Expect(err).NotTo(o.HaveOccurred())
})
g.AfterEach(func() {
if g.CurrentGinkgoTestDescription().Failed {
exutil.DumpPodStates(oc)
exutil.DumpPodLogsStartingWith("", oc)
}
})
g.It("should inject secrets to the appropriate buildconfigs [apigroup:build.openshift.io]", func() {
out, err := oc.Run("get").Args("bc/test1", "-o", "template", "--template", "{{.spec.source.sourceSecret.name}}").Output()
o.Expect(err).NotTo(o.HaveOccurred())
o.Expect(out).To(o.Equal("secret1"))
out, err = oc.Run("get").Args("bc/test2", "-o", "template", "--template", "{{.spec.source.sourceSecret.name}}").Output()
o.Expect(err).NotTo(o.HaveOccurred())
o.Expect(out).To(o.Equal("secret2"))
out, err = oc.Run("get").Args("bc/test3", "-o", "template", "--template", "{{.spec.source.sourceSecret.name}}").Output()
o.Expect(err).NotTo(o.HaveOccurred())
o.Expect(out).To(o.Equal("secret3"))
out, err = oc.Run("get").Args("bc/test4", "-o", "template", "--template", "{{.spec.source.sourceSecret.name}}").Output()
o.Expect(err).NotTo(o.HaveOccurred())
o.Expect(out).To(o.Equal("<no value>"))
})
})
})
| {
"content_hash": "038d365f3a1bee765e30bfad5368e026",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 124,
"avg_line_length": 32.574074074074076,
"alnum_prop": 0.6480955088118249,
"repo_name": "csrwng/origin",
"id": "bc7b42cfc9e4221dc3a2b9f943776a2d0396a08e",
"size": "1759",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/extended/builds/buildconfigsecretinjector.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "921"
},
{
"name": "Go",
"bytes": "4315970"
},
{
"name": "HTML",
"bytes": "19660"
},
{
"name": "Makefile",
"bytes": "2855"
},
{
"name": "Shell",
"bytes": "225122"
}
],
"symlink_target": ""
} |
import gi
try:
gi.require_version('Gtk', '3.0')
gi.require_version('Gdk', '3.0')
gi.require_version('GdkPixbuf', '2.0')
except ValueError as e:
print(e)
exit(1)
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GdkPixbuf
import os
DEFAULT_CURSOR = Gdk.Cursor(Gdk.CursorType.ARROW)
WAIT_CURSOR = Gdk.Cursor(Gdk.CursorType.WATCH)
def load_css(css_filename):
with open(css_filename, 'r') as css_file:
css_code = css_file.read()
style_provider = Gtk.CssProvider()
style_provider.load_from_data(css_code.encode())
Gtk.StyleContext.add_provider_for_screen(
Gdk.Screen.get_default(),
style_provider,
Gtk.STYLE_PROVIDER_PRIORITY_USER)
def load_image(filename, size=24):
if os.path.exists(filename):
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(filename, size, size)
return Gtk.Image.new_from_pixbuf(pixbuf)
return None
def redondea(valor):
valor = valor * 10.0
return int(valor) / 10.0
def redondea_digits(valor, digits=0):
if digits == 0:
return int(round(valor, digits))
return round(valor, digits)
def s2f(cadena):
try:
value = float(cadena)
except BaseException:
value = 0.0
return value
def s2f_print(word):
try:
return float(word)
except Exception as e:
print('error:', str(e))
return 0
def cambia(valor, a, SI=True):
if len(valor) == 0:
return ''
valor = float(valor)
if SI is False:
valor = redondea(5.0 / 9.0 * (valor - 32.0))
if a == 'F':
return str(redondea(valor * 9.0 / 5.0 + 32.0))
elif a == 'K':
return str(redondea(valor + 273.15))
return str(valor)
def change_temperature(valor, a):
valor = s2f(valor)
# initial a in ºF
if a == 'C':
valor = 5.0 / 9.0 * (valor - 32.0)
elif a == 'K':
valor = 5.0 / 9.0 * (valor - 32.0) + 273.15
return str(redondea_digits(valor))
def fa2f(temperature):
return (temperature - 273.15) * 9.0 / 5.0 + 32.0
def f2c(temperature):
return (s2f(temperature) - 32.0) * 5.0 / 9.0
| {
"content_hash": "edb3a2ff2cbf9b329da9cea4989e8c07",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 77,
"avg_line_length": 23.554347826086957,
"alnum_prop": 0.6035994462390402,
"repo_name": "atareao/my-weather-indicator",
"id": "008d6b75d709abd4ea8389c607a309638de2f365",
"size": "3384",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16797"
},
{
"name": "HTML",
"bytes": "8469"
},
{
"name": "JavaScript",
"bytes": "80848"
},
{
"name": "Python",
"bytes": "401750"
}
],
"symlink_target": ""
} |
namespace base {
class FilePath;
}
@class NSString;
@interface SnapshotCache (Internal)
// Returns filepath to the color snapshot of `snapshotID`.
- (base::FilePath)imagePathForSnapshotID:(NSString*)snapshotID;
// Returns filepath to the greyscale snapshot of `snapshotID`.
- (base::FilePath)greyImagePathForSnapshotID:(NSString*)snapshotID;
@end
#endif // IOS_CHROME_BROWSER_SNAPSHOTS_SNAPSHOT_CACHE_INTERNAL_H_
| {
"content_hash": "223b3c57b56bb7bfbe0a9adb8bb3bee9",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 67,
"avg_line_length": 29.785714285714285,
"alnum_prop": 0.7817745803357314,
"repo_name": "chromium/chromium",
"id": "1fc28d85d215b703f4f8c6958eb91c28446a3781",
"size": "756",
"binary": false,
"copies": "7",
"ref": "refs/heads/main",
"path": "ios/chrome/browser/snapshots/snapshot_cache_internal.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import {filter, reduce} from './utils';
export interface Pos {
line: number;
col: number;
}
export interface Block {
statements: number;
hits: number;
start: Pos;
end: Pos;
}
export class FileCoverage {
public blocks: Block[] = [];
constructor(readonly filename: string, readonly fileNumber: number) {}
public addBlock(block: Block) {
this.blocks.push(block);
}
get totalStatements(): number {
return this.blocks.reduce((acc, b) => acc + b.statements, 0);
}
get coveredStatements(): number {
return this.blocks.reduce(
(acc, b) => acc + (b.hits > 0 ? b.statements : 0), 0);
}
}
export class Coverage {
public files = new Map<string, FileCoverage>();
constructor(readonly mode: string, readonly prefix = '') {}
public addFile(file: FileCoverage): void {
this.files.set(file.filename, file);
}
public getFile(name: string): FileCoverage|undefined {
return this.files.get(name);
}
public getFilesWithPrefix(prefix: string): Map<string, FileCoverage> {
return new Map(filter(
this.files.entries(), ([k]) => k.startsWith(this.prefix + prefix)));
}
public getCoverageForPrefix(prefix: string): Coverage {
const subCoverage = new Coverage(this.mode, this.prefix + prefix);
for (const [filename, file] of this.files) {
if (filename.startsWith(this.prefix + prefix)) {
subCoverage.addFile(file);
}
}
return subCoverage;
}
get children(): Map<string, Coverage> {
const children = new Map();
for (const path of this.files.keys()) {
// tslint:disable-next-line:prefer-const
let [dir, rest] = path.substr(this.prefix.length).split('/', 2);
if (!children.has(dir)) {
if (rest) {
dir += '/';
}
children.set(dir, this.getCoverageForPrefix(dir));
}
}
return children;
}
get basename(): string {
if (this.prefix.endsWith('/')) {
return this.prefix.substring(0, this.prefix.length - 1).split('/').pop() +
'/';
}
return this.prefix.split('/').pop()!;
}
get totalStatements(): number {
return reduce(this.files.values(), (acc, f) => acc + f.totalStatements, 0);
}
get coveredStatements(): number {
return reduce(
this.files.values(), (acc, f) => acc + f.coveredStatements, 0);
}
get totalFiles(): number {
return this.files.size;
}
get coveredFiles(): number {
return reduce(
this.files.values(),
(acc, f) => acc + (f.coveredStatements > 0 ? 1 : 0), 0);
}
}
export function parseCoverage(content: string): Coverage {
const lines = content.split('\n');
const modeLine = lines.shift()!;
const [modeLabel, mode] = modeLine.split(':').map((x) => x.trim());
if (modeLabel !== 'mode') {
throw new Error('Expected to start with mode line.');
}
const coverage = new Coverage(mode);
let fileCounter = 0;
for (const line of lines) {
if (line === '') {
continue;
}
const {filename, ...block} = parseLine(line);
let file = coverage.getFile(filename);
if (!file) {
file = new FileCoverage(filename, fileCounter++);
coverage.addFile(file);
}
file.addBlock(block);
}
return coverage;
}
function parseLine(line: string): Block&{filename: string} {
const [filename, block] = line.split(':');
const [positions, statements, hits] = block.split(' ');
const [start, end] = positions.split(',');
const [startLine, startCol] = start.split('.').map(parseInt);
const [endLine, endCol] = end.split('.').map(parseInt);
return {
end: {
col: endCol,
line: endLine,
},
filename,
hits: Math.max(0, Number(hits)),
start: {
col: startCol,
line: startLine,
},
statements: Number(statements),
};
}
| {
"content_hash": "ed62a45bda9140c6feb17eb8976e0b79",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 80,
"avg_line_length": 24.95394736842105,
"alnum_prop": 0.6090166095438967,
"repo_name": "lavalamp/test-infra",
"id": "f0f5c1c05d5a9d3babd26434200c835a401f2bfa",
"size": "4362",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "gopherage/cmd/html/static/parser.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "33021"
},
{
"name": "Dockerfile",
"bytes": "31806"
},
{
"name": "Go",
"bytes": "5061108"
},
{
"name": "HTML",
"bytes": "54898"
},
{
"name": "JavaScript",
"bytes": "74872"
},
{
"name": "Makefile",
"bytes": "34958"
},
{
"name": "Python",
"bytes": "1079244"
},
{
"name": "Shell",
"bytes": "139373"
},
{
"name": "TypeScript",
"bytes": "175375"
}
],
"symlink_target": ""
} |
package org.drools.karaf.itest;
import static org.ops4j.pax.exam.CoreOptions.maven;
import static org.ops4j.pax.exam.CoreOptions.mavenBundle;
import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.features;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import org.apache.camel.CamelContext;
import org.apache.camel.osgi.CamelContextFactory;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.karaf.options.KarafDistributionOption;
import org.ops4j.pax.exam.options.DefaultCompositeOption;
import org.ops4j.pax.exam.options.MavenArtifactProvisionOption;
import org.ops4j.pax.exam.options.UrlReference;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
abstract public class KarafIntegrationTestSupport extends CamelTestSupport {
protected static final transient Logger LOG = LoggerFactory.getLogger(KarafIntegrationTestSupport.class);
@Inject
protected BundleContext bundleContext;
protected Bundle getInstalledBundle(String symbolicName) {
for (Bundle b : bundleContext.getBundles()) {
if (b.getSymbolicName().equals(symbolicName)) {
return b;
}
}
for (Bundle b : bundleContext.getBundles()) {
LOG.warn("Bundle: " + b.getSymbolicName());
}
throw new RuntimeException("Bundle " + symbolicName + " does not exist");
}
@Override
protected CamelContext createCamelContext() throws Exception {
LOG.info("Get the bundleContext is " + bundleContext);
LOG.info("Application installed as bundle id: " + bundleContext.getBundle().getBundleId());
setThreadContextClassLoader();
CamelContextFactory factory = new CamelContextFactory();
factory.setBundleContext(bundleContext);
factory.setRegistry(createRegistry());
return factory.createContext();
}
protected void setThreadContextClassLoader() {
// set the thread context classloader current bundle classloader
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
}
public static MavenArtifactProvisionOption getFeatureUrl(String groupId, String camelId) {
return mavenBundle().groupId(groupId).artifactId(camelId);
}
public static UrlReference getCamelKarafFeatureUrl() {
return getCamelKarafFeatureUrl(null);
}
public static UrlReference getCamelKarafFeatureUrl(String version) {
String type = "xml/features";
MavenArtifactProvisionOption mavenOption = mavenBundle().groupId("org.apache.camel.karaf").artifactId("apache-camel");
if (version == null) {
return mavenOption.versionAsInProject().type(type);
} else {
return mavenOption.version(version).type(type);
}
}
public static Option loadCamelFeatures(String... features) {
List<String> result = new ArrayList<String>();
result.add("camel-core");
result.add("camel-spring");
result.add("camel-test");
for (String feature : features) {
result.add(feature);
}
return features(getCamelKarafFeatureUrl(), result.toArray(new String[result.size()]));
}
public static Option loadDroolsFeatures(String... features) {
List<String> result = new ArrayList<String>();
result.add("drools-module");
for (String feature : features) {
result.add(feature);
}
return features(getFeatureUrl("org.jboss.integration.fuse", "karaf-features").type("xml").classifier("features").versionAsInProject(), result.toArray(new String[4 + features.length]));
}
public static Option loadDroolsRepo() {
return features(maven().groupId("org.drools").artifactId("drools-karaf-features").type("xml").classifier("features").versionAsInProject().getURL());
}
private static String getKarafVersion() {
String karafVersion = System.getProperty("karafVersion");
if (karafVersion == null) {
// setup the default version of it
karafVersion = "2.3.3";
}
return karafVersion;
}
public static Option getKarafDistributionOption() {
String karafVersion = getKarafVersion();
LOG.info("*** The karaf version is " + karafVersion + " ***");
String localRepo = System.getProperty("maven.repo.local", "");
if (localRepo.length() > 0) {
LOG.info("Using alternative local Maven repository in {}.", new File(localRepo).getAbsolutePath());
localRepo = new File(localRepo).getAbsolutePath().toString()+"@id=local,";
}
return new DefaultCompositeOption(KarafDistributionOption.karafDistributionConfiguration()
.frameworkUrl(maven().groupId("org.apache.karaf").artifactId("apache-karaf").type("tar.gz").versionAsInProject())
.karafVersion(karafVersion)
.name("Apache Karaf")
.useDeployFolder(false).unpackDirectory(new File("target/paxexam/unpack/"))
,
KarafDistributionOption.editConfigurationFilePut("etc/org.ops4j.pax.url.mvn.cfg", "org.ops4j.pax.url.mvn.repositories",
localRepo+
"http://repo1.maven.org/maven2@id=central," +
" http://svn.apache.org/repos/asf/servicemix/m2-repo@id=servicemix," +
" http://repository.springsource.com/maven/bundles/release@id=springsource.release," +
" http://repository.springsource.com/maven/bundles/external@id=springsource.external," +
" https://oss.sonatype.org/content/repositories/releases/@id=sonatype, " +
" https://repository.jboss.org/nexus/content/groups/ea@id=ea"
));
}
}
| {
"content_hash": "94aa71c192c29e1ba37d4abd0fdb3478",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 192,
"avg_line_length": 42.70629370629371,
"alnum_prop": 0.6567872932700181,
"repo_name": "mariofusco/fuse-bxms-integ",
"id": "715c06bd031b94d3a71cdf7f5cc61cda06795179",
"size": "6724",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "release/karaf/itests/drools/src/test/java/org/drools/karaf/itest/KarafIntegrationTestSupport.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "6607"
},
{
"name": "Java",
"bytes": "1533772"
},
{
"name": "XSLT",
"bytes": "301"
}
],
"symlink_target": ""
} |
import { BlankPage } from './app.po';
describe('blank App', () => {
let page: BlankPage;
beforeEach(() => {
page = new BlankPage();
});
it('should display message saying app works', () => {
page.navigateTo();
expect(page.getParagraphText()).toEqual('Tour of Heroes');
});
});
| {
"content_hash": "1ef7e4e288103716159f9054e15e9e6b",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 62,
"avg_line_length": 21.5,
"alnum_prop": 0.5880398671096345,
"repo_name": "johnpapa/angular2-tour-of-heroes",
"id": "66c98fce6c41a5944b9b701c9f7fe3a0d131e05a",
"size": "301",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "e2e/app.e2e-spec.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3843"
},
{
"name": "HTML",
"bytes": "4450"
},
{
"name": "JavaScript",
"bytes": "2384"
},
{
"name": "TypeScript",
"bytes": "12776"
}
],
"symlink_target": ""
} |
title: "Optymalizacja wydajności"
description: "Wydajność można traktować jako jedną z cech produktu. Dostarczaj treść użytkownikom możliwie najszybciej. Po wejściu użytkowników do projektowanej aplikacji interakcja ze stroną i renderowanie powinny być jak najbardziej płynne."
updated_on: 2014-04-28
---
{% comment %}
Guide list content will be output by the landing layout passed on the article collection matching page.id
{% endcomment %}
| {
"content_hash": "473bcdb8b3de69083285596e77d0daec",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 243,
"avg_line_length": 44.5,
"alnum_prop": 0.8044943820224719,
"repo_name": "yoichiro/WebFundamentals",
"id": "6999d7440886aa1474bcff902844b320c3456f2b",
"size": "465",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/content/pl/fundamentals/performance/index.markdown",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "59398"
},
{
"name": "HTML",
"bytes": "405678"
},
{
"name": "JavaScript",
"bytes": "33183"
},
{
"name": "Liquid",
"bytes": "61069"
},
{
"name": "Python",
"bytes": "2386"
},
{
"name": "Ruby",
"bytes": "103364"
},
{
"name": "Shell",
"bytes": "4844"
},
{
"name": "Smarty",
"bytes": "1487"
}
],
"symlink_target": ""
} |
package org.caleydo.datadomain.pathway.graph;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.caleydo.datadomain.pathway.graph.item.vertex.PathwayVertexRep;
import org.caleydo.datadomain.pathway.manager.PathwayManager;
import org.jgrapht.graph.DefaultEdge;
/**
*
* A path of {@link PathwayVertexRep}s that consists of one or multiple {@link PathSegment}s.
*
* @author Christian
*
*/
public class PathwayPath extends ArrayList<PathSegment> {
/**
*
*/
private static final long serialVersionUID = -4401074951729509240L;
/**
*
*/
public PathwayPath() {
// TODO Auto-generated constructor stub
}
public PathwayPath(int capacity) {
super(capacity);
}
public PathwayPath(Collection<? extends PathSegment> segments) {
super(segments);
}
public PathwayPath(PathSegment... segments) {
super(segments.length);
for (PathSegment s : segments) {
add(s);
}
}
public List<PathSegment> getSegmentsOfPathway(PathwayGraph pathway) {
List<PathSegment> segments = new ArrayList<>();
for (PathSegment segment : this) {
if (segment.getPathway() == pathway) {
segments.add(segment);
}
}
return segments;
}
public boolean hasPathway(PathwayGraph pathway) {
for (PathSegment segment : this) {
if (segment.getPathway() == pathway) {
return true;
}
}
return false;
}
public boolean checkIntegrity() {
PathSegment prevSegment = null;
for (PathSegment segment : this) {
if (segment == null || segment.isEmpty() || !segment.checkIntegrity()) {
return false;
}
if (prevSegment != null && (prevSegment.size() == 1 || (segment.size() == 1 && getLast() != segment))
&& PathwayManager.get().areVerticesEquivalent(segment.getFirst(), prevSegment.getLast())) {
return false;
}
if (prevSegment != null && prevSegment.getPathway() == segment.getPathway()) {
DefaultEdge edge1 = segment.getPathway().getEdge(prevSegment.getLast(), segment.getFirst());
DefaultEdge edge2 = segment.getPathway().getEdge(segment.getFirst(), prevSegment.getLast());
if (edge1 != null || edge2 != null) {
return false;
}
}
prevSegment = segment;
}
return true;
}
public void ensurePathLevelIntegrity() {
boolean pathChanged = false;
do {
pathChanged = false;
PathSegment prevSegment = null;
for (int i = 0; i < size(); i++) {
PathSegment segment = get(i);
if (segment == null || segment.isEmpty()) {
remove(i);
pathChanged = true;
break;
}
if (prevSegment != null && prevSegment.size() == 1
&& PathwayManager.get().areVerticesEquivalent(segment.getFirst(), prevSegment.getLast())) {
remove(i - 1);
pathChanged = true;
break;
}
if (prevSegment != null && segment.size() == 1 && getLast() != segment
&& PathwayManager.get().areVerticesEquivalent(segment.getFirst(), prevSegment.getLast())) {
remove(i);
pathChanged = true;
break;
}
if (prevSegment != null && prevSegment.getPathway() == segment.getPathway()) {
DefaultEdge edge1 = segment.getPathway().getEdge(prevSegment.getLast(), segment.getFirst());
DefaultEdge edge2 = segment.getPathway().getEdge(segment.getFirst(), prevSegment.getLast());
if (edge1 != null || edge2 != null) {
prevSegment.addAll(segment);
remove(i);
pathChanged = true;
break;
}
}
prevSegment = segment;
}
} while (pathChanged);
}
/**
* @param segments
* @return One list of {@link PathwayVertexRep}s that contains all objects of the list of lists.
*/
public static List<PathwayVertexRep> flattenSegments(PathwayPath segments) {
List<PathwayVertexRep> vertexReps = new ArrayList<>();
for (PathSegment segment : segments) {
vertexReps.addAll(segment);
}
return vertexReps;
}
/**
* Determines, whether the specified target path segments are shown by the source path segments. If the specified
* pathway is not null, only segments referring to this pathway are considered.
*
* @param segments
* @return
*/
public static boolean isPathShown(PathwayPath sourcePathSegments, PathwayPath targetPathSegments,
PathwayGraph pathway) {
List<PathwayVertexRep> sourceSegments = flattenSegments(sourcePathSegments);
List<PathwayVertexRep> targetSegments = flattenSegments(targetPathSegments);
int startIndex = 0;
boolean equalityStarted = false;
for (PathwayVertexRep vTarget : targetSegments) {
// Ignore other pathway paths if this renderer only repersents a single pathway
if (pathway != null && pathway != vTarget.getPathway())
continue;
if (startIndex >= sourceSegments.size())
return false;
for (int i = startIndex; i < sourceSegments.size(); i++) {
PathwayVertexRep vSource = sourceSegments.get(i);
startIndex = i + 1;
// Ignore other pathway paths if this renderer only repersents a single pathway
if (pathway != null && pathway != vSource.getPathway())
continue;
if (vTarget == vSource) {
equalityStarted = true;
break;
} else if (equalityStarted) {
return false;
}
}
}
return true;
}
/**
* Determines whether the specified path segments contain a vertex rep.
*
* @param pathSegments
* @param vertexRep
* @return
*/
public static boolean containsVertexRep(PathwayPath pathSegments, PathwayVertexRep vertexRep) {
List<PathwayVertexRep> segments = flattenSegments(pathSegments);
for (PathwayVertexRep vSource : segments) {
if (vSource == vertexRep)
return true;
}
return false;
}
/**
* Determines the number of equal vertices of the specified paths.
*
* @param segments
* @return
*/
public static int getNumEqualVertices(PathwayPath sourcePathSegments, PathwayPath targetPathSegments) {
List<PathwayVertexRep> sourceSegments = flattenSegments(sourcePathSegments);
List<PathwayVertexRep> targetSegments = flattenSegments(targetPathSegments);
int numEqualVertices = 0;
for (PathwayVertexRep vTarget : targetSegments) {
for (PathwayVertexRep vSource : sourceSegments) {
if (vSource == vTarget) {
numEqualVertices++;
break;
}
}
}
return numEqualVertices;
}
/**
* Determines the set of vertices the specified paths have in common.
*
* @param sourcePathSegments
* @param targetPathSegments
* @return
*/
public static Set<PathwayVertexRep> getCommonVertices(PathwayPath sourcePathSegments, PathwayPath targetPathSegments) {
List<PathwayVertexRep> sourceSegments = flattenSegments(sourcePathSegments);
List<PathwayVertexRep> targetSegments = flattenSegments(targetPathSegments);
Set<PathwayVertexRep> commonVertices = new LinkedHashSet<>();
for (PathwayVertexRep vTarget : targetSegments) {
for (PathwayVertexRep vSource : sourceSegments) {
if (vSource == vTarget) {
commonVertices.add(vSource);
break;
}
}
}
return commonVertices;
}
public PathSegment getFirst() {
return isEmpty() ? null : get(0);
}
public PathSegment getLast() {
return isEmpty() ? null : get(size() - 1);
}
}
| {
"content_hash": "a59358fff107a350d179ba76dde80f0b",
"timestamp": "",
"source": "github",
"line_count": 259,
"max_line_length": 120,
"avg_line_length": 27.501930501930502,
"alnum_prop": 0.6908605924470027,
"repo_name": "Caleydo/caleydo",
"id": "d5d29dc0faeadde5c1b6b510b4b14d1662eddd86",
"size": "7489",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "org.caleydo.datadomain.pathway/src/org/caleydo/datadomain/pathway/graph/PathwayPath.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2493"
},
{
"name": "GLSL",
"bytes": "2781"
},
{
"name": "HTML",
"bytes": "27930"
},
{
"name": "Java",
"bytes": "9111375"
},
{
"name": "JavaScript",
"bytes": "81257"
},
{
"name": "PHP",
"bytes": "546"
},
{
"name": "Python",
"bytes": "33906"
},
{
"name": "R",
"bytes": "5894"
},
{
"name": "Shell",
"bytes": "14669"
}
],
"symlink_target": ""
} |
package com.msiops.footing.functional;
@FunctionalInterface
public interface FunX1<T1, R, X extends Throwable> {
R apply(T1 v) throws X;
}
| {
"content_hash": "4fad2e00c414de2b1c886e29b086d7cc",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 52,
"avg_line_length": 16.333333333333332,
"alnum_prop": 0.7346938775510204,
"repo_name": "mediascience/java-functional",
"id": "ce77e8a703e55871f73b2ec5cbe956e937fac146",
"size": "858",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/msiops/footing/functional/FunX1.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "68818"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "e9b614d1ae8cb5283776d43c6e7a94ac",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "09073ca767e8a88f5f5cfff6d5392669ac069cf7",
"size": "196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Rhodophyta/Florideophyceae/Corallinales/Sporolithaceae/Sporolithon/Sporolithon stefaninii/ Syn. Archaeolithothamnion stefaninii/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "1611fe8a87cad66b8264338cb3f78a57",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "73d707ab1dfabe604e1506e86bb683d371612c51",
"size": "203",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Orchidaceae/Trichoglottis/Trichoglottis luwuensis/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package org.vaadin.vol.client.wrappers.control;
import org.vaadin.vol.client.wrappers.layer.Layer;
public class OverviewMap extends Control {
protected OverviewMap() {};
public static native OverviewMap create(Layer targetLayer)
/*-{
var options = {
layers: [targetLayer]
};
return new $wnd.OpenLayers.Control.OverviewMap(options);
}-*/;
}
| {
"content_hash": "70c3f0d6c29cd0394a4e14dbfa156566",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 60,
"avg_line_length": 21.823529411764707,
"alnum_prop": 0.7088948787061995,
"repo_name": "lizardtechblog/ExpressZip",
"id": "a9c1a16d60e81160b34e8b483782e069f96463fb",
"size": "371",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/org/vaadin/vol/client/wrappers/control/OverviewMap.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "37903"
},
{
"name": "Java",
"bytes": "825177"
},
{
"name": "JavaScript",
"bytes": "167965"
}
],
"symlink_target": ""
} |
package de.nm.file;
import java.io.File;
import java.io.FileFilter;
public class ImageFileFilter implements FileFilter {
private String[] supportedFileExt;
public ImageFileFilter(String[] supportedFileExtentions) {
supportedFileExt = supportedFileExtentions;
}
@Override
public boolean accept(File pathname) {
return endsWith(pathname.getName(), supportedFileExt);
}
public boolean endsWith(String needle, String[] heap) {
needle = needle.toLowerCase();
if(heap == null)
return true;
for(String s : heap) {
if(needle.endsWith(s.toLowerCase())) {
return true;
}
}
return false;
}
}
| {
"content_hash": "3391b6d15b8219919c0f027de44812e0",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 59,
"avg_line_length": 20.29032258064516,
"alnum_prop": 0.7154213036565977,
"repo_name": "MilchReis/ImageShrinker",
"id": "eb782d135491a232ed2496f475ec81ea2f42fec9",
"size": "629",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/de/nm/file/ImageFileFilter.java",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "13076"
}
],
"symlink_target": ""
} |
using Lucene.Net.Analysis;
using Lucene.Net.Analysis.TokenAttributes;
using Lucene.Net.Index;
using Lucene.Net.Search.Spans;
using Lucene.Net.Util;
using System.Collections.Generic;
using System.IO;
using System.Xml;
namespace Lucene.Net.QueryParsers.Xml.Builders
{
/// <summary>
/// Builder that analyzes the text into a <see cref="SpanOrQuery"/>
/// </summary>
public class SpanOrTermsBuilder : SpanBuilderBase
{
private readonly Analyzer analyzer;
public SpanOrTermsBuilder(Analyzer analyzer)
{
this.analyzer = analyzer;
}
public override SpanQuery GetSpanQuery(XmlElement e)
{
string fieldName = DOMUtils.GetAttributeWithInheritanceOrFail(e, "fieldName");
string value = DOMUtils.GetNonBlankTextOrFail(e);
List<SpanQuery> clausesList = new List<SpanQuery>();
TokenStream ts = null;
try
{
ts = analyzer.GetTokenStream(fieldName, value);
ITermToBytesRefAttribute termAtt = ts.AddAttribute<ITermToBytesRefAttribute>();
BytesRef bytes = termAtt.BytesRef;
ts.Reset();
while (ts.IncrementToken())
{
termAtt.FillBytesRef();
SpanTermQuery stq = new SpanTermQuery(new Term(fieldName, BytesRef.DeepCopyOf(bytes)));
clausesList.Add(stq);
}
ts.End();
SpanOrQuery soq = new SpanOrQuery(clausesList.ToArray(/*new SpanQuery[clausesList.size()]*/));
soq.Boost = DOMUtils.GetAttribute(e, "boost", 1.0f);
return soq;
}
#pragma warning disable 168
catch (IOException ioe)
#pragma warning restore 168
{
throw new ParserException("IOException parsing value:" + value);
}
finally
{
IOUtils.DisposeWhileHandlingException(ts);
}
}
}
}
| {
"content_hash": "4871e4d78a114945751835021debe66b",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 110,
"avg_line_length": 32.42857142857143,
"alnum_prop": 0.5795398923152227,
"repo_name": "sisve/lucenenet",
"id": "9b6c09d7e10b7d86713351d13bf26defe7d85d5e",
"size": "2906",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/Lucene.Net.QueryParser/Xml/Builders/SpanOrTermsBuilder.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4805"
},
{
"name": "C#",
"bytes": "41089129"
},
{
"name": "Gnuplot",
"bytes": "2444"
},
{
"name": "HTML",
"bytes": "79746"
},
{
"name": "PowerShell",
"bytes": "73932"
},
{
"name": "XSLT",
"bytes": "21773"
}
],
"symlink_target": ""
} |
import io
import os
import setuptools # type: ignore
package_root = os.path.abspath(os.path.dirname(__file__))
name = "google-cloud-containeranalysis"
description = "Google Cloud Devtools Containeranalysis API client library"
version = {}
with open(
os.path.join(
package_root, "google/cloud/devtools/containeranalysis/gapic_version.py"
)
) as fp:
exec(fp.read(), version)
version = version["__version__"]
if version[0] == "0":
release_status = "Development Status :: 4 - Beta"
else:
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
"google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*",
"proto-plus >= 1.22.0, <2.0.0dev",
"protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
"grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev",
"grafeas >=1.4.1, <2.0dev",
]
url = "https://github.com/googleapis/python-containeranalysis"
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, "README.rst")
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
packages = [
package
for package in setuptools.PEP420PackageFinder.find()
if package.startswith("google")
]
namespaces = ["google"]
if "google.cloud" in packages:
namespaces.append("google.cloud")
if "google.cloud.devtools" in packages:
namespaces.append("google.cloud.devtools")
setuptools.setup(
name=name,
version=version,
description=description,
long_description=readme,
author="Google LLC",
author_email="googleapis-packages@google.com",
license="Apache 2.0",
url=url,
classifiers=[
release_status,
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Operating System :: OS Independent",
"Topic :: Internet",
],
platforms="Posix; MacOS X; Windows",
packages=packages,
python_requires=">=3.7",
namespace_packages=namespaces,
install_requires=dependencies,
include_package_data=True,
zip_safe=False,
)
| {
"content_hash": "f497e9c0eeb427be06128b49842c200b",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 113,
"avg_line_length": 29.89156626506024,
"alnum_prop": 0.6360338573155986,
"repo_name": "googleapis/python-containeranalysis",
"id": "16a187fbc581ec13cf71b6585bbd7ed0f7ac0c32",
"size": "3081",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "255878"
},
{
"name": "Shell",
"bytes": "30693"
}
],
"symlink_target": ""
} |
package com.jimandreas.aarch.popm.db;
import android.arch.persistence.room.TypeConverter;
import android.arch.persistence.room.util.StringUtil;
import java.util.Collections;
import java.util.List;
public class GithubTypeConverters {
@TypeConverter
public static List<Integer> stringToIntList(String data) {
if (data == null) {
return Collections.emptyList();
}
return StringUtil.splitToIntList(data);
}
@TypeConverter
public static String intListToString(List<Integer> ints) {
return StringUtil.joinIntoString(ints);
}
}
| {
"content_hash": "860d9c8eecd3ad1311088d083f07ae19",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 62,
"avg_line_length": 24.791666666666668,
"alnum_prop": 0.7092436974789916,
"repo_name": "jimandreas/PopM",
"id": "fcfc9cf5f13d295e72e0742998cd3cbfd33b51d3",
"size": "1214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/jimandreas/aarch/popm/db/GithubTypeConverters.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "345330"
},
{
"name": "Shell",
"bytes": "1553"
}
],
"symlink_target": ""
} |
<!doctype html>
<!-- This file is generated by build.py. -->
<title>Reference for img wide.jpg; overflow:hidden; -o-object-fit:none; -o-object-position:center 1em</title>
<link rel="stylesheet" href="../../support/reftests.css">
<style>
.helper { overflow:hidden }
.helper > * { left:-20.0px; top:1em; }
</style>
<div id="ref">
<span class="helper"><img src="../../support/wide.jpg"></span>
</div>
| {
"content_hash": "213fdeb2dee2a3be48ca7f4aa73f2116",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 109,
"avg_line_length": 36.54545454545455,
"alnum_prop": 0.654228855721393,
"repo_name": "operasoftware/presto-testo",
"id": "8dabfb90f568fad03fa1de91084ceafecc7e5878",
"size": "402",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "css/image-fit/reftests/img-jpg-wide/hidden_none_center_1em-ref.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "2312"
},
{
"name": "ActionScript",
"bytes": "23470"
},
{
"name": "AutoHotkey",
"bytes": "8832"
},
{
"name": "Batchfile",
"bytes": "5001"
},
{
"name": "C",
"bytes": "116512"
},
{
"name": "C++",
"bytes": "279128"
},
{
"name": "CSS",
"bytes": "208905"
},
{
"name": "Groff",
"bytes": "674"
},
{
"name": "HTML",
"bytes": "106576719"
},
{
"name": "Haxe",
"bytes": "3874"
},
{
"name": "Java",
"bytes": "185827"
},
{
"name": "JavaScript",
"bytes": "22531460"
},
{
"name": "Makefile",
"bytes": "13409"
},
{
"name": "PHP",
"bytes": "524372"
},
{
"name": "POV-Ray SDL",
"bytes": "6542"
},
{
"name": "Perl",
"bytes": "321672"
},
{
"name": "Python",
"bytes": "954636"
},
{
"name": "Ruby",
"bytes": "1006850"
},
{
"name": "Shell",
"bytes": "12140"
},
{
"name": "Smarty",
"bytes": "1860"
},
{
"name": "XSLT",
"bytes": "2567445"
}
],
"symlink_target": ""
} |
<?php
// Heading
$_['heading_title'] = 'Frete por itens';
// Text
$_['text_shipping'] = 'Fretes';
$_['text_success'] = 'Frete por itens modificado com sucesso!';
$_['text_edit'] = 'Configurações do Frete por itens';
// Entry
$_['entry_cost'] = 'Valor por item';
$_['entry_tax_class'] = 'Grupo de impostos';
$_['entry_geo_zone'] = 'Região geográfica';
$_['entry_status'] = 'Situação';
$_['entry_sort_order'] = 'Posição';
// Error
$_['error_permission'] = 'Atenção: Você não tem permissão para modificar a extensão de frete Frete por itens!'; | {
"content_hash": "5f59cc15cee80bd4b11785f6145e3395",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 111,
"avg_line_length": 31.833333333333332,
"alnum_prop": 0.6055846422338569,
"repo_name": "ScalaSoft/opencart203",
"id": "3d7983ee67f6f4b9e6143c11baf69ca594b5d2f0",
"size": "587",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "system/upload/temp-137f4b4949a1d5050fc4ec01e45065df/upload/admin/language/portuguese-br/shipping/item.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1258635"
},
{
"name": "HTML",
"bytes": "187936"
},
{
"name": "JavaScript",
"bytes": "676331"
},
{
"name": "Makefile",
"bytes": "285"
},
{
"name": "PHP",
"bytes": "7633090"
},
{
"name": "Shell",
"bytes": "680"
},
{
"name": "Smarty",
"bytes": "5588445"
}
],
"symlink_target": ""
} |
<?php
class Mage_Adminhtml_Model_System_Config_Source_Currency_Service
{
protected $_options;
public function toOptionArray($isMultiselect)
{
if (!$this->_options) {
$services = Mage::getConfig()->getNode('global/currency/import/services')->asArray();
$this->_options = array();
foreach( $services as $_code => $_options ) {
$this->_options[] = array(
'label' => $_options['name'],
'value' => $_code,
);
}
}
$options = $this->_options;
return $options;
}
}
| {
"content_hash": "15415b710b34c2f62db7d84006ebdf75",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 97,
"avg_line_length": 24.26923076923077,
"alnum_prop": 0.4865293185419968,
"repo_name": "z-v/iboxGento2",
"id": "c468b95e09a74212cef8037f94877e3aaa619925",
"size": "1572",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "app/code/core/Mage/Adminhtml/Model/System/Config/Source/Currency/Service.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "20018"
},
{
"name": "ApacheConf",
"bytes": "1187"
},
{
"name": "Batchfile",
"bytes": "1036"
},
{
"name": "CSS",
"bytes": "2077843"
},
{
"name": "HTML",
"bytes": "5840333"
},
{
"name": "JavaScript",
"bytes": "1563785"
},
{
"name": "PHP",
"bytes": "49596017"
},
{
"name": "PowerShell",
"bytes": "1028"
},
{
"name": "Ruby",
"bytes": "288"
},
{
"name": "Shell",
"bytes": "3849"
},
{
"name": "XSLT",
"bytes": "2066"
}
],
"symlink_target": ""
} |
//
// Misc zip/gzip utility functions.
//
#define LOG_TAG "ziputil"
#include <androidfw/ZipUtils.h>
#include <androidfw/ZipFileRO.h>
#include <utils/Log.h>
#include <utils/Compat.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <zlib.h>
using namespace android;
static inline unsigned long get4LE(const unsigned char* buf) {
return buf[0] | (buf[1] << 8) | (buf[2] << 16) | (buf[3] << 24);
}
static const unsigned long kReadBufSize = 32768;
/*
* Utility function that expands zip/gzip "deflate" compressed data
* into a buffer.
*
* (This is a clone of the previous function, but it takes a FILE* instead
* of an fd. We could pass fileno(fd) to the above, but we can run into
* trouble when "fp" has a different notion of what fd's file position is.)
*
* "fp" is an open file positioned at the start of the "deflate" data
* "buf" must hold at least "uncompressedLen" bytes.
*/
/*static*/ template<typename T> bool inflateToBuffer(T& reader, void* buf,
long uncompressedLen, long compressedLen)
{
bool result = false;
z_stream zstream;
int zerr;
unsigned long compRemaining;
assert(uncompressedLen >= 0);
assert(compressedLen >= 0);
compRemaining = compressedLen;
/*
* Initialize the zlib stream.
*/
memset(&zstream, 0, sizeof(zstream));
zstream.zalloc = Z_NULL;
zstream.zfree = Z_NULL;
zstream.opaque = Z_NULL;
zstream.next_in = NULL;
zstream.avail_in = 0;
zstream.next_out = (Bytef*) buf;
zstream.avail_out = uncompressedLen;
zstream.data_type = Z_UNKNOWN;
/*
* Use the undocumented "negative window bits" feature to tell zlib
* that there's no zlib header waiting for it.
*/
zerr = inflateInit2(&zstream, -MAX_WBITS);
if (zerr != Z_OK) {
if (zerr == Z_VERSION_ERROR) {
ALOGE("Installed zlib is not compatible with linked version (%s)\n",
ZLIB_VERSION);
} else {
ALOGE("Call to inflateInit2 failed (zerr=%d)\n", zerr);
}
goto bail;
}
/*
* Loop while we have data.
*/
do {
unsigned long getSize;
/* read as much as we can */
if (zstream.avail_in == 0) {
getSize = (compRemaining > kReadBufSize) ?
kReadBufSize : compRemaining;
ALOGV("+++ reading %ld bytes (%ld left)\n",
getSize, compRemaining);
unsigned char* nextBuffer = NULL;
const unsigned long nextSize = reader.read(&nextBuffer, getSize);
if (nextSize < getSize || nextBuffer == NULL) {
ALOGD("inflate read failed (%ld vs %ld)\n", nextSize, getSize);
goto z_bail;
}
compRemaining -= nextSize;
zstream.next_in = nextBuffer;
zstream.avail_in = nextSize;
}
/* uncompress the data */
zerr = inflate(&zstream, Z_NO_FLUSH);
if (zerr != Z_OK && zerr != Z_STREAM_END) {
ALOGD("zlib inflate call failed (zerr=%d)\n", zerr);
goto z_bail;
}
/* output buffer holds all, so no need to write the output */
} while (zerr == Z_OK);
assert(zerr == Z_STREAM_END); /* other errors should've been caught */
if ((long) zstream.total_out != uncompressedLen) {
ALOGW("Size mismatch on inflated file (%ld vs %ld)\n",
zstream.total_out, uncompressedLen);
goto z_bail;
}
// success!
result = true;
z_bail:
inflateEnd(&zstream); /* free up any allocated structures */
bail:
return result;
}
class FileReader {
public:
FileReader(FILE* fp) :
mFp(fp), mReadBuf(new unsigned char[kReadBufSize])
{
}
~FileReader() {
delete[] mReadBuf;
}
long read(unsigned char** nextBuffer, long readSize) const {
*nextBuffer = mReadBuf;
return fread(mReadBuf, 1, readSize, mFp);
}
FILE* mFp;
unsigned char* mReadBuf;
};
class FdReader {
public:
FdReader(int fd) :
mFd(fd), mReadBuf(new unsigned char[kReadBufSize])
{
}
~FdReader() {
delete[] mReadBuf;
}
long read(unsigned char** nextBuffer, long readSize) const {
*nextBuffer = mReadBuf;
return TEMP_FAILURE_RETRY(::read(mFd, mReadBuf, readSize));
}
int mFd;
unsigned char* mReadBuf;
};
class BufferReader {
public:
BufferReader(void* input, size_t inputSize) :
mInput(reinterpret_cast<unsigned char*>(input)),
mInputSize(inputSize),
mBufferReturned(false)
{
}
long read(unsigned char** nextBuffer, long /*readSize*/) {
if (!mBufferReturned) {
mBufferReturned = true;
*nextBuffer = mInput;
return mInputSize;
}
*nextBuffer = NULL;
return 0;
}
unsigned char* mInput;
const size_t mInputSize;
bool mBufferReturned;
};
/*static*/ bool ZipUtils::inflateToBuffer(FILE* fp, void* buf,
long uncompressedLen, long compressedLen)
{
FileReader reader(fp);
return ::inflateToBuffer<FileReader>(reader, buf,
uncompressedLen, compressedLen);
}
/*static*/ bool ZipUtils::inflateToBuffer(int fd, void* buf,
long uncompressedLen, long compressedLen)
{
FdReader reader(fd);
return ::inflateToBuffer<FdReader>(reader, buf,
uncompressedLen, compressedLen);
}
/*static*/ bool ZipUtils::inflateToBuffer(void* in, void* buf,
long uncompressedLen, long compressedLen)
{
BufferReader reader(in, compressedLen);
return ::inflateToBuffer<BufferReader>(reader, buf,
uncompressedLen, compressedLen);
}
/*
* Look at the contents of a gzip archive. We want to know where the
* data starts, and how long it will be after it is uncompressed.
*
* We expect to find the CRC and length as the last 8 bytes on the file.
* This is a pretty reasonable thing to expect for locally-compressed
* files, but there's a small chance that some extra padding got thrown
* on (the man page talks about compressed data written to tape). We
* don't currently deal with that here. If "gzip -l" whines, we're going
* to fail too.
*
* On exit, "fp" is pointing at the start of the compressed data.
*/
/*static*/ bool ZipUtils::examineGzip(FILE* fp, int* pCompressionMethod,
long* pUncompressedLen, long* pCompressedLen, unsigned long* pCRC32)
{
enum { // flags
FTEXT = 0x01,
FHCRC = 0x02,
FEXTRA = 0x04,
FNAME = 0x08,
FCOMMENT = 0x10,
};
int ic;
int method, flags;
int i;
ic = getc(fp);
if (ic != 0x1f || getc(fp) != 0x8b)
return false; // not gzip
method = getc(fp);
flags = getc(fp);
/* quick sanity checks */
if (method == EOF || flags == EOF)
return false;
if (method != ZipFileRO::kCompressDeflated)
return false;
/* skip over 4 bytes of mod time, 1 byte XFL, 1 byte OS */
for (i = 0; i < 6; i++)
(void) getc(fp);
/* consume "extra" field, if present */
if ((flags & FEXTRA) != 0) {
int len;
len = getc(fp);
len |= getc(fp) << 8;
while (len-- && getc(fp) != EOF)
;
}
/* consume filename, if present */
if ((flags & FNAME) != 0) {
do {
ic = getc(fp);
} while (ic != 0 && ic != EOF);
}
/* consume comment, if present */
if ((flags & FCOMMENT) != 0) {
do {
ic = getc(fp);
} while (ic != 0 && ic != EOF);
}
/* consume 16-bit header CRC, if present */
if ((flags & FHCRC) != 0) {
(void) getc(fp);
(void) getc(fp);
}
if (feof(fp) || ferror(fp))
return false;
/* seek to the end; CRC and length are in the last 8 bytes */
long curPosn = ftell(fp);
unsigned char buf[8];
fseek(fp, -8, SEEK_END);
*pCompressedLen = ftell(fp) - curPosn;
if (fread(buf, 1, 8, fp) != 8)
return false;
/* seek back to start of compressed data */
fseek(fp, curPosn, SEEK_SET);
*pCompressionMethod = method;
*pCRC32 = get4LE(&buf[0]);
*pUncompressedLen = get4LE(&buf[4]);
return true;
}
| {
"content_hash": "35a3afe11b6650c41da886cc12b9bf3e",
"timestamp": "",
"source": "github",
"line_count": 316,
"max_line_length": 80,
"avg_line_length": 26.015822784810126,
"alnum_prop": 0.5881279649677654,
"repo_name": "bunnyblue/ACDDExtension",
"id": "6fa0f14ecb8e79af09107dd0d2f0275bebd864bb",
"size": "8840",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "aapt/aapt_base/libs/androidfw/ZipUtils.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "146466"
},
{
"name": "C++",
"bytes": "6811820"
},
{
"name": "Groovy",
"bytes": "794"
},
{
"name": "Java",
"bytes": "503207"
},
{
"name": "Makefile",
"bytes": "29797"
},
{
"name": "Shell",
"bytes": "14308"
}
],
"symlink_target": ""
} |
require([
'dojo/_base/lang',
'dojo/_base/html',
'dojo/_base/array',
'dojo/_base/connect',
'dojo/_base/window',
'dojo/dnd/Target',
'dojo/dnd/Source',
'gridx/Grid',
'gridx/core/model/cache/Async',
'gridx/tests/support/data/MusicData',
'gridx/tests/support/stores/Memory',
'gridx/tests/support/TestPane',
'gridx/tests/support/modules',
'dijit/form/Button',
'dijit/form/TextBox',
'dojo/domReady!'
], function(lang, html, array, connect, win, dndTarget, dndSource, Grid, Cache, dataSource, storeFactory, TestPane, mods){
function create(id, container, size, layoutIdx, args){
var g = new Grid(lang.mixin({
id: id,
cacheClass: Cache,
store: storeFactory({
path: './support/stores',
dataSource: dataSource,
size: size
}),
selectRowTriggerOnCell: true,
modules: [
mods.Focus,
mods.ExtendedSelectColumn,
mods.MoveColumn,
mods.DndColumn,
mods.VirtualVScroller
],
structure: dataSource.layouts[layoutIdx]
}, args));
g.placeAt(container);
g.startup();
return g;
}
grid = create('grid', 'grid1Container', 100, 0, {
dndColumnCanRearrange: false
});
//--------------------------------------------
var formTarget = new dndTarget("songForm", {
accept: ['grid/columns'],
onDropExternal: function(source, nodes, copy){
html.byId('draggedColumns').innerHTML = array.map(nodes, function(node){
return node.getAttribute('columnid');
}).join(', ');
}
});
grid.dnd._dnd._fixFF(formTarget, 'songForm');
});
| {
"content_hash": "b59203f77fd4765190f2d2be22d31358",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 122,
"avg_line_length": 25.75862068965517,
"alnum_prop": 0.6492637215528781,
"repo_name": "andrescabrera/gwt-dojo-toolkit",
"id": "e9973199ba26f25915f39dec47f5caa8c6741ede",
"size": "1494",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/gwt/dojo/gridx/public/dojo/gridx/tests/test_grid_dndcolumn_nongrid_target.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "19954"
},
{
"name": "Batchfile",
"bytes": "866"
},
{
"name": "CSS",
"bytes": "5141756"
},
{
"name": "HTML",
"bytes": "324905"
},
{
"name": "Java",
"bytes": "974601"
},
{
"name": "JavaScript",
"bytes": "6023255"
},
{
"name": "PHP",
"bytes": "52493"
},
{
"name": "Shell",
"bytes": "1026"
},
{
"name": "XSLT",
"bytes": "47380"
}
],
"symlink_target": ""
} |
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.sebastianmarschall.rxsmartlock">
</manifest>
| {
"content_hash": "8ac75b7e041eeee813751c88c072a0a4",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 68,
"avg_line_length": 34.5,
"alnum_prop": 0.7318840579710145,
"repo_name": "sebastianmarschall/RxAuth",
"id": "18c6510fc3ec06f9af00812fbb1082f6b0fb40b0",
"size": "138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rxauth/src/main/AndroidManifest.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "412"
},
{
"name": "Kotlin",
"bytes": "9530"
}
],
"symlink_target": ""
} |
import os
import sys
DEBUG = True
SECRET_KEY = os.environ.get('DAYS_SECRET_KEY', 'insecure-secret-key')
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Application definition
INSTALLED_APPS = [
'days.apps.days.apps.DaysConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'days.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'days.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
# STATICFILES_DIRS = (
# os.path.join(BASE_DIR, 'static'),
# )
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# Sites framework
SITE_ID = 1
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s %(levelname)s %(process)d [%(name)s:%(lineno)d] - %(message)s',
},
},
'handlers': {
'console': {
'level': 'DEBUG' if DEBUG else 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'standard',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': 'INFO',
},
'': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': False
},
},
}
# Day of the week on which to run the load_events management command, where Monday is 0 and Sunday is 6.
# Allows Heroku's Scheduler to be used to run the command weekly.
LOAD_DAY = 6
# Number of days for which to retrieve and store events.
DAY_COUNT = 30
# Number of events to include in daily emails to subscribers.
EVENT_COUNT = 1
# SendGrid credentials
SENDGRID_USERNAME = os.environ.get('SENDGRID_USERNAME', 'set-me-please')
SENDGRID_PASSWORD = os.environ.get('SENDGRID_PASSWORD', 'set-me-please')
# Address to send emails from
FROM_EMAIL = 'On This Day <historian@onthisday.com>'
| {
"content_hash": "027656ac07e24b7beb1d4d336a38c45c",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 104,
"avg_line_length": 26.98581560283688,
"alnum_prop": 0.6438896189224704,
"repo_name": "rlucioni/days",
"id": "c18bd3bc7ba5afc43b5fdf0b5ecb46577d3f4ff4",
"size": "3805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "days/settings/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "242"
},
{
"name": "Python",
"bytes": "18244"
}
],
"symlink_target": ""
} |
import requests
class LeClient(object):
def __init__(self):
self.endpoint = 'http://dbios.herokuapp.com/'
def request(self, path=None, params=None):
self.path = path
self.params = params
result = None
result = requests.get(self.endpoint + path, params=params)
return result.json()
| {
"content_hash": "278a54a33084d2ede6326448c06c2b70",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 66,
"avg_line_length": 24.357142857142858,
"alnum_prop": 0.6099706744868035,
"repo_name": "fly/dbcli",
"id": "464ef8230c660bf85a04a4ddbef9d2d06c2bcda8",
"size": "341",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "api.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "5710"
}
],
"symlink_target": ""
} |
// Copyright (C) 2002-2011 Nikolaus Gebhardt
// This file is part of the "Irrlicht Engine".
// For conditions of distribution and use, see copyright notice in irrlicht.h
#ifndef __I_SCENE_MANAGER_H_INCLUDED__
#define __I_SCENE_MANAGER_H_INCLUDED__
#include "IReferenceCounted.h"
#include "irrArray.h"
#include "irrString.h"
#include "path.h"
#include "vector3d.h"
#include "dimension2d.h"
#include "SColor.h"
#include "ETerrainElements.h"
#include "ESceneNodeTypes.h"
#include "ESceneNodeAnimatorTypes.h"
#include "EMeshWriterEnums.h"
#include "SceneParameters.h"
#include "IGeometryCreator.h"
#include "ISkinnedMesh.h"
namespace irr
{
struct SKeyMap;
struct SEvent;
namespace io
{
class IReadFile;
class IAttributes;
class IWriteFile;
class IFileSystem;
} // end namespace io
namespace gui
{
class IGUIFont;
class IGUIEnvironment;
} // end namespace gui
namespace video
{
class IVideoDriver;
class SMaterial;
class IImage;
class ITexture;
} // end namespace video
namespace scene
{
//! Enumeration for render passes.
/** A parameter passed to the registerNodeForRendering() method of the ISceneManager,
specifying when the node wants to be drawn in relation to the other nodes. */
enum E_SCENE_NODE_RENDER_PASS
{
//! No pass currently active
ESNRP_NONE =0,
//! Camera pass. The active view is set up here. The very first pass.
ESNRP_CAMERA =1,
//! In this pass, lights are transformed into camera space and added to the driver
ESNRP_LIGHT =2,
//! This is used for sky boxes.
ESNRP_SKY_BOX =4,
//! All normal objects can use this for registering themselves.
/** This value will never be returned by
ISceneManager::getSceneNodeRenderPass(). The scene manager
will determine by itself if an object is transparent or solid
and register the object as SNRT_TRANSPARENT or SNRT_SOLD
automatically if you call registerNodeForRendering with this
value (which is default). Note that it will register the node
only as ONE type. If your scene node has both solid and
transparent material types register it twice (one time as
SNRT_SOLID, the other time as SNRT_TRANSPARENT) and in the
render() method call getSceneNodeRenderPass() to find out the
current render pass and render only the corresponding parts of
the node. */
ESNRP_AUTOMATIC =24,
//! Solid scene nodes or special scene nodes without materials.
ESNRP_SOLID =8,
//! Transparent scene nodes, drawn after solid nodes. They are sorted from back to front and drawn in that order.
ESNRP_TRANSPARENT =16,
//! Transparent effect scene nodes, drawn after Transparent nodes. They are sorted from back to front and drawn in that order.
ESNRP_TRANSPARENT_EFFECT =32,
//! Drawn after the solid nodes, before the transparent nodes, the time for drawing shadow volumes
ESNRP_SHADOW =64
};
class IAnimatedMesh;
class IAnimatedMeshSceneNode;
class IBillboardSceneNode;
class IBillboardTextSceneNode;
class ICameraSceneNode;
class IDummyTransformationSceneNode;
class ILightManager;
class ILightSceneNode;
class IMesh;
class IMeshBuffer;
class IMeshCache;
class IMeshLoader;
class IMeshManipulator;
class IMeshSceneNode;
class IMeshWriter;
class IMetaTriangleSelector;
class IParticleSystemSceneNode;
class ISceneCollisionManager;
class ISceneLoader;
class ISceneNode;
class ISceneNodeAnimator;
class ISceneNodeAnimatorCollisionResponse;
class ISceneNodeAnimatorFactory;
class ISceneNodeFactory;
class ISceneUserDataSerializer;
class ITerrainSceneNode;
class ITextSceneNode;
class ITriangleSelector;
class IVolumeLightSceneNode;
namespace quake3
{
struct IShader;
} // end namespace quake3
//! The Scene Manager manages scene nodes, mesh recources, cameras and all the other stuff.
/** All Scene nodes can be created only here. There is a always growing
list of scene nodes for lots of purposes: Indoor rendering scene nodes
like the Octree (addOctreeSceneNode()) or the terrain renderer
(addTerrainSceneNode()), different Camera scene nodes
(addCameraSceneNode(), addCameraSceneNodeMaya()), scene nodes for Light
(addLightSceneNode()), Billboards (addBillboardSceneNode()) and so on.
A scene node is a node in the hierachical scene graph. Every scene node
may have children, which are other scene nodes. Children move relative
the their parents position. If the parent of a node is not visible, its
children won't be visible, too. In this way, it is for example easily
possible to attach a light to a moving car or to place a walking
character on a moving platform on a moving ship.
The SceneManager is also able to load 3d mesh files of different
formats. Take a look at getMesh() to find out what formats are
supported. If these formats are not enough, use
addExternalMeshLoader() to add new formats to the engine.
*/
class ISceneManager : public virtual IReferenceCounted
{
public:
//! Get pointer to an animateable mesh. Loads the file if not loaded already.
/**
* If you want to remove a loaded mesh from the cache again, use removeMesh().
* Currently there are the following mesh formats supported:
* <TABLE border="1" cellpadding="2" cellspacing="0">
* <TR>
* <TD>Format</TD>
* <TD>Description</TD>
* </TR>
* <TR>
* <TD>3D Studio (.3ds)</TD>
* <TD>Loader for 3D-Studio files which lots of 3D packages
* are able to export. Only static meshes are currently
* supported by this importer.</TD>
* </TR>
* <TR>
* <TD>3D World Studio (.smf)</TD>
* <TD>Loader for Leadwerks SMF mesh files, a simple mesh format
* containing static geometry for games. The proprietary .STF texture format
* is not supported yet. This loader was originally written by Joseph Ellis. </TD>
* </TR>
* <TR>
* <TD>Bliz Basic B3D (.b3d)</TD>
* <TD>Loader for blitz basic files, developed by Mark
* Sibly. This is the ideal animated mesh format for game
* characters as it is both rigidly defined and widely
* supported by modeling and animation software.
* As this format supports skeletal animations, an
* ISkinnedMesh will be returned by this importer.</TD>
* </TR>
* <TR>
* <TD>Cartography shop 4 (.csm)</TD>
* <TD>Cartography Shop is a modeling program for creating
* architecture and calculating lighting. Irrlicht can
* directly import .csm files thanks to the IrrCSM library
* created by Saurav Mohapatra which is now integrated
* directly in Irrlicht. If you are using this loader,
* please note that you'll have to set the path of the
* textures before loading .csm files. You can do this
* using
* SceneManager->getParameters()->setAttribute(scene::CSM_TEXTURE_PATH,
* "path/to/your/textures");</TD>
* </TR>
* <TR>
* <TD>COLLADA (.dae, .xml)</TD>
* <TD>COLLADA is an open Digital Asset Exchange Schema for
* the interactive 3D industry. There are exporters and
* importers for this format available for most of the
* big 3d packagesat http://collada.org. Irrlicht can
* import COLLADA files by using the
* ISceneManager::getMesh() method. COLLADA files need
* not contain only one single mesh but multiple meshes
* and a whole scene setup with lights, cameras and mesh
* instances, this loader can set up a scene as
* described by the COLLADA file instead of loading and
* returning one single mesh. By default, this loader
* behaves like the other loaders and does not create
* instances, but it can be switched into this mode by
* using
* SceneManager->getParameters()->setAttribute(COLLADA_CREATE_SCENE_INSTANCES, true);
* Created scene nodes will be named as the names of the
* nodes in the COLLADA file. The returned mesh is just
* a dummy object in this mode. Meshes included in the
* scene will be added into the scene manager with the
* following naming scheme:
* "path/to/file/file.dea#meshname". The loading of such
* meshes is logged. Currently, this loader is able to
* create meshes (made of only polygons), lights, and
* cameras. Materials and animations are currently not
* supported but this will change with future releases.
* </TD>
* </TR>
* <TR>
* <TD>Delgine DeleD (.dmf)</TD>
* <TD>DeleD (delgine.com) is a 3D editor and level-editor
* combined into one and is specifically designed for 3D
* game-development. With this loader, it is possible to
* directly load all geometry is as well as textures and
* lightmaps from .dmf files. To set texture and
* material paths, see scene::DMF_USE_MATERIALS_DIRS and
* scene::DMF_TEXTURE_PATH. It is also possible to flip
* the alpha texture by setting
* scene::DMF_FLIP_ALPHA_TEXTURES to true and to set the
* material transparent reference value by setting
* scene::DMF_ALPHA_CHANNEL_REF to a float between 0 and
* 1. The loader is based on Salvatore Russo's .dmf
* loader, I just changed some parts of it. Thanks to
* Salvatore for his work and for allowing me to use his
* code in Irrlicht and put it under Irrlicht's license.
* For newer and more enchanced versions of the loader,
* take a look at delgine.com.
* </TD>
* </TR>
* <TR>
* <TD>DirectX (.x)</TD>
* <TD>Platform independent importer (so not D3D-only) for
* .x files. Most 3D packages can export these natively
* and there are several tools for them available, e.g.
* the Maya exporter included in the DX SDK.
* .x files can include skeletal animations and Irrlicht
* is able to play and display them, users can manipulate
* the joints via the ISkinnedMesh interface. Currently,
* Irrlicht only supports uncompressed .x files.</TD>
* </TR>
* <TR>
* <TD>Half-Life model (.mdl)</TD>
* <TD>This loader opens Half-life 1 models, it was contributed
* by Fabio Concas and adapted by Thomas Alten.</TD>
* </TR>
* <TR>
* <TD>Irrlicht Mesh (.irrMesh)</TD>
* <TD>This is a static mesh format written in XML, native
* to Irrlicht and written by the irr mesh writer.
* This format is exported by the CopperCube engine's
* lightmapper.</TD>
* </TR>
* <TR>
* <TD>LightWave (.lwo)</TD>
* <TD>Native to NewTek's LightWave 3D, the LWO format is well
* known and supported by many exporters. This loader will
* import LWO2 models including lightmaps, bumpmaps and
* reflection textures.</TD>
* </TR>
* <TR>
* <TD>Maya (.obj)</TD>
* <TD>Most 3D software can create .obj files which contain
* static geometry without material data. The material
* files .mtl are also supported. This importer for
* Irrlicht can load them directly. </TD>
* </TR>
* <TR>
* <TD>Milkshape (.ms3d)</TD>
* <TD>.MS3D files contain models and sometimes skeletal
* animations from the Milkshape 3D modeling and animation
* software. Like the other skeletal mesh loaders, oints
* are exposed via the ISkinnedMesh animated mesh type.</TD>
* </TR>
* <TR>
* <TD>My3D (.my3d)</TD>
* <TD>.my3D is a flexible 3D file format. The My3DTools
* contains plug-ins to export .my3D files from several
* 3D packages. With this built-in importer, Irrlicht
* can read and display those files directly. This
* loader was written by Zhuck Dimitry who also created
* the whole My3DTools package. If you are using this
* loader, please note that you can set the path of the
* textures before loading .my3d files. You can do this
* using
* SceneManager->getParameters()->setAttribute(scene::MY3D_TEXTURE_PATH,
* "path/to/your/textures");
* </TD>
* </TR>
* <TR>
* <TD>OCT (.oct)</TD>
* <TD>The oct file format contains 3D geometry and
* lightmaps and can be loaded directly by Irrlicht. OCT
* files<br> can be created by FSRad, Paul Nette's
* radiosity processor or exported from Blender using
* OCTTools which can be found in the exporters/OCTTools
* directory of the SDK. Thanks to Murphy McCauley for
* creating all this.</TD>
* </TR>
* <TR>
* <TD>OGRE Meshes (.mesh)</TD>
* <TD>Ogre .mesh files contain 3D data for the OGRE 3D
* engine. Irrlicht can read and display them directly
* with this importer. To define materials for the mesh,
* copy a .material file named like the corresponding
* .mesh file where the .mesh file is. (For example
* ogrehead.material for ogrehead.mesh). Thanks to
* Christian Stehno who wrote and contributed this
* loader.</TD>
* </TR>
* <TR>
* <TD>Pulsar LMTools (.lmts)</TD>
* <TD>LMTools is a set of tools (Windows & Linux) for
* creating lightmaps. Irrlicht can directly read .lmts
* files thanks to<br> the importer created by Jonas
* Petersen. If you are using this loader, please note
* that you can set the path of the textures before
* loading .lmts files. You can do this using
* SceneManager->getParameters()->setAttribute(scene::LMTS_TEXTURE_PATH,
* "path/to/your/textures");
* Notes for<br> this version of the loader:<br>
* - It does not recognise/support user data in the
* *.lmts files.<br>
* - The TGAs generated by LMTools don't work in
* Irrlicht for some reason (the textures are upside
* down). Opening and resaving them in a graphics app
* will solve the problem.</TD>
* </TR>
* <TR>
* <TD>Quake 3 levels (.bsp)</TD>
* <TD>Quake 3 is a popular game by IDSoftware, and .pk3
* files contain .bsp files and textures/lightmaps
* describing huge prelighted levels. Irrlicht can read
* .pk3 and .bsp files directly and thus render Quake 3
* levels directly. Written by Nikolaus Gebhardt
* enhanced by Dean P. Macri with the curved surfaces
* feature. </TD>
* </TR>
* <TR>
* <TD>Quake 2 models (.md2)</TD>
* <TD>Quake 2 models are characters with morph target
* animation. Irrlicht can read, display and animate
* them directly with this importer. </TD>
* </TR>
* <TR>
* <TD>Quake 3 models (.md3)</TD>
* <TD>Quake 3 models are characters with morph target
* animation, they contain mount points for weapons and body
* parts and are typically made of several sections which are
* manually joined together.</TD>
* </TR>
* <TR>
* <TD>Stanford Triangle (.ply)</TD>
* <TD>Invented by Stanford University and known as the native
* format of the infamous "Stanford Bunny" model, this is a
* popular static mesh format used by 3D scanning hardware
* and software. This loader supports extremely large models
* in both ASCII and binary format, but only has rudimentary
* material support in the form of vertex colors and texture
* coordinates.</TD>
* </TR>
* <TR>
* <TD>Stereolithography (.stl)</TD>
* <TD>The STL format is used for rapid prototyping and
* computer-aided manufacturing, thus has no support for
* materials.</TD>
* </TR>
* </TABLE>
*
* To load and display a mesh quickly, just do this:
* \code
* SceneManager->addAnimatedMeshSceneNode(
* SceneManager->getMesh("yourmesh.3ds"));
* \endcode
* If you would like to implement and add your own file format loader to Irrlicht,
* see addExternalMeshLoader().
* \param filename: Filename of the mesh to load.
* \return Null if failed, otherwise pointer to the mesh.
* This pointer should not be dropped. See IReferenceCounted::drop() for more information.
**/
virtual IAnimatedMesh* getMesh(const io::path& filename) = 0;
//! Get pointer to an animateable mesh. Loads the file if not loaded already.
/** Works just as getMesh(const char* filename). If you want to
remove a loaded mesh from the cache again, use removeMesh().
\param file File handle of the mesh to load.
\return NULL if failed and pointer to the mesh if successful.
This pointer should not be dropped. See
IReferenceCounted::drop() for more information. */
virtual IAnimatedMesh* getMesh(io::IReadFile* file) = 0;
//! Get interface to the mesh cache which is shared beween all existing scene managers.
/** With this interface, it is possible to manually add new loaded
meshes (if ISceneManager::getMesh() is not sufficient), to remove them and to iterate
through already loaded meshes. */
virtual IMeshCache* getMeshCache() = 0;
//! Get the video driver.
/** \return Pointer to the video Driver.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual video::IVideoDriver* getVideoDriver() = 0;
//! Get the active GUIEnvironment
/** \return Pointer to the GUIEnvironment
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual gui::IGUIEnvironment* getGUIEnvironment() = 0;
//! Get the active FileSystem
/** \return Pointer to the FileSystem
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual io::IFileSystem* getFileSystem() = 0;
//! adds Volume Lighting Scene Node.
/** Example Usage:
scene::IVolumeLightSceneNode * n = smgr->addVolumeLightSceneNode(0, -1,
32, 32, //Subdivide U/V
video::SColor(0, 180, 180, 180), //foot color
video::SColor(0, 0, 0, 0) //tail color
);
if (n)
{
n->setScale(core::vector3df(46.0f, 45.0f, 46.0f));
n->getMaterial(0).setTexture(0, smgr->getVideoDriver()->getTexture("lightFalloff.png"));
}
\return Pointer to the volumeLight if successful, otherwise NULL.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IVolumeLightSceneNode* addVolumeLightSceneNode(ISceneNode* parent=0, s32 id=-1,
const u32 subdivU = 32, const u32 subdivV = 32,
const video::SColor foot = video::SColor(51, 0, 230, 180),
const video::SColor tail = video::SColor(0, 0, 0, 0),
const core::vector3df& position = core::vector3df(0,0,0),
const core::vector3df& rotation = core::vector3df(0,0,0),
const core::vector3df& scale = core::vector3df(1.0f, 1.0f, 1.0f)) = 0;
//! Adds a cube scene node
/** \param size: Size of the cube, uniformly in each dimension.
\param parent: Parent of the scene node. Can be 0 if no parent.
\param id: Id of the node. This id can be used to identify the scene node.
\param position: Position of the space relative to its parent
where the scene node will be placed.
\param rotation: Initital rotation of the scene node.
\param scale: Initial scale of the scene node.
\return Pointer to the created test scene node. This
pointer should not be dropped. See IReferenceCounted::drop()
for more information. */
virtual IMeshSceneNode* addCubeSceneNode(f32 size=10.0f, ISceneNode* parent=0, s32 id=-1,
const core::vector3df& position = core::vector3df(0,0,0),
const core::vector3df& rotation = core::vector3df(0,0,0),
const core::vector3df& scale = core::vector3df(1.0f, 1.0f, 1.0f)) = 0;
//! Adds a sphere scene node of the given radius and detail
/** \param radius: Radius of the sphere.
\param polyCount: The number of vertices in horizontal and
vertical direction. The total polyCount of the sphere is
polyCount*polyCount. This parameter must be less than 256 to
stay within the 16-bit limit of the indices of a meshbuffer.
\param parent: Parent of the scene node. Can be 0 if no parent.
\param id: Id of the node. This id can be used to identify the scene node.
\param position: Position of the space relative to its parent
where the scene node will be placed.
\param rotation: Initital rotation of the scene node.
\param scale: Initial scale of the scene node.
\return Pointer to the created test scene node. This
pointer should not be dropped. See IReferenceCounted::drop()
for more information. */
virtual IMeshSceneNode* addSphereSceneNode(f32 radius=5.0f, s32 polyCount=16,
ISceneNode* parent=0, s32 id=-1,
const core::vector3df& position = core::vector3df(0,0,0),
const core::vector3df& rotation = core::vector3df(0,0,0),
const core::vector3df& scale = core::vector3df(1.0f, 1.0f, 1.0f)) = 0;
//! Adds a scene node for rendering an animated mesh model.
/** \param mesh: Pointer to the loaded animated mesh to be displayed.
\param parent: Parent of the scene node. Can be NULL if no parent.
\param id: Id of the node. This id can be used to identify the scene node.
\param position: Position of the space relative to its parent where the
scene node will be placed.
\param rotation: Initital rotation of the scene node.
\param scale: Initial scale of the scene node.
\param alsoAddIfMeshPointerZero: Add the scene node even if a 0 pointer is passed.
\return Pointer to the created scene node.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IAnimatedMeshSceneNode* addAnimatedMeshSceneNode(IAnimatedMesh* mesh,
ISceneNode* parent=0, s32 id=-1,
const core::vector3df& position = core::vector3df(0,0,0),
const core::vector3df& rotation = core::vector3df(0,0,0),
const core::vector3df& scale = core::vector3df(1.0f, 1.0f, 1.0f),
bool alsoAddIfMeshPointerZero=false) = 0;
//! Adds a scene node for rendering a static mesh.
/** \param mesh: Pointer to the loaded static mesh to be displayed.
\param parent: Parent of the scene node. Can be NULL if no parent.
\param id: Id of the node. This id can be used to identify the scene node.
\param position: Position of the space relative to its parent where the
scene node will be placed.
\param rotation: Initital rotation of the scene node.
\param scale: Initial scale of the scene node.
\param alsoAddIfMeshPointerZero: Add the scene node even if a 0 pointer is passed.
\return Pointer to the created scene node.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IMeshSceneNode* addMeshSceneNode(IMesh* mesh, ISceneNode* parent=0, s32 id=-1,
const core::vector3df& position = core::vector3df(0,0,0),
const core::vector3df& rotation = core::vector3df(0,0,0),
const core::vector3df& scale = core::vector3df(1.0f, 1.0f, 1.0f),
bool alsoAddIfMeshPointerZero=false) = 0;
//! Adds a scene node for rendering a animated water surface mesh.
/** Looks really good when the Material type EMT_TRANSPARENT_REFLECTION
is used.
\param waveHeight: Height of the water waves.
\param waveSpeed: Speed of the water waves.
\param waveLength: Lenght of a water wave.
\param mesh: Pointer to the loaded static mesh to be displayed with water waves on it.
\param parent: Parent of the scene node. Can be NULL if no parent.
\param id: Id of the node. This id can be used to identify the scene node.
\param position: Position of the space relative to its parent where the
scene node will be placed.
\param rotation: Initital rotation of the scene node.
\param scale: Initial scale of the scene node.
\return Pointer to the created scene node.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNode* addWaterSurfaceSceneNode(IMesh* mesh,
f32 waveHeight=2.0f, f32 waveSpeed=300.0f, f32 waveLength=10.0f,
ISceneNode* parent=0, s32 id=-1,
const core::vector3df& position = core::vector3df(0,0,0),
const core::vector3df& rotation = core::vector3df(0,0,0),
const core::vector3df& scale = core::vector3df(1.0f, 1.0f, 1.0f)) = 0;
//! Adds a scene node for rendering using a octree to the scene graph.
/** This a good method for rendering
scenes with lots of geometry. The Octree is built on the fly from the mesh.
\param mesh: The mesh containing all geometry from which the octree will be build.
If this animated mesh has more than one frames in it, the first frame is taken.
\param parent: Parent node of the octree node.
\param id: id of the node. This id can be used to identify the node.
\param minimalPolysPerNode: Specifies the minimal polygons contained a octree node.
If a node gets less polys than this value it will not be split into
smaller nodes.
\param alsoAddIfMeshPointerZero: Add the scene node even if a 0 pointer is passed.
\return Pointer to the Octree if successful, otherwise 0.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IMeshSceneNode* addOctreeSceneNode(IAnimatedMesh* mesh, ISceneNode* parent=0,
s32 id=-1, s32 minimalPolysPerNode=512, bool alsoAddIfMeshPointerZero=false) = 0;
//! Adds a scene node for rendering using a octree to the scene graph.
/** \deprecated Use addOctreeSceneNode instead. This method may be removed by Irrlicht 1.9. */
_IRR_DEPRECATED_ IMeshSceneNode* addOctTreeSceneNode(IAnimatedMesh* mesh, ISceneNode* parent=0,
s32 id=-1, s32 minimalPolysPerNode=512, bool alsoAddIfMeshPointerZero=false)
{
return addOctreeSceneNode(mesh, parent, id, minimalPolysPerNode, alsoAddIfMeshPointerZero);
}
//! Adds a scene node for rendering using a octree to the scene graph.
/** This a good method for rendering scenes with lots of
geometry. The Octree is built on the fly from the mesh, much
faster then a bsp tree.
\param mesh: The mesh containing all geometry from which the octree will be build.
\param parent: Parent node of the octree node.
\param id: id of the node. This id can be used to identify the node.
\param minimalPolysPerNode: Specifies the minimal polygons contained a octree node.
If a node gets less polys than this value it will not be split into
smaller nodes.
\param alsoAddIfMeshPointerZero: Add the scene node even if a 0 pointer is passed.
\return Pointer to the octree if successful, otherwise 0.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IMeshSceneNode* addOctreeSceneNode(IMesh* mesh, ISceneNode* parent=0,
s32 id=-1, s32 minimalPolysPerNode=256, bool alsoAddIfMeshPointerZero=false) = 0;
//! Adds a scene node for rendering using a octree to the scene graph.
/** \deprecated Use addOctreeSceneNode instead. This method may be removed by Irrlicht 1.9. */
_IRR_DEPRECATED_ IMeshSceneNode* addOctTreeSceneNode(IMesh* mesh, ISceneNode* parent=0,
s32 id=-1, s32 minimalPolysPerNode=256, bool alsoAddIfMeshPointerZero=false)
{
return addOctreeSceneNode(mesh, parent, id, minimalPolysPerNode, alsoAddIfMeshPointerZero);
}
//! Adds a camera scene node to the scene graph and sets it as active camera.
/** This camera does not react on user input like for example the one created with
addCameraSceneNodeFPS(). If you want to move or animate it, use animators or the
ISceneNode::setPosition(), ICameraSceneNode::setTarget() etc methods.
By default, a camera's look at position (set with setTarget()) and its scene node
rotation (set with setRotation()) are independent. If you want to be able to
control the direction that the camera looks by using setRotation() then call
ICameraSceneNode::bindTargetAndRotation(true) on it.
\param position: Position of the space relative to its parent where the camera will be placed.
\param lookat: Position where the camera will look at. Also known as target.
\param parent: Parent scene node of the camera. Can be null. If the parent moves,
the camera will move too.
\param id: id of the camera. This id can be used to identify the camera.
\param makeActive Flag whether this camera should become the active one.
Make sure you always have one active camera.
\return Pointer to interface to camera if successful, otherwise 0.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ICameraSceneNode* addCameraSceneNode(ISceneNode* parent = 0,
const core::vector3df& position = core::vector3df(0,0,0),
const core::vector3df& lookat = core::vector3df(0,0,100),
s32 id=-1, bool makeActive=true) = 0;
//! Adds a maya style user controlled camera scene node to the scene graph.
/** This is a standard camera with an animator that provides mouse control similar
to camera in the 3D Software Maya by Alias Wavefront.
The camera does not react on setPosition anymore after applying this animator. Instead
use setTarget, to fix the target the camera the camera hovers around. And setDistance
to set the current distance from that target, i.e. the radius of the orbit the camera
hovers on.
\param parent: Parent scene node of the camera. Can be null.
\param rotateSpeed: Rotation speed of the camera.
\param zoomSpeed: Zoom speed of the camera.
\param translationSpeed: TranslationSpeed of the camera.
\param id: id of the camera. This id can be used to identify the camera.
\param distance Initial distance of the camera from the object
\param makeActive Flag whether this camera should become the active one.
Make sure you always have one active camera.
\return Returns a pointer to the interface of the camera if successful, otherwise 0.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ICameraSceneNode* addCameraSceneNodeMaya(ISceneNode* parent=0,
f32 rotateSpeed=-1500.f, f32 zoomSpeed=200.f,
f32 translationSpeed=1500.f, s32 id=-1, f32 distance=70.f,
bool makeActive=true) =0;
//! Adds a camera scene node with an animator which provides mouse and keyboard control appropriate for first person shooters (FPS).
/** This FPS camera is intended to provide a demonstration of a
camera that behaves like a typical First Person Shooter. It is
useful for simple demos and prototyping but is not intended to
provide a full solution for a production quality game. It binds
the camera scene node rotation to the look-at target; @see
ICameraSceneNode::bindTargetAndRotation(). With this camera,
you look with the mouse, and move with cursor keys. If you want
to change the key layout, you can specify your own keymap. For
example to make the camera be controlled by the cursor keys AND
the keys W,A,S, and D, do something like this:
\code
SKeyMap keyMap[8];
keyMap[0].Action = EKA_MOVE_FORWARD;
keyMap[0].KeyCode = KEY_UP;
keyMap[1].Action = EKA_MOVE_FORWARD;
keyMap[1].KeyCode = KEY_KEY_W;
keyMap[2].Action = EKA_MOVE_BACKWARD;
keyMap[2].KeyCode = KEY_DOWN;
keyMap[3].Action = EKA_MOVE_BACKWARD;
keyMap[3].KeyCode = KEY_KEY_S;
keyMap[4].Action = EKA_STRAFE_LEFT;
keyMap[4].KeyCode = KEY_LEFT;
keyMap[5].Action = EKA_STRAFE_LEFT;
keyMap[5].KeyCode = KEY_KEY_A;
keyMap[6].Action = EKA_STRAFE_RIGHT;
keyMap[6].KeyCode = KEY_RIGHT;
keyMap[7].Action = EKA_STRAFE_RIGHT;
keyMap[7].KeyCode = KEY_KEY_D;
camera = sceneManager->addCameraSceneNodeFPS(0, 100, 500, -1, keyMap, 8);
\endcode
\param parent: Parent scene node of the camera. Can be null.
\param rotateSpeed: Speed in degress with which the camera is
rotated. This can be done only with the mouse.
\param moveSpeed: Speed in units per millisecond with which
the camera is moved. Movement is done with the cursor keys.
\param id: id of the camera. This id can be used to identify
the camera.
\param keyMapArray: Optional pointer to an array of a keymap,
specifying what keys should be used to move the camera. If this
is null, the default keymap is used. You can define actions
more then one time in the array, to bind multiple keys to the
same action.
\param keyMapSize: Amount of items in the keymap array.
\param noVerticalMovement: Setting this to true makes the
camera only move within a horizontal plane, and disables
vertical movement as known from most ego shooters. Default is
'false', with which it is possible to fly around in space, if
no gravity is there.
\param jumpSpeed: Speed with which the camera is moved when
jumping.
\param invertMouse: Setting this to true makes the camera look
up when the mouse is moved down and down when the mouse is
moved up, the default is 'false' which means it will follow the
movement of the mouse cursor.
\param makeActive Flag whether this camera should become the active one.
Make sure you always have one active camera.
\return Pointer to the interface of the camera if successful,
otherwise 0. This pointer should not be dropped. See
IReferenceCounted::drop() for more information. */
virtual ICameraSceneNode* addCameraSceneNodeFPS(ISceneNode* parent = 0,
f32 rotateSpeed = 100.0f, f32 moveSpeed = 0.5f, s32 id=-1,
SKeyMap* keyMapArray=0, s32 keyMapSize=0, bool noVerticalMovement=false,
f32 jumpSpeed = 0.f, bool invertMouse=false,
bool makeActive=true) = 0;
//! Adds a dynamic light scene node to the scene graph.
/** The light will cast dynamic light on all
other scene nodes in the scene, which have the material flag video::MTF_LIGHTING
turned on. (This is the default setting in most scene nodes).
\param parent: Parent scene node of the light. Can be null. If the parent moves,
the light will move too.
\param position: Position of the space relative to its parent where the light will be placed.
\param color: Diffuse color of the light. Ambient or Specular colors can be set manually with
the ILightSceneNode::getLightData() method.
\param radius: Radius of the light.
\param id: id of the node. This id can be used to identify the node.
\return Pointer to the interface of the light if successful, otherwise NULL.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ILightSceneNode* addLightSceneNode(ISceneNode* parent = 0,
const core::vector3df& position = core::vector3df(0,0,0),
video::SColorf color = video::SColorf(1.0f, 1.0f, 1.0f),
f32 radius=100.0f, s32 id=-1) = 0;
//! Adds a billboard scene node to the scene graph.
/** A billboard is like a 3d sprite: A 2d element,
which always looks to the camera. It is usually used for things
like explosions, fire, lensflares and things like that.
\param parent Parent scene node of the billboard. Can be null.
If the parent moves, the billboard will move too.
\param size Size of the billboard. This size is 2 dimensional
because a billboard only has width and height.
\param position Position of the space relative to its parent
where the billboard will be placed.
\param id An id of the node. This id can be used to identify
the node.
\param colorTop The color of the vertices at the top of the
billboard (default: white).
\param colorBottom The color of the vertices at the bottom of
the billboard (default: white).
\return Pointer to the billboard if successful, otherwise NULL.
This pointer should not be dropped. See
IReferenceCounted::drop() for more information. */
virtual IBillboardSceneNode* addBillboardSceneNode(ISceneNode* parent = 0,
const core::dimension2d<f32>& size = core::dimension2d<f32>(10.0f, 10.0f),
const core::vector3df& position = core::vector3df(0,0,0), s32 id=-1,
video::SColor colorTop = 0xFFFFFFFF, video::SColor colorBottom = 0xFFFFFFFF) = 0;
//! Adds a skybox scene node to the scene graph.
/** A skybox is a big cube with 6 textures on it and
is drawn around the camera position.
\param top: Texture for the top plane of the box.
\param bottom: Texture for the bottom plane of the box.
\param left: Texture for the left plane of the box.
\param right: Texture for the right plane of the box.
\param front: Texture for the front plane of the box.
\param back: Texture for the back plane of the box.
\param parent: Parent scene node of the skybox. A skybox usually has no parent,
so this should be null. Note: If a parent is set to the skybox, the box will not
change how it is drawn.
\param id: An id of the node. This id can be used to identify the node.
\return Pointer to the sky box if successful, otherwise NULL.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNode* addSkyBoxSceneNode(video::ITexture* top, video::ITexture* bottom,
video::ITexture* left, video::ITexture* right, video::ITexture* front,
video::ITexture* back, ISceneNode* parent = 0, s32 id=-1) = 0;
//! Adds a skydome scene node to the scene graph.
/** A skydome is a large (half-) sphere with a panoramic texture
on the inside and is drawn around the camera position.
\param texture: Texture for the dome.
\param horiRes: Number of vertices of a horizontal layer of the sphere.
\param vertRes: Number of vertices of a vertical layer of the sphere.
\param texturePercentage: How much of the height of the
texture is used. Should be between 0 and 1.
\param spherePercentage: How much of the sphere is drawn.
Value should be between 0 and 2, where 1 is an exact
half-sphere and 2 is a full sphere.
\param radius The Radius of the sphere
\param parent: Parent scene node of the dome. A dome usually has no parent,
so this should be null. Note: If a parent is set, the dome will not
change how it is drawn.
\param id: An id of the node. This id can be used to identify the node.
\return Pointer to the sky dome if successful, otherwise NULL.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNode* addSkyDomeSceneNode(video::ITexture* texture,
u32 horiRes=16, u32 vertRes=8,
f32 texturePercentage=0.9, f32 spherePercentage=2.0,f32 radius = 1000.f,
ISceneNode* parent=0, s32 id=-1) = 0;
//! Adds a particle system scene node to the scene graph.
/** \param withDefaultEmitter: Creates a default working point emitter
which emitts some particles. Set this to true to see a particle system
in action. If set to false, you'll have to set the emitter you want by
calling IParticleSystemSceneNode::setEmitter().
\param parent: Parent of the scene node. Can be NULL if no parent.
\param id: Id of the node. This id can be used to identify the scene node.
\param position: Position of the space relative to its parent where the
scene node will be placed.
\param rotation: Initital rotation of the scene node.
\param scale: Initial scale of the scene node.
\return Pointer to the created scene node.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IParticleSystemSceneNode* addParticleSystemSceneNode(
bool withDefaultEmitter=true, ISceneNode* parent=0, s32 id=-1,
const core::vector3df& position = core::vector3df(0,0,0),
const core::vector3df& rotation = core::vector3df(0,0,0),
const core::vector3df& scale = core::vector3df(1.0f, 1.0f, 1.0f)) = 0;
//! Adds a terrain scene node to the scene graph.
/** This node implements is a simple terrain renderer which uses
a technique known as geo mip mapping
for reducing the detail of triangle blocks which are far away.
The code for the TerrainSceneNode is based on the terrain
renderer by Soconne and the GeoMipMapSceneNode developed by
Spintz. They made their code available for Irrlicht and allowed
it to be distributed under this licence. I only modified some
parts. A lot of thanks go to them.
This scene node is capable of loading terrains and updating
the indices at runtime to enable viewing very large terrains
very quickly. It uses a CLOD (Continuous Level of Detail)
algorithm which updates the indices for each patch based on
a LOD (Level of Detail) which is determined based on a patch's
distance from the camera.
The patch size of the terrain must always be a size of 2^N+1,
i.e. 8+1(9), 16+1(17), etc.
The MaxLOD available is directly dependent on the patch size
of the terrain. LOD 0 contains all of the indices to draw all
the triangles at the max detail for a patch. As each LOD goes
up by 1 the step taken, in generating indices increases by
-2^LOD, so for LOD 1, the step taken is 2, for LOD 2, the step
taken is 4, LOD 3 - 8, etc. The step can be no larger than
the size of the patch, so having a LOD of 8, with a patch size
of 17, is asking the algoritm to generate indices every 2^8 (
256 ) vertices, which is not possible with a patch size of 17.
The maximum LOD for a patch size of 17 is 2^4 ( 16 ). So,
with a MaxLOD of 5, you'll have LOD 0 ( full detail ), LOD 1 (
every 2 vertices ), LOD 2 ( every 4 vertices ), LOD 3 ( every
8 vertices ) and LOD 4 ( every 16 vertices ).
\param heightMapFileName: The name of the file on disk, to read vertex data from. This should
be a gray scale bitmap.
\param parent: Parent of the scene node. Can be 0 if no parent.
\param id: Id of the node. This id can be used to identify the scene node.
\param position: The absolute position of this node.
\param rotation: The absolute rotation of this node. ( NOT YET IMPLEMENTED )
\param scale: The scale factor for the terrain. If you're
using a heightmap of size 129x129 and would like your terrain
to be 12900x12900 in game units, then use a scale factor of (
core::vector ( 100.0f, 100.0f, 100.0f ). If you use a Y
scaling factor of 0.0f, then your terrain will be flat.
\param vertexColor: The default color of all the vertices. If no texture is associated
with the scene node, then all vertices will be this color. Defaults to white.
\param maxLOD: The maximum LOD (level of detail) for the node. Only change if you
know what you are doing, this might lead to strange behaviour.
\param patchSize: patch size of the terrain. Only change if you
know what you are doing, this might lead to strange behaviour.
\param smoothFactor: The number of times the vertices are smoothed.
\param addAlsoIfHeightmapEmpty: Add terrain node even with empty heightmap.
\return Pointer to the created scene node. Can be null
if the terrain could not be created, for example because the
heightmap could not be loaded. The returned pointer should
not be dropped. See IReferenceCounted::drop() for more
information. */
virtual ITerrainSceneNode* addTerrainSceneNode(
const io::path& heightMapFileName,
ISceneNode* parent=0, s32 id=-1,
const core::vector3df& position = core::vector3df(0.0f,0.0f,0.0f),
const core::vector3df& rotation = core::vector3df(0.0f,0.0f,0.0f),
const core::vector3df& scale = core::vector3df(1.0f,1.0f,1.0f),
video::SColor vertexColor = video::SColor(255,255,255,255),
s32 maxLOD=5, E_TERRAIN_PATCH_SIZE patchSize=ETPS_17, s32 smoothFactor=0,
bool addAlsoIfHeightmapEmpty = false) = 0;
//! Adds a terrain scene node to the scene graph.
/** Just like the other addTerrainSceneNode() method, but takes an IReadFile
pointer as parameter for the heightmap. For more informations take a look
at the other function.
\param heightMapFile: The file handle to read vertex data from. This should
be a gray scale bitmap.
\param parent: Parent of the scene node. Can be 0 if no parent.
\param id: Id of the node. This id can be used to identify the scene node.
\param position: The absolute position of this node.
\param rotation: The absolute rotation of this node. ( NOT YET IMPLEMENTED )
\param scale: The scale factor for the terrain. If you're
using a heightmap of size 129x129 and would like your terrain
to be 12900x12900 in game units, then use a scale factor of (
core::vector ( 100.0f, 100.0f, 100.0f ). If you use a Y
scaling factor of 0.0f, then your terrain will be flat.
\param vertexColor: The default color of all the vertices. If no texture is associated
with the scene node, then all vertices will be this color. Defaults to white.
\param maxLOD: The maximum LOD (level of detail) for the node. Only change if you
know what you are doing, this might lead to strange behaviour.
\param patchSize: patch size of the terrain. Only change if you
know what you are doing, this might lead to strange behaviour.
\param smoothFactor: The number of times the vertices are smoothed.
\param addAlsoIfHeightmapEmpty: Add terrain node even with empty heightmap.
\return Pointer to the created scene node. Can be null
if the terrain could not be created, for example because the
heightmap could not be loaded. The returned pointer should
not be dropped. See IReferenceCounted::drop() for more
information. */
virtual ITerrainSceneNode* addTerrainSceneNode(
io::IReadFile* heightMapFile,
ISceneNode* parent=0, s32 id=-1,
const core::vector3df& position = core::vector3df(0.0f,0.0f,0.0f),
const core::vector3df& rotation = core::vector3df(0.0f,0.0f,0.0f),
const core::vector3df& scale = core::vector3df(1.0f,1.0f,1.0f),
video::SColor vertexColor = video::SColor(255,255,255,255),
s32 maxLOD=5, E_TERRAIN_PATCH_SIZE patchSize=ETPS_17, s32 smoothFactor=0,
bool addAlsoIfHeightmapEmpty = false) = 0;
//! Adds a quake3 scene node to the scene graph.
/** A Quake3 Scene renders multiple meshes for a specific HighLanguage Shader (Quake3 Style )
\return Pointer to the quake3 scene node if successful, otherwise NULL.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IMeshSceneNode* addQuake3SceneNode(const IMeshBuffer* meshBuffer, const quake3::IShader * shader,
ISceneNode* parent=0, s32 id=-1
) = 0;
//! Adds an empty scene node to the scene graph.
/** Can be used for doing advanced transformations
or structuring the scene graph.
\return Pointer to the created scene node.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNode* addEmptySceneNode(ISceneNode* parent=0, s32 id=-1) = 0;
//! Adds a dummy transformation scene node to the scene graph.
/** This scene node does not render itself, and does not respond to set/getPosition,
set/getRotation and set/getScale. Its just a simple scene node that takes a
matrix as relative transformation, making it possible to insert any transformation
anywhere into the scene graph.
\return Pointer to the created scene node.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IDummyTransformationSceneNode* addDummyTransformationSceneNode(
ISceneNode* parent=0, s32 id=-1) = 0;
//! Adds a text scene node, which is able to display 2d text at a position in three dimensional space
virtual ITextSceneNode* addTextSceneNode(gui::IGUIFont* font, const wchar_t* text,
video::SColor color=video::SColor(100,255,255,255),
ISceneNode* parent = 0, const core::vector3df& position = core::vector3df(0,0,0),
s32 id=-1) = 0;
//! Adds a text scene node, which uses billboards. The node, and the text on it, will scale with distance.
/**
\param font The font to use on the billboard. Pass 0 to use the GUI environment's default font.
\param text The text to display on the billboard.
\param parent The billboard's parent. Pass 0 to use the root scene node.
\param size The billboard's width and height.
\param position The billboards position relative to its parent.
\param id: An id of the node. This id can be used to identify the node.
\param colorTop: The color of the vertices at the top of the billboard (default: white).
\param colorBottom: The color of the vertices at the bottom of the billboard (default: white).
\return Pointer to the billboard if successful, otherwise NULL.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IBillboardTextSceneNode* addBillboardTextSceneNode( gui::IGUIFont* font, const wchar_t* text,
ISceneNode* parent = 0,
const core::dimension2d<f32>& size = core::dimension2d<f32>(10.0f, 10.0f),
const core::vector3df& position = core::vector3df(0,0,0), s32 id=-1,
video::SColor colorTop = 0xFFFFFFFF, video::SColor colorBottom = 0xFFFFFFFF) = 0;
//! Adds a Hill Plane mesh to the mesh pool.
/** The mesh is generated on the fly
and looks like a plane with some hills on it. It is uses mostly for quick
tests of the engine only. You can specify how many hills there should be
on the plane and how high they should be. Also you must specify a name for
the mesh, because the mesh is added to the mesh pool, and can be retrieved
again using ISceneManager::getMesh() with the name as parameter.
\param name: The name of this mesh which must be specified in order
to be able to retrieve the mesh later with ISceneManager::getMesh().
\param tileSize: Size of a tile of the mesh. (10.0f, 10.0f) would be a
good value to start, for example.
\param tileCount: Specifies how much tiles there will be. If you specifiy
for example that a tile has the size (10.0f, 10.0f) and the tileCount is
(10,10), than you get a field of 100 tiles which has the dimension 100.0fx100.0f.
\param material: Material of the hill mesh.
\param hillHeight: Height of the hills. If you specify a negative value
you will get holes instead of hills. If the height is 0, no hills will be
created.
\param countHills: Amount of hills on the plane. There will be countHills.X
hills along the X axis and countHills.Y along the Y axis. So in total there
will be countHills.X * countHills.Y hills.
\param textureRepeatCount: Defines how often the texture will be repeated in
x and y direction.
return Null if the creation failed. The reason could be that you
specified some invalid parameters or that a mesh with that name already
exists. If successful, a pointer to the mesh is returned.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IAnimatedMesh* addHillPlaneMesh(const io::path& name,
const core::dimension2d<f32>& tileSize, const core::dimension2d<u32>& tileCount,
video::SMaterial* material = 0, f32 hillHeight = 0.0f,
const core::dimension2d<f32>& countHills = core::dimension2d<f32>(0.0f, 0.0f),
const core::dimension2d<f32>& textureRepeatCount = core::dimension2d<f32>(1.0f, 1.0f)) = 0;
//! Adds a static terrain mesh to the mesh pool.
/** The mesh is generated on the fly
from a texture file and a height map file. Both files may be huge
(8000x8000 pixels would be no problem) because the generator splits the
files into smaller textures if necessary.
You must specify a name for the mesh, because the mesh is added to the mesh pool,
and can be retrieved again using ISceneManager::getMesh() with the name as parameter.
\param meshname: The name of this mesh which must be specified in order
to be able to retrieve the mesh later with ISceneManager::getMesh().
\param texture: Texture for the terrain. Please note that this is not a
hardware texture as usual (ITexture), but an IImage software texture.
You can load this texture with IVideoDriver::createImageFromFile().
\param heightmap: A grayscaled heightmap image. Like the texture,
it can be created with IVideoDriver::createImageFromFile(). The amount
of triangles created depends on the size of this texture, so use a small
heightmap to increase rendering speed.
\param stretchSize: Parameter defining how big a is pixel on the heightmap.
\param maxHeight: Defines how high a white pixel on the heighmap is.
\param defaultVertexBlockSize: Defines the initial dimension between vertices.
\return Null if the creation failed. The reason could be that you
specified some invalid parameters, that a mesh with that name already
exists, or that a texture could not be found. If successful, a pointer to the mesh is returned.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IAnimatedMesh* addTerrainMesh(const io::path& meshname,
video::IImage* texture, video::IImage* heightmap,
const core::dimension2d<f32>& stretchSize = core::dimension2d<f32>(10.0f,10.0f),
f32 maxHeight=200.0f,
const core::dimension2d<u32>& defaultVertexBlockSize = core::dimension2d<u32>(64,64)) = 0;
//! add a static arrow mesh to the meshpool
/** \param name Name of the mesh
\param vtxColorCylinder color of the cylinder
\param vtxColorCone color of the cone
\param tesselationCylinder Number of quads the cylinder side consists of
\param tesselationCone Number of triangles the cone's roof consits of
\param height Total height of the arrow
\param cylinderHeight Total height of the cylinder, should be lesser than total height
\param widthCylinder Diameter of the cylinder
\param widthCone Diameter of the cone's base, should be not smaller than the cylinder's diameter
\return Pointer to the arrow mesh if successful, otherwise 0.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IAnimatedMesh* addArrowMesh(const io::path& name,
video::SColor vtxColorCylinder=0xFFFFFFFF,
video::SColor vtxColorCone=0xFFFFFFFF,
u32 tesselationCylinder=4, u32 tesselationCone=8,
f32 height=1.f, f32 cylinderHeight=0.6f,
f32 widthCylinder=0.05f, f32 widthCone=0.3f) = 0;
//! add a static sphere mesh to the meshpool
/** \param name Name of the mesh
\param radius Radius of the sphere
\param polyCountX Number of quads used for the horizontal tiling
\param polyCountY Number of quads used for the vertical tiling
\return Pointer to the sphere mesh if successful, otherwise 0.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IAnimatedMesh* addSphereMesh(const io::path& name,
f32 radius=5.f, u32 polyCountX = 16,
u32 polyCountY = 16) = 0;
//! Add a volume light mesh to the meshpool
/** \param name Name of the mesh
\param SubdivideU Horizontal subdivision count
\param SubdivideV Vertical subdivision count
\param FootColor Color of the bottom of the light
\param TailColor Color of the top of the light
\return Pointer to the volume light mesh if successful, otherwise 0.
This pointer should not be dropped. See IReferenceCounted::drop() for more information.
*/
virtual IAnimatedMesh* addVolumeLightMesh(const io::path& name,
const u32 SubdivideU = 32, const u32 SubdivideV = 32,
const video::SColor FootColor = video::SColor(51, 0, 230, 180),
const video::SColor TailColor = video::SColor(0, 0, 0, 0)) = 0;
//! Gets the root scene node.
/** This is the scene node which is parent
of all scene nodes. The root scene node is a special scene node which
only exists to manage all scene nodes. It will not be rendered and cannot
be removed from the scene.
\return Pointer to the root scene node.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNode* getRootSceneNode() = 0;
//! Get the first scene node with the specified id.
/** \param id: The id to search for
\param start: Scene node to start from. All children of this scene
node are searched. If null is specified, the root scene node is
taken.
\return Pointer to the first scene node with this id,
and null if no scene node could be found.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNode* getSceneNodeFromId(s32 id, ISceneNode* start=0) = 0;
//! Get the first scene node with the specified name.
/** \param name: The name to search for
\param start: Scene node to start from. All children of this scene
node are searched. If null is specified, the root scene node is
taken.
\return Pointer to the first scene node with this id,
and null if no scene node could be found.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNode* getSceneNodeFromName(const c8* name, ISceneNode* start=0) = 0;
//! Get the first scene node with the specified type.
/** \param type: The type to search for
\param start: Scene node to start from. All children of this scene
node are searched. If null is specified, the root scene node is
taken.
\return Pointer to the first scene node with this type,
and null if no scene node could be found.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNode* getSceneNodeFromType(scene::ESCENE_NODE_TYPE type, ISceneNode* start=0) = 0;
//! Get scene nodes by type.
/** \param type: Type of scene node to find (ESNT_ANY will return all child nodes).
\param outNodes: array to be filled with results.
\param start: Scene node to start from. All children of this scene
node are searched. If null is specified, the root scene node is
taken. */
virtual void getSceneNodesFromType(ESCENE_NODE_TYPE type,
core::array<scene::ISceneNode*>& outNodes,
ISceneNode* start=0) = 0;
//! Get the current active camera.
/** \return The active camera is returned. Note that this can
be NULL, if there was no camera created yet.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ICameraSceneNode* getActiveCamera() const =0;
//! Sets the currently active camera.
/** The previous active camera will be deactivated.
\param camera: The new camera which should be active. */
virtual void setActiveCamera(ICameraSceneNode* camera) = 0;
//! Sets the color of stencil buffers shadows drawn by the scene manager.
virtual void setShadowColor(video::SColor color = video::SColor(150,0,0,0)) = 0;
//! Get the current color of shadows.
virtual video::SColor getShadowColor() const = 0;
//! Registers a node for rendering it at a specific time.
/** This method should only be used by SceneNodes when they get a
ISceneNode::OnRegisterSceneNode() call.
\param node: Node to register for drawing. Usually scene nodes would set 'this'
as parameter here because they want to be drawn.
\param pass: Specifies when the node wants to be drawn in relation to the other nodes.
For example, if the node is a shadow, it usually wants to be drawn after all other nodes
and will use ESNRP_SHADOW for this. See scene::E_SCENE_NODE_RENDER_PASS for details.
\return scene will be rendered ( passed culling ) */
virtual u32 registerNodeForRendering(ISceneNode* node,
E_SCENE_NODE_RENDER_PASS pass = ESNRP_AUTOMATIC) = 0;
//! Draws all the scene nodes.
/** This can only be invoked between
IVideoDriver::beginScene() and IVideoDriver::endScene(). Please note that
the scene is not only drawn when calling this, but also animated
by existing scene node animators, culling of scene nodes is done, etc. */
virtual void drawAll() = 0;
//! Creates a rotation animator, which rotates the attached scene node around itself.
/** \param rotationSpeed Specifies the speed of the animation in degree per 10 milliseconds.
\return The animator. Attach it to a scene node with ISceneNode::addAnimator()
and the animator will animate it.
If you no longer need the animator, you should call ISceneNodeAnimator::drop().
See IReferenceCounted::drop() for more information. */
virtual ISceneNodeAnimator* createRotationAnimator(const core::vector3df& rotationSpeed) = 0;
//! Creates a fly circle animator, which lets the attached scene node fly around a center.
/** \param center: Center of the circle.
\param radius: Radius of the circle.
\param speed: The orbital speed, in radians per millisecond.
\param direction: Specifies the upvector used for alignment of the mesh.
\param startPosition: The position on the circle where the animator will
begin. Value is in multiples of a circle, i.e. 0.5 is half way around. (phase)
\param radiusEllipsoid: if radiusEllipsoid != 0 then radius2 froms a ellipsoid
begin. Value is in multiples of a circle, i.e. 0.5 is half way around. (phase)
\return The animator. Attach it to a scene node with ISceneNode::addAnimator()
and the animator will animate it.
If you no longer need the animator, you should call ISceneNodeAnimator::drop().
See IReferenceCounted::drop() for more information. */
virtual ISceneNodeAnimator* createFlyCircleAnimator(
const core::vector3df& center=core::vector3df(0.f,0.f,0.f),
f32 radius=100.f, f32 speed=0.001f,
const core::vector3df& direction=core::vector3df(0.f, 1.f, 0.f),
f32 startPosition = 0.f,
f32 radiusEllipsoid = 0.f) = 0;
//! Creates a fly straight animator, which lets the attached scene node fly or move along a line between two points.
/** \param startPoint: Start point of the line.
\param endPoint: End point of the line.
\param timeForWay: Time in milli seconds how long the node should need to
move from the start point to the end point.
\param loop: If set to false, the node stops when the end point is reached.
If loop is true, the node begins again at the start.
\param pingpong Flag to set whether the animator should fly
back from end to start again.
\return The animator. Attach it to a scene node with ISceneNode::addAnimator()
and the animator will animate it.
If you no longer need the animator, you should call ISceneNodeAnimator::drop().
See IReferenceCounted::drop() for more information. */
virtual ISceneNodeAnimator* createFlyStraightAnimator(const core::vector3df& startPoint,
const core::vector3df& endPoint, u32 timeForWay, bool loop=false, bool pingpong = false) = 0;
//! Creates a texture animator, which switches the textures of the target scene node based on a list of textures.
/** \param textures: List of textures to use.
\param timePerFrame: Time in milliseconds, how long any texture in the list
should be visible.
\param loop: If set to to false, the last texture remains set, and the animation
stops. If set to true, the animation restarts with the first texture.
\return The animator. Attach it to a scene node with ISceneNode::addAnimator()
and the animator will animate it.
If you no longer need the animator, you should call ISceneNodeAnimator::drop().
See IReferenceCounted::drop() for more information. */
virtual ISceneNodeAnimator* createTextureAnimator(const core::array<video::ITexture*>& textures,
s32 timePerFrame, bool loop=true) = 0;
//! Creates a scene node animator, which deletes the scene node after some time automatically.
/** \param timeMs: Time in milliseconds, after when the node will be deleted.
\return The animator. Attach it to a scene node with ISceneNode::addAnimator()
and the animator will animate it.
If you no longer need the animator, you should call ISceneNodeAnimator::drop().
See IReferenceCounted::drop() for more information. */
virtual ISceneNodeAnimator* createDeleteAnimator(u32 timeMs) = 0;
//! Creates a special scene node animator for doing automatic collision detection and response.
/** See ISceneNodeAnimatorCollisionResponse for details.
\param world: Triangle selector holding all triangles of the world with which
the scene node may collide. You can create a triangle selector with
ISceneManager::createTriangleSelector();
\param sceneNode: SceneNode which should be manipulated. After you added this animator
to the scene node, the scene node will not be able to move through walls and is
affected by gravity. If you need to teleport the scene node to a new position without
it being effected by the collision geometry, then call sceneNode->setPosition(); then
animator->setTargetNode(sceneNode);
\param ellipsoidRadius: Radius of the ellipsoid with which collision detection and
response is done. If you have got a scene node, and you are unsure about
how big the radius should be, you could use the following code to determine
it:
\code
const core::aabbox3d<f32>& box = yourSceneNode->getBoundingBox();
core::vector3df radius = box.MaxEdge - box.getCenter();
\endcode
\param gravityPerSecond: Sets the gravity of the environment, as an acceleration in
units per second per second. If your units are equivalent to metres, then
core::vector3df(0,-10.0f,0) would give an approximately realistic gravity.
You can disable gravity by setting it to core::vector3df(0,0,0).
\param ellipsoidTranslation: By default, the ellipsoid for collision detection is created around
the center of the scene node, which means that the ellipsoid surrounds
it completely. If this is not what you want, you may specify a translation
for the ellipsoid.
\param slidingValue: DOCUMENTATION NEEDED.
\return The animator. Attach it to a scene node with ISceneNode::addAnimator()
and the animator will cause it to do collision detection and response.
If you no longer need the animator, you should call ISceneNodeAnimator::drop().
See IReferenceCounted::drop() for more information. */
virtual ISceneNodeAnimatorCollisionResponse* createCollisionResponseAnimator(
ITriangleSelector* world, ISceneNode* sceneNode,
const core::vector3df& ellipsoidRadius = core::vector3df(30,60,30),
const core::vector3df& gravityPerSecond = core::vector3df(0,-10.0f,0),
const core::vector3df& ellipsoidTranslation = core::vector3df(0,0,0),
f32 slidingValue = 0.0005f) = 0;
//! Creates a follow spline animator.
/** The animator modifies the position of
the attached scene node to make it follow a hermite spline.
It uses a subset of hermite splines: either cardinal splines
(tightness != 0.5) or catmull-rom-splines (tightness == 0.5).
The animator moves from one control point to the next in
1/speed seconds. This code was sent in by Matthias Gall.
If you no longer need the animator, you should call ISceneNodeAnimator::drop().
See IReferenceCounted::drop() for more information. */
virtual ISceneNodeAnimator* createFollowSplineAnimator(s32 startTime,
const core::array< core::vector3df >& points,
f32 speed = 1.0f, f32 tightness = 0.5f, bool loop=true, bool pingpong=false) = 0;
//! Creates a simple ITriangleSelector, based on a mesh.
/** Triangle selectors
can be used for doing collision detection. Don't use this selector
for a huge amount of triangles like in Quake3 maps.
Instead, use for example ISceneManager::createOctreeTriangleSelector().
Please note that the created triangle selector is not automaticly attached
to the scene node. You will have to call ISceneNode::setTriangleSelector()
for this. To create and attach a triangle selector is done like this:
\code
ITriangleSelector* s = sceneManager->createTriangleSelector(yourMesh,
yourSceneNode);
yourSceneNode->setTriangleSelector(s);
s->drop();
\endcode
\param mesh: Mesh of which the triangles are taken.
\param node: Scene node of which visibility and transformation is used.
\return The selector, or null if not successful.
If you no longer need the selector, you should call ITriangleSelector::drop().
See IReferenceCounted::drop() for more information. */
virtual ITriangleSelector* createTriangleSelector(IMesh* mesh, ISceneNode* node) = 0;
//! Creates a simple ITriangleSelector, based on an animated mesh scene node.
/** Details of the mesh associated with the node will be extracted internally.
Call ITriangleSelector::update() to have the triangle selector updated based
on the current frame of the animated mesh scene node.
\param node The animated mesh scene node from which to build the selector
*/
virtual ITriangleSelector* createTriangleSelector(IAnimatedMeshSceneNode* node) = 0;
//! Creates a simple dynamic ITriangleSelector, based on a axis aligned bounding box.
/** Triangle selectors
can be used for doing collision detection. Every time when triangles are
queried, the triangle selector gets the bounding box of the scene node,
an creates new triangles. In this way, it works good with animated scene nodes.
\param node: Scene node of which the bounding box, visibility and transformation is used.
\return The selector, or null if not successful.
If you no longer need the selector, you should call ITriangleSelector::drop().
See IReferenceCounted::drop() for more information. */
virtual ITriangleSelector* createTriangleSelectorFromBoundingBox(ISceneNode* node) = 0;
//! Creates a Triangle Selector, optimized by an octree.
/** Triangle selectors
can be used for doing collision detection. This triangle selector is
optimized for huge amounts of triangle, it organizes them in an octree.
Please note that the created triangle selector is not automaticly attached
to the scene node. You will have to call ISceneNode::setTriangleSelector()
for this. To create and attach a triangle selector is done like this:
\code
ITriangleSelector* s = sceneManager->createOctreeTriangleSelector(yourMesh,
yourSceneNode);
yourSceneNode->setTriangleSelector(s);
s->drop();
\endcode
For more informations and examples on this, take a look at the collision
tutorial in the SDK.
\param mesh: Mesh of which the triangles are taken.
\param node: Scene node of which visibility and transformation is used.
\param minimalPolysPerNode: Specifies the minimal polygons contained a octree node.
If a node gets less polys the this value, it will not be splitted into
smaller nodes.
\return The selector, or null if not successful.
If you no longer need the selector, you should call ITriangleSelector::drop().
See IReferenceCounted::drop() for more information. */
virtual ITriangleSelector* createOctreeTriangleSelector(IMesh* mesh,
ISceneNode* node, s32 minimalPolysPerNode=32) = 0;
//! //! Creates a Triangle Selector, optimized by an octree.
/** \deprecated Use createOctreeTriangleSelector instead. This method may be removed by Irrlicht 1.9. */
_IRR_DEPRECATED_ ITriangleSelector* createOctTreeTriangleSelector(IMesh* mesh,
ISceneNode* node, s32 minimalPolysPerNode=32)
{
return createOctreeTriangleSelector(mesh, node, minimalPolysPerNode);
}
//! Creates a meta triangle selector.
/** A meta triangle selector is nothing more than a
collection of one or more triangle selectors providing together
the interface of one triangle selector. In this way,
collision tests can be done with different triangle soups in one pass.
\return The selector, or null if not successful.
If you no longer need the selector, you should call ITriangleSelector::drop().
See IReferenceCounted::drop() for more information. */
virtual IMetaTriangleSelector* createMetaTriangleSelector() = 0;
//! Creates a triangle selector which can select triangles from a terrain scene node.
/** \param node: Pointer to the created terrain scene node
\param LOD: Level of detail, 0 for highest detail.
\return The selector, or null if not successful.
If you no longer need the selector, you should call ITriangleSelector::drop().
See IReferenceCounted::drop() for more information. */
virtual ITriangleSelector* createTerrainTriangleSelector(
ITerrainSceneNode* node, s32 LOD=0) = 0;
//! Adds an external mesh loader for extending the engine with new file formats.
/** If you want the engine to be extended with
file formats it currently is not able to load (e.g. .cob), just implement
the IMeshLoader interface in your loading class and add it with this method.
Using this method it is also possible to override built-in mesh loaders with
newer or updated versions without the need to recompile the engine.
\param externalLoader: Implementation of a new mesh loader. */
virtual void addExternalMeshLoader(IMeshLoader* externalLoader) = 0;
//! Returns the number of mesh loaders supported by Irrlicht at this time
virtual u32 getMeshLoaderCount() const = 0;
//! Retrieve the given mesh loader
/** \param index The index of the loader to retrieve. This parameter is an 0-based
array index.
\return A pointer to the specified loader, 0 if the index is incorrect. */
virtual IMeshLoader* getMeshLoader(u32 index) const = 0;
//! Adds an external scene loader for extending the engine with new file formats.
/** If you want the engine to be extended with
file formats it currently is not able to load (e.g. .vrml), just implement
the ISceneLoader interface in your loading class and add it with this method.
Using this method it is also possible to override the built-in scene loaders
with newer or updated versions without the need to recompile the engine.
\param externalLoader: Implementation of a new mesh loader. */
virtual void addExternalSceneLoader(ISceneLoader* externalLoader) = 0;
//! Returns the number of scene loaders supported by Irrlicht at this time
virtual u32 getSceneLoaderCount() const = 0;
//! Retrieve the given scene loader
/** \param index The index of the loader to retrieve. This parameter is an 0-based
array index.
\return A pointer to the specified loader, 0 if the index is incorrect. */
virtual ISceneLoader* getSceneLoader(u32 index) const = 0;
//! Get pointer to the scene collision manager.
/** \return Pointer to the collision manager
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneCollisionManager* getSceneCollisionManager() = 0;
//! Get pointer to the mesh manipulator.
/** \return Pointer to the mesh manipulator
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual IMeshManipulator* getMeshManipulator() = 0;
//! Adds a scene node to the deletion queue.
/** The scene node is immediatly
deleted when it's secure. Which means when the scene node does not
execute animators and things like that. This method is for example
used for deleting scene nodes by their scene node animators. In
most other cases, a ISceneNode::remove() call is enough, using this
deletion queue is not necessary.
See ISceneManager::createDeleteAnimator() for details.
\param node: Node to detete. */
virtual void addToDeletionQueue(ISceneNode* node) = 0;
//! Posts an input event to the environment.
/** Usually you do not have to
use this method, it is used by the internal engine. */
virtual bool postEventFromUser(const SEvent& event) = 0;
//! Clears the whole scene.
/** All scene nodes are removed. */
virtual void clear() = 0;
//! Get interface to the parameters set in this scene.
/** String parameters can be used by plugins and mesh loaders.
For example the CMS and LMTS loader want a parameter named 'CSM_TexturePath'
and 'LMTS_TexturePath' set to the path were attached textures can be found. See
CSM_TEXTURE_PATH, LMTS_TEXTURE_PATH, MY3D_TEXTURE_PATH,
COLLADA_CREATE_SCENE_INSTANCES, DMF_TEXTURE_PATH and DMF_USE_MATERIALS_DIRS*/
virtual io::IAttributes* getParameters() = 0;
//! Get current render pass.
/** All scene nodes are being rendered in a specific order.
First lights, cameras, sky boxes, solid geometry, and then transparent
stuff. During the rendering process, scene nodes may want to know what the scene
manager is rendering currently, because for example they registered for rendering
twice, once for transparent geometry and once for solid. When knowing what rendering
pass currently is active they can render the correct part of their geometry. */
virtual E_SCENE_NODE_RENDER_PASS getSceneNodeRenderPass() const = 0;
//! Get the default scene node factory which can create all built in scene nodes
/** \return Pointer to the default scene node factory
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNodeFactory* getDefaultSceneNodeFactory() = 0;
//! Adds a scene node factory to the scene manager.
/** Use this to extend the scene manager with new scene node types which it should be
able to create automaticly, for example when loading data from xml files. */
virtual void registerSceneNodeFactory(ISceneNodeFactory* factoryToAdd) = 0;
//! Get amount of registered scene node factories.
virtual u32 getRegisteredSceneNodeFactoryCount() const = 0;
//! Get a scene node factory by index
/** \return Pointer to the requested scene node factory, or 0 if it does not exist.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNodeFactory* getSceneNodeFactory(u32 index) = 0;
//! Get the default scene node animator factory which can create all built-in scene node animators
/** \return Pointer to the default scene node animator factory
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNodeAnimatorFactory* getDefaultSceneNodeAnimatorFactory() = 0;
//! Adds a scene node animator factory to the scene manager.
/** Use this to extend the scene manager with new scene node animator types which it should be
able to create automaticly, for example when loading data from xml files. */
virtual void registerSceneNodeAnimatorFactory(ISceneNodeAnimatorFactory* factoryToAdd) = 0;
//! Get amount of registered scene node animator factories.
virtual u32 getRegisteredSceneNodeAnimatorFactoryCount() const = 0;
//! Get scene node animator factory by index
/** \return Pointer to the requested scene node animator factory, or 0 if it does not exist.
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNodeAnimatorFactory* getSceneNodeAnimatorFactory(u32 index) = 0;
//! Get typename from a scene node type or null if not found
virtual const c8* getSceneNodeTypeName(ESCENE_NODE_TYPE type) = 0;
//! Returns a typename from a scene node animator type or null if not found
virtual const c8* getAnimatorTypeName(ESCENE_NODE_ANIMATOR_TYPE type) = 0;
//! Adds a scene node to the scene by name
/** \return Pointer to the scene node added by a factory
This pointer should not be dropped. See IReferenceCounted::drop() for more information. */
virtual ISceneNode* addSceneNode(const char* sceneNodeTypeName, ISceneNode* parent=0) = 0;
//! creates a scene node animator based on its type name
/** \param typeName: Type of the scene node animator to add.
\param target: Target scene node of the new animator.
\return Returns pointer to the new scene node animator or null if not successful. You need to
drop this pointer after calling this, see IReferenceCounted::drop() for details. */
virtual ISceneNodeAnimator* createSceneNodeAnimator(const char* typeName, ISceneNode* target=0) = 0;
//! Creates a new scene manager.
/** This can be used to easily draw and/or store two
independent scenes at the same time. The mesh cache will be
shared between all existing scene managers, which means if you
load a mesh in the original scene manager using for example
getMesh(), the mesh will be available in all other scene
managers too, without loading.
The original/main scene manager will still be there and
accessible via IrrlichtDevice::getSceneManager(). If you need
input event in this new scene manager, for example for FPS
cameras, you'll need to forward input to this manually: Just
implement an IEventReceiver and call
yourNewSceneManager->postEventFromUser(), and return true so
that the original scene manager doesn't get the event.
Otherwise, all input will go to the main scene manager
automatically.
If you no longer need the new scene manager, you should call
ISceneManager::drop().
See IReferenceCounted::drop() for more information. */
virtual ISceneManager* createNewSceneManager(bool cloneContent=false) = 0;
//! Saves the current scene into a file.
/** Scene nodes with the option isDebugObject set to true are not being saved.
The scene is usually written to an .irr file, an xml based format. .irr files can
Be edited with the Irrlicht Engine Editor, irrEdit (http://irredit.irrlicht3d.org).
To load .irr files again, see ISceneManager::loadScene().
\param filename Name of the file.
\param userDataSerializer If you want to save some user data for every scene node into the
file, implement the ISceneUserDataSerializer interface and provide it as parameter here.
Otherwise, simply specify 0 as this parameter.
\param node Node which is taken as the top node of the scene. This node and all of its
descendants are saved into the scene file. Pass 0 or the scene manager to save the full
scene (which is also the default).
\return True if successful. */
virtual bool saveScene(const io::path& filename, ISceneUserDataSerializer* userDataSerializer=0, ISceneNode* node=0) = 0;
//! Saves the current scene into a file.
/** Scene nodes with the option isDebugObject set to true are not being saved.
The scene is usually written to an .irr file, an xml based format. .irr files can
Be edited with the Irrlicht Engine Editor, irrEdit (http://irredit.irrlicht3d.org).
To load .irr files again, see ISceneManager::loadScene().
\param file: File where the scene is saved into.
\param userDataSerializer: If you want to save some user data for every scene node into the
file, implement the ISceneUserDataSerializer interface and provide it as parameter here.
Otherwise, simply specify 0 as this parameter.
\param node Node which is taken as the top node of the scene. This node and all of its
descendants are saved into the scene file. Pass 0 or the scene manager to save the full
scene (which is also the default).
\return True if successful. */
virtual bool saveScene(io::IWriteFile* file, ISceneUserDataSerializer* userDataSerializer=0, ISceneNode* node=0) = 0;
//! Loads a scene. Note that the current scene is not cleared before.
/** The scene is usually loaded from an .irr file, an xml based format, but other scene formats
can be added to the engine via ISceneManager::addExternalSceneLoader. .irr files can
Be edited with the Irrlicht Engine Editor, irrEdit (http://irredit.irrlicht3d.org) or
saved directly by the engine using ISceneManager::saveScene().
\param filename: Name of the file.
\param userDataSerializer: If you want to load user data
possibily saved in that file for some scene nodes in the file,
implement the ISceneUserDataSerializer interface and provide it
as parameter here. Otherwise, simply specify 0 as this
parameter.
\param rootNode Node which is taken as the root node of the scene. Pass 0 to add the scene
directly to the scene manager (which is also the default).
\return True if successful. */
virtual bool loadScene(const io::path& filename, ISceneUserDataSerializer* userDataSerializer=0, ISceneNode* rootNode=0) = 0;
//! Loads a scene. Note that the current scene is not cleared before.
/** The scene is usually loaded from an .irr file, an xml based format, but other scene formats
can be added to the engine via ISceneManager::addExternalSceneLoader. .irr files can
Be edited with the Irrlicht Engine Editor, irrEdit (http://irredit.irrlicht3d.org) or
saved directly by the engine using ISceneManager::saveScene().
\param file: File where the scene is going to be saved into.
\param userDataSerializer: If you want to load user data
possibily saved in that file for some scene nodes in the file,
implement the ISceneUserDataSerializer interface and provide it
as parameter here. Otherwise, simply specify 0 as this
parameter.
\param rootNode Node which is taken as the root node of the scene. Pass 0 to add the scene
directly to the scene manager (which is also the default).
\return True if successful. */
virtual bool loadScene(io::IReadFile* file, ISceneUserDataSerializer* userDataSerializer=0, ISceneNode* rootNode=0) = 0;
//! Get a mesh writer implementation if available
/** Note: You need to drop() the pointer after use again, see IReferenceCounted::drop()
for details. */
virtual IMeshWriter* createMeshWriter(EMESH_WRITER_TYPE type) = 0;
//! Get a skinned mesh, which is not available as header-only code
/** Note: You need to drop() the pointer after use again, see IReferenceCounted::drop()
for details. */
virtual ISkinnedMesh* createSkinnedMesh() = 0;
//! Sets ambient color of the scene
virtual void setAmbientLight(const video::SColorf &ambientColor) = 0;
//! Get ambient color of the scene
virtual const video::SColorf& getAmbientLight() const = 0;
//! Register a custom callbacks manager which gets callbacks during scene rendering.
/** \param[in] lightManager: the new callbacks manager. You may pass 0 to remove the
current callbacks manager and restore the default behaviour. */
virtual void setLightManager(ILightManager* lightManager) = 0;
//! Get an instance of a geometry creator.
/** The geometry creator provides some helper methods to create various types of
basic geometry. This can be useful for custom scene nodes. */
virtual const IGeometryCreator* getGeometryCreator(void) const = 0;
//! Check if node is culled in current view frustum
/** Please note that depending on the used culling method this
check can be rather coarse, or slow. A positive result is
correct, though, i.e. if this method returns true the node is
positively not visible. The node might still be invisible even
if this method returns false.
\param node The scene node which is checked for culling.
\return True if node is not visible in the current scene, else
false. */
virtual bool isCulled(const ISceneNode* node) const =0;
};
} // end namespace scene
} // end namespace irr
#endif
| {
"content_hash": "e10aa3e8b5d11ae003baddff80c0c174",
"timestamp": "",
"source": "github",
"line_count": 1631,
"max_line_length": 134,
"avg_line_length": 52.795830778663394,
"alnum_prop": 0.7317733132040414,
"repo_name": "mcodegeeks/OpenKODE-Framework",
"id": "68e7fd408e62e08660238fe6dea2c7bead6e0ec2",
"size": "86110",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "02_Library/Include/XMIrrlicht/ISceneManager.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1292072"
},
{
"name": "C",
"bytes": "36964031"
},
{
"name": "C++",
"bytes": "86498790"
},
{
"name": "Cuda",
"bytes": "1295615"
},
{
"name": "GLSL",
"bytes": "244633"
},
{
"name": "Gnuplot",
"bytes": "1174"
},
{
"name": "HLSL",
"bytes": "153769"
},
{
"name": "HTML",
"bytes": "9848"
},
{
"name": "JavaScript",
"bytes": "1610772"
},
{
"name": "Lex",
"bytes": "83193"
},
{
"name": "Limbo",
"bytes": "2257"
},
{
"name": "Logos",
"bytes": "5463744"
},
{
"name": "Lua",
"bytes": "1094038"
},
{
"name": "Makefile",
"bytes": "69477"
},
{
"name": "Objective-C",
"bytes": "502497"
},
{
"name": "Objective-C++",
"bytes": "211576"
},
{
"name": "Perl",
"bytes": "12636"
},
{
"name": "Python",
"bytes": "170481"
},
{
"name": "Shell",
"bytes": "149"
},
{
"name": "Yacc",
"bytes": "40113"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<!-- DO NOT EDIT! This test has been generated by /html/canvas/tools/gentest.py. -->
<title>Canvas test: 2d.pattern.paint.norepeat.coord1</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/html/canvas/resources/canvas-tests.js"></script>
<link rel="stylesheet" href="/html/canvas/resources/canvas-tests.css">
<body class="show_output">
<h1>2d.pattern.paint.norepeat.coord1</h1>
<p class="desc"></p>
<p class="output">Actual output:</p>
<canvas id="c" class="output" width="100" height="50"><p class="fallback">FAIL (fallback content)</p></canvas>
<p class="output expectedtext">Expected output:<p><img src="/images/green-100x50.png" class="output expected" id="expected" alt="">
<ul id="d"></ul>
<script>
var t = async_test("");
_addTest(function(canvas, ctx) {
ctx.fillStyle = '#0f0';
ctx.fillRect(0, 0, 50, 50);
ctx.fillStyle = '#f00';
ctx.fillRect(50, 0, 50, 50);
var img = document.getElementById('green.png');
var pattern = ctx.createPattern(img, 'no-repeat');
ctx.fillStyle = pattern;
ctx.translate(50, 0);
ctx.fillRect(-50, 0, 100, 50);
_assertPixel(canvas, 1,1, 0,255,0,255, "1,1", "0,255,0,255");
_assertPixel(canvas, 98,1, 0,255,0,255, "98,1", "0,255,0,255");
_assertPixel(canvas, 1,48, 0,255,0,255, "1,48", "0,255,0,255");
_assertPixel(canvas, 98,48, 0,255,0,255, "98,48", "0,255,0,255");
});
</script>
<img src="/images/green.png" id="green.png" class="resource">
| {
"content_hash": "594928a1a76a86e7bf111a4bbbf119fe",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 131,
"avg_line_length": 35.19047619047619,
"alnum_prop": 0.6799729364005412,
"repo_name": "ric2b/Vivaldi-browser",
"id": "98a363dbe85f454a34262ae71b76556f678085ef",
"size": "1478",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "chromium/third_party/blink/web_tests/external/wpt/html/canvas/element/fill-and-stroke-styles/2d.pattern.paint.norepeat.coord1.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
<!--
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
-->
<link rel="import" href="../polymer/polymer.html">
<link rel="import" href="../app-storage/app-storage-behavior.html">
<link rel="import" href="app-pouchdb-database-behavior.html">
<link rel="import" href="pouchdb.html">
<!--
`app-pouchdb-document` is an implementation of `Polymer.AppStorageBehavior`
for reading and writing to individual PouchDB documents.
In order to refer to a PouchDB document, provide the name of the database
(both local and remote databases are supported) and the ID of the document.
For example:
```html
<app-pouchdb-document
db-name="cats"
doc-id="parsnip">
</app-pouchdb-document>
```
In the above example, a PouchDB instance will be created to connect to the
local database named "cats". Then it will check to see if a document with the
ID "parsnip" exists. If it does, the `data` property of the document will be
set to the value of the document. If it does not, then any subsequent
assignments to the `data` property will cause a document with ID "parsnip" to
be created.
Here is an example of a simple form that can be used to read and write to a
PouchDB document:
```html
<app-pouchdb-document
db-name="cats"
doc-id="parsnip"
data="{{cat}}">
</app-pouchdb-document>
<input
is="iron-input"
bind-value="{{cat.name}}">
</input>
```
-->
<script>
(function() {
'use strict';
Polymer({
is: 'app-pouchdb-document',
behaviors: [
Polymer.AppStorageBehavior,
Polymer.AppPouchDBDatabaseBehavior
],
properties: {
/**
* The value of the _id (Pouch/Couch unique identifier) of the PouchDB
* document that this element's data should refer to.
*/
docId: {
type: String,
value: null
},
/**
* The current _rev (revision) of the PouchDB document that this
* element's data refers to, if the document is not new.
*/
rev: {
type: String,
readOnly: true,
value: null
},
/**
* A changes event emitter. Notifies of changes to the PouchDB document
* referred to by `docId`, if a `docId` has been provided.
*/
changes: {
type: Object,
computed: '__computeChanges(db, docId)'
}
},
observers: [
'__docRevChanged(data._rev)',
'__docChanged(db, data, docId, _readied)'
],
/** @override */
get isNew() {
return this.docId == null;
},
/** @override */
get zeroValue() {
return {};
},
/** @override */
save: function() {
if (!this.db) {
return Promise.reject('No PouchDB instance available!');
}
if (this.docId) {
this.data._id = this.docId;
}
return this._upsert('data', this.data, this.data)
.then(function(response) {
this.syncToMemory(function() {
this.docId = response.id;
this.set('data._id', response.id);
this.set('data._rev', response.rev);
});
}.bind(this));
},
/** @override */
reset: function() {
this.docId = null;
this.data = this.zeroValue;
},
/** @override */
destroy: function() {
this.set('data._deleted', true);
return this.transactionsComplete.then(function() {
return this.reset();
});
},
/** @override */
getStoredValue: function(storagePath) {
return this.db.get(this.docId).then(function(doc) {
return this.get(storagePath, {
data: doc
});
}.bind(this)).catch(function(error) {
if (error && error.status === 404) {
return;
} else {
throw error;
}
});
},
/** @override */
setStoredValue: function(storagePath, value) {
if (storagePath === 'data' && value == null) {
return Promise.reject(
['Unsupported attempt to unset PouchDB document by assigning null value.',
'Perhaps you meant to set `data._deleted = true`?'].join(' '));
}
return this._put(storagePath, value, this.data);
},
__docChanged: function(db, doc, docId) {
this._log('Doc / ID changed', doc, docId);
if (db && doc != null && docId != null && doc._id != docId) {
doc._id = docId;
this._initializeStoredValue();
}
},
__docRevChanged: function() {
this._setRev(this.data != null ? this.data._rev : null);
},
__computeChanges: function(db, docId) {
if (this.changes) {
this.changes.removeAllListeners();
}
if (db == null || docId == null) {
return null;
}
return db.changes({
since: 'now',
live: true,
doc_ids: [docId],
include_docs: true,
}).on('change', function(change) {
var doc = change.doc;
this.syncToMemory(function() {
this.set('data', doc);
});
}.bind(this));
}
});
})();
</script>
| {
"content_hash": "18024014d6043b3c08af6ed4558bac9d",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 100,
"avg_line_length": 27.951219512195124,
"alnum_prop": 0.5614310645724259,
"repo_name": "Reviaco/Reviaco-Site",
"id": "458c40696ab3d199181b2536a9327427de208670",
"size": "5730",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "JS/Polymer/app-pouchdb/app-pouchdb-document.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2266315"
},
{
"name": "HTML",
"bytes": "5283583"
},
{
"name": "JavaScript",
"bytes": "12107102"
},
{
"name": "Makefile",
"bytes": "647"
},
{
"name": "PHP",
"bytes": "11127746"
},
{
"name": "Python",
"bytes": "1985"
},
{
"name": "Roff",
"bytes": "2072"
},
{
"name": "Ruby",
"bytes": "3839"
},
{
"name": "Shell",
"bytes": "4209"
},
{
"name": "TypeScript",
"bytes": "115045"
}
],
"symlink_target": ""
} |
//
// UINavigationBar.h
// UIKit
//
// Copyright (c) 2005-2014 Apple Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <FakeUIKit/UIView.h>
#import <FakeUIKit/UIInterface.h>
#import <FakeUIKit/UIFont.h>
#import <FakeUIKit/UIKitDefines.h>
#import <FakeUIKit/UIButton.h>
#import <FakeUIKit/UIBarButtonItem.h>
#import <FakeUIKit/UIBarCommon.h>
@class UINavigationItem, UIBarButtonItem, UIImage, UIColor;
@protocol UINavigationBarDelegate;
NS_CLASS_AVAILABLE_IOS(2_0) @interface UINavigationBar : UIView <NSCoding, UIBarPositioning> {
@private
NSMutableArray *_itemStack;
CGFloat _rightMargin;
unsigned _state;
id __unsafe_unretained _delegate;
UIView *_backgroundView;
UIView *_titleView;
NSArray *_leftViews;
NSArray *_rightViews;
UIView *_prompt;
UIView *_accessoryView;
UIColor *_barTintColor;
id _appearanceStorage;
id _currentAlert;
struct {
unsigned int animate:1;
unsigned int animationDisabledCount:10;
unsigned int transitioningBarStyle:1;
unsigned int newBarStyle:3;
unsigned int transitioningToTranslucent:1;
unsigned int barStyle:3;
unsigned int barTranslucence:3;
unsigned int disableLayout:1;
unsigned int backPressed:1;
unsigned int animatePromptChange:1;
unsigned int pendingHideBackButton:1;
unsigned int titleAutosizesToFit:1;
unsigned int usingNewAPI:1;
unsigned int forceFullHeightInLandscape:1;
unsigned int isLocked:1;
unsigned int shouldUpdatePromptAfterTransition:1;
unsigned int crossfadeItems:1;
unsigned int autoAdjustTitle:1;
unsigned int isContainedInPopover:1;
unsigned int needsDrawRect:1;
unsigned int animationCleanupCancelled:1;
unsigned int layoutInProgress:1;
unsigned int dynamicDuration:1;
unsigned int isInteractive:1;
unsigned int cancelledTransition:1;
unsigned int animationCount:4;
unsigned int backgroundLayoutNeedsUpdate:1;
} _navbarFlags;
}
@property(nonatomic,assign) UIBarStyle barStyle;
@property(nonatomic,assign) id<UINavigationBarDelegate> delegate;
/*
New behavior on iOS 7.
Default is YES.
You may force an opaque background by setting the property to NO.
If the navigation bar has a custom background image, the default is inferred
from the alpha values of the image—YES if it has any pixel with alpha < 1.0
If you send setTranslucent:YES to a bar with an opaque custom background image
it will apply a system opacity less than 1.0 to the image.
If you send setTranslucent:NO to a bar with a translucent custom background image
it will provide an opaque background for the image using the bar's barTintColor if defined, or black
for UIBarStyleBlack or white for UIBarStyleDefault if barTintColor is nil.
*/
@property(nonatomic,assign,getter=isTranslucent) BOOL translucent NS_AVAILABLE_IOS(3_0); // Default is NO on iOS 6 and earlier. Always YES if barStyle is set to UIBarStyleBlackTranslucent
// Pushing a navigation item displays the item's title in the center of the navigation bar.
// The previous top navigation item (if it exists) is displayed as a "back" button on the left.
- (void)pushNavigationItem:(UINavigationItem *)item animated:(BOOL)animated;
- (UINavigationItem *)popNavigationItemAnimated:(BOOL)animated; // Returns the item that was popped.
@property(nonatomic,readonly,retain) UINavigationItem *topItem;
@property(nonatomic,readonly,retain) UINavigationItem *backItem;
@property(nonatomic,copy) NSArray *items;
- (void)setItems:(NSArray *)items animated:(BOOL)animated; // If animated is YES, then simulate a push or pop depending on whether the new top item was previously in the stack.
/*
The behavior of tintColor for bars has changed on iOS 7.0. It no longer affects the bar's background
and behaves as described for the tintColor property added to UIView.
To tint the bar's background, please use -barTintColor.
*/
@property(nonatomic,retain) UIColor *tintColor;
@property(nonatomic,retain) UIColor *barTintColor NS_AVAILABLE_IOS(7_0) UI_APPEARANCE_SELECTOR; // default is nil
/* In general, you should specify a value for the normal state to be used by other states which don't have a custom value set.
Similarly, when a property is dependent on the bar metrics (on the iPhone in landscape orientation, bars have a different height from standard), be sure to specify a value for UIBarMetricsDefault.
*/
- (void)setBackgroundImage:(UIImage *)backgroundImage forBarPosition:(UIBarPosition)barPosition barMetrics:(UIBarMetrics)barMetrics NS_AVAILABLE_IOS(7_0) UI_APPEARANCE_SELECTOR;
- (UIImage *)backgroundImageForBarPosition:(UIBarPosition)barPosition barMetrics:(UIBarMetrics)barMetrics NS_AVAILABLE_IOS(7_0) UI_APPEARANCE_SELECTOR;
/*
Same as using UIBarPositionAny in -setBackgroundImage:forBarPosition:barMetrics. Resizable images will be stretched
vertically if necessary when the navigation bar is in the position UIBarPositionTopAttached.
*/
- (void)setBackgroundImage:(UIImage *)backgroundImage forBarMetrics:(UIBarMetrics)barMetrics NS_AVAILABLE_IOS(5_0) UI_APPEARANCE_SELECTOR;
- (UIImage *)backgroundImageForBarMetrics:(UIBarMetrics)barMetrics NS_AVAILABLE_IOS(5_0) UI_APPEARANCE_SELECTOR;
/* Default is nil. When non-nil, a custom shadow image to show instead of the default shadow image. For a custom shadow to be shown, a custom background image must also be set with -setBackgroundImage:forBarMetrics: (if the default background image is used, the default shadow image will be used).
*/
@property(nonatomic,retain) UIImage *shadowImage NS_AVAILABLE_IOS(6_0) UI_APPEARANCE_SELECTOR;
/* You may specify the font, text color, and shadow properties for the title in the text attributes dictionary, using the keys found in NSAttributedString.h.
*/
@property(nonatomic,copy) NSDictionary *titleTextAttributes NS_AVAILABLE_IOS(5_0) UI_APPEARANCE_SELECTOR;
- (void)setTitleVerticalPositionAdjustment:(CGFloat)adjustment forBarMetrics:(UIBarMetrics)barMetrics NS_AVAILABLE_IOS(5_0) UI_APPEARANCE_SELECTOR;
- (CGFloat)titleVerticalPositionAdjustmentForBarMetrics:(UIBarMetrics)barMetrics NS_AVAILABLE_IOS(5_0) UI_APPEARANCE_SELECTOR;
/*
The back indicator image is shown beside the back button.
The back indicator transition mask image is used as a mask for content during push and pop transitions
Note: These properties must both be set if you want to customize the back indicator image.
*/
@property(nonatomic,retain) UIImage *backIndicatorImage NS_AVAILABLE_IOS(7_0) UI_APPEARANCE_SELECTOR;
@property(nonatomic,retain) UIImage *backIndicatorTransitionMaskImage NS_AVAILABLE_IOS(7_0) UI_APPEARANCE_SELECTOR;
@end
@protocol UINavigationBarDelegate <UIBarPositioningDelegate>
@optional
- (BOOL)navigationBar:(UINavigationBar *)navigationBar shouldPushItem:(UINavigationItem *)item; // called to push. return NO not to.
- (void)navigationBar:(UINavigationBar *)navigationBar didPushItem:(UINavigationItem *)item; // called at end of animation of push or immediately if not animated
- (BOOL)navigationBar:(UINavigationBar *)navigationBar shouldPopItem:(UINavigationItem *)item; // same as push methods
- (void)navigationBar:(UINavigationBar *)navigationBar didPopItem:(UINavigationItem *)item;
@end
NS_CLASS_AVAILABLE_IOS(2_0) @interface UINavigationItem : NSObject <NSCoding> {
@private
NSString *_title;
NSString *_backButtonTitle;
UIBarButtonItem *_backBarButtonItem;
NSString *_prompt;
NSInteger _tag;
id _context;
UINavigationBar *_navigationBar;
UIView *_defaultTitleView;
UIView *_titleView;
UIView *_backButtonView;
NSArray *_leftBarButtonItems;
NSArray *_rightBarButtonItems;
NSArray *_customLeftViews;
NSArray *_customRightViews;
BOOL _hidesBackButton;
BOOL _leftItemsSupplementBackButton;
UIImageView *_frozenTitleView;
}
- (instancetype)initWithTitle:(NSString *)title; // Designated initializer
@property(nonatomic,copy) NSString *title; // Title when topmost on the stack. default is nil
@property(nonatomic,retain) UIBarButtonItem *backBarButtonItem; // Bar button item to use for the back button in the child navigation item.
@property(nonatomic,retain) UIView *titleView; // Custom view to use in lieu of a title. May be sized horizontally. Only used when item is topmost on the stack.
@property(nonatomic,copy) NSString *prompt; // Explanatory text to display above the navigation bar buttons.
@property(nonatomic,assign) BOOL hidesBackButton; // If YES, this navigation item will hide the back button when it's on top of the stack.
- (void)setHidesBackButton:(BOOL)hidesBackButton animated:(BOOL)animated;
/* Use these properties to set multiple items in a navigation bar.
The older single properties (leftBarButtonItem and rightBarButtonItem) now refer to
the first item in the respective array of items.
NOTE: You'll achieve the best results if you use either the singular properties or
the plural properties consistently and don't try to mix them.
leftBarButtonItems are placed in the navigation bar left to right with the first
item in the list at the left outside edge and left aligned.
rightBarButtonItems are placed right to left with the first item in the list at
the right outside edge and right aligned.
*/
@property(nonatomic,copy) NSArray *leftBarButtonItems NS_AVAILABLE_IOS(5_0);
@property(nonatomic,copy) NSArray *rightBarButtonItems NS_AVAILABLE_IOS(5_0);
- (void)setLeftBarButtonItems:(NSArray *)items animated:(BOOL)animated NS_AVAILABLE_IOS(5_0);
- (void)setRightBarButtonItems:(NSArray *)items animated:(BOOL)animated NS_AVAILABLE_IOS(5_0);
/* By default, the leftItemsSupplementBackButton property is NO. In this case,
the back button is not drawn and the left item or items replace it. If you
would like the left items to appear in addition to the back button (as opposed to instead of it)
set leftItemsSupplementBackButton to YES.
*/
@property(nonatomic) BOOL leftItemsSupplementBackButton NS_AVAILABLE_IOS(5_0);
// Some navigation items want to display a custom left or right item when they're on top of the stack.
// A custom left item replaces the regular back button unless you set leftItemsSupplementBackButton to YES
@property(nonatomic,retain) UIBarButtonItem *leftBarButtonItem;
@property(nonatomic,retain) UIBarButtonItem *rightBarButtonItem;
- (void)setLeftBarButtonItem:(UIBarButtonItem *)item animated:(BOOL)animated;
- (void)setRightBarButtonItem:(UIBarButtonItem *)item animated:(BOOL)animated;
@end
| {
"content_hash": "51991b506bf3416aafae23f9464a7964",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 297,
"avg_line_length": 50.929906542056074,
"alnum_prop": 0.7545646389577025,
"repo_name": "PeqNP/ObjC-Stub",
"id": "2f7f289991a8cd1d6cd9f6b1af97e967e3604f6c",
"size": "10901",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xcode/UIKitTest/UIKitTest-Specs/Frameworks/FakeUIKit.framework/Headers/UINavigationBar.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "14601"
},
{
"name": "C++",
"bytes": "91397"
},
{
"name": "Objective-C",
"bytes": "2496597"
},
{
"name": "Objective-C++",
"bytes": "57904"
},
{
"name": "Python",
"bytes": "10827"
},
{
"name": "Ruby",
"bytes": "3897"
},
{
"name": "Shell",
"bytes": "8196"
}
],
"symlink_target": ""
} |
define(
({
viewer: {
common: {
close: "Fermer"
},
loading: {
step1: "Chargement du récit",
step2: "Chargement des données",
step3: "Initialisation",
loadBuilder: "Accéder au générateur",
long: "Le journal cartographique est en cours d\'initialisation",
long2: "Merci de patienter",
failButton: "Recharger le récit"
},
signin: {
title: "L\'authentification est nécessaire",
explainViewer: "Connectez-vous avec un compte sur %PORTAL_LINK% pour accéder au récit.",
explainBuilder: "Connectez-vous avec un compte sur %PORTAL_LINK% pour configurer le récit."
},
errors: {
boxTitle: "Une erreur s\’est produite",
invalidConfig: "Configuration non valide",
invalidConfigNoApp: "Identifiant de l\'application de cartographie Web non spécifié dans index.html.",
unspecifiedConfigOwner: "Le propriétaire autorisé n\'est pas configuré.",
invalidConfigOwner: "Le propriétaire du récit n\'est pas autorisé.",
createMap: "Impossible de créer la carte",
invalidApp: "Le %TPL_NAME% n\'existe pas ou n\'est pas accessible.",
appLoadingFail: "Une erreur s\'est produite et le chargement de %TPL_NAME% ne s\'est pas correctement déroulé.",
notConfiguredDesktop: "Le récit n\'est pas encore configuré.",
notConfiguredMobile: "Le générateur %TPL_NAME% n\'est pas pris en charge avec cette taille d\'affichage.",
notAuthorized: "Vous n\'êtes pas autorisé à accéder à ce récit.",
noBuilderIE: "Le générateur n\'est pas pris en charge dans Internet Explorer avant la version %VERSION%. %UPGRADE%",
noViewerIE: "Ce récit n\'est pas pris en charge dans Internet Explorer avant la version %VERSION%. %UPGRADE%",
upgradeBrowser: "<a href='http://browsehappy.com/' target='_blank'>Mettez votre navigateur à jour</a>.",
mapLoadingFail: "Une erreur s\'est produite et la carte n\'a pas été correctement chargée.",
signOut: "Se déconnecter"
},
mobileView: {
tapForDetails: "Touchez pour obtenir des détails",
clickForDetails: "En savoir plus",
swipeToExplore: "Effectuez un mouvement de balayage pour explorer",
tapForMap: "Touchez pour revenir à la carte",
clickForMap: "RETOUR A LA CARTE"
},
floatLayout: {
scroll: "Défilement"
},
sideLayout: {
scroll: "Faites défiler pour en savoir plus !"
},
mainStage: {
back: "Retour"
},
headerFromCommon: {
storymapsText: "Une Story Map",
builderButton: "Modifier",
facebookTooltip: "Partager sur Facebook",
twitterTooltip: "Partager sur Twitter",
bitlyTooltip: "Obtenir un lien court",
templateTitle: "Définir le titre du modèle",
templateSubtitle: "Définir le sous-titre du modèle",
share: "Partager",
checking: "Vérification du contenu de votre récit",
fix: "Résoudre les problèmes du récit",
noerrors: "Aucun problème détecté"
},
overviewFromCommon: {
title: "Vue générale"
},
legendFromCommon: {
title: "Légende"
},
shareFromCommon: {
copy: "Copier",
copied: "Copié",
open: "Ouvrir",
embed: "Incorporer dans une page Web",
embedExplain: "Utilisez le code HTML suivant pour incorporer le journal dans une page Web.",
size: "Taille (largeur/hauteur) :"
}
}
})
); | {
"content_hash": "7d4e752f68b3e17742434dc7eab06234",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 120,
"avg_line_length": 39.035714285714285,
"alnum_prop": 0.6816102470265325,
"repo_name": "arcgistask/arcgistask.github.io",
"id": "7c7fff885552d0ece2639e5e6899e75b4b18108c",
"size": "3344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resources/tpl/viewer/nls/fr/template.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "17789"
},
{
"name": "HTML",
"bytes": "880811"
},
{
"name": "JavaScript",
"bytes": "824896"
}
],
"symlink_target": ""
} |
package com.easyform.library.editors;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Color;
import android.support.v7.app.AlertDialog;
import android.text.TextUtils;
import android.view.View;
import com.easyform.library.R;
import com.easyform.library.editors.model.PairModel;
import java.util.LinkedList;
import java.util.List;
public class MultiSelectElement extends android.support.v7.widget.AppCompatTextView implements View.OnClickListener, Editor<String> {
private AlertDialog alertDialog;
private PairModel[] items;
private boolean[] results;
public MultiSelectElement(Context context) {
super(context);
setTextSize(16);
setTextColor(Color.BLACK);
setBackgroundResource(R.drawable.bottom_line);
setPadding(10, 5, 0, 0);
setOnClickListener(this);
}
public MultiSelectElement(Context context, PairModel[] items) {
this(context);
setItems(items);
}
public AlertDialog getSelectionDialog() {
if (alertDialog == null)
createAlertDialog();
return alertDialog;
}
public String[] getItems() {
String[] newItems = new String[items.length];
for (int i = 0; i < newItems.length; i++) {
newItems[i] = items[i].getPairValue();
}
return newItems;
}
public void setItems(PairModel[] items) {
if (items == null) {
items = new PairModel[0];
}
this.items = items;
this.results = new boolean[items.length];
alertDialog = null;
}
private void createAlertDialog() {
AlertDialog.Builder builder = new AlertDialog.Builder(this.getContext());
builder.setTitle("Select");
builder.setMultiChoiceItems(getItems(), results, new DialogInterface.OnMultiChoiceClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i, boolean b) {
}
}).setPositiveButton("Ok", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
// Save to pereferences.
setText(mergeSelections());
}
}).setNegativeButton("Cancel", null);
alertDialog = builder.create();
}
@Override
public String getValue() {
List<String> selections = new LinkedList<>();
for (int i = 0; i < items.length; i++) {
if (results[i])
selections.add(items[i].getPairKey());
}
String[] arr = new String[selections.size()];
selections.toArray(arr);
return TextUtils.join(",", arr);
}
@Override
public void setValue(String value) {
if (!TextUtils.isEmpty(value)) {
String[] values = TextUtils.split(value, ",");
for (int i = 0; i < items.length; i++) {
results[i] = false;
for (String item : values) {
if (items[i].getPairKey().equals(item)) {
results[i] = true;
break;
}
}
}
setText(mergeSelections());
}
}
private String mergeSelections() {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < items.length; i++) {
if (results[i]) {
builder.append(items[i]).append(",");
}
}
String result = builder.toString();
if (TextUtils.isEmpty(result)) {
return null;
}
return result.substring(0, result.length() - 1);
}
@Override
public void onClick(View view) {
getSelectionDialog().show();
}
}
| {
"content_hash": "22f51537e10510215e895f4728622dd1",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 133,
"avg_line_length": 27.06382978723404,
"alnum_prop": 0.5762578616352201,
"repo_name": "hasanmumin/EasyForm",
"id": "d3286f936b2de39a1e07daa6aab7b5f893c0f49c",
"size": "3816",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "library/src/main/java/com/easyform/library/editors/MultiSelectElement.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "74500"
}
],
"symlink_target": ""
} |
/**
* @file
* @brief
* This file includes platform abstractions for miscellaneous behaviors.
*/
#ifndef OPENTHREAD_PLATFORM_MISC_H_
#define OPENTHREAD_PLATFORM_MISC_H_
#include <stdint.h>
#include <openthread/instance.h>
#ifdef __cplusplus
extern "C" {
#endif
/**
* @addtogroup plat-misc
*
* @brief
* This module includes platform abstractions for miscellaneous behaviors.
*
* @{
*
*/
/**
* This function performs a software reset on the platform, if supported.
*
* @param[in] aInstance The OpenThread instance structure.
*
*/
void otPlatReset(otInstance *aInstance);
/**
* Enumeration of possible reset reason codes.
*
* These are in the same order as the Spinel reset reason codes.
*
*/
typedef enum
{
OT_PLAT_RESET_REASON_POWER_ON = 0,
OT_PLAT_RESET_REASON_EXTERNAL = 1,
OT_PLAT_RESET_REASON_SOFTWARE = 2,
OT_PLAT_RESET_REASON_FAULT = 3,
OT_PLAT_RESET_REASON_CRASH = 4,
OT_PLAT_RESET_REASON_ASSERT = 5,
OT_PLAT_RESET_REASON_OTHER = 6,
OT_PLAT_RESET_REASON_UNKNOWN = 7,
OT_PLAT_RESET_REASON_WATCHDOG = 8,
OT_PLAT_RESET_REASON_COUNT,
} otPlatResetReason;
/**
* This function returns the reason for the last platform reset.
*
* @param[in] aInstance The OpenThread instance structure.
*
*/
otPlatResetReason otPlatGetResetReason(otInstance *aInstance);
/**
* This function provides a platform specific implementation for assert.
*
* @param[in] aFilename The name of the file where the assert occurred.
* @param[in] aLineNumber The line number in the file where the assert occurred.
*
*/
void otPlatAssertFail(const char *aFilename, int aLineNumber);
/**
* This function performs a platform specific operation to wake the host MCU.
* This is used only for NCP configurations.
*
*/
void otPlatWakeHost(void);
/**
* Enumeration of micro-controller's power states.
*
* These values are used for NCP configuration when `OPENTHREAD_CONFIG_NCP_ENABLE_MCU_POWER_STATE_CONTROL` is enabled.
*
* The power state specifies the desired power state of NCP's micro-controller (MCU) when the underlying platform's
* operating system enters idle mode (i.e., all active tasks/events are processed and the MCU can potentially enter a
* energy-saving power state).
*
* The power state primarily determines how the host should interact with the NCP and whether the host needs an
* external trigger (a "poke") to NCP before it can communicate with the NCP or not.
*
* After a reset, the MCU power state MUST be `OT_PLAT_POWER_STATE_ON`.
*
*/
typedef enum
{
/**
* NCP's MCU stays on and active all the time.
*
* When the NCP's desired power state is set to `ON`, host can send messages to NCP without requiring any "poke" or
* external triggers.
*
* @note The `ON` power state only determines the MCU's power mode and is not related to radio's state.
*
*/
OT_PLAT_MCU_POWER_STATE_ON = 0,
/**
* NCP's MCU can enter low-power (energy-saving) state.
*
* When the NCP's desired power state is set to `LOW_POWER`, host is expected to "poke" the NCP (e.g., an external
* trigger like an interrupt) before it can communicate with the NCP (send a message to the NCP). The "poke"
* mechanism is determined by the platform code (based on NCP's interface to the host).
*
* While power state is set to `LOW_POWER`, NCP can still (at any time) send messages to host. Note that receiving
* a message from the NCP does NOT indicate that the NCP's power state has changed, i.e., host is expected to
* continue to "poke" when it wants to talk to the NCP until the power state is explicitly changed (by a successful
* call to `otPlatSetMcuPowerState()` changing the state to `ON`).
*
* @note The `LOW_POWER` power state only determines the MCU's power mode and is not related to radio's state
* (radio is managed by OpenThread core and device role, e.g., device being sleepy or not.
*
*/
OT_PLAT_MCU_POWER_STATE_LOW_POWER = 1,
/**
* NCP is fully off.
*
* An NCP hardware reset (via a RESET pin) is required to bring the NCP back to `SPINEL_MCU_POWER_STATE_ON`.
* RAM is not retained after reset.
*
*/
OT_PLAT_MCU_POWER_STATE_OFF = 2,
} otPlatMcuPowerState;
/**
* This function sets the desired MCU power state.
*
* This is only applicable and used for NCP configuration when `OPENTHREAD_CONFIG_NCP_ENABLE_MCU_POWER_STATE_CONTROL`
* is enabled.
*
* @param[in] aInstance A pointer to OpenThread instance.
* @param[in] aState The new MCU power state.
*
* @retval OT_ERROR_NONE The power state updated successfully.
* @retval OT_ERROR_FAILED The given MCU power state is not supported by the platform.
*
*/
otError otPlatSetMcuPowerState(otInstance *aInstance, otPlatMcuPowerState aState);
/**
* This function gets the current desired MCU power state.
*
* This is only applicable and used for NCP configuration when `OPENTHREAD_CONFIG_NCP_ENABLE_MCU_POWER_STATE_CONTROL`
* is enabled.
*
* After a reset, the power state MUST return `OT_PLAT_POWER_STATE_ON`. During operation, power state SHOULD only
* change through an explicit successful call to `otPlatSetMcuPowerState()`.
*
* @param[in] aInstance A pointer to OpenThread instance.
*
* @returns The current power state.
*
*/
otPlatMcuPowerState otPlatGetMcuPowerState(otInstance *aInstance);
/**
* @}
*
*/
#if defined(OPENTHREAD_CONFIG_PLATFORM_NETIF_ENABLE) && OPENTHREAD_CONFIG_PLATFORM_NETIF_ENABLE
/**
* This function gets the name and index of the platform's network interface (if it exists).
*
* @param[in] aInstance A pointer to OpenThread instance.
* @param[out] outNetIfName A pointer for the returned network interface name.
* @param[out] outNetIfIndex A pointer for the returned network interface index (i.e., if_nametoindex).
*
* @retval OT_ERROR_NONE Successfully returned the network interface and index.
* @retval OT_ERROR_FAILED The network interface is not enabled or is unknown.
*
*/
otError otPlatGetNetif(otInstance *aInstance, const char **outNetIfName, unsigned int *outNetIfIndex);
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif // OPENTHREAD_PLATFORM_MISC_H_
| {
"content_hash": "2d1772d1ce3795c5dd039e6bb7b4d0ad",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 119,
"avg_line_length": 32.350515463917525,
"alnum_prop": 0.7036328871892925,
"repo_name": "lanyuwen/openthread",
"id": "a5d5e44790f543850ab8fd8ac069d30f6d48a079",
"size": "7884",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "include/openthread/platform/misc.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "10128"
},
{
"name": "C",
"bytes": "504489"
},
{
"name": "C#",
"bytes": "18077"
},
{
"name": "C++",
"bytes": "3008688"
},
{
"name": "M4",
"bytes": "42638"
},
{
"name": "Makefile",
"bytes": "77019"
},
{
"name": "Python",
"bytes": "1017946"
},
{
"name": "Ruby",
"bytes": "3397"
},
{
"name": "Shell",
"bytes": "17185"
}
],
"symlink_target": ""
} |
module.exports = function(context, stepInfo) {
context.sharp.gamma(parseFloat(stepInfo.gamma || '2.2'));
};
| {
"content_hash": "512b1f5441a615c7818763a76dcda964",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 59,
"avg_line_length": 37.666666666666664,
"alnum_prop": 0.6902654867256637,
"repo_name": "surfdude75/node-image-steam",
"id": "be9b4ed9f7a461763c077f28b9f7d3a97de9544f",
"size": "113",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/processor/steps/gamma.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14"
},
{
"name": "JavaScript",
"bytes": "115457"
}
],
"symlink_target": ""
} |
{% load i18n %}
{% if cl.search_fields %}
<form class="navbar-form navbar-left" action="" method="get">
<div class="input-group search-group">
{% if cl.search_query %}
<span class="input-group-btn"><a class="btn btn-default" href="{{remove_search_url}}">x</a></span>
{% endif %}
<input id="searchbar" class="form-control" type="text" name="{{ search_var }}" value="{{ cl.search_query }}" placeholder="{% trans 'Search' %} {{cl.opts.verbose_name}}">
<span class="input-group-btn"><button class="btn btn-primary" type="submit"><i class="fa fa-search"></i></button></span>
</div>
{{ search_form_params|safe }}
</form>
{% endif %}
| {
"content_hash": "f3ccf08e162eb42efda7346a63bd448e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 173,
"avg_line_length": 50.84615384615385,
"alnum_prop": 0.6172465960665658,
"repo_name": "ly0/xxadmin",
"id": "76ebf42cc1fee492aa8285af12a1ba4f0bc35bc5",
"size": "661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xadmin/templates/xadmin/blocks/model_list.nav_form.search_form.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "24096"
},
{
"name": "HTML",
"bytes": "99885"
},
{
"name": "JavaScript",
"bytes": "84631"
},
{
"name": "Python",
"bytes": "434226"
},
{
"name": "Shell",
"bytes": "713"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>floatz - Layout.Empty</title>
<meta http-equiv="content-type" content="text/html; CHARSET=utf-8" />
<meta http-equiv="content-script-type" content="text/javascript" />
<meta http-equiv="content-style-type" content="text/css" />
<meta http-equiv="content-language" content="en" />
<meta name="author" content="" />
<meta name="robots" content="index,follow" />
<meta name="description" content="" />
<!-- Load project CSS without less -->
<link rel="stylesheet" type="text/css" href="styles/project4.css" />
<!-- Load project CSS with less -->
<!-- <link rel="stylesheet/less" type="text/css" href="styles/project.less" />
<script type="text/javascript" src="styles/floatz-1.2.0/scripts/less-2.2.0.min.js"></script> -->
<!-- -->
<script type="text/javascript" src="styles/floatz-1.2.0/scripts/LAB-2.0.3.min.js"></script>
<script type="text/javascript" src="scripts/project.js"></script>
</head>
<body>
<!-- page -->
<div id="flz_page">
<!-- min width containers -->
<div id="flz_minwidth">
<div id="flz_minwidthcontent">
<!-- header -->
<div class="flz_box header">Header</div>
<!-- menu -->
<div class="flz_box menu">Menu</div>
<!-- submenu -->
<div class="flz_box flz_l20 submenu">Submenu</div>
<!-- content -->
<div class="flz_box flz_r80 content">
<div class="flz_spacer">
<h1>Header 1</h1>
<p>Lorem ipsum dolor sit amet, consetetur sadipscing elitr,
sed diam nonumy eirmod tempor invidunt ut labore et dolore magna
aliquyam erat, sed diam voluptua. At vero eos et accusam et justo
duo dolores et ea rebum. Stet clita kasd gubergren, no sea
takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum
dolor sit amet, consetetur sadipscing elitr, sed diam nonumy
eirmod tempor invidunt ut labore et dolore magna aliquyam erat,
sed diam voluptua. At vero eos et accusam et justo duo dolores et
ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est
Lorem ipsum dolor sit amet.</p>
<h2>Header 2</h2>
<p>Lorem ipsum dolor sit amet, consetetur sadipscing elitr,
sed diam nonumy eirmod tempor invidunt ut labore et dolore magna
aliquyam erat, sed diam voluptua. At vero eos et accusam et justo
duo dolores et ea rebum. Stet clita kasd gubergren, no sea
takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum
dolor sit amet, consetetur sadipscing elitr, sed diam nonumy
eirmod tempor invidunt ut labore et dolore magna aliquyam erat,
sed diam voluptua. At vero eos et accusam et justo duo dolores et
ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est
Lorem ipsum dolor sit amet.</p>
<h2>Header 2</h2>
<p>Lorem ipsum dolor sit amet, consetetur sadipscing elitr,
sed diam nonumy eirmod tempor invidunt ut labore et dolore magna
aliquyam erat, sed diam voluptua. At vero eos et accusam et justo
duo dolores et ea rebum. Stet clita kasd gubergren, no sea
takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum
dolor sit amet, consetetur sadipscing elitr, sed diam nonumy
eirmod tempor invidunt ut labore et dolore magna aliquyam erat,
sed diam voluptua. At vero eos et accusam et justo duo dolores et
ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est
Lorem ipsum dolor sit amet.</p>
</div>
</div>
<!-- footer -->
<div class="flz_box flz_r100 footer">Footer</div>
</div>
</div>
</div>
</body>
</html>
| {
"content_hash": "d26d93cf19d60701d652aaed429237c0",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 121,
"avg_line_length": 48.285714285714285,
"alnum_prop": 0.6772458310919849,
"repo_name": "floatzcss/floatz",
"id": "64b551c76360f19c7b5527cbaa71315439daa402",
"size": "3718",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "floatz.dev.kit/samples/Layouting Pages/LayoutingPages_08.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6630"
},
{
"name": "CSS",
"bytes": "228024"
},
{
"name": "HTML",
"bytes": "612097"
},
{
"name": "JavaScript",
"bytes": "67190"
},
{
"name": "Shell",
"bytes": "13930"
}
],
"symlink_target": ""
} |
import { Serializer as LinkOperationSerializer } from '../mixins/regenerated/serializers/i-c-s-soft-s-t-o-r-m-n-e-t-security-link-operation';
import __ApplicationSerializer from './application';
export default __ApplicationSerializer.extend(LinkOperationSerializer, {
});
| {
"content_hash": "a20dbd9a5284d6de4726b3e58cdc192c",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 141,
"avg_line_length": 45.666666666666664,
"alnum_prop": 0.781021897810219,
"repo_name": "Flexberry/ember-flexberry-security",
"id": "833cb8c33175d536a955916464e42d098e300189",
"size": "274",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "addon/serializers/i-c-s-soft-s-t-o-r-m-n-e-t-security-link-operation.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1781"
},
{
"name": "Handlebars",
"bytes": "92996"
},
{
"name": "JavaScript",
"bytes": "486449"
},
{
"name": "Less",
"bytes": "1574"
},
{
"name": "Shell",
"bytes": "813"
}
],
"symlink_target": ""
} |
// Only define the Joomla namespace if not defined.
if (typeof(Joomla) === 'undefined') {
var Joomla = {};
}
Joomla.Highlighter = function(_options){
var $, words, options = {
autoUnhighlight: true,
caseSensitive: false,
startElement: false,
endElement: false,
elements: [],
className: 'highlight',
onlyWords: true,
tag: 'span'
},
highlight = function (words) {
if (words.constructor === String) {
words = [words];
}
if (options.autoUnhighlight) {
unhighlight(words);
}
var pattern = options.onlyWords ? '\b' + pattern + '\b' : '(' + words.join('\\b|\\b') + ')',
regex = new RegExp(pattern, options.caseSensitive ? '' : 'i');
options.elements.map(function(el){
recurse(el, regex, options.className);
});
return this;
},
unhighlight = function (words) {
if (words.constructor === String) {
words = [words];
}
var $elements, tn;
words.map(function(word){
word = (options.caseSensitive ? word : word.toUpperCase());
if (words[word]) {
$elements = $(words[word]);
$elements.removeClass();
$elements.each(function (index, el) {
tn = document.createTextNode($(el).text());
el.parentNode.replaceChild(tn, el);
});
}
});
return this;
},
recurse = function (node, regex, klass) {
if (node.nodeType === 3) {
var match = node.nodeValue.match(regex), highlight, $highlight, wordNode, wordClone, comparer, i;
if (match) {
highlight = document.createElement(options.tag);
$highlight = $(highlight);
$highlight.addClass(klass);
wordNode = node.splitText(match.index);
wordNode.splitText(match[0].length);
wordClone = wordNode.cloneNode(true);
$highlight.append(wordClone);
$(wordNode).replaceWith(highlight)
$highlight.attr('rel', $highlight.text());
comparer = $highlight.text()
if (!options.caseSensitive) {
comparer = $highlight.text().toUpperCase();
}
if (!words[comparer]) {
words[comparer] = [];
}
words[comparer].push(highlight);
return 1;
}
} else if ((node.nodeType === 1 && node.childNodes) && !/(script|style|textarea|iframe)/i.test(node.tagName) && !(node.tagName === options.tag.toUpperCase() && node.className === klass)) {
for (i = 0; i < node.childNodes.length; i++) {
i += recurse(node.childNodes[i], regex, klass);
}
}
return 0;
},
getElements = function ($start, $end) {
var $next = $start.next();
if ($next.attr('id') !== $end.attr('id')) {
options.elements.push($next.get(0));
getElements($next, $end);
}
},
initialize = function(_options) {
$ = jQuery.noConflict();
$.extend(options, _options);
getElements($(options.startElement), $(options.endElement));
words = [];
};
initialize(_options);
return {
highlight: highlight,
unhighlight : unhighlight
};
}
| {
"content_hash": "93d66672f486801f9f749fb363ebf2e8",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 196,
"avg_line_length": 32.794392523364486,
"alnum_prop": 0.4990025648332858,
"repo_name": "back1992/ticool",
"id": "b233b1d38a932891193df1840d9c085fd34269a3",
"size": "3718",
"binary": false,
"copies": "53",
"ref": "refs/heads/master",
"path": "media/system/js/highlighter-uncompressed.js",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1157815"
},
{
"name": "JavaScript",
"bytes": "335767"
},
{
"name": "PHP",
"bytes": "5865040"
},
{
"name": "Perl",
"bytes": "26515"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_102) on Wed Nov 02 19:53:02 IST 2016 -->
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Uses of Class org.apache.solr.index.SortingMergePolicyFactory (Solr 6.3.0 API)</title>
<meta name="date" content="2016-11-02">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.solr.index.SortingMergePolicyFactory (Solr 6.3.0 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/apache/solr/index/SortingMergePolicyFactory.html" title="class in org.apache.solr.index">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/apache/solr/index/class-use/SortingMergePolicyFactory.html" target="_top">Frames</a></li>
<li><a href="SortingMergePolicyFactory.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.apache.solr.index.SortingMergePolicyFactory" class="title">Uses of Class<br>org.apache.solr.index.SortingMergePolicyFactory</h2>
</div>
<div class="classUseContainer">No usage of org.apache.solr.index.SortingMergePolicyFactory</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/apache/solr/index/SortingMergePolicyFactory.html" title="class in org.apache.solr.index">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/apache/solr/index/class-use/SortingMergePolicyFactory.html" target="_top">Frames</a></li>
<li><a href="SortingMergePolicyFactory.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>
<i>Copyright © 2000-2016 Apache Software Foundation. All Rights Reserved.</i>
<script src='../../../../../prettify.js' type='text/javascript'></script>
<script type='text/javascript'>
(function(){
var oldonload = window.onload;
if (typeof oldonload != 'function') {
window.onload = prettyPrint;
} else {
window.onload = function() {
oldonload();
prettyPrint();
}
}
})();
</script>
</small></p>
</body>
</html>
| {
"content_hash": "3b703f9ddf1a185948a068b478e4b85a",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 157,
"avg_line_length": 36.45,
"alnum_prop": 0.602978640015677,
"repo_name": "johannesbraun/clm_autocomplete",
"id": "3271052e3ca38e855d49abba816b9a99dd23fbfa",
"size": "5103",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/solr-core/org/apache/solr/index/class-use/SortingMergePolicyFactory.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "291"
},
{
"name": "Batchfile",
"bytes": "63061"
},
{
"name": "CSS",
"bytes": "238996"
},
{
"name": "HTML",
"bytes": "230318"
},
{
"name": "JavaScript",
"bytes": "1224188"
},
{
"name": "Jupyter Notebook",
"bytes": "638688"
},
{
"name": "Python",
"bytes": "3829"
},
{
"name": "Roff",
"bytes": "34741083"
},
{
"name": "Shell",
"bytes": "96828"
},
{
"name": "XSLT",
"bytes": "124838"
}
],
"symlink_target": ""
} |
from BattleBase import *
from DistributedBattleAI import *
from toontown.toonbase.ToontownBattleGlobals import *
import random
from toontown.suit import DistributedSuitBaseAI
import SuitBattleGlobals
import BattleExperienceAI
from toontown.toon import NPCToons
from toontown.pets import PetTricks, DistributedPetProxyAI
from toontown.hood import ZoneUtil
from direct.showbase.PythonUtil import lerp
import sys
class BattleCalculatorAI:
AccuracyBonuses = [0,
20,
40,
60]
DamageBonuses = [0,
20,
20,
20]
AttackExpPerTrack = [0,
10,
20,
30,
40,
50,
60]
NumRoundsLured = [2,
2,
3,
3,
4,
4,
15]
TRAP_CONFLICT = -2
APPLY_HEALTH_ADJUSTMENTS = 1
TOONS_TAKE_NO_DAMAGE = 0
CAP_HEALS = 1
CLEAR_SUIT_ATTACKERS = 1
SUITS_UNLURED_IMMEDIATELY = 1
CLEAR_MULTIPLE_TRAPS = 0
KBBONUS_LURED_FLAG = 0
KBBONUS_TGT_LURED = 1
notify = DirectNotifyGlobal.directNotify.newCategory('BattleCalculatorAI')
toonsAlwaysHit = simbase.config.GetBool('toons-always-hit', 0)
toonsAlwaysMiss = simbase.config.GetBool('toons-always-miss', 0)
toonsAlways5050 = simbase.config.GetBool('toons-always-5050', 0)
suitsAlwaysHit = simbase.config.GetBool('suits-always-hit', 0)
suitsAlwaysMiss = simbase.config.GetBool('suits-always-miss', 0)
immortalSuits = simbase.config.GetBool('immortal-suits', 0)
propAndOrganicBonusStack = simbase.config.GetBool('prop-and-organic-bonus-stack', 0)
def __init__(self, battle, tutorialFlag = 0):
self.battle = battle
self.SuitAttackers = {}
self.currentlyLuredSuits = {}
self.successfulLures = {}
self.toonAtkOrder = []
self.toonHPAdjusts = {}
self.toonSkillPtsGained = {}
self.traps = {}
self.npcTraps = {}
self.suitAtkStats = {}
self.__clearBonuses(hp=1)
self.__clearBonuses(hp=0)
self.delayedUnlures = []
self.__skillCreditMultiplier = simbase.air.baseXpMultiplier
self.tutorialFlag = tutorialFlag
self.trainTrapTriggered = False
def setSkillCreditMultiplier(self, mult):
self.__skillCreditMultiplier = simbase.air.baseXpMultiplier * mult
def getSkillCreditMultiplier(self):
return self.__skillCreditMultiplier
def cleanup(self):
self.battle = None
return
def __calcToonAtkHit(self, attackIndex, atkTargets):
if len(atkTargets) == 0:
return (0, 0)
if self.tutorialFlag:
return (1, 95)
if self.toonsAlways5050:
roll = random.randint(0, 99)
if roll < 50:
return (1, 95)
else:
return (0, 0)
if self.toonsAlwaysHit:
return (1, 95)
elif self.toonsAlwaysMiss:
return (0, 0)
debug = self.notify.getDebug()
attack = self.battle.toonAttacks[attackIndex]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
hasAccuracyBuff = False
toon = simbase.air.doId2do.get(attack[TOON_ID_COL])
if toon:
if toon.hasBuff(BGagAccuracy):
if not ZoneUtil.isDynamicZone(toon.zoneId):
if ZoneUtil.getWhereName(toon.zoneId, True) in ('street', 'factoryExterior', 'cogHQExterior'):
hasAccuracyBuff = True
if atkTrack == NPCSOS:
return (1, 95)
if atkTrack == FIRE:
return (1, 95)
if atkTrack == TRAP:
if debug:
self.notify.debug('Attack is a trap, so it hits regardless')
attack[TOON_ACCBONUS_COL] = 0
return (1, 100)
elif atkTrack == DROP and attack[TOON_TRACK_COL] == NPCSOS:
unluredSuits = 0
for tgt in atkTargets:
if not self.__suitIsLured(tgt.getDoId()):
unluredSuits = 1
if unluredSuits == 0:
attack[TOON_ACCBONUS_COL] = 1
return (0, 0)
elif atkTrack == DROP:
allLured = True
for i in xrange(len(atkTargets)):
if self.__suitIsLured(atkTargets[i].getDoId()):
pass
else:
allLured = False
if allLured:
attack[TOON_ACCBONUS_COL] = 1
return (0, 0)
elif atkTrack == PETSOS:
return self.__calculatePetTrickSuccess(attack)
tgtDef = 0
numLured = 0
if atkTrack != HEAL:
for currTarget in atkTargets:
thisSuitDef = self.__targetDefense(currTarget, atkTrack)
if debug:
self.notify.debug('Examining suit def for toon attack: ' + str(thisSuitDef))
tgtDef = min(thisSuitDef, tgtDef)
if self.__suitIsLured(currTarget.getDoId()):
numLured += 1
trackExp = self.__toonTrackExp(attack[TOON_ID_COL], atkTrack)
for currOtherAtk in self.toonAtkOrder:
if currOtherAtk != attack[TOON_ID_COL]:
nextAttack = self.battle.toonAttacks[currOtherAtk]
nextAtkTrack = self.__getActualTrack(nextAttack)
if atkTrack == nextAtkTrack and attack[TOON_TGT_COL] == nextAttack[TOON_TGT_COL]:
currTrackExp = self.__toonTrackExp(nextAttack[TOON_ID_COL], atkTrack)
if debug:
self.notify.debug('Examining toon track exp bonus: ' + str(currTrackExp))
trackExp = max(currTrackExp, trackExp)
if debug:
if atkTrack == HEAL:
self.notify.debug('Toon attack is a heal, no target def used')
else:
self.notify.debug('Suit defense used for toon attack: ' + str(tgtDef))
self.notify.debug('Toon track exp bonus used for toon attack: ' + str(trackExp))
if attack[TOON_TRACK_COL] == NPCSOS:
randChoice = 0
else:
randChoice = random.randint(0, 99)
propAcc = AvPropAccuracy[atkTrack][atkLevel]
if hasAccuracyBuff:
propAcc *= BGagAccuracyMultiplier
if atkTrack == LURE:
treebonus = self.__toonCheckGagBonus(attack[TOON_ID_COL], atkTrack, atkLevel)
propBonus = self.__checkPropBonus(atkTrack)
if self.propAndOrganicBonusStack:
propAcc = 0
if treebonus:
self.notify.debug('using organic bonus lure accuracy')
propAcc += AvLureBonusAccuracy[atkLevel]
if propBonus:
self.notify.debug('using prop bonus lure accuracy')
propAcc += AvLureBonusAccuracy[atkLevel]
elif treebonus or propBonus:
self.notify.debug('using oragnic OR prop bonus lure accuracy')
propAcc = AvLureBonusAccuracy[atkLevel]
attackAcc = propAcc + trackExp + tgtDef
currAtk = self.toonAtkOrder.index(attackIndex)
if currAtk > 0 and atkTrack != HEAL:
prevAtkId = self.toonAtkOrder[currAtk - 1]
prevAttack = self.battle.toonAttacks[prevAtkId]
prevAtkTrack = self.__getActualTrack(prevAttack)
lure = atkTrack == LURE and (not attackAffectsGroup(atkTrack, atkLevel,
attack[TOON_TRACK_COL]) and attack[TOON_TGT_COL] in self.successfulLures or attackAffectsGroup(atkTrack, atkLevel, attack[TOON_TRACK_COL]))
if atkTrack == prevAtkTrack and (attack[TOON_TGT_COL] == prevAttack[TOON_TGT_COL] or lure):
if prevAttack[TOON_ACCBONUS_COL] == 1:
if debug:
self.notify.debug('DODGE: Toon attack track dodged')
elif prevAttack[TOON_ACCBONUS_COL] == 0:
if debug:
self.notify.debug('HIT: Toon attack track hit')
attack[TOON_ACCBONUS_COL] = prevAttack[TOON_ACCBONUS_COL]
return (not attack[TOON_ACCBONUS_COL], attackAcc)
atkAccResult = attackAcc
if debug:
self.notify.debug('setting atkAccResult to %d' % atkAccResult)
acc = attackAcc + self.__calcToonAccBonus(attackIndex)
if atkTrack != LURE and atkTrack != HEAL:
if atkTrack != DROP:
if numLured == len(atkTargets):
if debug:
self.notify.debug('all targets are lured, attack hits')
attack[TOON_ACCBONUS_COL] = 0
return (1, 100)
else:
luredRatio = float(numLured) / float(len(atkTargets))
accAdjust = 100 * luredRatio
if accAdjust > 0 and debug:
self.notify.debug(str(numLured) + ' out of ' + str(len(atkTargets)) + ' targets are lured, so adding ' + str(accAdjust) + ' to attack accuracy')
acc += accAdjust
elif numLured == len(atkTargets):
if debug:
self.notify.debug('all targets are lured, attack misses')
attack[TOON_ACCBONUS_COL] = 0
return (0, 0)
if acc > MaxToonAcc:
acc = MaxToonAcc
if randChoice < acc:
if debug:
self.notify.debug('HIT: Toon attack rolled' + str(randChoice) + 'to hit with an accuracy of' + str(acc))
attack[TOON_ACCBONUS_COL] = 0
else:
if debug:
self.notify.debug('MISS: Toon attack rolled' + str(randChoice) + 'to hit with an accuracy of' + str(acc))
attack[TOON_ACCBONUS_COL] = 1
return (not attack[TOON_ACCBONUS_COL], atkAccResult)
def __toonTrackExp(self, toonId, track):
toon = self.battle.getToon(toonId)
if toon != None:
toonExpLvl = toon.experience.getExpLevel(track)
exp = self.AttackExpPerTrack[toonExpLvl]
if track == HEAL:
exp = exp * 0.5
self.notify.debug('Toon track exp: ' + str(toonExpLvl) + ' and resulting acc bonus: ' + str(exp))
return exp
else:
return 0
return
def __toonCheckGagBonus(self, toonId, track, level):
toon = self.battle.getToon(toonId)
if toon != None:
return toon.checkGagBonus(track, level)
else:
return False
return
def __checkPropBonus(self, track):
result = False
if self.battle.getInteractivePropTrackBonus() == track:
result = True
return result
def __targetDefense(self, suit, atkTrack):
if atkTrack == HEAL:
return 0
suitDef = SuitBattleGlobals.SuitAttributes[suit.dna.name]['def'][suit.getLevel()]
return -suitDef
def __createToonTargetList(self, attackIndex):
attack = self.battle.toonAttacks[attackIndex]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
targetList = []
if atkTrack == NPCSOS:
return targetList
if not attackAffectsGroup(atkTrack, atkLevel, attack[TOON_TRACK_COL]):
if atkTrack == HEAL:
target = attack[TOON_TGT_COL]
else:
target = self.battle.findSuit(attack[TOON_TGT_COL])
if target != None:
targetList.append(target)
elif atkTrack == HEAL or atkTrack == PETSOS:
if attack[TOON_TRACK_COL] == NPCSOS or atkTrack == PETSOS:
targetList = self.battle.activeToons
else:
for currToon in self.battle.activeToons:
if attack[TOON_ID_COL] != currToon:
targetList.append(currToon)
else:
targetList = self.battle.activeSuits
return targetList
def __prevAtkTrack(self, attackerId, toon = 1):
if toon:
prevAtkIdx = self.toonAtkOrder.index(attackerId) - 1
if prevAtkIdx >= 0:
prevAttackerId = self.toonAtkOrder[prevAtkIdx]
attack = self.battle.toonAttacks[prevAttackerId]
return self.__getActualTrack(attack)
else:
return NO_ATTACK
def getSuitTrapType(self, suitId):
if suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
return NO_TRAP
else:
return self.traps[suitId][0]
else:
return NO_TRAP
def __suitTrapDamage(self, suitId):
if suitId in self.traps:
return self.traps[suitId][2]
else:
return 0
def addTrainTrapForJoiningSuit(self, suitId):
self.notify.debug('addTrainTrapForJoiningSuit suit=%d self.traps=%s' % (suitId, self.traps))
trapInfoToUse = None
for trapInfo in self.traps.values():
if trapInfo[0] == UBER_GAG_LEVEL_INDEX:
trapInfoToUse = trapInfo
break
if trapInfoToUse:
self.traps[suitId] = trapInfoToUse
else:
self.notify.warning('huh we did not find a train trap?')
return
def __addSuitGroupTrap(self, suitId, trapLvl, attackerId, allSuits, npcDamage = 0):
if npcDamage == 0:
if suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
pass
else:
self.traps[suitId][0] = self.TRAP_CONFLICT
for suit in allSuits:
id = suit.doId
if id in self.traps:
self.traps[id][0] = self.TRAP_CONFLICT
else:
self.traps[id] = [self.TRAP_CONFLICT, 0, 0]
else:
toon = self.battle.getToon(attackerId)
organicBonus = toon.checkGagBonus(TRAP, trapLvl)
propBonus = self.__checkPropBonus(TRAP)
damage = getAvPropDamage(TRAP, trapLvl, toon.experience.getExp(TRAP), organicBonus, propBonus, self.propAndOrganicBonusStack)
if self.itemIsCredit(TRAP, trapLvl):
self.traps[suitId] = [trapLvl, attackerId, damage]
else:
self.traps[suitId] = [trapLvl, 0, damage]
self.notify.debug('calling __addLuredSuitsDelayed')
self.__addLuredSuitsDelayed(attackerId, targetId=-1, ignoreDamageCheck=True)
elif suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
self.traps[suitId] = [trapLvl, 0, npcDamage]
elif not self.__suitIsLured(suitId):
self.traps[suitId] = [trapLvl, 0, npcDamage]
def __addSuitTrap(self, suitId, trapLvl, attackerId, npcDamage = 0):
if npcDamage == 0:
if suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
pass
else:
self.traps[suitId][0] = self.TRAP_CONFLICT
else:
toon = self.battle.getToon(attackerId)
organicBonus = toon.checkGagBonus(TRAP, trapLvl)
propBonus = self.__checkPropBonus(TRAP)
damage = getAvPropDamage(TRAP, trapLvl, toon.experience.getExp(TRAP), organicBonus, propBonus, self.propAndOrganicBonusStack)
if self.itemIsCredit(TRAP, trapLvl):
self.traps[suitId] = [trapLvl, attackerId, damage]
else:
self.traps[suitId] = [trapLvl, 0, damage]
elif suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
self.traps[suitId] = [trapLvl, 0, npcDamage]
elif not self.__suitIsLured(suitId):
self.traps[suitId] = [trapLvl, 0, npcDamage]
def __removeSuitTrap(self, suitId):
if suitId in self.traps:
del self.traps[suitId]
def __clearTrapCreator(self, creatorId, suitId = None):
if suitId == None:
for currTrap in self.traps.keys():
if creatorId == self.traps[currTrap][1]:
self.traps[currTrap][1] = 0
elif suitId in self.traps:
self.traps[suitId][1] = 0
return
def __trapCreator(self, suitId):
if suitId in self.traps:
return self.traps[suitId][1]
else:
return 0
def __initTraps(self):
self.trainTrapTriggered = False
keysList = self.traps.keys()
for currTrap in keysList:
if self.traps[currTrap][0] == self.TRAP_CONFLICT:
del self.traps[currTrap]
def __calcToonAtkHp(self, toonId):
attack = self.battle.toonAttacks[toonId]
targetList = self.__createToonTargetList(toonId)
atkHit, atkAcc = self.__calcToonAtkHit(toonId, targetList)
atkTrack, atkLevel, atkHp = self.__getActualTrackLevelHp(attack)
if not atkHit and atkTrack != HEAL:
return
validTargetAvail = 0
lureDidDamage = 0
currLureId = -1
for currTarget in xrange(len(targetList)):
attackLevel = -1
attackTrack = None
attackDamage = 0
toonTarget = 0
targetLured = 0
if atkTrack == HEAL or atkTrack == PETSOS:
targetId = targetList[currTarget]
toonTarget = 1
else:
targetId = targetList[currTarget].getDoId()
if atkTrack == LURE:
if self.getSuitTrapType(targetId) == NO_TRAP:
if self.notify.getDebug():
self.notify.debug('Suit lured, but no trap exists')
if self.SUITS_UNLURED_IMMEDIATELY:
if not self.__suitIsLured(targetId, prevRound=1):
if not self.__combatantDead(targetId, toon=toonTarget):
validTargetAvail = 1
rounds = self.NumRoundsLured[atkLevel]
wakeupChance = 100 - atkAcc * 2
npcLurer = attack[TOON_TRACK_COL] == NPCSOS
currLureId = self.__addLuredSuitInfo(targetId, -1, rounds, wakeupChance, toonId, atkLevel, lureId=currLureId, npc=npcLurer)
if self.notify.getDebug():
self.notify.debug('Suit lured for ' + str(rounds) + ' rounds max with ' + str(wakeupChance) + '% chance to wake up each round')
targetLured = 1
else:
attackTrack = TRAP
if targetId in self.traps:
trapInfo = self.traps[targetId]
attackLevel = trapInfo[0]
else:
attackLevel = NO_TRAP
attackDamage = self.__suitTrapDamage(targetId)
trapCreatorId = self.__trapCreator(targetId)
if trapCreatorId > 0:
self.notify.debug('Giving trap EXP to toon ' + str(trapCreatorId))
self.__addAttackExp(attack, track=TRAP, level=attackLevel, attackerId=trapCreatorId)
self.__clearTrapCreator(trapCreatorId, targetId)
lureDidDamage = 1
if self.notify.getDebug():
self.notify.debug('Suit lured right onto a trap! (' + str(AvProps[attackTrack][attackLevel]) + ',' + str(attackLevel) + ')')
if not self.__combatantDead(targetId, toon=toonTarget):
validTargetAvail = 1
targetLured = 1
if not self.SUITS_UNLURED_IMMEDIATELY:
if not self.__suitIsLured(targetId, prevRound=1):
if not self.__combatantDead(targetId, toon=toonTarget):
validTargetAvail = 1
rounds = self.NumRoundsLured[atkLevel]
wakeupChance = 100 - atkAcc * 2
npcLurer = attack[TOON_TRACK_COL] == NPCSOS
currLureId = self.__addLuredSuitInfo(targetId, -1, rounds, wakeupChance, toonId, atkLevel, lureId=currLureId, npc=npcLurer)
if self.notify.getDebug():
self.notify.debug('Suit lured for ' + str(rounds) + ' rounds max with ' + str(wakeupChance) + '% chance to wake up each round')
targetLured = 1
if attackLevel != -1:
self.__addLuredSuitsDelayed(toonId, targetId)
if targetLured and (not targetId in self.successfulLures or targetId in self.successfulLures and self.successfulLures[targetId][1] < atkLevel):
self.notify.debug('Adding target ' + str(targetId) + ' to successfulLures list')
self.successfulLures[targetId] = [toonId,
atkLevel,
atkAcc,
-1]
else:
if atkTrack == TRAP:
npcDamage = 0
if attack[TOON_TRACK_COL] == NPCSOS:
npcDamage = atkHp
if self.CLEAR_MULTIPLE_TRAPS:
if self.getSuitTrapType(targetId) != NO_TRAP:
self.__clearAttack(toonId)
return
if atkLevel == UBER_GAG_LEVEL_INDEX:
self.__addSuitGroupTrap(targetId, atkLevel, toonId, targetList, npcDamage)
if self.__suitIsLured(targetId):
self.notify.debug('Train Trap on lured suit %d, \n indicating with KBBONUS_COL flag' % targetId)
tgtPos = self.battle.activeSuits.index(targetList[currTarget])
attack[TOON_KBBONUS_COL][tgtPos] = self.KBBONUS_LURED_FLAG
else:
self.__addSuitTrap(targetId, atkLevel, toonId, npcDamage)
elif self.__suitIsLured(targetId) and atkTrack == SOUND:
self.notify.debug('Sound on lured suit, ' + 'indicating with KBBONUS_COL flag')
tgtPos = self.battle.activeSuits.index(targetList[currTarget])
attack[TOON_KBBONUS_COL][tgtPos] = self.KBBONUS_LURED_FLAG
attackLevel = atkLevel
attackTrack = atkTrack
toon = self.battle.getToon(toonId)
if attack[TOON_TRACK_COL] == NPCSOS and lureDidDamage != 1 or attack[TOON_TRACK_COL] == PETSOS:
attackDamage = atkHp
elif atkTrack == FIRE:
suit = self.battle.findSuit(targetId)
if suit:
costToFire = 1
abilityToFire = toon.getPinkSlips()
numLeft = abilityToFire - costToFire
if numLeft < 0:
numLeft = 0
toon.b_setPinkSlips(numLeft)
if costToFire > abilityToFire:
simbase.air.writeServerEvent('suspicious', toonId, 'Toon attempting to fire a %s cost cog with %s pinkslips' % (costToFire, abilityToFire))
print 'Not enough PinkSlips to fire cog - print a warning here'
else:
suit.skeleRevives = 0
attackDamage = suit.getHP()
else:
attackDamage = 0
bonus = 0
else:
organicBonus = toon.checkGagBonus(attackTrack, attackLevel)
propBonus = self.__checkPropBonus(attackTrack)
attackDamage = getAvPropDamage(attackTrack, attackLevel, toon.experience.getExp(attackTrack), organicBonus, propBonus, self.propAndOrganicBonusStack)
if not self.__combatantDead(targetId, toon=toonTarget):
if self.__suitIsLured(targetId) and atkTrack == DROP:
self.notify.debug('not setting validTargetAvail, since drop on a lured suit')
else:
validTargetAvail = 1
if attackLevel == -1 and not atkTrack == FIRE:
result = LURE_SUCCEEDED
elif atkTrack != TRAP:
result = attackDamage
if atkTrack == HEAL:
if not self.__attackHasHit(attack, suit=0):
result = result * 0.2
if self.notify.getDebug():
self.notify.debug('toon does ' + str(result) + ' healing to toon(s)')
else:
if self.__suitIsLured(targetId) and atkTrack == DROP:
result = 0
self.notify.debug('setting damage to 0, since drop on a lured suit')
if self.notify.getDebug():
self.notify.debug('toon does ' + str(result) + ' damage to suit')
else:
result = 0
if result != 0 or atkTrack == PETSOS:
targets = self.__getToonTargets(attack)
if targetList[currTarget] not in targets:
if self.notify.getDebug():
self.notify.debug('Target of toon is not accessible!')
continue
targetIndex = targets.index(targetList[currTarget])
if atkTrack == HEAL:
result = result / len(targetList)
if self.notify.getDebug():
self.notify.debug('Splitting heal among ' + str(len(targetList)) + ' targets')
if targetId in self.successfulLures and atkTrack == LURE:
self.notify.debug('Updating lure damage to ' + str(result))
self.successfulLures[targetId][3] = result
else:
attack[TOON_HP_COL][targetIndex] = result
if result > 0 and atkTrack != HEAL and atkTrack != DROP and atkTrack != PETSOS:
attackTrack = LURE
lureInfos = self.__getLuredExpInfo(targetId)
for currInfo in lureInfos:
if currInfo[3]:
self.notify.debug('Giving lure EXP to toon ' + str(currInfo[0]))
self.__addAttackExp(attack, track=attackTrack, level=currInfo[1], attackerId=currInfo[0])
self.__clearLurer(currInfo[0], lureId=currInfo[2])
if lureDidDamage:
if self.itemIsCredit(atkTrack, atkLevel):
self.notify.debug('Giving lure EXP to toon ' + str(toonId))
self.__addAttackExp(attack)
if not validTargetAvail and self.__prevAtkTrack(toonId) != atkTrack:
self.__clearAttack(toonId)
return
def __getToonTargets(self, attack):
track = self.__getActualTrack(attack)
if track == HEAL or track == PETSOS:
return self.battle.activeToons
else:
return self.battle.activeSuits
def __attackHasHit(self, attack, suit = 0):
if suit == 1:
for dmg in attack[SUIT_HP_COL]:
if dmg > 0:
return 1
return 0
else:
track = self.__getActualTrack(attack)
return not attack[TOON_ACCBONUS_COL] and track != NO_ATTACK
def __attackDamage(self, attack, suit = 0):
if suit:
for dmg in attack[SUIT_HP_COL]:
if dmg > 0:
return dmg
return 0
else:
for dmg in attack[TOON_HP_COL]:
if dmg > 0:
return dmg
return 0
def __attackDamageForTgt(self, attack, tgtPos, suit = 0):
if suit:
return attack[SUIT_HP_COL][tgtPos]
else:
return attack[TOON_HP_COL][tgtPos]
def __calcToonAccBonus(self, attackKey):
numPrevHits = 0
attackIdx = self.toonAtkOrder.index(attackKey)
for currPrevAtk in xrange(attackIdx - 1, -1, -1):
attack = self.battle.toonAttacks[attackKey]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
prevAttackKey = self.toonAtkOrder[currPrevAtk]
prevAttack = self.battle.toonAttacks[prevAttackKey]
prvAtkTrack, prvAtkLevel = self.__getActualTrackLevel(prevAttack)
if self.__attackHasHit(prevAttack) and (attackAffectsGroup(prvAtkTrack, prvAtkLevel, prevAttack[TOON_TRACK_COL]) or attackAffectsGroup(atkTrack, atkLevel, attack[TOON_TRACK_COL]) or attack[TOON_TGT_COL] == prevAttack[TOON_TGT_COL]) and atkTrack != prvAtkTrack:
numPrevHits += 1
if numPrevHits > 0 and self.notify.getDebug():
self.notify.debug('ACC BONUS: toon attack received accuracy ' + 'bonus of ' + str(self.AccuracyBonuses[numPrevHits]) + ' from previous attack by (' + str(attack[TOON_ID_COL]) + ') which hit')
return self.AccuracyBonuses[numPrevHits]
def __applyToonAttackDamages(self, toonId, hpbonus = 0, kbbonus = 0):
totalDamages = 0
if not self.APPLY_HEALTH_ADJUSTMENTS:
return totalDamages
attack = self.battle.toonAttacks[toonId]
track = self.__getActualTrack(attack)
if track != NO_ATTACK and track != SOS and track != TRAP and track != NPCSOS:
targets = self.__getToonTargets(attack)
for position in xrange(len(targets)):
if hpbonus:
if targets[position] in self.__createToonTargetList(toonId):
damageDone = attack[TOON_HPBONUS_COL]
else:
damageDone = 0
elif kbbonus:
if targets[position] in self.__createToonTargetList(toonId):
damageDone = attack[TOON_KBBONUS_COL][position]
else:
damageDone = 0
else:
damageDone = attack[TOON_HP_COL][position]
if damageDone <= 0 or self.immortalSuits:
continue
if track == HEAL or track == PETSOS:
currTarget = targets[position]
if self.CAP_HEALS:
toonHp = self.__getToonHp(currTarget)
toonMaxHp = self.__getToonMaxHp(currTarget)
if toonHp + damageDone > toonMaxHp:
damageDone = toonMaxHp - toonHp
attack[TOON_HP_COL][position] = damageDone
self.toonHPAdjusts[currTarget] += damageDone
totalDamages = totalDamages + damageDone
continue
currTarget = targets[position]
currTarget.setHP(currTarget.getHP() - damageDone)
targetId = currTarget.getDoId()
if self.notify.getDebug():
if hpbonus:
self.notify.debug(str(targetId) + ': suit takes ' + str(damageDone) + ' damage from HP-Bonus')
elif kbbonus:
self.notify.debug(str(targetId) + ': suit takes ' + str(damageDone) + ' damage from KB-Bonus')
else:
self.notify.debug(str(targetId) + ': suit takes ' + str(damageDone) + ' damage')
totalDamages = totalDamages + damageDone
if currTarget.getHP() <= 0:
if currTarget.getSkeleRevives() >= 1:
currTarget.useSkeleRevive()
attack[SUIT_REVIVE_COL] = attack[SUIT_REVIVE_COL] | 1 << position
else:
self.suitLeftBattle(targetId)
attack[SUIT_DIED_COL] = attack[SUIT_DIED_COL] | 1 << position
if self.notify.getDebug():
self.notify.debug('Suit' + str(targetId) + 'bravely expired in combat')
return totalDamages
def __combatantDead(self, avId, toon):
if toon:
if self.__getToonHp(avId) <= 0:
return 1
else:
suit = self.battle.findSuit(avId)
if suit.getHP() <= 0:
return 1
return 0
def __combatantJustRevived(self, avId):
suit = self.battle.findSuit(avId)
if suit.reviveCheckAndClear():
return 1
else:
return 0
def __addAttackExp(self, attack, track = -1, level = -1, attackerId = -1):
trk = -1
lvl = -1
id = -1
if track != -1 and level != -1 and attackerId != -1:
trk = track
lvl = level
id = attackerId
elif self.__attackHasHit(attack):
if self.notify.getDebug():
self.notify.debug('Attack ' + repr(attack) + ' has hit')
trk = attack[TOON_TRACK_COL]
lvl = attack[TOON_LVL_COL]
id = attack[TOON_ID_COL]
if trk != -1 and trk != NPCSOS and trk != PETSOS and lvl != -1 and id != -1:
expList = self.toonSkillPtsGained.get(id, None)
if expList == None:
expList = [0,
0,
0,
0,
0,
0,
0]
self.toonSkillPtsGained[id] = expList
expList[trk] = min(ExperienceCap, expList[trk] + (lvl + 1) * self.__skillCreditMultiplier)
return
def __clearTgtDied(self, tgt, lastAtk, currAtk):
position = self.battle.activeSuits.index(tgt)
currAtkTrack = self.__getActualTrack(currAtk)
lastAtkTrack = self.__getActualTrack(lastAtk)
if currAtkTrack == lastAtkTrack and lastAtk[SUIT_DIED_COL] & 1 << position and self.__attackHasHit(currAtk, suit=0):
if self.notify.getDebug():
self.notify.debug('Clearing suit died for ' + str(tgt.getDoId()) + ' at position ' + str(position) + ' from toon attack ' + str(lastAtk[TOON_ID_COL]) + ' and setting it for ' + str(currAtk[TOON_ID_COL]))
lastAtk[SUIT_DIED_COL] = lastAtk[SUIT_DIED_COL] ^ 1 << position
self.suitLeftBattle(tgt.getDoId())
currAtk[SUIT_DIED_COL] = currAtk[SUIT_DIED_COL] | 1 << position
def __addDmgToBonuses(self, dmg, attackIndex, hp = 1):
toonId = self.toonAtkOrder[attackIndex]
attack = self.battle.toonAttacks[toonId]
atkTrack = self.__getActualTrack(attack)
if atkTrack == HEAL or atkTrack == PETSOS:
return
tgts = self.__createToonTargetList(toonId)
for currTgt in tgts:
tgtPos = self.battle.activeSuits.index(currTgt)
attackerId = self.toonAtkOrder[attackIndex]
attack = self.battle.toonAttacks[attackerId]
track = self.__getActualTrack(attack)
if hp:
if track in self.hpBonuses[tgtPos]:
self.hpBonuses[tgtPos][track].append([attackIndex, dmg])
else:
self.hpBonuses[tgtPos][track] = [[attackIndex, dmg]]
elif self.__suitIsLured(currTgt.getDoId()):
if track in self.kbBonuses[tgtPos]:
self.kbBonuses[tgtPos][track].append([attackIndex, dmg])
else:
self.kbBonuses[tgtPos][track] = [[attackIndex, dmg]]
def __clearBonuses(self, hp = 1):
if hp:
self.hpBonuses = [{},
{},
{},
{}]
else:
self.kbBonuses = [{},
{},
{},
{}]
def __bonusExists(self, tgtSuit, hp = 1):
tgtPos = self.activeSuits.index(tgtSuit)
if hp:
bonusLen = len(self.hpBonuses[tgtPos])
else:
bonusLen = len(self.kbBonuses[tgtPos])
if bonusLen > 0:
return 1
return 0
def __processBonuses(self, hp = 1):
if hp:
bonusList = self.hpBonuses
self.notify.debug('Processing hpBonuses: ' + repr(self.hpBonuses))
else:
bonusList = self.kbBonuses
self.notify.debug('Processing kbBonuses: ' + repr(self.kbBonuses))
tgtPos = 0
for currTgt in bonusList:
for currAtkType in currTgt.keys():
if len(currTgt[currAtkType]) > 1 or not hp and len(currTgt[currAtkType]) > 0:
totalDmgs = 0
for currDmg in currTgt[currAtkType]:
totalDmgs += currDmg[1]
numDmgs = len(currTgt[currAtkType])
attackIdx = currTgt[currAtkType][numDmgs - 1][0]
attackerId = self.toonAtkOrder[attackIdx]
attack = self.battle.toonAttacks[attackerId]
if hp:
attack[TOON_HPBONUS_COL] = math.ceil(totalDmgs * (self.DamageBonuses[numDmgs - 1] * 0.01))
if self.notify.getDebug():
self.notify.debug('Applying hp bonus to track ' + str(attack[TOON_TRACK_COL]) + ' of ' + str(attack[TOON_HPBONUS_COL]))
elif len(attack[TOON_KBBONUS_COL]) > tgtPos:
attack[TOON_KBBONUS_COL][tgtPos] = totalDmgs * 0.5
if self.notify.getDebug():
self.notify.debug('Applying kb bonus to track ' + str(attack[TOON_TRACK_COL]) + ' of ' + str(attack[TOON_KBBONUS_COL][tgtPos]) + ' to target ' + str(tgtPos))
else:
self.notify.warning('invalid tgtPos for knock back bonus: %d' % tgtPos)
tgtPos += 1
if hp:
self.__clearBonuses()
else:
self.__clearBonuses(hp=0)
def __handleBonus(self, attackIdx, hp = 1):
attackerId = self.toonAtkOrder[attackIdx]
attack = self.battle.toonAttacks[attackerId]
atkDmg = self.__attackDamage(attack, suit=0)
atkTrack = self.__getActualTrack(attack)
if atkDmg > 0:
if hp:
if atkTrack != LURE:
self.notify.debug('Adding dmg of ' + str(atkDmg) + ' to hpBonuses list')
self.__addDmgToBonuses(atkDmg, attackIdx)
elif self.__knockBackAtk(attackerId, toon=1):
self.notify.debug('Adding dmg of ' + str(atkDmg) + ' to kbBonuses list')
self.__addDmgToBonuses(atkDmg, attackIdx, hp=0)
def __clearAttack(self, attackIdx, toon = 1):
if toon:
if self.notify.getDebug():
self.notify.debug('clearing out toon attack for toon ' + str(attackIdx) + '...')
attack = self.battle.toonAttacks[attackIdx]
self.battle.toonAttacks[attackIdx] = getToonAttack(attackIdx)
longest = max(len(self.battle.activeToons), len(self.battle.activeSuits))
taList = self.battle.toonAttacks
for j in xrange(longest):
taList[attackIdx][TOON_HP_COL].append(-1)
taList[attackIdx][TOON_KBBONUS_COL].append(-1)
if self.notify.getDebug():
self.notify.debug('toon attack is now ' + repr(self.battle.toonAttacks[attackIdx]))
else:
self.notify.warning('__clearAttack not implemented for suits!')
def __rememberToonAttack(self, suitId, toonId, damage):
if not suitId in self.SuitAttackers:
self.SuitAttackers[suitId] = {toonId: damage}
elif not toonId in self.SuitAttackers[suitId]:
self.SuitAttackers[suitId][toonId] = damage
elif self.SuitAttackers[suitId][toonId] <= damage:
self.SuitAttackers[suitId] = [toonId, damage]
def __postProcessToonAttacks(self):
self.notify.debug('__postProcessToonAttacks()')
lastTrack = -1
lastAttacks = []
self.__clearBonuses()
for currToonAttack in self.toonAtkOrder:
if currToonAttack != -1:
attack = self.battle.toonAttacks[currToonAttack]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
if atkTrack != HEAL and atkTrack != SOS and atkTrack != NO_ATTACK and atkTrack != NPCSOS and atkTrack != PETSOS:
targets = self.__createToonTargetList(currToonAttack)
allTargetsDead = 1
for currTgt in targets:
damageDone = self.__attackDamage(attack, suit=0)
if damageDone > 0:
self.__rememberToonAttack(currTgt.getDoId(), attack[TOON_ID_COL], damageDone)
if atkTrack == TRAP:
if currTgt.doId in self.traps:
trapInfo = self.traps[currTgt.doId]
currTgt.battleTrap = trapInfo[0]
targetDead = 0
if currTgt.getHP() > 0:
allTargetsDead = 0
else:
targetDead = 1
if atkTrack != LURE:
for currLastAtk in lastAttacks:
self.__clearTgtDied(currTgt, currLastAtk, attack)
tgtId = currTgt.getDoId()
if tgtId in self.successfulLures and atkTrack == LURE:
lureInfo = self.successfulLures[tgtId]
self.notify.debug('applying lure data: ' + repr(lureInfo))
toonId = lureInfo[0]
lureAtk = self.battle.toonAttacks[toonId]
tgtPos = self.battle.activeSuits.index(currTgt)
if currTgt.doId in self.traps:
trapInfo = self.traps[currTgt.doId]
if trapInfo[0] == UBER_GAG_LEVEL_INDEX:
self.notify.debug('train trap triggered for %d' % currTgt.doId)
self.trainTrapTriggered = True
self.__removeSuitTrap(tgtId)
lureAtk[TOON_KBBONUS_COL][tgtPos] = self.KBBONUS_TGT_LURED
lureAtk[TOON_HP_COL][tgtPos] = lureInfo[3]
elif self.__suitIsLured(tgtId) and atkTrack == DROP:
self.notify.debug('Drop on lured suit, ' + 'indicating with KBBONUS_COL ' + 'flag')
tgtPos = self.battle.activeSuits.index(currTgt)
attack[TOON_KBBONUS_COL][tgtPos] = self.KBBONUS_LURED_FLAG
if targetDead and atkTrack != lastTrack:
tgtPos = self.battle.activeSuits.index(currTgt)
attack[TOON_HP_COL][tgtPos] = 0
attack[TOON_KBBONUS_COL][tgtPos] = -1
if allTargetsDead and atkTrack != lastTrack:
if self.notify.getDebug():
self.notify.debug('all targets of toon attack ' + str(currToonAttack) + ' are dead')
self.__clearAttack(currToonAttack, toon=1)
attack = self.battle.toonAttacks[currToonAttack]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
damagesDone = self.__applyToonAttackDamages(currToonAttack)
self.__applyToonAttackDamages(currToonAttack, hpbonus=1)
if atkTrack != LURE and atkTrack != DROP and atkTrack != SOUND:
self.__applyToonAttackDamages(currToonAttack, kbbonus=1)
if lastTrack != atkTrack:
lastAttacks = []
lastTrack = atkTrack
lastAttacks.append(attack)
if self.itemIsCredit(atkTrack, atkLevel):
if atkTrack == TRAP or atkTrack == LURE:
pass
elif atkTrack == HEAL:
if damagesDone != 0:
self.__addAttackExp(attack)
else:
self.__addAttackExp(attack)
if self.trainTrapTriggered:
for suit in self.battle.activeSuits:
suitId = suit.doId
self.__removeSuitTrap(suitId)
suit.battleTrap = NO_TRAP
self.notify.debug('train trap triggered, removing trap from %d' % suitId)
if self.notify.getDebug():
for currToonAttack in self.toonAtkOrder:
attack = self.battle.toonAttacks[currToonAttack]
self.notify.debug('Final Toon attack: ' + str(attack))
def __allTargetsDead(self, attackIdx, toon = 1):
allTargetsDead = 1
if toon:
targets = self.__createToonTargetList(attackIdx)
for currTgt in targets:
if currTgt.getHp() > 0:
allTargetsDead = 0
break
else:
self.notify.warning('__allTargetsDead: suit ver. not implemented!')
return allTargetsDead
def __clearLuredSuitsByAttack(self, toonId, kbBonusReq = 0, targetId = -1):
if self.notify.getDebug():
self.notify.debug('__clearLuredSuitsByAttack')
if targetId != -1 and self.__suitIsLured(t.getDoId()):
self.__removeLured(t.getDoId())
else:
tgtList = self.__createToonTargetList(toonId)
for t in tgtList:
if self.__suitIsLured(t.getDoId()) and (not kbBonusReq or self.__bonusExists(t, hp=0)):
self.__removeLured(t.getDoId())
if self.notify.getDebug():
self.notify.debug('Suit %d stepping from lured spot' % t.getDoId())
else:
self.notify.debug('Suit ' + str(t.getDoId()) + ' not found in currently lured suits')
def __clearLuredSuitsDelayed(self):
if self.notify.getDebug():
self.notify.debug('__clearLuredSuitsDelayed')
for t in self.delayedUnlures:
if self.__suitIsLured(t):
self.__removeLured(t)
if self.notify.getDebug():
self.notify.debug('Suit %d stepping back from lured spot' % t)
else:
self.notify.debug('Suit ' + str(t) + ' not found in currently lured suits')
self.delayedUnlures = []
def __addLuredSuitsDelayed(self, toonId, targetId = -1, ignoreDamageCheck = False):
if self.notify.getDebug():
self.notify.debug('__addLuredSuitsDelayed')
if targetId != -1:
self.delayedUnlures.append(targetId)
else:
tgtList = self.__createToonTargetList(toonId)
for t in tgtList:
if self.__suitIsLured(t.getDoId()) and t.getDoId() not in self.delayedUnlures and (self.__attackDamageForTgt(self.battle.toonAttacks[toonId], self.battle.activeSuits.index(t), suit=0) > 0 or ignoreDamageCheck):
self.delayedUnlures.append(t.getDoId())
def __calculateToonAttacks(self):
self.notify.debug('__calculateToonAttacks()')
self.__clearBonuses(hp=0)
currTrack = None
self.notify.debug('Traps: ' + str(self.traps))
maxSuitLevel = 0
for cog in self.battle.activeSuits:
maxSuitLevel = max(maxSuitLevel, cog.getActualLevel())
self.creditLevel = maxSuitLevel
for toonId in self.toonAtkOrder:
if self.__combatantDead(toonId, toon=1):
if self.notify.getDebug():
self.notify.debug("Toon %d is dead and can't attack" % toonId)
continue
attack = self.battle.toonAttacks[toonId]
atkTrack = self.__getActualTrack(attack)
if atkTrack != NO_ATTACK and atkTrack != SOS and atkTrack != NPCSOS:
if self.notify.getDebug():
self.notify.debug('Calculating attack for toon: %d' % toonId)
if self.SUITS_UNLURED_IMMEDIATELY:
if currTrack and atkTrack != currTrack:
self.__clearLuredSuitsDelayed()
currTrack = atkTrack
self.__calcToonAtkHp(toonId)
attackIdx = self.toonAtkOrder.index(toonId)
self.__handleBonus(attackIdx, hp=0)
self.__handleBonus(attackIdx, hp=1)
lastAttack = self.toonAtkOrder.index(toonId) >= len(self.toonAtkOrder) - 1
unlureAttack = self.__attackHasHit(attack, suit=0) and self.__unlureAtk(toonId, toon=1)
if unlureAttack:
if lastAttack:
self.__clearLuredSuitsByAttack(toonId)
else:
self.__addLuredSuitsDelayed(toonId)
if lastAttack:
self.__clearLuredSuitsDelayed()
self.__processBonuses(hp=0)
self.__processBonuses(hp=1)
self.__postProcessToonAttacks()
return
def __knockBackAtk(self, attackIndex, toon = 1):
if toon and (self.battle.toonAttacks[attackIndex][TOON_TRACK_COL] == THROW or self.battle.toonAttacks[attackIndex][TOON_TRACK_COL] == SQUIRT):
if self.notify.getDebug():
self.notify.debug('attack is a knockback')
return 1
return 0
def __unlureAtk(self, attackIndex, toon = 1):
attack = self.battle.toonAttacks[attackIndex]
track = self.__getActualTrack(attack)
if toon and (track == THROW or track == SQUIRT or track == SOUND):
if self.notify.getDebug():
self.notify.debug('attack is an unlure')
return 1
return 0
def __calcSuitAtkType(self, attackIndex):
theSuit = self.battle.activeSuits[attackIndex]
attacks = SuitBattleGlobals.SuitAttributes[theSuit.dna.name]['attacks']
atk = SuitBattleGlobals.pickSuitAttack(attacks, theSuit.getLevel())
return atk
def __calcSuitTarget(self, attackIndex):
attack = self.battle.suitAttacks[attackIndex]
suitId = attack[SUIT_ID_COL]
if suitId in self.SuitAttackers and random.randint(0, 99) < 75:
totalDamage = 0
for currToon in self.SuitAttackers[suitId].keys():
totalDamage += self.SuitAttackers[suitId][currToon]
dmgs = []
for currToon in self.SuitAttackers[suitId].keys():
dmgs.append(self.SuitAttackers[suitId][currToon] / totalDamage * 100)
dmgIdx = SuitBattleGlobals.pickFromFreqList(dmgs)
if dmgIdx == None:
toonId = self.__pickRandomToon(suitId)
else:
toonId = self.SuitAttackers[suitId].keys()[dmgIdx]
if toonId == -1 or toonId not in self.battle.activeToons:
return -1
self.notify.debug('Suit attacking back at toon ' + str(toonId))
return self.battle.activeToons.index(toonId)
else:
return self.__pickRandomToon(suitId)
return
def __pickRandomToon(self, suitId):
liveToons = []
for currToon in self.battle.activeToons:
if not self.__combatantDead(currToon, toon=1):
liveToons.append(self.battle.activeToons.index(currToon))
if len(liveToons) == 0:
self.notify.debug('No tgts avail. for suit ' + str(suitId))
return -1
chosen = random.choice(liveToons)
self.notify.debug('Suit randomly attacking toon ' + str(self.battle.activeToons[chosen]))
return chosen
def __suitAtkHit(self, attackIndex):
if self.suitsAlwaysHit:
return 1
elif self.suitsAlwaysMiss:
return 0
theSuit = self.battle.activeSuits[attackIndex]
atkType = self.battle.suitAttacks[attackIndex][SUIT_ATK_COL]
atkInfo = SuitBattleGlobals.getSuitAttack(theSuit.dna.name, theSuit.getLevel(), atkType)
atkAcc = atkInfo['acc']
suitAcc = SuitBattleGlobals.SuitAttributes[theSuit.dna.name]['acc'][theSuit.getLevel()]
acc = atkAcc
randChoice = random.randint(0, 99)
if self.notify.getDebug():
self.notify.debug('Suit attack rolled ' + str(randChoice) + ' to hit with an accuracy of ' + str(acc) + ' (attackAcc: ' + str(atkAcc) + ' suitAcc: ' + str(suitAcc) + ')')
if randChoice < acc:
return 1
return 0
def __suitAtkAffectsGroup(self, attack):
atkType = attack[SUIT_ATK_COL]
theSuit = self.battle.findSuit(attack[SUIT_ID_COL])
atkInfo = SuitBattleGlobals.getSuitAttack(theSuit.dna.name, theSuit.getLevel(), atkType)
return atkInfo['group'] != SuitBattleGlobals.ATK_TGT_SINGLE
def __createSuitTargetList(self, attackIndex):
attack = self.battle.suitAttacks[attackIndex]
targetList = []
if attack[SUIT_ATK_COL] == NO_ATTACK:
self.notify.debug('No attack, no targets')
return targetList
debug = self.notify.getDebug()
if not self.__suitAtkAffectsGroup(attack):
targetList.append(self.battle.activeToons[attack[SUIT_TGT_COL]])
if debug:
self.notify.debug('Suit attack is single target')
else:
if debug:
self.notify.debug('Suit attack is group target')
for currToon in self.battle.activeToons:
if debug:
self.notify.debug('Suit attack will target toon' + str(currToon))
targetList.append(currToon)
return targetList
def __calcSuitAtkHp(self, attackIndex):
targetList = self.__createSuitTargetList(attackIndex)
attack = self.battle.suitAttacks[attackIndex]
for currTarget in xrange(len(targetList)):
toonId = targetList[currTarget]
toon = self.battle.getToon(toonId)
result = 0
if toon and toon.immortalMode:
result = 1
elif self.TOONS_TAKE_NO_DAMAGE:
result = 0
elif self.__suitAtkHit(attackIndex):
atkType = attack[SUIT_ATK_COL]
theSuit = self.battle.findSuit(attack[SUIT_ID_COL])
atkInfo = SuitBattleGlobals.getSuitAttack(theSuit.dna.name, theSuit.getLevel(), atkType)
result = atkInfo['hp']
targetIndex = self.battle.activeToons.index(toonId)
attack[SUIT_HP_COL][targetIndex] = result
def __getToonHp(self, toonDoId):
handle = self.battle.getToon(toonDoId)
if handle != None and toonDoId in self.toonHPAdjusts:
return handle.hp + self.toonHPAdjusts[toonDoId]
else:
return 0
return
def __getToonMaxHp(self, toonDoId):
handle = self.battle.getToon(toonDoId)
if handle != None:
return handle.maxHp
else:
return 0
return
def __applySuitAttackDamages(self, attackIndex):
attack = self.battle.suitAttacks[attackIndex]
if self.APPLY_HEALTH_ADJUSTMENTS:
for t in self.battle.activeToons:
position = self.battle.activeToons.index(t)
if attack[SUIT_HP_COL][position] <= 0:
continue
toonHp = self.__getToonHp(t)
if toonHp - attack[SUIT_HP_COL][position] <= 0:
if self.notify.getDebug():
self.notify.debug('Toon %d has died, removing' % t)
self.toonLeftBattle(t)
attack[TOON_DIED_COL] = attack[TOON_DIED_COL] | 1 << position
if self.notify.getDebug():
self.notify.debug('Toon ' + str(t) + ' takes ' + str(attack[SUIT_HP_COL][position]) + ' damage')
self.toonHPAdjusts[t] -= attack[SUIT_HP_COL][position]
self.notify.debug('Toon ' + str(t) + ' now has ' + str(self.__getToonHp(t)) + ' health')
def __suitCanAttack(self, suitId):
if self.__combatantDead(suitId, toon=0) or self.__suitIsLured(suitId) or self.__combatantJustRevived(suitId):
return 0
return 1
def __updateSuitAtkStat(self, toonId):
if toonId in self.suitAtkStats:
self.suitAtkStats[toonId] += 1
else:
self.suitAtkStats[toonId] = 1
def __printSuitAtkStats(self):
self.notify.debug('Suit Atk Stats:')
for currTgt in self.suitAtkStats.keys():
if currTgt not in self.battle.activeToons:
continue
tgtPos = self.battle.activeToons.index(currTgt)
self.notify.debug(' toon ' + str(currTgt) + ' at position ' + str(tgtPos) + ' was attacked ' + str(self.suitAtkStats[currTgt]) + ' times')
self.notify.debug('\n')
def __calculateSuitAttacks(self):
for i in xrange(len(self.battle.suitAttacks)):
if i < len(self.battle.activeSuits):
suitId = self.battle.activeSuits[i].doId
self.battle.suitAttacks[i][SUIT_ID_COL] = suitId
if not self.__suitCanAttack(suitId):
if self.notify.getDebug():
self.notify.debug("Suit %d can't attack" % suitId)
continue
if self.battle.pendingSuits.count(self.battle.activeSuits[i]) > 0 or self.battle.joiningSuits.count(self.battle.activeSuits[i]) > 0:
continue
attack = self.battle.suitAttacks[i]
attack[SUIT_ID_COL] = self.battle.activeSuits[i].doId
attack[SUIT_ATK_COL] = self.__calcSuitAtkType(i)
attack[SUIT_TGT_COL] = self.__calcSuitTarget(i)
if attack[SUIT_TGT_COL] == -1:
self.battle.suitAttacks[i] = getDefaultSuitAttack()
attack = self.battle.suitAttacks[i]
self.notify.debug('clearing suit attack, no avail targets')
self.__calcSuitAtkHp(i)
if attack[SUIT_ATK_COL] != NO_ATTACK:
if self.__suitAtkAffectsGroup(attack):
for currTgt in self.battle.activeToons:
self.__updateSuitAtkStat(currTgt)
else:
tgtId = self.battle.activeToons[attack[SUIT_TGT_COL]]
self.__updateSuitAtkStat(tgtId)
targets = self.__createSuitTargetList(i)
allTargetsDead = 1
for currTgt in targets:
if self.__getToonHp(currTgt) > 0:
allTargetsDead = 0
break
if allTargetsDead:
self.battle.suitAttacks[i] = getDefaultSuitAttack()
if self.notify.getDebug():
self.notify.debug('clearing suit attack, targets dead')
self.notify.debug('suit attack is now ' + repr(self.battle.suitAttacks[i]))
self.notify.debug('all attacks: ' + repr(self.battle.suitAttacks))
attack = self.battle.suitAttacks[i]
if self.__attackHasHit(attack, suit=1):
self.__applySuitAttackDamages(i)
if self.notify.getDebug():
self.notify.debug('Suit attack: ' + str(self.battle.suitAttacks[i]))
attack[SUIT_BEFORE_TOONS_COL] = 0
def __updateLureTimeouts(self):
if self.notify.getDebug():
self.notify.debug('__updateLureTimeouts()')
self.notify.debug('Lured suits: ' + str(self.currentlyLuredSuits))
noLongerLured = []
for currLuredSuit in self.currentlyLuredSuits.keys():
self.__incLuredCurrRound(currLuredSuit)
if self.__luredMaxRoundsReached(currLuredSuit) or self.__luredWakeupTime(currLuredSuit):
noLongerLured.append(currLuredSuit)
for currLuredSuit in noLongerLured:
self.__removeLured(currLuredSuit)
if self.notify.getDebug():
self.notify.debug('Lured suits: ' + str(self.currentlyLuredSuits))
def __initRound(self):
if self.CLEAR_SUIT_ATTACKERS:
self.SuitAttackers = {}
self.toonAtkOrder = []
attacks = findToonAttack(self.battle.activeToons, self.battle.toonAttacks, PETSOS)
for atk in attacks:
self.toonAtkOrder.append(atk[TOON_ID_COL])
attacks = findToonAttack(self.battle.activeToons, self.battle.toonAttacks, FIRE)
for atk in attacks:
self.toonAtkOrder.append(atk[TOON_ID_COL])
for track in xrange(HEAL, DROP + 1):
attacks = findToonAttack(self.battle.activeToons, self.battle.toonAttacks, track)
if track == TRAP:
sortedTraps = []
for atk in attacks:
if atk[TOON_TRACK_COL] == TRAP:
sortedTraps.append(atk)
for atk in attacks:
if atk[TOON_TRACK_COL] == NPCSOS:
sortedTraps.append(atk)
attacks = sortedTraps
for atk in attacks:
self.toonAtkOrder.append(atk[TOON_ID_COL])
specials = findToonAttack(self.battle.activeToons, self.battle.toonAttacks, NPCSOS)
toonsHit = 0
cogsMiss = 0
for special in specials:
npc_track = NPCToons.getNPCTrack(special[TOON_TGT_COL])
if npc_track == NPC_TOONS_HIT:
BattleCalculatorAI.toonsAlwaysHit = 1
toonsHit = 1
elif npc_track == NPC_COGS_MISS:
BattleCalculatorAI.suitsAlwaysMiss = 1
cogsMiss = 1
if self.notify.getDebug():
self.notify.debug('Toon attack order: ' + str(self.toonAtkOrder))
self.notify.debug('Active toons: ' + str(self.battle.activeToons))
self.notify.debug('Toon attacks: ' + str(self.battle.toonAttacks))
self.notify.debug('Active suits: ' + str(self.battle.activeSuits))
self.notify.debug('Suit attacks: ' + str(self.battle.suitAttacks))
self.toonHPAdjusts = {}
for t in self.battle.activeToons:
self.toonHPAdjusts[t] = 0
self.__clearBonuses()
self.__updateActiveToons()
self.delayedUnlures = []
self.__initTraps()
self.successfulLures = {}
return (toonsHit, cogsMiss)
def calculateRound(self):
longest = max(len(self.battle.activeToons), len(self.battle.activeSuits))
for t in self.battle.activeToons:
for j in xrange(longest):
self.battle.toonAttacks[t][TOON_HP_COL].append(-1)
self.battle.toonAttacks[t][TOON_KBBONUS_COL].append(-1)
for i in xrange(4):
for j in xrange(len(self.battle.activeToons)):
self.battle.suitAttacks[i][SUIT_HP_COL].append(-1)
toonsHit, cogsMiss = self.__initRound()
for suit in self.battle.activeSuits:
if suit.isGenerated():
suit.b_setHP(suit.getHP())
for suit in self.battle.activeSuits:
if not hasattr(suit, 'dna'):
self.notify.warning('a removed suit is in this battle!')
return None
self.__calculateToonAttacks()
self.__updateLureTimeouts()
self.__calculateSuitAttacks()
if toonsHit == 1:
BattleCalculatorAI.toonsAlwaysHit = 0
if cogsMiss == 1:
BattleCalculatorAI.suitsAlwaysMiss = 0
if self.notify.getDebug():
self.notify.debug('Toon skills gained after this round: ' + repr(self.toonSkillPtsGained))
self.__printSuitAtkStats()
return None
def __calculateFiredCogs():
import pdb
pdb.set_trace()
def toonLeftBattle(self, toonId):
if self.notify.getDebug():
self.notify.debug('toonLeftBattle()' + str(toonId))
if toonId in self.toonSkillPtsGained:
del self.toonSkillPtsGained[toonId]
if toonId in self.suitAtkStats:
del self.suitAtkStats[toonId]
if not self.CLEAR_SUIT_ATTACKERS:
oldSuitIds = []
for s in self.SuitAttackers.keys():
if toonId in self.SuitAttackers[s]:
del self.SuitAttackers[s][toonId]
if len(self.SuitAttackers[s]) == 0:
oldSuitIds.append(s)
for oldSuitId in oldSuitIds:
del self.SuitAttackers[oldSuitId]
self.__clearTrapCreator(toonId)
self.__clearLurer(toonId)
def suitLeftBattle(self, suitId):
if self.notify.getDebug():
self.notify.debug('suitLeftBattle(): ' + str(suitId))
self.__removeLured(suitId)
if suitId in self.SuitAttackers:
del self.SuitAttackers[suitId]
self.__removeSuitTrap(suitId)
def __updateActiveToons(self):
if self.notify.getDebug():
self.notify.debug('updateActiveToons()')
if not self.CLEAR_SUIT_ATTACKERS:
oldSuitIds = []
for s in self.SuitAttackers.keys():
for t in self.SuitAttackers[s].keys():
if t not in self.battle.activeToons:
del self.SuitAttackers[s][t]
if len(self.SuitAttackers[s]) == 0:
oldSuitIds.append(s)
for oldSuitId in oldSuitIds:
del self.SuitAttackers[oldSuitId]
for trap in self.traps.keys():
if self.traps[trap][1] not in self.battle.activeToons:
self.notify.debug('Trap for toon ' + str(self.traps[trap][1]) + ' will no longer give exp')
self.traps[trap][1] = 0
def getSkillGained(self, toonId, track):
return BattleExperienceAI.getSkillGained(self.toonSkillPtsGained, toonId, track)
def getLuredSuits(self):
luredSuits = self.currentlyLuredSuits.keys()
self.notify.debug('Lured suits reported to battle: ' + repr(luredSuits))
return luredSuits
def __suitIsLured(self, suitId, prevRound = 0):
inList = suitId in self.currentlyLuredSuits
if prevRound:
return inList and self.currentlyLuredSuits[suitId][0] != -1
return inList
def __findAvailLureId(self, lurerId):
luredSuits = self.currentlyLuredSuits.keys()
lureIds = []
for currLured in luredSuits:
lurerInfo = self.currentlyLuredSuits[currLured][3]
lurers = lurerInfo.keys()
for currLurer in lurers:
currId = lurerInfo[currLurer][1]
if currLurer == lurerId and currId not in lureIds:
lureIds.append(currId)
lureIds.sort()
currId = 1
for currLureId in lureIds:
if currLureId != currId:
return currId
currId += 1
return currId
def __addLuredSuitInfo(self, suitId, currRounds, maxRounds, wakeChance, lurer, lureLvl, lureId = -1, npc = 0):
if lureId == -1:
availLureId = self.__findAvailLureId(lurer)
else:
availLureId = lureId
if npc == 1:
credit = 0
else:
credit = self.itemIsCredit(LURE, lureLvl)
if suitId in self.currentlyLuredSuits:
lureInfo = self.currentlyLuredSuits[suitId]
if not lurer in lureInfo[3]:
lureInfo[1] += maxRounds
if wakeChance < lureInfo[2]:
lureInfo[2] = wakeChance
lureInfo[3][lurer] = [lureLvl, availLureId, credit]
else:
lurerInfo = {lurer: [lureLvl, availLureId, credit]}
self.currentlyLuredSuits[suitId] = [currRounds,
maxRounds,
wakeChance,
lurerInfo]
self.notify.debug('__addLuredSuitInfo: currLuredSuits -> %s' % repr(self.currentlyLuredSuits))
return availLureId
def __getLurers(self, suitId):
if self.__suitIsLured(suitId):
return self.currentlyLuredSuits[suitId][3].keys()
return []
def __getLuredExpInfo(self, suitId):
returnInfo = []
lurers = self.__getLurers(suitId)
if len(lurers) == 0:
return returnInfo
lurerInfo = self.currentlyLuredSuits[suitId][3]
for currLurer in lurers:
returnInfo.append([currLurer,
lurerInfo[currLurer][0],
lurerInfo[currLurer][1],
lurerInfo[currLurer][2]])
return returnInfo
def __clearLurer(self, lurerId, lureId = -1):
luredSuits = self.currentlyLuredSuits.keys()
for currLured in luredSuits:
lurerInfo = self.currentlyLuredSuits[currLured][3]
lurers = lurerInfo.keys()
for currLurer in lurers:
if currLurer == lurerId and (lureId == -1 or lureId == lurerInfo[currLurer][1]):
del lurerInfo[currLurer]
def __setLuredMaxRounds(self, suitId, rounds):
if self.__suitIsLured(suitId):
self.currentlyLuredSuits[suitId][1] = rounds
def __setLuredWakeChance(self, suitId, chance):
if self.__suitIsLured(suitId):
self.currentlyLuredSuits[suitId][2] = chance
def __incLuredCurrRound(self, suitId):
if self.__suitIsLured(suitId):
self.currentlyLuredSuits[suitId][0] += 1
def __removeLured(self, suitId):
if self.__suitIsLured(suitId):
del self.currentlyLuredSuits[suitId]
def __luredMaxRoundsReached(self, suitId):
return self.__suitIsLured(suitId) and self.currentlyLuredSuits[suitId][0] >= self.currentlyLuredSuits[suitId][1]
def __luredWakeupTime(self, suitId):
return self.__suitIsLured(suitId) and self.currentlyLuredSuits[suitId][0] > 0 and random.randint(0, 99) < self.currentlyLuredSuits[suitId][2]
def itemIsCredit(self, track, level):
if track == PETSOS:
return 0
return level < self.creditLevel
def __getActualTrack(self, toonAttack):
if toonAttack[TOON_TRACK_COL] == NPCSOS:
track = NPCToons.getNPCTrack(toonAttack[TOON_TGT_COL])
if track != None:
return track
else:
self.notify.warning('No NPC with id: %d' % toonAttack[TOON_TGT_COL])
return toonAttack[TOON_TRACK_COL]
def __getActualTrackLevel(self, toonAttack):
if toonAttack[TOON_TRACK_COL] == NPCSOS:
track, level, hp = NPCToons.getNPCTrackLevelHp(toonAttack[TOON_TGT_COL])
if track != None:
return (track, level)
else:
self.notify.warning('No NPC with id: %d' % toonAttack[TOON_TGT_COL])
return (toonAttack[TOON_TRACK_COL], toonAttack[TOON_LVL_COL])
def __getActualTrackLevelHp(self, toonAttack):
if toonAttack[TOON_TRACK_COL] == NPCSOS:
track, level, hp = NPCToons.getNPCTrackLevelHp(toonAttack[TOON_TGT_COL])
if track != None:
return (track, level, hp)
else:
self.notify.warning('No NPC with id: %d' % toonAttack[TOON_TGT_COL])
elif toonAttack[TOON_TRACK_COL] == PETSOS:
trick = toonAttack[TOON_LVL_COL]
petProxyId = toonAttack[TOON_TGT_COL]
trickId = toonAttack[TOON_LVL_COL]
healRange = PetTricks.TrickHeals[trickId]
hp = 0
if petProxyId in simbase.air.doId2do:
petProxy = simbase.air.doId2do[petProxyId]
if trickId < len(petProxy.trickAptitudes):
aptitude = petProxy.trickAptitudes[trickId]
hp = int(lerp(healRange[0], healRange[1], aptitude))
else:
self.notify.warning('pet proxy: %d not in doId2do!' % petProxyId)
return (toonAttack[TOON_TRACK_COL], toonAttack[TOON_LVL_COL], hp)
return (toonAttack[TOON_TRACK_COL], toonAttack[TOON_LVL_COL], 0)
def __calculatePetTrickSuccess(self, toonAttack):
petProxyId = toonAttack[TOON_TGT_COL]
if not petProxyId in simbase.air.doId2do:
self.notify.warning('pet proxy %d not in doId2do!' % petProxyId)
toonAttack[TOON_ACCBONUS_COL] = 1
return (0, 0)
petProxy = simbase.air.doId2do[petProxyId]
trickId = toonAttack[TOON_LVL_COL]
toonAttack[TOON_ACCBONUS_COL] = petProxy.attemptBattleTrick(trickId)
if toonAttack[TOON_ACCBONUS_COL] == 1:
return (0, 0)
else:
return (1, 100)
| {
"content_hash": "21ab5790f06c3d6fcabeec2cfe6f48b6",
"timestamp": "",
"source": "github",
"line_count": 1619,
"max_line_length": 272,
"avg_line_length": 45.18282890673255,
"alnum_prop": 0.552063539801233,
"repo_name": "Spiderlover/Toontown",
"id": "d3cce0e70b8fad5c1f28c304abb0c62cc382e76d",
"size": "73151",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "toontown/battle/BattleCalculatorAI.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7774"
},
{
"name": "Python",
"bytes": "17241353"
},
{
"name": "Shell",
"bytes": "7699"
}
],
"symlink_target": ""
} |
'''
MFEM example 8p
See c++ version in the MFEM library for more detail
How to run:
mpirun -np 2 python <arguments>
Example of arguments:
ex8p.py -m square-disc.mesh
ex8p.py -m star.mesh
ex8p.py -m escher.mesh
ex8p.py -m fichera.mesh
ex8p.py -m square-disc-p3.mesh
ex8p.py -m star-surf.mesh -o 2
'''
import sys
from os.path import expanduser, join, dirname
import numpy as np
from numpy import sin, cos, exp, sqrt
from mfem.common.arg_parser import ArgParser
import mfem.par as mfem
from mpi4py import MPI
num_procs = MPI.COMM_WORLD.size
myid = MPI.COMM_WORLD.rank
parser = ArgParser(description='Ex8p')
parser.add_argument('-m', '--mesh',
default='star.mesh',
action='store', type=str,
help='Mesh file to use.')
parser.add_argument('-o', '--order',
action='store', default=1, type=int,
help="Finite element order (polynomial degree)")
parser.add_argument('-vis', '--visualization',
action='store_true', default=True,
help='Enable GLVis visualization')
args = parser.parse_args()
device = mfem.Device('cpu')
if myid == 0:
device.Print()
order = args.order
visualization = args.visualization
if myid == 0:
parser.print_options(args)
# 3. Read the (serial) mesh from the given mesh file on all processors. We
# can handle triangular, quadrilateral, tetrahedral, hexahedral, surface
# and volume meshes with the same code.
meshfile = expanduser(join(dirname(__file__), '..', 'data', 'star.mesh'))
mesh = mfem.Mesh(meshfile, 1, 1)
dim = mesh.Dimension()
# 4. Refine the serial mesh on all processors to increase the resolution. In
# this example we do 'ref_levels' of uniform refinement. We choose
# 'ref_levels' to be the largest number that gives a final mesh with no
# more than 10,000 elements.
ref_levels = int(np.floor(np.log(10000./mesh.GetNE())/np.log(2.)/dim))
for x in range(ref_levels):
mesh.UniformRefinement()
# 5. Define a parallel mesh by a partitioning of the serial mesh. Refine
# this mesh further in parallel to increase the resolution. Once the
# parallel mesh is defined, the serial mesh can be deleted.
pmesh = mfem.ParMesh(MPI.COMM_WORLD, mesh)
del mesh
par_ref_levels = 1
for l in range(par_ref_levels):
pmesh.UniformRefinement()
pmesh.ReorientTetMesh()
# 6. Define the trial, interfacial (trace) and test DPG spaces:
# - The trial space, x0_space, contains the non-interfacial unknowns and
# has the essential BC.
# - The interfacial space, xhat_space, contains the interfacial unknowns
# and does not have essential BC.
# - The test space, test_space, is an enriched space where the enrichment
# degree may depend on the spatial dimension of the domain, the type of
# the mesh and the trial space order.
trial_order = order
trace_order = order - 1
test_order = order # reduced order, full order is (order + dim - 1)
if (dim == 2 and (order % 2 == 0 or (pmesh.MeshGenerator() & 2 and order > 1))):
test_order = test_order + 1
if (test_order < trial_order):
if myid == 0:
print("Warning, test space not enriched enough to handle primal trial space")
x0_fec = mfem.H1_FECollection(trial_order, dim)
xhat_fec = mfem.RT_Trace_FECollection(trace_order, dim)
test_fec = mfem.L2_FECollection(test_order, dim)
x0_space = mfem.ParFiniteElementSpace(pmesh, x0_fec)
xhat_space = mfem.ParFiniteElementSpace(pmesh, xhat_fec)
test_space = mfem.ParFiniteElementSpace(pmesh, test_fec)
glob_true_s0 = x0_space.GlobalTrueVSize()
glob_true_s1 = xhat_space.GlobalTrueVSize()
glob_true_s_test = test_space.GlobalTrueVSize()
if myid == 0:
print('\n'.join(["nNumber of Unknowns",
" Trial space, X0 : " + str(glob_true_s0) +
" (order " + str(trial_order) + ")",
" Interface space, Xhat : " + str(glob_true_s1) +
" (order " + str(trace_order) + ")",
" Test space, Y : " + str(glob_true_s_test) +
" (order " + str(test_order) + ")"]))
# 7. Set up the linear form F(.) which corresponds to the right-hand side of
# the FEM linear system, which in this case is (f,phi_i) where f=1.0 and
# phi_i are the basis functions in the test finite element fespace.
one = mfem.ConstantCoefficient(1.0)
F = mfem.ParLinearForm(test_space)
F.AddDomainIntegrator(mfem.DomainLFIntegrator(one))
F.Assemble()
x0 = mfem.ParGridFunction(x0_space)
x0.Assign(0.0)
# 8. Set up the mixed bilinear form for the primal trial unknowns, B0,
# the mixed bilinear form for the interfacial unknowns, Bhat,
# the inverse stiffness matrix on the discontinuous test space, Sinv,
# and the stiffness matrix on the continuous trial space, S0.
ess_bdr = mfem.intArray(pmesh.bdr_attributes.Max())
ess_bdr.Assign(1)
ess_dof = mfem.intArray()
x0_space.GetEssentialVDofs(ess_bdr, ess_dof)
B0 = mfem.ParMixedBilinearForm(x0_space, test_space)
B0.AddDomainIntegrator(mfem.DiffusionIntegrator(one))
B0.Assemble()
B0.EliminateEssentialBCFromTrialDofs(ess_dof, x0, F)
B0.Finalize()
Bhat = mfem.ParMixedBilinearForm(xhat_space, test_space)
Bhat.AddTraceFaceIntegrator(mfem.TraceJumpIntegrator())
Bhat.Assemble()
Bhat.Finalize()
Sinv = mfem.ParBilinearForm(test_space)
Sum = mfem.SumIntegrator()
Sum.AddIntegrator(mfem.DiffusionIntegrator(one))
Sum.AddIntegrator(mfem.MassIntegrator(one))
Sinv.AddDomainIntegrator(mfem.InverseIntegrator(Sum))
Sinv.Assemble()
Sinv.Finalize()
S0 = mfem.ParBilinearForm(x0_space)
S0.AddDomainIntegrator(mfem.DiffusionIntegrator(one))
S0.Assemble()
S0.EliminateEssentialBC(ess_bdr)
S0.Finalize()
matB0 = B0.ParallelAssemble()
del B0
matBhat = Bhat.ParallelAssemble()
del Bhat
matSinv = Sinv.ParallelAssemble()
del Sinv
matS0 = S0.ParallelAssemble()
del S0
# 9. Define the block structure of the problem, by creating the offset
# variables. Also allocate two BlockVector objects to store the solution
# and rhs.
x0_var = 0
xhat_var = 1
NVAR = 2 # enum in C
true_s0 = x0_space.TrueVSize()
true_s1 = xhat_space.TrueVSize()
true_s_test = test_space.TrueVSize()
true_offsets = mfem.intArray([0, true_s0, true_s0+true_s1])
true_offsets_test = mfem.intArray([0, true_s_test])
x = mfem.BlockVector(true_offsets)
b = mfem.BlockVector(true_offsets)
x.Assign(0.0)
b.Assign(0.0)
# 10. Set up the 1x2 block Least Squares DPG operator, B = [B0 Bhat],
# the normal equation operator, A = B^t Sinv B, and
# the normal equation right-hand-size, b = B^t Sinv F.
B = mfem.BlockOperator(true_offsets_test, true_offsets)
B.SetBlock(0, 0, matB0)
B.SetBlock(0, 1, matBhat)
A = mfem.RAPOperator(B, matSinv, B)
trueF = F.ParallelAssemble()
SinvF = mfem.HypreParVector(test_space)
matSinv.Mult(trueF, SinvF)
B.MultTranspose(SinvF, b)
# 11. Set up a block-diagonal preconditioner for the 2x2 normal equation
#
# [ S0^{-1} 0 ]
# [ 0 Shat^{-1} ] Shat = (Bhat^T Sinv Bhat)
#
# corresponding to the primal (x0) and interfacial (xhat) unknowns.
# Since the Shat operator is equivalent to an H(div) matrix reduced to
# the interfacial skeleton, we approximate its inverse with one V-cycle
# of the ADS preconditioner from the hypre library (in 2D we use AMS for
# the rotated H(curl) problem).
S0inv = mfem.HypreBoomerAMG(matS0)
S0inv.SetPrintLevel(0)
Shat = mfem.RAP(matSinv, matBhat)
if (dim == 2):
Shatinv = mfem.HypreAMS(Shat, xhat_space)
else:
Shatinv = mfem.HypreADS(Shat, xhat_space)
P = mfem.BlockDiagonalPreconditioner(true_offsets)
P.SetDiagonalBlock(0, S0inv)
P.SetDiagonalBlock(1, Shatinv)
# 12. Solve the normal equation system using the PCG iterative solver.
# Check the weighted norm of residual for the DPG least square problem.
# Wrap the primal variable in a GridFunction for visualization purposes.
pcg = mfem.CGSolver(MPI.COMM_WORLD)
pcg.SetOperator(A)
pcg.SetPreconditioner(P)
pcg.SetRelTol(1e-6)
pcg.SetMaxIter(200)
pcg.SetPrintLevel(1)
pcg.Mult(b, x)
LSres = mfem.HypreParVector(test_space)
tmp = mfem.HypreParVector(test_space)
B.Mult(x, LSres)
LSres -= trueF
matSinv.Mult(LSres, tmp)
res = sqrt(mfem.InnerProduct(LSres, tmp))
if (myid == 0):
print("\n|| B0*x0 + Bhat*xhat - F ||_{S^-1} = " + str(res))
x0.Distribute(x.GetBlock(x0_var))
# 13. Save the refined mesh and the solution in parallel. This output can
# be viewed later using GLVis: "glvis -np <np> -m mesh -g sol".
smyid = '{:0>6d}'.format(myid)
mesh_name = "mesh."+smyid
sol_name = "sol."+smyid
pmesh.Print(mesh_name, 8)
x0.Save(sol_name, 8)
# 14. Send the solution by socket to a GLVis server.
if visualization:
sol_sock = mfem.socketstream("localhost", 19916)
sol_sock.send_text("parallel " + str(num_procs) + " " + str(myid))
sol_sock.precision(8)
sol_sock.send_solution(pmesh, x0)
| {
"content_hash": "9b07ed2616a04329a9eb79499c095faf",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 85,
"avg_line_length": 32.87822878228782,
"alnum_prop": 0.6894500561167228,
"repo_name": "mfem/PyMFEM",
"id": "dc432ae9d223493c61254024528e983af956925d",
"size": "8910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/ex8p.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "179682"
},
{
"name": "Grammatical Framework",
"bytes": "18800"
},
{
"name": "Makefile",
"bytes": "1055"
},
{
"name": "Python",
"bytes": "265160"
},
{
"name": "SWIG",
"bytes": "371435"
},
{
"name": "Shell",
"bytes": "1650"
}
],
"symlink_target": ""
} |
package org.apache.drill.exec.impersonation;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
import org.apache.drill.categories.SecurityTest;
import org.apache.drill.common.util.DrillFileUtils;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.rpc.user.InboundImpersonationManager;
import org.apache.drill.exec.server.options.OptionDefinition;
import org.apache.drill.exec.server.options.OptionValue;
import org.apache.drill.exec.server.options.SystemOptionManager;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.IOException;
import static junit.framework.Assert.assertEquals;
@Category(SecurityTest.class)
public class TestInboundImpersonationPrivileges extends BaseTestImpersonation {
private static final org.slf4j.Logger logger =
org.slf4j.LoggerFactory.getLogger(TestInboundImpersonationPrivileges.class);
// policies on which the tests are based
private static final String IMPERSONATION_POLICIES;
static {
try {
IMPERSONATION_POLICIES = Files.toString(DrillFileUtils.getResourceAsFile("/inbound_impersonation_policies.json"),
Charsets.UTF_8);
} catch (final IOException e) {
throw new RuntimeException("Cannot load impersonation policies.", e);
}
}
private static boolean checkPrivileges(final String proxyName, final String targetName) {
OptionDefinition optionDefinition = SystemOptionManager.createDefaultOptionDefinitions().get(ExecConstants.IMPERSONATION_POLICIES_KEY);
ExecConstants.IMPERSONATION_POLICY_VALIDATOR.validate(
OptionValue.create(optionDefinition.getMetaData().getAccessibleScopes(),
ExecConstants.IMPERSONATION_POLICIES_KEY,
IMPERSONATION_POLICIES,OptionValue.OptionScope.SYSTEM), optionDefinition.getMetaData(),null);
try {
return InboundImpersonationManager.hasImpersonationPrivileges(proxyName, targetName, IMPERSONATION_POLICIES);
} catch (final Exception e) {
logger.error("Failed to check impersonation privileges.", e);
return false;
}
}
private static void run(final String proxyName, final String targetName, final boolean expected) {
assertEquals("proxyName: " + proxyName + " targetName: " + targetName,
expected, checkPrivileges(proxyName, targetName));
}
@Test
public void allTargetUsers() {
for (final String user : org1Users) {
run("user0_1", user, true);
}
for (final String user : org2Users) {
run("user0_1", user, true);
}
}
@Test
public void noTargetUsers() {
for (final String user : org1Users) {
run("user1_1", user, false);
}
for (final String user : org2Users) {
run("user1_1", user, false);
}
}
@Test
public void someTargetUsersAndGroups() {
run("user2_1", "user3_1", true);
run("user2_1", "user3_1", true);
run("user2_1", "user1_1", false);
run("user2_1", "user4_1", false);
for (final String user : org1Users) {
if (!user.equals("user3_1") && !user.equals("user2_1")) {
run("user2_1", user, false);
}
}
for (final String user : org2Users) {
run("user2_1", user, false);
}
}
@Test
public void someTargetUsers() {
run("user4_1", "user1_1", true);
run("user4_1", "user3_1", true);
for (final String user : org1Users) {
if (!user.equals("user1_1") && !user.equals("user3_1")) {
run("user4_1", user, false);
}
}
for (final String user : org2Users) {
run("user4_1", user, false);
}
}
@Test
public void oneTargetGroup() {
run("user5_1", "user4_2", true);
run("user5_1", "user5_2", true);
run("user5_1", "user4_1", false);
run("user5_1", "user3_2", false);
}
@Test
public void twoTargetUsers() {
run("user5_2", "user0_2", true);
run("user5_2", "user1_2", true);
run("user5_2", "user2_2", false);
run("user5_2", "user0_1", false);
run("user5_2", "user1_1", false);
}
@Test
public void twoTargetGroups() {
run("user3_2", "user4_2", true);
run("user3_2", "user1_2", true);
run("user3_2", "user2_2", true);
run("user3_2", "user0_2", false);
run("user3_2", "user5_2", false);
for (final String user : org1Users) {
run("user3_2", user, false);
}
}
}
| {
"content_hash": "7ae2396654498c54fdcae587e118f4c3",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 139,
"avg_line_length": 32.1865671641791,
"alnum_prop": 0.6700672385810341,
"repo_name": "nagix/drill",
"id": "c83adc936235a4dd5785a6a3f16f396a62b05e10",
"size": "5114",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestInboundImpersonationPrivileges.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7200"
},
{
"name": "C",
"bytes": "31425"
},
{
"name": "C++",
"bytes": "581383"
},
{
"name": "CMake",
"bytes": "24811"
},
{
"name": "CSS",
"bytes": "14536"
},
{
"name": "FreeMarker",
"bytes": "136069"
},
{
"name": "GAP",
"bytes": "16502"
},
{
"name": "Java",
"bytes": "22312938"
},
{
"name": "JavaScript",
"bytes": "74920"
},
{
"name": "PLSQL",
"bytes": "6665"
},
{
"name": "Python",
"bytes": "5388"
},
{
"name": "Shell",
"bytes": "99566"
}
],
"symlink_target": ""
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>Function template format_date_time</title>
<link rel="stylesheet" href="../../../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../../index.html" title="Chapter 1. Boost.Log v2">
<link rel="up" href="../../../expressions.html#header.boost.log.expressions.formatters.date_time_hpp" title="Header <boost/log/expressions/formatters/date_time.hpp>">
<link rel="prev" href="format_date_ti_idp52461760.html" title="Function template format_date_time">
<link rel="next" href="format_date_ti_idp52473904.html" title="Function template format_date_time">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr><td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../../../boost.png"></td></tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="format_date_ti_idp52461760.html"><img src="../../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../../expressions.html#header.boost.log.expressions.formatters.date_time_hpp"><img src="../../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="format_date_ti_idp52473904.html"><img src="../../../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="refentry">
<a name="boost.log.expressions.format_date_ti_idp52467824"></a><div class="titlepage"></div>
<div class="refnamediv">
<h2><span class="refentrytitle">Function template format_date_time</span></h2>
<p>boost::log::expressions::format_date_time</p>
</div>
<h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2>
<div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: <<a class="link" href="../../../expressions.html#header.boost.log.expressions.formatters.date_time_hpp" title="Header <boost/log/expressions/formatters/date_time.hpp>">boost/log/expressions/formatters/date_time.hpp</a>>
</span>
<span class="keyword">template</span><span class="special"><</span><span class="keyword">typename</span> AttributeValueT<span class="special">,</span> <span class="keyword">typename</span> CharT<span class="special">></span>
<a class="link" href="format_date_time_actor.html" title="Class template format_date_time_actor">format_date_time_actor</a><span class="special"><</span> <span class="identifier">AttributeValueT</span><span class="special">,</span> <span class="identifier">fallback_to_none</span><span class="special">,</span> <span class="identifier">CharT</span> <span class="special">></span>
<span class="identifier">format_date_time</span><span class="special">(</span><span class="identifier">attribute_name</span> <span class="keyword">const</span> <span class="special">&</span> name<span class="special">,</span>
<span class="identifier">std</span><span class="special">::</span><span class="identifier">basic_string</span><span class="special"><</span> <span class="identifier">CharT</span> <span class="special">></span> <span class="keyword">const</span> <span class="special">&</span> format<span class="special">)</span><span class="special">;</span></pre></div>
<div class="refsect1">
<a name="idp126430160"></a><h2>Description</h2>
<p>The function generates a manipulator node in a template expression. The manipulator must participate in a formatting expression (stream output or <code class="computeroutput">format</code> placeholder filler).</p>
<p>
</p>
<div class="variablelist"><table border="0" class="variablelist compact">
<colgroup>
<col align="left" valign="top">
<col>
</colgroup>
<tbody><tr>
<td><p><span class="term">Parameters:</span></p></td>
<td><div class="variablelist"><table border="0" class="variablelist compact">
<colgroup>
<col align="left" valign="top">
<col>
</colgroup>
<tbody>
<tr>
<td><p><span class="term"><code class="computeroutput">format</code></span></p></td>
<td><p>Format string </p></td>
</tr>
<tr>
<td><p><span class="term"><code class="computeroutput">name</code></span></p></td>
<td><p>Attribute name </p></td>
</tr>
</tbody>
</table></div></td>
</tr></tbody>
</table></div>
</div>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2007-2015 Andrey
Semashev<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>).
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="format_date_ti_idp52461760.html"><img src="../../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../../expressions.html#header.boost.log.expressions.formatters.date_time_hpp"><img src="../../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="format_date_ti_idp52473904.html"><img src="../../../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
| {
"content_hash": "d573f556c3b019f016baa5ba3ceb6319",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 540,
"avg_line_length": 72.5,
"alnum_prop": 0.6693191865605659,
"repo_name": "Franky666/programmiersprachen-raytracer",
"id": "afd7d7a02fb645b8d17f925583eb812edeee266e",
"size": "5655",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "external/boost_1_59_0/libs/log/doc/html/boost/log/expressions/format_date_ti_idp52467824.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "905071"
},
{
"name": "C++",
"bytes": "46207"
},
{
"name": "CMake",
"bytes": "4419"
}
],
"symlink_target": ""
} |
package filter
import (
"context"
"fmt"
"os"
"testing"
"golang.org/x/oauth2/google"
"google.golang.org/api/compute/v1"
)
var (
projectID = os.Getenv("PROJECT_ID")
// test requires e2e cluster to be present in the project specified in order to match these filters
routeFilters = map[string]interface{}{"name": "e2e-test-.*", "description": "k8s-node-route", "priority": 1000}
networkFilters = map[string]interface{}{"autoCreateSubnetworks": false}
)
func TestFilterResources(t *testing.T) {
t.Parallel()
if projectID == "" {
t.Skip("Missing projectID... skipping test")
}
// See https://cloud.google.com/docs/authentication/.
// Use GOOGLE_APPLICATION_CREDENTIALS environment variable to specify
// a service account key file to authenticate to the API.
hc, err := google.DefaultClient(context.Background(), compute.ComputeScope)
if err != nil {
t.Errorf("Could not get authenticated client: %v", err)
}
svc, err := compute.New(hc)
if err != nil {
t.Errorf("Could not initialize compute client: %v", err)
}
if err := listRoutes(svc, projectID, t); err != nil {
t.Errorf("Failed to rist routes - %v", err)
}
if err = listNetworks(svc, projectID, t); err != nil {
t.Errorf("Failed to rist routes - %v", err)
}
}
func constructFilter(params map[string]interface{}) string {
var fl *F
for key, val := range params {
switch val.(type) {
case string:
if fl == nil {
fl = Regexp(key, val.(string))
} else {
fl.AndRegexp(key, val.(string))
}
case bool:
if fl == nil {
fl = EqualBool(key, val.(bool))
} else {
fl.AndEqualBool(key, val.(bool))
}
case int:
if fl == nil {
fl = EqualInt(key, val.(int))
} else {
fl.AndEqualInt(key, val.(int))
}
}
}
if fl == nil {
return ""
}
return fl.String()
}
func listRoutes(svc *compute.Service, projectID string, t *testing.T) error {
fstr := constructFilter(routeFilters)
list, err := svc.Routes.List(projectID).Filter(fstr).Do()
if err != nil {
return fmt.Errorf("failed to list routes: %v", err)
}
t.Logf("Got %d matching routes matching filter '%s':", len(list.Items), fstr)
for ix, v := range list.Items {
t.Logf("%d. %s", ix, v.Name)
}
return nil
}
func listNetworks(svc *compute.Service, projectID string, t *testing.T) error {
fstr := constructFilter(networkFilters)
list, err := svc.Networks.List(projectID).Filter(fstr).Do()
if err != nil {
return fmt.Errorf("failed to list networks: %v", err)
}
t.Logf("Got %d matching networks matching filter '%s':", len(list.Items), fstr)
for ix, v := range list.Items {
t.Logf("%d. %s", ix, v.Name)
}
return nil
}
| {
"content_hash": "3284dc840a9176d8df7ec70143e627b8",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 114,
"avg_line_length": 25.563106796116504,
"alnum_prop": 0.6543866312191416,
"repo_name": "ironcladlou/origin",
"id": "e5c3f1d2b64958b80aa182348c3a0bb245a6aec1",
"size": "3186",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "vendor/github.com/GoogleCloudPlatform/k8s-cloud-provider/pkg/cloud/filter/filter_resource_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "921"
},
{
"name": "Dockerfile",
"bytes": "2240"
},
{
"name": "Go",
"bytes": "2321943"
},
{
"name": "Makefile",
"bytes": "6395"
},
{
"name": "Python",
"bytes": "14593"
},
{
"name": "Shell",
"bytes": "310343"
}
],
"symlink_target": ""
} |
CKEDITOR.plugins.setLang( 'removeformat', 'pl', {
toolbar: 'Usuń formatowanie'
} );
| {
"content_hash": "024e734fa719d514f908698cd310962f",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 49,
"avg_line_length": 21.75,
"alnum_prop": 0.6781609195402298,
"repo_name": "Rudhie/simlab",
"id": "90a9810b80f0001e685fa735fae45d3b04e8e16b",
"size": "232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "assets/ckeditor/plugins/removeformat/lang/pl.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "364"
},
{
"name": "CSS",
"bytes": "186843"
},
{
"name": "HTML",
"bytes": "427207"
},
{
"name": "JavaScript",
"bytes": "5743945"
},
{
"name": "PHP",
"bytes": "1987763"
}
],
"symlink_target": ""
} |
@implementation NSAlert (additions)
+ (instancetype)alertWithStyle:(NSAlertStyle)style
messageText:(nullable NSString *)messageText
informativeText:(nullable NSString *)informativeText
buttonTitles:(NSArray <NSString *> *)titles
{
NSAlert * alert = [[NSAlert alloc] init];
alert.alertStyle = style;
alert.messageText = messageText ?: @"";
alert.informativeText = informativeText ?: @"";
for (NSString * title in titles)
[alert addButtonWithTitle:title];
return alert;
}
- (NSArray <NSButton *> *)addButtonsWithTitles:(NSArray <NSString *> *)titles
{
NSMutableArray * buttons = [NSMutableArray arrayWithCapacity:titles.count];
for (NSString * title in titles) {
[buttons addObject:[self addButtonWithTitle:title]];
}
return buttons;
}
@end
| {
"content_hash": "9dcaf1722922215bb1f6696da6683a3b",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 77,
"avg_line_length": 28.77777777777778,
"alnum_prop": 0.7271557271557272,
"repo_name": "Lisapple/TeaBox",
"id": "12ba5d5c38ada8ed389fcb212ae738f8b02156db",
"size": "891",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tea Box/NSAlert+additions.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2278"
},
{
"name": "HTML",
"bytes": "6787"
},
{
"name": "Objective-C",
"bytes": "265068"
},
{
"name": "Ruby",
"bytes": "372"
}
],
"symlink_target": ""
} |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package UI;
import javax.swing.SwingUtilities;
/**
*
* @author Aluno
*/
public class Projeto {
public static void main(String... args)
{
SwingUtilities.invokeLater(() -> {
FormInicial inicial = new FormInicial();
inicial.setVisible(true);
});
}
}
| {
"content_hash": "56ee864573bf645cb750acb0ccccbe63",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 79,
"avg_line_length": 22.347826086956523,
"alnum_prop": 0.6089494163424124,
"repo_name": "cefet-inf-2015/portal-educacao",
"id": "bfa16cc0e737b51989c66b27ef841a5ef3dd16c3",
"size": "514",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Banco de Questões/java/UI/Projeto.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2291"
},
{
"name": "CSS",
"bytes": "783474"
},
{
"name": "HTML",
"bytes": "7343216"
},
{
"name": "Java",
"bytes": "1812512"
},
{
"name": "JavaScript",
"bytes": "1015535"
},
{
"name": "PHP",
"bytes": "1020927"
},
{
"name": "Shell",
"bytes": "2101"
}
],
"symlink_target": ""
} |
layout: single
title: "Pipeline Expressions Guide"
sidebar:
nav: guides
redirect_from: /docs/pipeline-expressions-guide
---
{% include toc %}
## What are Pipeline Expressions?
Pipeline expressions allow you to put a placeholder value that gets filled in when you run your pipeline.
A pipeline expression in Spinnaker looks like this:
`${ execution.stages[0].name }`
For example, let's say I have a field called `command` that I want to be able to change every time I manually execute a pipeline. When I configure the pipeline, instead of a normal string, I can enter the expression `${ parameters.command }`,

I can now set up my pipeline to take a `command` parameter, when I manually execute my pipeline, it will ask me to enter the value for `command`.

When the pipeline runs, it resolves the expression to what I’ve entered for `command`.
## Where can I use Pipeline Expressions in Spinnaker?
> **Warning**
>
> Sometimes adding an expression stops your ability to use the UI. For example, if you use a parameter for the account field in a cluster deployment stage, you will see a spinning loader from the UI when you try to edit this cluster.
You can usually type in a value in the text fields of the Pipeline stages in Spinnaker.
[comment]: <> 
<img width="600" src="3.png">
In the following example, we're using pipeline expressions as parameters being passed into a Jenkins stage:
> **Expressions and Pipeline Configuration**
>
> Because expressions are evaluated per stage before each stage starts, you cannot use expressions in the configuration at the pipeline level. For example, an expression in the job name of a trigger for the pipeline does not work because no expressions have been evaluated yet.
### Turning a stage on or off
You can use expressions to turn an individual stage on or off.
To do so, select the 'Conditional on Expression' checkbox.
Enter an expression:

When this pipeline runs, it will evaluate the expression to decide if the stage can be skipped. This is useful for adding optional stages to your pipeline.
### Preconditions Stage
To use expressions to gate multiple stages, you can use the the 'check Preconditions' stage. When this stage is configured, it will gate the execution of subsequent stages. In other words, the stages that follow this stage will only run if the precondition evaluates to true.
Add a new stage and set it to be of type *Check Preconditions*.

You can add one or more preconditions by clicking on *Add Precondition*.
You can add expressions to be checked here by selecting *Expression* from the Check dropdown and entering an expression.

When the pipeline runs, if this expression evaluates to true, the pipeline will continue as is. If not, the stages following this stage will not run.
### Sometimes you can't use an expression
There are often cases where the value you want to replace doesn't allow typing, such as specifying a list of security groups or wanting to dynamically control the behavior of a checkbox. For these cases, you should edit the pipeline JSON directly.
You can edit the pipeline JSON by selecting Pipeline Actions -> Edit as Json

In the popup screen, you can now edit the fields that match the fields you want to substitute.
[comment]: <> 
<img width="700" src="5.png">
## What kind of data can I put in Pipeline Expressions?
In this section, we will describe the numerous sources of data for a pipeline expression in Spinnaker and how to access them. Expressions are evaluated against the currently running pipeline. To see all the available attributes in a pipeline, you can view the source JSON. The easiest way to view this JSON is as follows,
* Go to an execution of your pipeline.
* Click on `Details` to expand the pipeline.
* Click on the `source` link under the pipeline execution:

This will take you to a JSON file that contains the details of your pipeline execution.
### Pipeline Execution
The current running pipeline is available within Pipeline Expressions as `execution`.
From there, you can navigate to different parts of the pipeline. For example, to reference the name of the first stage you can use `${ execution.stages[0]['name'] }`.
### The current stage
Values for the current stage context are available by their variable name. In the execution JSON, they will be defined under the context object.
For example, if I look at the JSON for a bake stage execution, I will see something like this:
```
{
"id": "e980c921-b1f3-4b04-adda-b5c50ea1797a",
"type": "bake",
"name": "Bake",
"startTime": 1464024188808,
"endTime": null,
"status": "RUNNING",
"context": {
"cloudProviderType": "aws",
"regions": ["us-west-1"],
"user": "clin@netflix.com",
"vmType": "hvm",
"storeType": "docker",
"baseOs": "trusty",
"baseLabel": "release",
"package": "mypackage",
"amiSuffix": "20160523172307" ...
```
This means that if I reference a field in my expression like `${ package }`, it will resolve to the package for that stage.
### Other stages
You can also access the value of other stages by doing a lookup by name using the [#stage( stage name )](#stagestring) helper method. To access stage attributes, you can call `${ #stage('stage name')['id']}`. To access the values passed into the stage, use `${ #stage('stage name')['context']['baseAmiName']}`.
### Pipeline Parameters
The configuration screen of your pipeline lets you specify parameters. Parameters to the pipeline can also come from upstream pipelines that have configured a Pipeline stage.
You can access values of parameters within expressions by referencing the `parameters` array. This is a shortcut for `execution.parameters`. For example, `${ parameters['run canary'] }` will evaluate to the value of the parameter called *run canary*.
### Trigger Values
You can reference values in the pipeline trigger via the `trigger` property. This value is available in the execution JSON under the trigger object.
The `trigger.buildInfo` field in jenkins triggers will contain details about the jenkins execution. To access the version hash of the git commit in Jenkins, for example, we can ask for `${ trigger.buildInfo.scm[0]['sha1'] }`.
### Property files
A feature of the Jenkins trigger is the ability to specify a properties file. Spinnaker will read the contents of this artifact ( it can be a Java .properties file, yml or json ) and add it into the pipeline context.
To specify a property file, make sure you're archiving it in your jenkins job, then provide the name of the file, i.e `mypropertyfile.properties` ( not the absolute path i.e, `workspace/build/mypropertyfile.properties` ).

The contents of this file will now be available as a map under the trigger and accessible via `trigger.properties`.
For example, if my property file looked like this:
```
BUILD_STACK=bojack
BUILD_ANIMAL=horseman
```
I can then access the value from the property file as `${ trigger.properties['BUILD_ANIMAL']}` to get `horseman`.
If you're adding property files from a Jenkins or Script *STAGE* ( not Trigger ), then you can omit the `trigger.properties` field as the stage. Any stage that runs after will be able to access these parameters as normal context fields. If we created a new stage and ran the same Jenkins job as above, then `${ BUILD_ANIMAL }` will resolve to `horseman`.
> **Using the same field names**
>
> If you have multiple Jenkins jobs, they can clobber the same properties. A way around this is to reference the particular stage. `${ #stage( 'first jenkins job' )['context']['BUILD_ANIMAL'] }`
### Manual Judgment Choices
The manual judgment stage allows you to specify options when you approve a stage. Under Judgement Inputs, you can enter a choice.

When your stage executes, you will see a dropdown with the provided choice.

In subsequent stages, you can reference the choice provided via the [#judgment( stage Name ) ](#judgmentstring) helper method `${ #judgment( 'manual judgment stage name' )}`.
### External sources
You can also read from external sources via the [#fromUrl( url )](#fromurlstring) and [#jsonFromUrl( url )](#jsonfromurlstring) helper methods.
## Pipeline Expression Language Reference
The Pipeline Expression syntax is implemented with the Spring Expression Language ( SpEL ). You can learn more about SPeL by following this [link](http://docs.spring.io/spring/docs/current/spring-framework-reference/html/expressions.html).
### Writing Expressions
A pipeline expression is started by `${` and terminated by `}` :
`${ expression here }`
You can interpolate expressions and strings, so
`${expressionA}-randomString-${expressionB}`
will evaluate to `Kimmy-randomString-Schmidt` when expressionA is `Kimmy` and expressionB is `Schmidt`.
In Spinnaker, if an expression cannot be evaluated, it will return the expression entered.
### Strings
You can enter String in expressions: `${ 'this is a String' }` will evaluate to `this is a String`.
### Lists
You can reference entries in lists: `${ stages[0] }` will return the value of the first stage.
### Maps
And you can reference values in maps based on their keys: `${ trigger['properties']['value1']}`
**Using a dot (.) vs square brackets [ 'value' ]**
Spinnaker supports both a dot notation for accessing values and a square bracket notation. However, there are a few places where the dot notation doesn't work as expected --- for example after a filter operation or getting nested JSON values from an URL. Generally, it's recommended to use the square bracket notation for referencing map values as this is most predictable. I.e, prefer `trigger['properties']['value']` instead of `trigger.properties.value`.
### Math
Arithmetic operations such as `${ trigger.buildInfo.number * 2 }` are supported.
### Comparisons
The expression language also allows you to specify rational operators, such as `${ instance.size > 400 }` or `${ params.runCanary == true }`.
### Functions
There are a few [helper functions](#helper-functions-in-spinnaker) available within Spinnaker. These can be accessed via `#functionName( params )`. For example, `#fromUrl( 'http://www.netflix.com' )`.
### Code
You can execute java code within the expression language. This can be useful for string manipulation or to create more exotic data fields.
You can use methods available to existing types like String - `${ 'this is a long string'.substring(0,5)}`
You can also declare new classes. Note that package names need to be fully qualified. In the following expression, we're getting the current date as MM-dd-yyyy format: `${ new java.text.SimpleDateFormat('MM-dd-yyyy').format(new java.util.Date()) }`
### Some Useful Things to Know
### #Default Values#
You can set a default value within expressions by using `?:`. For example, `${ parameters.region ?: 'us-east-1'}`.
###Filtering Maps#
The expression language allows you to filter maps by using the `.?` expression. For example, to filter a list of stages by type, you can use the expression `${ execution.stages.?[ type == 'bake' ] }`
###Lists as Parameters
Sometimes you want to enter a list of values as a parameter. For example, a list of regions or security groups. A useful tip here is to ask the user to enter them as a list of comma separated values `us-east-1,us-west-1,eu-west-1` and then use the `split()` method to make these into a map. Your expression would look like this `parameters.regions.split(',')`.
### #root
Sometimes you want to reference another variable in an expression. You can do this by prepending `#root` in front of the expression name. In the following expression, I can set a value depending on the baseLabel field: `${ #root.baseLabel ?: 'noBaseLabelValue' }`
## Helper properties in Spinnaker
There are a few attribute shortcuts that you can use in Spinnaker:
* `execution` : refers to the current pipeline execution.
* `parameters` : pipeline parameters
* `trigger` : pipeline trigger
* `scmInfo` : this is a shortcut to the git details of either the trigger or the last executed jenkins stage. `scmInfo.sha1` will give you the hash of the last build. `scmInfo.branch` will give you the branch name.
* `deployedServerGroups` : this is a reference to the server group that was created by the last deploy stage. Here is an example of the contents of a deployedServerGroups : ```[{account=prod,capacity={desired=1, max=1, min=1}, parentStage=23452655-c6de-4aac-b529-55e1357dfee7, region=us-west-1, ami=ami-575eb013, amiSuffix=201505190020, baseLabel=release, baseOs=trusty, package=rush, storeType=ebs, vmType=pv, serverGroup=rush-prestaging-v049}]```
# Helper functions in Spinnaker
The following section outlines the currently available helper functions in Spinnaker.
### #alphanumerical( String )
Returns the String value of a string passed in with non A-Z or digits stripped out. This is useful if you want to have a valid stack or details field in Spinnaker, as some provides don't allow special characters in them.
### #readJson(String)
Converts a JSON String into a Map that can then be processed further. This is useful if a jenkins property file has a complex value that you want to pass into Spinnaker. This is also used by the #jsonFromUrl( ) function under the hood.
```
"instanceType": "${#readJson('{\"prod\": \"r3.8xlarge\"}')[#root.parameters['environment']]}",
```
### #fromUrl(String)
Returns the contents of the specified URL as a String.
### #jsonFromUrl(String)
Retrieves the contents of the given URL and converts it into either a map or a list. This is useful to fetch information from the Spinnaker API or other sources.
### #judgment(String)
Gets the selected judgment value from the stage with the name asked for. I.,e `${#judgment('my manual judgment stage')}` will return the selected judgmentInput from the stage called `my manual judgment`.
### #propertiesFromUrl(String)
Retrieves the contents of the properties file at the given URL and converts it into a map. This is useful to fetch information from jenkins properties files or other similar endpoints.
### #stage(String)
A shortcut to get the stage by name. I.e, `${#stage( 'bake' )}`. Be careful that the values for the stage are still under the context, so accessing a property would be via `${#stage('bake')['context']['valuePassedIn']}`.
### #toBoolean(String)
Converts a value to a boolean.
### #toFloat(String)
Converts a value to a floating point number.
### #toInt( String )
Converts a value to an integer.
### #toJson(Object)
Converts a Map into a JSON String. This is useful when you want to pass values from Spinnaker to another system. For example, I can use ${#toJson( deployedServerGroups )} to pass the details of the recently deployed server groups to a jenkins job.
## Sample Use Cases
### Create a stack based on the committed git branch
In this use case, we're going to create a different cluster for each branch that gets triggered in Jenkins.
1. Set up a jenkins trigger for the pipeline.
2. Add a deploy stage and use an expression for my cluster name that uses the scmInfo value in the branch. `${ #alphanumerical( scmInfo.branch ) }`:

The deploy stage should look like this:

When the pipeline runs, it will replace the expression with the name of the branch, creating a new cluster:

### Conditionally execute pipeline behavior based on manual input
For this use case, we're going to set up a pipeline with 3 conditions, and gate these conditions with a manual judgment stage.
1. First, add a manual judgment stage and add three options.

2. Now that I have these inputs set up, I create a new precondition stage. I add an expression to check that the input was 'clean up'

My precondition stage should look like this.

> **Fail pipeline**
>
> For these type of workflows, you want to set *Fail Pipeline* as false when you're setting up your expression.
3. Add stages that you want to execute based on this precondition.
4. I do the same for the 'deploy another' input.
5. My pipeline now looks like this:

6. Go to the executions screen and run my pipeline. I should see the options show up in the Manual Judgment stage:

If I select 'do nothing', which matches no preconditions, my pipeline should just stop:

If I choose 'deploy another', I can see that the stages gated by deploy another are executed.

Similarly, you can see that when I chose 'cleaning up', that pipeline branch is the one that runs.

## Expression Autocomplete
Spinnaker has autocompleted functionality within the context of a pipeline configuration.
**TLDR;**
Here are the character triggers that will invoke the autocomplete
**$** = Adds the **${ }** to the text field and starts the expression building
**#** = Displays a list of all the **helper functions**
**?** = Displays a list of **helper parameters** and **stages** for the pipeline
In any text field, you can start autocompleting by typing a **dollar sign($)**.
Doing so will display this:

Hit enter and the opening and closing braces of the expression will be added to the text field.

### Helper Functions and Stages
Within the **curly braces ${ }**, adding a **question mark (?)** will display a list of all the helper properties that are relevant to the stage config that you are in as well as a list of all the stages in the pipeline.

Once the list is displayed you can start typing to narrow down the list and hitting enter will add the helper property or stage name to the expression.
If a helper property is added to the expression you can then hit any of the meta-keys (Shift, Comand, Alt, Control) to pop up a list of all the pipeline config context relevant to that selected helper property.
Here is an example of what context of the 'deployedServerGroups' helper property will show.

The list will show the available context attributes on the left and the previous values highlighted on the right. The list can also be narrowed by typing a few characters, and selecting on by hitting **ENTER**.
### Helper Functions
Also within the **curly braces (${ })**, adding a **pound sign (#)** will display a list of all the helper functions that are available.
Here is an example of selecting the '#stage' helper function.

Selecting the function will add it to the expression and place the cursor between the single quotes

From here we can enter a **question mark (?)** to pull up the list of stages.

Once the stage name is selected it will be added to the expression.

From here you can move the cursor to the end of the closing paren of the function, and a list of context values for that stage will be presented for your choosing.

# Source Code
The expression language support can be found in the orca codebase and pretty much encapsulated in the [ContextParameterProcessor](https://github.com/spinnaker/orca/blob/master/orca-core/src/main/groovy/com/netflix/spinnaker/orca/pipeline/util/ContextParameterProcessor.groovy) class.
| {
"content_hash": "747286352c28f5f7a2c4c25f757759de",
"timestamp": "",
"source": "github",
"line_count": 448,
"max_line_length": 457,
"avg_line_length": 43.09375,
"alnum_prop": 0.7470734486688076,
"repo_name": "yabro/yabro.github.io",
"id": "44ba002571e20eb3b9b02879dbded5c8edaeddc1",
"size": "19312",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "guides/user/pipeline-expressions/index.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "97843"
},
{
"name": "HTML",
"bytes": "1578309"
},
{
"name": "JavaScript",
"bytes": "53565"
},
{
"name": "Ruby",
"bytes": "10302"
},
{
"name": "Shell",
"bytes": "15321"
}
],
"symlink_target": ""
} |
'use strict';
angular.module('mgcrea.ngStrap.popover', ['mgcrea.ngStrap.tooltip'])
.provider('$popover', function() {
var defaults = this.defaults = {
animation: 'am-fade',
placement: 'right',
template: 'popover/popover.tpl.html',
contentTemplate: false,
trigger: 'click',
keyboard: true,
html: false,
title: '',
content: '',
delay: 0,
container: false
};
this.$get = function($tooltip) {
function PopoverFactory(element, config) {
// Common vars
var options = angular.extend({}, defaults, config);
var $popover = $tooltip(element, options);
// Support scope as string options [/*title, */content]
if(options.content) {
$popover.$scope.content = options.content;
}
return $popover;
}
return PopoverFactory;
};
})
.directive('bsPopover', function($window, $location, $sce, $popover) {
var requestAnimationFrame = $window.requestAnimationFrame || $window.setTimeout;
return {
restrict: 'EAC',
scope: true,
link: function postLink(scope, element, attr) {
// Directive options
var options = {scope: scope};
angular.forEach(['template', 'contentTemplate', 'placement', 'container', 'delay', 'trigger', 'keyboard', 'html', 'animation'], function(key) {
if(angular.isDefined(attr[key])) options[key] = attr[key];
});
// Support scope as data-attrs
angular.forEach(['title', 'content'], function(key) {
attr[key] && attr.$observe(key, function(newValue, oldValue) {
scope[key] = $sce.trustAsHtml(newValue);
angular.isDefined(oldValue) && requestAnimationFrame(function() {
popover && popover.$applyPlacement();
});
});
});
// Support scope as an object
attr.bsPopover && scope.$watch(attr.bsPopover, function(newValue, oldValue) {
if(angular.isObject(newValue)) {
angular.extend(scope, newValue);
} else {
scope.content = newValue;
}
angular.isDefined(oldValue) && requestAnimationFrame(function() {
popover && popover.$applyPlacement();
});
}, true);
// Initialize popover
var popover = $popover(element, options);
// Garbage collection
scope.$on('$destroy', function() {
popover.destroy();
options = null;
popover = null;
});
}
};
});
| {
"content_hash": "ed0656068c769a05cab63838c7773575",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 151,
"avg_line_length": 28.13684210526316,
"alnum_prop": 0.5402169846614291,
"repo_name": "tdelev/web-proceedings",
"id": "546bf4cfabc4d6444bc3fa5e08430132e22bcada",
"size": "2673",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/main/webapp/resources/bower_components/angular-strap/src/popover/popover.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27198"
},
{
"name": "Java",
"bytes": "95551"
},
{
"name": "JavaScript",
"bytes": "19821"
}
],
"symlink_target": ""
} |
package mesosphere.marathon
package api.v2
import mesosphere.UnitTest
import mesosphere.marathon.raml._
class AppNormalizationTest extends UnitTest {
import Normalization._
"AppNormalization" should {
"normalize readiness checks" when {
"readiness check does not specify status codes for ready" in {
val check = ReadinessCheck()
val normalized = AppNormalization.normalizeReadinessCheck(check)
normalized should be(check.copy(httpStatusCodesForReady = Option(Set(200))))
}
"readiness check does specify status codes for ready" in {
val check = ReadinessCheck(httpStatusCodesForReady = Option(Set(203, 204, 205, 206)))
val normalized = AppNormalization.normalizeReadinessCheck(check)
normalized should be(check)
}
}
"normalize health checks" when {
def networkBasedHealthCheck(check: AppHealthCheck): Unit = {
s"${check.protocol} health check does not contain port or port index" in {
check.port should be('empty)
check.portIndex should be('empty)
val normalized = AppNormalization.normalizeHealthChecks.normalized(Set(check))
normalized should be(Set(check.copy(portIndex = Option(0))))
}
s"${check.protocol} health check w/ port spec isn't normalized" in {
val checkWithPort = check.copy(port = Option(88))
checkWithPort.portIndex should be('empty)
val normalized = AppNormalization.normalizeHealthChecks.normalized(Set(checkWithPort))
normalized should be(Set(checkWithPort))
}
s"${check.protocol} health check w/ port index spec isn't normalized" in {
val checkWithPort = check.copy(portIndex = Option(5))
checkWithPort.port should be('empty)
val normalized = AppNormalization.normalizeHealthChecks.normalized(Set(checkWithPort))
normalized should be(Set(checkWithPort))
}
}
behave like networkBasedHealthCheck(AppHealthCheck(protocol = AppHealthCheckProtocol.Http))
behave like networkBasedHealthCheck(AppHealthCheck(protocol = AppHealthCheckProtocol.Https))
behave like networkBasedHealthCheck(AppHealthCheck(protocol = AppHealthCheckProtocol.Tcp))
behave like networkBasedHealthCheck(AppHealthCheck(protocol = AppHealthCheckProtocol.MesosHttp))
behave like networkBasedHealthCheck(AppHealthCheck(protocol = AppHealthCheckProtocol.MesosHttps))
behave like networkBasedHealthCheck(AppHealthCheck(protocol = AppHealthCheckProtocol.MesosTcp))
"COMMAND health check isn't changed" in {
val check = AppHealthCheck(protocol = AppHealthCheckProtocol.Command)
val normalized = AppNormalization.normalizeHealthChecks.normalized(Set(check))
normalized should be(Set(check))
}
}
"normalize fetch and uris fields" when {
"uris are present and fetch is not" in {
val urisNoFetch = AppNormalization.Artifacts(Option(Seq("a")), None).normalize.fetch
val expected = Option(Seq(Artifact("a", extract = false)))
urisNoFetch should be(expected)
}
"uris are present and fetch is an empty list" in {
val urisEmptyFetch = AppNormalization.Artifacts(Option(Seq("a")), Option(Nil)).normalize.fetch
val expected = Option(Seq(Artifact("a", extract = false)))
urisEmptyFetch should be(expected)
}
"fetch is present and uris are not" in {
val fetchNoUris = AppNormalization.Artifacts(None, Option(Seq(Artifact("a")))).normalize.fetch
val expected = Option(Seq(Artifact("a")))
fetchNoUris should be(expected)
}
"fetch is present and uris are an empty list" in {
val fetchEmptyUris = AppNormalization.Artifacts(Option(Nil), Option(Seq(Artifact("a")))).normalize.fetch
val expected = Option(Seq(Artifact("a")))
fetchEmptyUris should be(expected)
}
"fetch and uris are both empty lists" in {
val fetchEmptyUris = AppNormalization.Artifacts(Option(Nil), Option(Nil)).normalize.fetch
val expected = Option(Nil)
fetchEmptyUris should be(expected)
}
"fetch and uris are both non-empty" in {
a[NormalizationException] should be thrownBy {
AppNormalization.Artifacts(Option(Seq("u")), Option(Seq(Artifact("a")))).normalize
}
}
}
def normalizer(defaultNetworkName: Option[String] = None, mesosBridgeName: String = raml.Networks.DefaultMesosBridgeName) = {
val config = AppNormalization.Configuration(defaultNetworkName, mesosBridgeName)
Normalization[App] { app =>
AppNormalization(config).normalized(AppNormalization.forDeprecated(config).normalized(app))
}
}
def updateNormalizer(defaultNetworkName: Option[String], mesosBridgeName: String = raml.Networks.DefaultMesosBridgeName) = {
val config = AppNormalization.Configuration(defaultNetworkName, mesosBridgeName)
Normalization[AppUpdate] { app =>
AppNormalization.forUpdates(config)
.normalized(AppNormalization.forDeprecatedUpdates(config).normalized(app))
}
}
"migrate legacy port definitions and mappings to canonical form" when {
implicit val appNormalizer = normalizer()
def normalizeMismatchedPortDefinitionsAndMappings(subcase: String, legacyf: Fixture => App, canonicalf: Fixture => App, extraPort: ContainerPortMapping) = {
s"mismatched port defintions and port mappings are specified for a docker app ($subcase)" in new Fixture {
val legacy: App = legacyf(this)
// the whole point is to test migration when # of mappings != # of port definitions
require(legacy.container.exists(_.docker.exists(_.portMappings.exists(_.size == 1))))
val raw = legacy.copy(portDefinitions = Option(PortDefinitions(0, 0)))
val result = raw.normalize
val canonical: App = canonicalf(this)
result should be(canonical.copy(container = canonical.container.map { ct =>
ct.copy(portMappings = ct.portMappings.map { pm =>
pm ++ Seq(extraPort)
})
}))
}
}
behave like normalizeMismatchedPortDefinitionsAndMappings(
"container-mode networking", _.legacyDockerApp, _.normalizedDockerApp, ContainerPortMapping())
behave like normalizeMismatchedPortDefinitionsAndMappings(
"bridge-mode networking",
f => f.legacyDockerApp.copy(ipAddress = None, container = f.legacyDockerApp.container.map { ct =>
ct.copy(docker = ct.docker.map { docker =>
docker.copy(network = Some(DockerNetwork.Bridge))
})
}),
_.normalizedDockerApp.copy(networks = Seq(Network(mode = NetworkMode.ContainerBridge))),
ContainerPortMapping(0, hostPort = Option(0))
)
}
"normalize a canonical app with a default network specified" when {
implicit val appNormalizer = normalizer(Some("default-network0"))
"normalization doesn't overwrite an existing network name" in new Fixture {
normalizedMesosApp.normalize should be(normalizedMesosApp)
}
}
"migrate ipAddress discovery to container port mappings with a default network specified" when {
val defaultNetworkName = Some("default-network0")
implicit val appNormalizer = normalizer(defaultNetworkName)
"using legacy docker networking API, without a named network" in new Fixture {
val normalized = legacyDockerApp.copy(ipAddress = Option(IpAddress())).normalize
normalized should be(normalizedDockerApp.copy(networks = Seq(Network(name = defaultNetworkName))))
}
"using legacy IP/CT networking API without a named network" in new Fixture {
legacyMesosApp.copy(ipAddress = legacyMesosApp.ipAddress.map(_.copy(
networkName = None))).normalize should be(normalizedMesosApp.copy(networks = Seq(Network(name = defaultNetworkName))))
}
"fails when ipAddress discovery ports and container port mappings are both specified" in new Fixture {
a[NormalizationException] should be thrownBy {
legacyMesosApp.copy(container = legacyMesosApp.container.map(_.copy(portMappings = Some(Nil)))).normalize
}
}
}
"migrate legacy network modes to canonical API" when {
implicit val appNormalizer = normalizer()
"legacy docker bridge app specifies the configured mesos CNI bridge" in {
val legacyDockerApp = App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(image = "image0", portMappings = Some(Nil)))
)),
ipAddress = Some(IpAddress(networkName = Some("mesos-bridge")))
)
val normalDockerApp = App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(image = "image0")),
portMappings = Some(Nil)
)),
networks = Seq(Network(mode = NetworkMode.ContainerBridge)),
unreachableStrategy = Some(UnreachableEnabled.Default)
)
legacyDockerApp.normalize should be(normalDockerApp)
}
"preserves networkNames field" in {
val legacyDockerApp = App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(
image = "image0",
portMappings = Some(Seq(
ContainerPortMapping(
containerPort = 80,
networkNames = List("1"))))))
)),
networks = Seq(Network(mode = NetworkMode.Container, name = Some("1")))
)
val Some(Seq(portMapping)) = legacyDockerApp.normalize.container.flatMap(_.portMappings)
portMapping shouldBe ContainerPortMapping(
containerPort = 80,
networkNames = List("1"))
}
"legacy docker app specifies ipAddress and HOST networking" in {
val legacyDockerApp = App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(network = Some(DockerNetwork.Host), image = "image0", portMappings = Some(Nil)))
)),
ipAddress = Some(IpAddress(networkName = Some("mesos-bridge")))
)
val normalDockerApp = App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(image = "image0"))
)),
networks = Seq(Network(mode = NetworkMode.Host)),
unreachableStrategy = Some(UnreachableEnabled.Default),
portDefinitions = Some(Apps.DefaultPortDefinitions)
)
legacyDockerApp.normalize should be(normalDockerApp)
}
"legacy docker app specifies ipAddress and BRIDGE networking" in {
val legacyDockerApp = App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(network = Some(DockerNetwork.Bridge), image = "image0", portMappings = Some(Nil)))
)),
ipAddress = Some(IpAddress(networkName = Some("my-bridge")))
)
val normalDockerApp = App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(image = "image0")),
portMappings = Some(Nil)
)),
networks = Seq(Network(mode = NetworkMode.ContainerBridge)),
unreachableStrategy = Some(UnreachableEnabled.Default)
)
legacyDockerApp.normalize should be(normalDockerApp)
}
"legacy docker app specifies NONE networking, with or without ipAddress" in {
a[NormalizationException] should be thrownBy {
App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(network = Some(DockerNetwork.None), image = "image0", portMappings = Some(Nil)))
)),
ipAddress = Some(IpAddress())
).normalize
}
a[NormalizationException] should be thrownBy {
App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(network = Some(DockerNetwork.None), image = "image0", portMappings = Some(Nil)))
))
).normalize
}
}
"legacy docker app specifies both legacy and canonical networking modes" in {
a[NormalizationException] should be thrownBy {
App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(network = Some(DockerNetwork.Host), image = "image0", portMappings = Some(Nil)))
)),
networks = Seq(Network(mode = NetworkMode.Host))
).normalize
}
a[NormalizationException] should be thrownBy {
App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(network = Some(DockerNetwork.Bridge), image = "image0", portMappings = Some(Nil)))
)),
networks = Seq(Network(mode = NetworkMode.ContainerBridge))
).normalize
}
a[NormalizationException] should be thrownBy {
App(
"/foo",
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(network = Some(DockerNetwork.User), image = "image0", portMappings = Some(Nil)))
)),
networks = Seq(Network(mode = NetworkMode.Container))
).normalize
}
}
}
"migrate ipAddress discovery to container port mappings without a default network specified" when {
implicit val appNormalizer = normalizer(None)
"using legacy docker networking API" in new Fixture {
val normalized = legacyDockerApp.normalize
normalized should be(normalizedDockerApp)
}
"using legacy docker networking API, without a named network" in new Fixture {
val ex = intercept[NormalizationException] {
legacyDockerApp.copy(ipAddress = Option(IpAddress())).normalize
}
ex.msg shouldBe NetworkNormalizationMessages.ContainerNetworkNameUnresolved
}
"using legacy docker networking API w/ extraneous ipAddress discovery ports" in new Fixture {
val ex = intercept[NormalizationException] {
legacyDockerApp.copy(ipAddress = legacyDockerApp.ipAddress.map(_.copy(discovery =
Option(IpDiscovery(
ports = Seq(IpDiscoveryPort(34, "port1"))
))
))).normalize
}
ex.getMessage should include("discovery.ports")
}
"using legacy IP/CT networking API" in new Fixture {
legacyMesosApp.normalize should be(normalizedMesosApp)
}
"using legacy IP/CT networking API without a named network" in new Fixture {
val ex = intercept[NormalizationException] {
legacyMesosApp.copy(ipAddress = legacyMesosApp.ipAddress.map(_.copy(
networkName = None))).normalize
}
ex.msg shouldBe NetworkNormalizationMessages.ContainerNetworkNameUnresolved
}
}
"not assign defaults for app update normalization" when {
implicit val appUpdateNormalizer = updateNormalizer(None)
"for an empty app update" in {
val raw = AppUpdate()
raw.normalize should be(raw)
}
"for an empty docker app update" in {
val raw = AppUpdate(
container = Option(Container(
`type` = EngineType.Docker,
docker = Option(DockerContainer(
image = "image0"
))
)),
networks = Option(Seq(Network(name = Some("whatever"))))
)
raw.normalize should be(raw)
}
}
"normalize requirePorts depending on network type" when {
implicit val appNormalizer = normalizer(None)
"app w/ non-host networking discards requirePorts" in new Fixture {
val raw = legacyMesosApp.copy(requirePorts = true)
raw.normalize should be(normalizedMesosApp)
}
"app w/ host networking preserves requirePorts" in new Fixture {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
networks = Apps.DefaultNetworks,
unreachableStrategy = Option(UnreachableEnabled.Default),
portDefinitions = Option(PortDefinitions(0)),
requirePorts = true
)
raw.normalize should be(raw)
}
}
"preserve user intent w/ respect to opting into and out of default ports" when {
implicit val appNormalizer = normalizer(None)
"inject default ports for an app w/ container networking but w/o a container" in {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
networks = Seq(Network(mode = NetworkMode.ContainerBridge)),
unreachableStrategy = Option(UnreachableEnabled.Default)
)
raw.normalize should be(raw.copy(container = Some(Container(
`type` = EngineType.Mesos,
portMappings = Option(Seq(
ContainerPortMapping(0, name = Some("default"), hostPort = Option(0))
))
))))
}
"allow a legacy docker bridge mode app to declare empty port mappings" in {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(
image = "image0",
network = Some(DockerNetwork.Bridge),
portMappings = Some(Nil)))
)),
unreachableStrategy = Option(UnreachableEnabled.Default)
)
raw.normalize should be(raw.copy(
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(image = "image0")),
portMappings = Some(Nil)
)),
networks = Seq(Network(mode = NetworkMode.ContainerBridge))
))
}
"allow a legacy docker bridge mode app to declare empty port mappings at both levels" in {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
container = Some(Container(
`type` = EngineType.Docker,
portMappings = Some(Nil),
docker = Some(DockerContainer(
image = "image0",
network = Some(DockerNetwork.Bridge),
portMappings = Some(Nil)))
)),
unreachableStrategy = Option(UnreachableEnabled.Default)
)
raw.normalize should be(raw.copy(
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(image = "image0")),
portMappings = Some(Nil)
)),
networks = Seq(Network(mode = NetworkMode.ContainerBridge))
))
}
"allow a legacy docker bridge mode app to declare port mappings at container level if legacy mappings are empty" in {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
container = Some(Container(
`type` = EngineType.Docker,
portMappings = Some(Seq(ContainerPortMapping())),
docker = Some(DockerContainer(
image = "image0",
network = Some(DockerNetwork.Bridge),
portMappings = Some(Nil)))
)),
unreachableStrategy = Option(UnreachableEnabled.Default)
)
raw.normalize should be(raw.copy(
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(image = "image0")),
portMappings = Some(Seq(ContainerPortMapping(hostPort = Some(0))))
)),
networks = Seq(Network(mode = NetworkMode.ContainerBridge))
))
}
"prevent a legacy docker bridge mode app from mixing empty and non-empty port mappings" in {
a[NormalizationException] should be thrownBy {
App(
id = "/foo",
cmd = Option("sleep"),
container = Some(Container(
`type` = EngineType.Docker,
portMappings = Some(Nil),
docker = Some(DockerContainer(
image = "image0",
network = Some(DockerNetwork.Bridge),
portMappings = Some(Seq(ContainerPortMapping()))))
)),
unreachableStrategy = Option(UnreachableEnabled.Default)
).normalize
}
}
"allow a mesos app to declare empty port mappings" in {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
container = Some(Container(
`type` = EngineType.Mesos,
portMappings = Option(Seq.empty
))),
networks = Seq(Network(mode = NetworkMode.ContainerBridge)),
unreachableStrategy = Option(UnreachableEnabled.Default)
)
raw.normalize should be(raw)
}
"provide default port mappings when left unspecified for an app container w/ bridge networking" in {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
container = Some(Container(
`type` = EngineType.Mesos
)),
networks = Seq(Network(mode = NetworkMode.ContainerBridge)),
unreachableStrategy = Option(UnreachableEnabled.Default)
)
raw.normalize should be(raw.copy(container = raw.container.map(_.copy(
portMappings = Option(Seq(ContainerPortMapping(hostPort = Option(0), name = Option("default"))))))))
}
"provide default port mappings when left unspecified for an app container w/ container networking" in {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
container = Some(Container(
`type` = EngineType.Mesos
)),
networks = Seq(Network(name = Option("network1"), mode = NetworkMode.Container)),
unreachableStrategy = Option(UnreachableEnabled.Default)
)
raw.normalize should be(raw.copy(container = raw.container.map(_.copy(
portMappings = Option(Seq(ContainerPortMapping(name = Option("default"))))))))
}
"allow an app to declare empty port definitions" in {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
portDefinitions = Option(Seq.empty),
networks = Apps.DefaultNetworks,
unreachableStrategy = Option(UnreachableEnabled.Default)
)
raw.normalize should be(raw)
}
"provide a default port definition when no port definitions are specified" in {
val raw = App(
id = "/foo",
cmd = Option("sleep"),
networks = Apps.DefaultNetworks,
unreachableStrategy = Option(UnreachableEnabled.Default)
)
raw.normalize should be(raw.copy(portDefinitions = Option(Apps.DefaultPortDefinitions)))
}
}
}
private class Fixture {
val legacyDockerApp = App(
id = "/foo",
container = Option(Container(
`type` = EngineType.Docker,
docker = Option(DockerContainer(
network = Option(DockerNetwork.User),
image = "image0",
portMappings = Option(Seq(ContainerPortMapping(
containerPort = 1, hostPort = Option(2), servicePort = 3, name = Option("port0"), protocol = NetworkProtocol.Udp
)))
))
)),
ipAddress = Option(IpAddress(
networkName = Option("someUserNetwork")
))
)
val normalizedDockerApp = App(
id = "/foo",
container = Option(Container(
`type` = EngineType.Docker,
docker = Option(DockerContainer(
image = "image0"
)),
portMappings = Option(Seq(ContainerPortMapping(
containerPort = 1, hostPort = Option(2), servicePort = 3, name = Option("port0"), protocol = NetworkProtocol.Udp
)))
)),
networks = Seq(Network(name = Option("someUserNetwork"))),
unreachableStrategy = Option(UnreachableEnabled.Default)
)
val legacyMesosApp = App(
id = "/foo",
container = Option(Container(
`type` = EngineType.Mesos,
docker = Option(DockerContainer(image = "image0"))
)),
ipAddress = Option(IpAddress(
networkName = Option("someUserNetwork"),
discovery = Option(IpDiscovery(
ports = Seq(IpDiscoveryPort(34, "port1", NetworkProtocol.Udp, labels = Map("VIP_0" -> "/namedvip:34")))
))
))
)
val normalizedMesosApp = App(
id = "/foo",
container = Option(Container(
`type` = EngineType.Mesos,
docker = Option(DockerContainer(image = "image0")),
portMappings = Option(Seq(ContainerPortMapping(
containerPort = 34,
name = Option("port1"),
protocol = NetworkProtocol.Udp,
labels = Map("VIP_0" -> "/namedvip:34")
)))
)),
networks = Seq(Network(name = Option("someUserNetwork"))),
unreachableStrategy = Option(UnreachableEnabled.Default)
)
}
}
| {
"content_hash": "d7672b04e7d9eaaf436ef956d5c75793",
"timestamp": "",
"source": "github",
"line_count": 638,
"max_line_length": 162,
"avg_line_length": 40.009404388714735,
"alnum_prop": 0.6152942098252762,
"repo_name": "guenter/marathon",
"id": "1b100d630cfd9905cd82ecd08b100b956d339ac5",
"size": "25526",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/test/scala/mesosphere/marathon/api/v2/AppNormalizationTest.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "59270"
},
{
"name": "Groovy",
"bytes": "14255"
},
{
"name": "HTML",
"bytes": "502"
},
{
"name": "Java",
"bytes": "778"
},
{
"name": "Makefile",
"bytes": "4005"
},
{
"name": "Python",
"bytes": "169779"
},
{
"name": "Ruby",
"bytes": "772"
},
{
"name": "Scala",
"bytes": "4280011"
},
{
"name": "Shell",
"bytes": "41549"
}
],
"symlink_target": ""
} |
package org.apache.spark.sql
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.SchemaRequiredDataSource
import org.apache.spark.sql.connector.catalog.InMemoryPartitionTableCatalog
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.SimpleInsertSource
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
import org.apache.spark.sql.types._
// The base trait for char/varchar tests that need to be run with different table implementations.
trait CharVarcharTestSuite extends QueryTest with SQLTestUtils {
def format: String
def checkColType(f: StructField, dt: DataType): Unit = {
assert(f.dataType == CharVarcharUtils.replaceCharVarcharWithString(dt))
assert(CharVarcharUtils.getRawType(f.metadata) == Some(dt))
}
def checkPlainResult(df: DataFrame, dt: String, insertVal: String): Unit = {
val dataType = CatalystSqlParser.parseDataType(dt)
checkColType(df.schema(1), dataType)
dataType match {
case CharType(len) =>
// char value will be padded if (<= len) or trimmed if (> len)
val fixLenStr = if (insertVal != null) {
insertVal.take(len).padTo(len, " ").mkString
} else null
checkAnswer(df, Row("1", fixLenStr))
case VarcharType(len) =>
// varchar value will be remained if (<= len) or trimmed if (> len)
val varLenStrWithUpperBound = if (insertVal != null) {
insertVal.take(len)
} else null
checkAnswer(df, Row("1", varLenStrWithUpperBound))
}
}
test("apply char padding/trimming and varchar trimming: top-level columns") {
Seq("CHAR(5)", "VARCHAR(5)").foreach { typ =>
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c $typ) USING $format")
(0 to 5).map(n => "a" + " " * n).foreach { v =>
sql(s"INSERT OVERWRITE t VALUES ('1', '$v')")
checkPlainResult(spark.table("t"), typ, v)
}
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkPlainResult(spark.table("t"), typ, null)
}
}
}
test("char type values should be padded or trimmed: partitioned columns") {
// via dynamic partitioned columns
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(5)) USING $format PARTITIONED BY (c)")
(0 to 5).map(n => "a" + " " * n).foreach { v =>
sql(s"INSERT OVERWRITE t VALUES ('1', '$v')")
checkPlainResult(spark.table("t"), "CHAR(5)", v)
}
}
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(5)) USING $format PARTITIONED BY (c)")
(0 to 5).map(n => "a" + " " * n).foreach { v =>
// via dynamic partitioned columns with drop partition command
sql(s"INSERT INTO t VALUES ('1', '$v')")
checkPlainResult(spark.table("t"), "CHAR(5)", v)
sql(s"ALTER TABLE t DROP PARTITION(c='a')")
checkAnswer(spark.table("t"), Nil)
// via static partitioned columns with drop partition command
sql(s"INSERT INTO t PARTITION (c ='$v') VALUES ('1')")
checkPlainResult(spark.table("t"), "CHAR(5)", v)
sql(s"ALTER TABLE t DROP PARTITION(c='a')")
checkAnswer(spark.table("t"), Nil)
}
}
}
test("char type values should not be padded when charVarcharAsString is true") {
withSQLConf(SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key -> "true") {
withTable("t") {
sql(s"CREATE TABLE t(a STRING, b CHAR(5), c CHAR(5)) USING $format partitioned by (c)")
sql("INSERT INTO t VALUES ('abc', 'abc', 'abc')")
checkAnswer(sql("SELECT b FROM t WHERE b='abc'"), Row("abc"))
checkAnswer(sql("SELECT b FROM t WHERE b in ('abc')"), Row("abc"))
checkAnswer(sql("SELECT c FROM t WHERE c='abc'"), Row("abc"))
checkAnswer(sql("SELECT c FROM t WHERE c in ('abc')"), Row("abc"))
}
}
}
test("varchar type values length check and trim: partitioned columns") {
(0 to 5).foreach { n =>
// SPARK-34192: we need to create a a new table for each round of test because of
// trailing spaces in partition column will be treated differently.
// This is because Mysql and Derby(used in tests) considers 'a' = 'a '
// whereas others like (Postgres, Oracle) doesn't exhibit this problem.
// see more at:
// https://issues.apache.org/jira/browse/HIVE-13618
// https://issues.apache.org/jira/browse/SPARK-34192
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c VARCHAR(5)) USING $format PARTITIONED BY (c)")
val v = "a" + " " * n
// via dynamic partitioned columns
sql(s"INSERT INTO t VALUES ('1', '$v')")
checkPlainResult(spark.table("t"), "VARCHAR(5)", v)
sql(s"ALTER TABLE t DROP PARTITION(c='$v')")
checkAnswer(spark.table("t"), Nil)
// via static partitioned columns
sql(s"INSERT INTO t PARTITION (c='$v') VALUES ('1')")
checkPlainResult(spark.table("t"), "VARCHAR(5)", v)
sql(s"ALTER TABLE t DROP PARTITION(c='$v')")
checkAnswer(spark.table("t"), Nil)
}
}
}
test("oversize char/varchar values for alter table partition operations") {
Seq("CHAR(5)", "VARCHAR(5)").foreach { typ =>
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c $typ) USING $format PARTITIONED BY (c)")
Seq("ADD", "DROP").foreach { op =>
val e = intercept[RuntimeException](sql(s"ALTER TABLE t $op PARTITION(c='abcdef')"))
assert(e.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
}
val e1 = intercept[RuntimeException] {
sql(s"ALTER TABLE t PARTITION (c='abcdef') RENAME TO PARTITION (c='2')")
}
assert(e1.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
val e2 = intercept[RuntimeException] {
sql(s"ALTER TABLE t PARTITION (c='1') RENAME TO PARTITION (c='abcdef')")
}
assert(e2.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
}
}
}
test("SPARK-34233: char/varchar with null value for partitioned columns") {
Seq("CHAR(5)", "VARCHAR(5)").foreach { typ =>
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c $typ) USING $format PARTITIONED BY (c)")
sql("INSERT INTO t VALUES ('1', null)")
checkPlainResult(spark.table("t"), typ, null)
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkPlainResult(spark.table("t"), typ, null)
sql("INSERT OVERWRITE t PARTITION (c=null) VALUES ('1')")
checkPlainResult(spark.table("t"), typ, null)
sql("ALTER TABLE t DROP PARTITION(c=null)")
checkAnswer(spark.table("t"), Nil)
}
}
}
test("char/varchar type values length check: partitioned columns of other types") {
Seq("CHAR(5)", "VARCHAR(5)").foreach { typ =>
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c $typ) USING $format PARTITIONED BY (c)")
Seq(1, 10, 100, 1000, 10000).foreach { v =>
sql(s"INSERT OVERWRITE t VALUES ('1', $v)")
checkPlainResult(spark.table("t"), typ, v.toString)
sql(s"ALTER TABLE t DROP PARTITION(c=$v)")
checkAnswer(spark.table("t"), Nil)
}
val e1 = intercept[SparkException](sql(s"INSERT OVERWRITE t VALUES ('1', 100000)"))
assert(e1.getCause.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
val e2 = intercept[RuntimeException](sql("ALTER TABLE t DROP PARTITION(c=100000)"))
assert(e2.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
}
}
}
test("char type values should be padded: nested in struct") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c STRUCT<c: CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES ('1', struct('a'))")
checkAnswer(spark.table("t"), Row("1", Row("a" + " " * 4)))
checkColType(spark.table("t").schema(1), new StructType().add("c", CharType(5)))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', struct(null))")
checkAnswer(spark.table("t"), Row("1", Row(null)))
}
}
test("char type values should be padded: nested in array") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c ARRAY<CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES ('1', array('a', 'ab'))")
checkAnswer(spark.table("t"), Row("1", Seq("a" + " " * 4, "ab" + " " * 3)))
checkColType(spark.table("t").schema(1), ArrayType(CharType(5)))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', array(null))")
checkAnswer(spark.table("t"), Row("1", Seq(null)))
}
}
test("char type values should be padded: nested in map key") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c MAP<CHAR(5), STRING>) USING $format")
sql("INSERT INTO t VALUES ('1', map('a', 'ab'))")
checkAnswer(spark.table("t"), Row("1", Map(("a" + " " * 4, "ab"))))
checkColType(spark.table("t").schema(1), MapType(CharType(5), StringType))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
}
}
test("char type values should be padded: nested in map value") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c MAP<STRING, CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES ('1', map('a', 'ab'))")
checkAnswer(spark.table("t"), Row("1", Map(("a", "ab" + " " * 3))))
checkColType(spark.table("t").schema(1), MapType(StringType, CharType(5)))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', map('a', null))")
checkAnswer(spark.table("t"), Row("1", Map("a" -> null)))
}
}
test("char type values should be padded: nested in both map key and value") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c MAP<CHAR(5), CHAR(10)>) USING $format")
sql("INSERT INTO t VALUES ('1', map('a', 'ab'))")
checkAnswer(spark.table("t"), Row("1", Map(("a" + " " * 4, "ab" + " " * 8))))
checkColType(spark.table("t").schema(1), MapType(CharType(5), CharType(10)))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
}
}
test("char type values should be padded: nested in struct of array") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c STRUCT<c: ARRAY<CHAR(5)>>) USING $format")
sql("INSERT INTO t VALUES ('1', struct(array('a', 'ab')))")
checkAnswer(spark.table("t"), Row("1", Row(Seq("a" + " " * 4, "ab" + " " * 3))))
checkColType(spark.table("t").schema(1),
new StructType().add("c", ArrayType(CharType(5))))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', struct(null))")
checkAnswer(spark.table("t"), Row("1", Row(null)))
sql("INSERT OVERWRITE t VALUES ('1', struct(array(null)))")
checkAnswer(spark.table("t"), Row("1", Row(Seq(null))))
}
}
test("char type values should be padded: nested in array of struct") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c ARRAY<STRUCT<c: CHAR(5)>>) USING $format")
sql("INSERT INTO t VALUES ('1', array(struct('a'), struct('ab')))")
checkAnswer(spark.table("t"), Row("1", Seq(Row("a" + " " * 4), Row("ab" + " " * 3))))
checkColType(spark.table("t").schema(1),
ArrayType(new StructType().add("c", CharType(5))))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', array(null))")
checkAnswer(spark.table("t"), Row("1", Seq(null)))
sql("INSERT OVERWRITE t VALUES ('1', array(struct(null)))")
checkAnswer(spark.table("t"), Row("1", Seq(Row(null))))
}
}
test("char type values should be padded: nested in array of array") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c ARRAY<ARRAY<CHAR(5)>>) USING $format")
sql("INSERT INTO t VALUES ('1', array(array('a', 'ab')))")
checkAnswer(spark.table("t"), Row("1", Seq(Seq("a" + " " * 4, "ab" + " " * 3))))
checkColType(spark.table("t").schema(1), ArrayType(ArrayType(CharType(5))))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', array(null))")
checkAnswer(spark.table("t"), Row("1", Seq(null)))
sql("INSERT OVERWRITE t VALUES ('1', array(array(null)))")
checkAnswer(spark.table("t"), Row("1", Seq(Seq(null))))
}
}
private def testTableWrite(f: String => Unit): Unit = {
withTable("t") { f("char") }
withTable("t") { f("varchar") }
}
test("length check for input string values: top-level columns") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c $typeName(5)) USING $format")
sql("INSERT INTO t VALUES (null)")
checkAnswer(spark.table("t"), Row(null))
val e = intercept[SparkException](sql("INSERT INTO t VALUES ('123456')"))
assert(e.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: partitioned columns") {
// DS V2 doesn't support partitioned table.
if (!conf.contains(SQLConf.DEFAULT_CATALOG.key)) {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(i INT, c $typeName(5)) USING $format PARTITIONED BY (c)")
sql("INSERT INTO t VALUES (1, null)")
checkAnswer(spark.table("t"), Row(1, null))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (1, '123456')"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
}
test("length check for input string values: nested in struct") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c STRUCT<c: $typeName(5)>) USING $format")
sql("INSERT INTO t SELECT struct(null)")
checkAnswer(spark.table("t"), Row(Row(null)))
val e = intercept[RuntimeException](sql("INSERT INTO t SELECT struct('123456')"))
assert(e.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in array") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c ARRAY<$typeName(5)>) USING $format")
sql("INSERT INTO t VALUES (array(null))")
checkAnswer(spark.table("t"), Row(Seq(null)))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (array('a', '123456'))"))
assert(e.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in map key") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c MAP<$typeName(5), STRING>) USING $format")
val e = intercept[SparkException](sql("INSERT INTO t VALUES (map('123456', 'a'))"))
assert(e.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in map value") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c MAP<STRING, $typeName(5)>) USING $format")
sql("INSERT INTO t VALUES (map('a', null))")
checkAnswer(spark.table("t"), Row(Map("a" -> null)))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (map('a', '123456'))"))
assert(e.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in both map key and value") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c MAP<$typeName(5), $typeName(5)>) USING $format")
val e1 = intercept[SparkException](sql("INSERT INTO t VALUES (map('123456', 'a'))"))
assert(e1.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
val e2 = intercept[SparkException](sql("INSERT INTO t VALUES (map('a', '123456'))"))
assert(e2.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in struct of array") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c STRUCT<c: ARRAY<$typeName(5)>>) USING $format")
sql("INSERT INTO t SELECT struct(array(null))")
checkAnswer(spark.table("t"), Row(Row(Seq(null))))
val e = intercept[SparkException](sql("INSERT INTO t SELECT struct(array('123456'))"))
assert(e.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in array of struct") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c ARRAY<STRUCT<c: $typeName(5)>>) USING $format")
sql("INSERT INTO t VALUES (array(struct(null)))")
checkAnswer(spark.table("t"), Row(Seq(Row(null))))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (array(struct('123456')))"))
assert(e.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in array of array") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c ARRAY<ARRAY<$typeName(5)>>) USING $format")
sql("INSERT INTO t VALUES (array(array(null)))")
checkAnswer(spark.table("t"), Row(Seq(Seq(null))))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (array(array('123456')))"))
assert(e.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: with trailing spaces") {
withTable("t") {
sql(s"CREATE TABLE t(c1 CHAR(5), c2 VARCHAR(5)) USING $format")
sql("INSERT INTO t VALUES ('12 ', '12 ')")
sql("INSERT INTO t VALUES ('1234 ', '1234 ')")
checkAnswer(spark.table("t"), Seq(
Row("12" + " " * 3, "12 "),
Row("1234 ", "1234 ")))
}
}
test("length check for input string values: with implicit cast") {
withTable("t") {
sql(s"CREATE TABLE t(c1 CHAR(5), c2 VARCHAR(5)) USING $format")
sql("INSERT INTO t VALUES (1234, 1234)")
checkAnswer(spark.table("t"), Row("1234 ", "1234"))
val e1 = intercept[SparkException](sql("INSERT INTO t VALUES (123456, 1)"))
assert(e1.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
val e2 = intercept[SparkException](sql("INSERT INTO t VALUES (1, 123456)"))
assert(e2.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
}
}
private def testConditions(df: DataFrame, conditions: Seq[(String, Boolean)]): Unit = {
checkAnswer(df.selectExpr(conditions.map(_._1): _*), Row.fromSeq(conditions.map(_._2)))
}
test("char type comparison: top-level columns") {
withTable("t") {
sql(s"CREATE TABLE t(c1 CHAR(2), c2 CHAR(5)) USING $format")
sql("INSERT INTO t VALUES ('a', 'a')")
testConditions(spark.table("t"), Seq(
("c1 = 'a'", true),
("'a' = c1", true),
("c1 = 'a '", true),
("c1 > 'a'", false),
("c1 IN ('a', 'b')", true),
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true),
("c1 <=> null", false)))
}
}
test("char type comparison: partitioned columns") {
withTable("t") {
sql(s"CREATE TABLE t(i INT, c1 CHAR(2), c2 CHAR(5)) USING $format PARTITIONED BY (c1, c2)")
sql("INSERT INTO t VALUES (1, 'a', 'a')")
testConditions(spark.table("t"), Seq(
("c1 = 'a'", true),
("'a' = c1", true),
("c1 = 'a '", true),
("c1 > 'a'", false),
("c1 IN ('a', 'b')", true),
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true),
("c1 <=> null", false)))
}
}
private def testNullConditions(df: DataFrame, conditions: Seq[String]): Unit = {
conditions.foreach { cond =>
checkAnswer(df.selectExpr(cond), Row(null))
}
}
test("SPARK-34233: char type comparison with null values") {
val conditions = Seq("c = null", "c IN ('e', null)", "c IN (null)")
withTable("t") {
sql(s"CREATE TABLE t(c CHAR(2)) USING $format")
sql("INSERT INTO t VALUES ('a')")
testNullConditions(spark.table("t"), conditions)
}
withTable("t") {
sql(s"CREATE TABLE t(i INT, c CHAR(2)) USING $format PARTITIONED BY (c)")
sql("INSERT INTO t VALUES (1, 'a')")
testNullConditions(spark.table("t"), conditions)
}
}
test("char type comparison: partition pruning") {
withTable("t") {
sql(s"CREATE TABLE t(i INT, c1 CHAR(2), c2 VARCHAR(5)) USING $format PARTITIONED BY (c1, c2)")
sql("INSERT INTO t VALUES (1, 'a', 'a')")
Seq(("c1 = 'a'", true),
("'a' = c1", true),
("c1 = 'a '", true),
("c1 > 'a'", false),
("c1 IN ('a', 'b')", true),
("c2 = 'a '", false),
("c2 = 'a'", true),
("c2 IN ('a', 'b')", true)).foreach { case (con, res) =>
val df = spark.table("t")
withClue(con) {
checkAnswer(df.where(con), df.where(res.toString))
}
}
}
}
test("char type comparison: join") {
withTable("t1", "t2") {
sql(s"CREATE TABLE t1(c CHAR(2)) USING $format")
sql(s"CREATE TABLE t2(c CHAR(5)) USING $format")
sql("INSERT INTO t1 VALUES ('a')")
sql("INSERT INTO t2 VALUES ('a')")
checkAnswer(sql("SELECT t1.c FROM t1 JOIN t2 ON t1.c = t2.c"), Row("a "))
}
}
test("char type comparison: nested in struct") {
withTable("t") {
sql(s"CREATE TABLE t(c1 STRUCT<c: CHAR(2)>, c2 STRUCT<c: CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES (struct('a'), struct('a'))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("char type comparison: nested in array") {
withTable("t") {
sql(s"CREATE TABLE t(c1 ARRAY<CHAR(2)>, c2 ARRAY<CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES (array('a', 'b'), array('a', 'b'))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("char type comparison: nested in struct of array") {
withTable("t") {
sql("CREATE TABLE t(c1 STRUCT<a: ARRAY<CHAR(2)>>, c2 STRUCT<a: ARRAY<CHAR(5)>>) " +
s"USING $format")
sql("INSERT INTO t VALUES (struct(array('a', 'b')), struct(array('a', 'b')))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("char type comparison: nested in array of struct") {
withTable("t") {
sql("CREATE TABLE t(c1 ARRAY<STRUCT<c: CHAR(2)>>, c2 ARRAY<STRUCT<c: CHAR(5)>>) " +
s"USING $format")
sql("INSERT INTO t VALUES (array(struct('a')), array(struct('a')))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("char type comparison: nested in array of array") {
withTable("t") {
sql("CREATE TABLE t(c1 ARRAY<ARRAY<CHAR(2)>>, c2 ARRAY<ARRAY<CHAR(5)>>) " +
s"USING $format")
sql("INSERT INTO t VALUES (array(array('a')), array(array('a')))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("SPARK-33892: DESCRIBE TABLE w/ char/varchar") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), c CHAR(5)) USING $format")
checkAnswer(sql("desc t").selectExpr("data_type").where("data_type like '%char%'"),
Seq(Row("char(5)"), Row("varchar(3)")))
}
}
test("SPARK-34003: fix char/varchar fails w/ both group by and order by ") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), i INT) USING $format")
sql("INSERT INTO t VALUES ('c', 1)")
checkAnswer(sql("SELECT v, sum(i) FROM t GROUP BY v ORDER BY v"), Row("c", 1))
}
}
test("SPARK-34003: fix char/varchar fails w/ order by functions") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), i INT) USING $format")
sql("INSERT INTO t VALUES ('c', 1)")
checkAnswer(sql("SELECT substr(v, 1, 2), sum(i) FROM t GROUP BY v ORDER BY substr(v, 1, 2)"),
Row("c", 1))
checkAnswer(sql("SELECT sum(i) FROM t GROUP BY v ORDER BY substr(v, 1, 2)"),
Row(1))
}
}
test("SPARK-34114: varchar type will strip tailing spaces to certain length at write time") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3)) USING $format")
sql("INSERT INTO t VALUES ('c ')")
checkAnswer(spark.table("t"), Row("c "))
}
}
test("SPARK-34114: varchar type will remain the value length with spaces at read time") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3)) USING $format")
sql("INSERT INTO t VALUES ('c ')")
checkAnswer(spark.table("t"), Row("c "))
}
}
test("SPARK-34833: right-padding applied correctly for correlated subqueries - join keys") {
withTable("t1", "t2") {
sql(s"CREATE TABLE t1(v VARCHAR(3), c CHAR(5)) USING $format")
sql(s"CREATE TABLE t2(v VARCHAR(5), c CHAR(8)) USING $format")
sql("INSERT INTO t1 VALUES ('c', 'b')")
sql("INSERT INTO t2 VALUES ('a', 'b')")
Seq("t1.c = t2.c", "t2.c = t1.c",
"t1.c = 'b'", "'b' = t1.c", "t1.c = 'b '", "'b ' = t1.c",
"t1.c = 'b '", "'b ' = t1.c").foreach { predicate =>
checkAnswer(sql(
s"""
|SELECT v FROM t1
|WHERE 'a' IN (SELECT v FROM t2 WHERE $predicate)
""".stripMargin),
Row("c"))
}
}
}
test("SPARK-34833: right-padding applied correctly for correlated subqueries - other preds") {
withTable("t") {
sql(s"CREATE TABLE t(c0 INT, c1 CHAR(5), c2 CHAR(7)) USING $format")
sql("INSERT INTO t VALUES (1, 'abc', 'abc')")
Seq("c1 = 'abc'", "'abc' = c1", "c1 = 'abc '", "'abc ' = c1",
"c1 = 'abc '", "'abc ' = c1", "c1 = c2", "c2 = c1",
"c1 IN ('xxx', 'abc', 'xxxxx')", "c1 IN ('xxx', 'abc ', 'xxxxx')",
"c1 IN ('xxx', 'abc ', 'xxxxx')",
"c1 IN (c2)", "c2 IN (c1)").foreach { predicate =>
checkAnswer(sql(
s"""
|SELECT c0 FROM t t1
|WHERE (
| SELECT count(*) AS c
| FROM t
| WHERE c0 = t1.c0 AND $predicate
|) > 0
""".stripMargin),
Row(1))
}
}
}
test("SPARK-35359: create table and insert data over length values") {
Seq("char", "varchar").foreach { typ =>
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
withTable("t") {
sql(s"CREATE TABLE t (col $typ(2)) using $format")
sql("INSERT INTO t SELECT 'aaa'")
checkAnswer(sql("select * from t"), Row("aaa"))
}
}
}
}
}
// Some basic char/varchar tests which doesn't rely on table implementation.
class BasicCharVarcharTestSuite extends QueryTest with SharedSparkSession {
import testImplicits._
test("user-specified schema in cast") {
def assertNoCharType(df: DataFrame): Unit = {
checkAnswer(df, Row("0"))
assert(df.schema.map(_.dataType) == Seq(StringType))
}
val logAppender = new LogAppender("The Spark cast operator does not support char/varchar" +
" type and simply treats them as string type. Please use string type directly to avoid" +
" confusion.")
withLogAppender(logAppender) {
assertNoCharType(spark.range(1).select($"id".cast("char(5)")))
assertNoCharType(spark.range(1).select($"id".cast(CharType(5))))
assertNoCharType(spark.range(1).selectExpr("CAST(id AS CHAR(5))"))
assertNoCharType(sql("SELECT CAST(id AS CHAR(5)) FROM range(1)"))
}
}
def failWithInvalidCharUsage[T](fn: => T): Unit = {
val e = intercept[AnalysisException](fn)
assert(e.getMessage contains "char/varchar type can only be used in the table schema")
}
test("invalidate char/varchar in functions") {
failWithInvalidCharUsage(sql("""SELECT from_json('{"a": "str"}', 'a CHAR(5)')"""))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
val df = sql("""SELECT from_json('{"a": "str"}', 'a CHAR(5)')""")
checkAnswer(df, Row(Row("str")))
val schema = df.schema.head.dataType.asInstanceOf[StructType]
assert(schema.map(_.dataType) == Seq(StringType))
}
}
test("invalidate char/varchar in SparkSession createDataframe") {
val df = spark.range(10).map(_.toString).toDF()
val schema = new StructType().add("id", CharType(5))
failWithInvalidCharUsage(spark.createDataFrame(df.collectAsList(), schema))
failWithInvalidCharUsage(spark.createDataFrame(df.rdd, schema))
failWithInvalidCharUsage(spark.createDataFrame(df.toJavaRDD, schema))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
val df1 = spark.createDataFrame(df.collectAsList(), schema)
checkAnswer(df1, df)
assert(df1.schema.head.dataType === StringType)
}
}
test("invalidate char/varchar in spark.read.schema") {
failWithInvalidCharUsage(spark.read.schema(new StructType().add("id", CharType(5))))
failWithInvalidCharUsage(spark.read.schema("id char(5)"))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
val ds = spark.range(10).map(_.toString)
val df1 = spark.read.schema(new StructType().add("id", CharType(5))).csv(ds)
assert(df1.schema.map(_.dataType) == Seq(StringType))
val df2 = spark.read.schema("id char(5)").csv(ds)
assert(df2.schema.map(_.dataType) == Seq(StringType))
def checkSchema(df: DataFrame): Unit = {
val schemas = df.queryExecution.analyzed.collect {
case l: LogicalRelation => l.relation.schema
case d: DataSourceV2Relation => d.table.schema()
}
assert(schemas.length == 1)
assert(schemas.head.map(_.dataType) == Seq(StringType))
}
// user-specified schema in DataFrameReader: DSV1
checkSchema(spark.read.schema(new StructType().add("id", CharType(5)))
.format(classOf[SimpleInsertSource].getName).load())
checkSchema(spark.read.schema("id char(5)")
.format(classOf[SimpleInsertSource].getName).load())
// user-specified schema in DataFrameReader: DSV2
checkSchema(spark.read.schema(new StructType().add("id", CharType(5)))
.format(classOf[SchemaRequiredDataSource].getName).load())
checkSchema(spark.read.schema("id char(5)")
.format(classOf[SchemaRequiredDataSource].getName).load())
}
}
test("invalidate char/varchar in udf's result type") {
failWithInvalidCharUsage(spark.udf.register("testchar", () => "B", VarcharType(1)))
failWithInvalidCharUsage(spark.udf.register("testchar2", (x: String) => x, VarcharType(1)))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
spark.udf.register("testchar", () => "B", VarcharType(1))
spark.udf.register("testchar2", (x: String) => x, VarcharType(1))
val df1 = spark.sql("select testchar()")
checkAnswer(df1, Row("B"))
assert(df1.schema.head.dataType === StringType)
val df2 = spark.sql("select testchar2('abc')")
checkAnswer(df2, Row("abc"))
assert(df2.schema.head.dataType === StringType)
}
}
test("invalidate char/varchar in spark.readStream.schema") {
failWithInvalidCharUsage(spark.readStream.schema(new StructType().add("id", CharType(5))))
failWithInvalidCharUsage(spark.readStream.schema("id char(5)"))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
withTempPath { dir =>
spark.range(2).write.save(dir.toString)
val df1 = spark.readStream.schema(new StructType().add("id", CharType(5)))
.load(dir.toString)
assert(df1.schema.map(_.dataType) == Seq(StringType))
val df2 = spark.readStream.schema("id char(5)").load(dir.toString)
assert(df2.schema.map(_.dataType) == Seq(StringType))
}
}
}
}
class FileSourceCharVarcharTestSuite extends CharVarcharTestSuite with SharedSparkSession {
override def format: String = "parquet"
override protected def sparkConf: SparkConf = {
super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "parquet")
}
test("create table w/ location and fit length values") {
withTempPath { dir =>
withTable("t") {
sql("SELECT '12' as col1, '12' as col2").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col1 char(3), col2 varchar(3)) using $format LOCATION '$dir'")
checkAnswer(sql("select * from t"), Row("12 ", "12"))
}
}
}
test("create table w/ location and over length values") {
Seq("char", "varchar").foreach { typ =>
withTempPath { dir =>
withTable("t") {
sql("SELECT '123456' as col").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col $typ(2)) using $format LOCATION '$dir'")
checkAnswer(sql("select * from t"), Row("123456"))
}
}
}
}
test("alter table set location w/ fit length values") {
withTempPath { dir =>
withTable("t") {
sql("SELECT '12' as col1, '12' as col2").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col1 char(3), col2 varchar(3)) using $format")
sql(s"ALTER TABLE t SET LOCATION '$dir'")
checkAnswer(spark.table("t"), Row("12 ", "12"))
}
}
}
test("alter table set location w/ over length values") {
Seq("char", "varchar").foreach { typ =>
withTempPath { dir =>
withTable("t") {
sql("SELECT '123456' as col").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col $typ(2)) using $format")
sql(s"ALTER TABLE t SET LOCATION '$dir'")
checkAnswer(spark.table("t"), Row("123456"))
}
}
}
}
test("SPARK-34114: should not trim right for read-side length check and char padding") {
Seq("char", "varchar").foreach { typ =>
withTempPath { dir =>
withTable("t") {
sql("SELECT '12 ' as col").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col $typ(2)) using $format LOCATION '$dir'")
checkAnswer(spark.table("t"), Row("12 "))
}
}
}
}
}
class DSV2CharVarcharTestSuite extends CharVarcharTestSuite
with SharedSparkSession {
override def format: String = "foo"
protected override def sparkConf = {
super.sparkConf
.set("spark.sql.catalog.testcat", classOf[InMemoryPartitionTableCatalog].getName)
.set(SQLConf.DEFAULT_CATALOG.key, "testcat")
}
}
| {
"content_hash": "0453f245fca99b3d70685472aa56c7c2",
"timestamp": "",
"source": "github",
"line_count": 860,
"max_line_length": 100,
"avg_line_length": 41.00581395348837,
"alnum_prop": 0.6010208421948107,
"repo_name": "shaneknapp/spark",
"id": "27a630c169be066c7a3ad7381a2766778db0d1bf",
"size": "36065",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sql/core/src/test/scala/org/apache/spark/sql/CharVarcharTestSuite.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "60021"
},
{
"name": "Batchfile",
"bytes": "27482"
},
{
"name": "C",
"bytes": "1493"
},
{
"name": "CSS",
"bytes": "26338"
},
{
"name": "Dockerfile",
"bytes": "16252"
},
{
"name": "HTML",
"bytes": "42080"
},
{
"name": "HiveQL",
"bytes": "1859465"
},
{
"name": "Java",
"bytes": "4736955"
},
{
"name": "JavaScript",
"bytes": "223014"
},
{
"name": "Jupyter Notebook",
"bytes": "4310512"
},
{
"name": "Makefile",
"bytes": "2379"
},
{
"name": "PLpgSQL",
"bytes": "352609"
},
{
"name": "PowerShell",
"bytes": "4221"
},
{
"name": "Python",
"bytes": "8368428"
},
{
"name": "R",
"bytes": "1287401"
},
{
"name": "ReScript",
"bytes": "240"
},
{
"name": "Roff",
"bytes": "32632"
},
{
"name": "Scala",
"bytes": "44294294"
},
{
"name": "Shell",
"bytes": "245444"
},
{
"name": "Thrift",
"bytes": "2016"
},
{
"name": "q",
"bytes": "111129"
}
],
"symlink_target": ""
} |
package com.amazonaws.services.lexmodelbuilding.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.lexmodelbuilding.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DeleteBotVersionResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteBotVersionResultJsonUnmarshaller implements Unmarshaller<DeleteBotVersionResult, JsonUnmarshallerContext> {
public DeleteBotVersionResult unmarshall(JsonUnmarshallerContext context) throws Exception {
DeleteBotVersionResult deleteBotVersionResult = new DeleteBotVersionResult();
return deleteBotVersionResult;
}
private static DeleteBotVersionResultJsonUnmarshaller instance;
public static DeleteBotVersionResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DeleteBotVersionResultJsonUnmarshaller();
return instance;
}
}
| {
"content_hash": "b7514ad795726261050311b408603c00",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 126,
"avg_line_length": 32.484848484848484,
"alnum_prop": 0.7910447761194029,
"repo_name": "jentfoo/aws-sdk-java",
"id": "5ee6092838013eae980d2a5b1e0d762c4238c6bb",
"size": "1652",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-java-sdk-lexmodelbuilding/src/main/java/com/amazonaws/services/lexmodelbuilding/model/transform/DeleteBotVersionResultJsonUnmarshaller.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "270"
},
{
"name": "FreeMarker",
"bytes": "173637"
},
{
"name": "Gherkin",
"bytes": "25063"
},
{
"name": "Java",
"bytes": "356214839"
},
{
"name": "Scilab",
"bytes": "3924"
},
{
"name": "Shell",
"bytes": "295"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<title>CepheiPrefs — Deprecated Reference</title>
<link rel="stylesheet" type="text/css" href="css/jazzy.css" />
<link rel="stylesheet" type="text/css" href="css/highlight.css" />
<meta charset='utf-8'>
<script src="js/jquery.min.js" defer></script>
<script src="js/jazzy.js" defer></script>
<style>
body, .section-name { font-family: -apple-system, BlinkMacSystemFont, sans-serif; }
pre, code { font-family: "SF Mono", ui-monospace, Menlo, monspace; }
h1, h2, h3, .section-name { font-weight: 600; }
.nav-group-name { font-weight: 300; }
.highlight { line-height: 1.3em; }
.graybox { line-height: 1.8em; }
.graybox tr td:first-of-type { font-weight: bold; }
</style>
<script src="js/lunr.min.js" defer></script>
<script src="js/typeahead.jquery.js" defer></script>
<script src="js/jazzy.search.js" defer></script>
</head>
<body>
<a name="//apple_ref/objc/Section/CepheiPrefs — Deprecated" class="dashAnchor"></a>
<a title="CepheiPrefs — Deprecated Reference"></a>
<header>
<div class="content-wrapper">
<p><a href="index.html">Cephei 1.17 Docs</a></p>
<p class="header-right"><a href="https://github.com/hbang/libcephei"><img src="img/gh.png"/>View on GitHub</a></p>
<p class="header-right"><a href="dash-feed://https%3A%2F%2Fhbang.github.io%2Flibcephei%2Fdocsets%2FCephei.xml"><img src="img/dash.png"/>Install in Dash</a></p>
<p class="header-right">
<form role="search" action="search.json">
<input type="text" placeholder="Search documentation" data-typeahead>
</form>
</p>
</div>
</header>
<div class="content-wrapper">
<p id="breadcrumbs">
<a href="index.html">Cephei Reference</a>
<img id="carat" src="img/carat.png" />
CepheiPrefs — Deprecated Reference
</p>
</div>
<div class="content-wrapper">
<nav class="sidebar">
<ul class="nav-groups">
<li class="nav-group-name">
<a href="Cephei%20%E2%80%94%20General.html">Cephei — General</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="defaults.html">defaults</a>
</li>
<li class="nav-group-task">
<a href="Cephei%20%E2%80%94%20General.html#/c:@F@HBOutputForShellCommand">HBOutputForShellCommand</a>
</li>
<li class="nav-group-task">
<a href="Cephei%20%E2%80%94%20General.html#/c:@F@HBOutputForShellCommandWithReturnCode">HBOutputForShellCommandWithReturnCode</a>
</li>
<li class="nav-group-task">
<a href="Classes/HBPreferences.html">HBPreferences</a>
</li>
<li class="nav-group-task">
<a href="Classes/HBRespringController.html">HBRespringController</a>
</li>
<li class="nav-group-task">
<a href="Categories/NSDictionary%28HBAdditions%29.html">NSDictionary(HBAdditions)</a>
</li>
<li class="nav-group-task">
<a href="Categories/NSString%28HBAdditions%29.html">NSString(HBAdditions)</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="CepheiUI%20%E2%80%94%20General.html">CepheiUI — General</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="Categories/NSLayoutConstraint%28CompactConstraint%29.html">NSLayoutConstraint(CompactConstraint)</a>
</li>
<li class="nav-group-task">
<a href="Categories/UIColor%28HBAdditions%29.html">UIColor(HBAdditions)</a>
</li>
<li class="nav-group-task">
<a href="Categories/UIView%28CompactConstraint%29.html">UIView(CompactConstraint)</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="Cephei%20%E2%80%94%20Types%20and%20Constants.html">Cephei — Types and Constants</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="Cephei%20%E2%80%94%20Types%20and%20Constants.html#/c:@HBPreferencesDidChangeNotification">HBPreferencesDidChangeNotification</a>
</li>
<li class="nav-group-task">
<a href="Cephei%20%E2%80%94%20Types%20and%20Constants.html#/c:@HBPreferencesNotMobileException">HBPreferencesNotMobileException</a>
</li>
<li class="nav-group-task">
<a href="Cephei%20%E2%80%94%20Types%20and%20Constants.html#/c:HBPreferences.h@T@HBPreferencesChangeCallback">HBPreferencesChangeCallback</a>
</li>
<li class="nav-group-task">
<a href="Cephei%20%E2%80%94%20Types%20and%20Constants.html#/c:HBPreferences.h@T@HBPreferencesValueChangeCallback">HBPreferencesValueChangeCallback</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="CepheiPrefs%20%E2%80%94%20General.html">CepheiPrefs — General</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="cepheiprefs-annoying-warning.html">CepheiPrefs Annoying Warning</a>
</li>
<li class="nav-group-task">
<a href="Classes/HBAppearanceSettings.html">HBAppearanceSettings</a>
</li>
<li class="nav-group-task">
<a href="Enums/HBAppearanceSettingsLargeTitleStyle.html">HBAppearanceSettingsLargeTitleStyle</a>
</li>
<li class="nav-group-task">
<a href="Classes/HBSupportController.html">HBSupportController</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="CepheiPrefs%20%E2%80%94%20List%20Controllers.html">CepheiPrefs — List Controllers</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="Classes/HBAboutListController.html">HBAboutListController</a>
</li>
<li class="nav-group-task">
<a href="Classes/HBListController.html">HBListController</a>
</li>
<li class="nav-group-task">
<a href="Classes/HBRootListController.html">HBRootListController</a>
</li>
<li class="nav-group-task">
<a href="Categories/PSListController%28HBTintAdditions%29.html">PSListController(HBTintAdditions)</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="CepheiPrefs%20%E2%80%94%20Cells.html">CepheiPrefs — Cells</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="Classes/HBDiscreteSliderTableCell.html">HBDiscreteSliderTableCell</a>
</li>
<li class="nav-group-task">
<a href="CepheiPrefs%20%E2%80%94%20Cells.html#/c:objc(cs)HBImageTableCell">HBImageTableCell</a>
</li>
<li class="nav-group-task">
<a href="Classes/HBLinkTableCell.html">HBLinkTableCell</a>
</li>
<li class="nav-group-task">
<a href="CepheiPrefs%20%E2%80%94%20Cells.html#/c:objc(cs)HBPackageTableCell">HBPackageTableCell</a>
</li>
<li class="nav-group-task">
<a href="CepheiPrefs%20%E2%80%94%20Cells.html#/c:objc(cs)HBPackageNameHeaderCell">HBPackageNameHeaderCell</a>
</li>
<li class="nav-group-task">
<a href="CepheiPrefs%20%E2%80%94%20Cells.html#/c:objc(cs)HBSpinnerTableCell">HBSpinnerTableCell</a>
</li>
<li class="nav-group-task">
<a href="Classes/HBStepperTableCell.html">HBStepperTableCell</a>
</li>
<li class="nav-group-task">
<a href="CepheiPrefs%20%E2%80%94%20Cells.html#/c:objc(cs)HBTintedTableCell">HBTintedTableCell</a>
</li>
<li class="nav-group-task">
<a href="CepheiPrefs%20%E2%80%94%20Cells.html#/c:objc(cs)HBTwitterCell">HBTwitterCell</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="CepheiPrefs%20%E2%80%94%20Deprecated.html">CepheiPrefs — Deprecated</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="CepheiPrefs%20%E2%80%94%20Deprecated.html#/c:objc(cs)HBListItemsController">HBListItemsController</a>
</li>
<li class="nav-group-task">
<a href="CepheiPrefs%20%E2%80%94%20Deprecated.html#/c:objc(cs)HBInitialsLinkTableCell">HBInitialsLinkTableCell</a>
</li>
</ul>
</li>
</ul>
</nav>
<article class="main-content">
<section>
<section class="section">
<h1>CepheiPrefs — Deprecated</h1>
</section>
<section class="section task-group-section">
<div class="task-group">
<ul>
<li class="item">
<div>
<code>
<a name="/c:objc(cs)HBListItemsController"></a>
<a name="//apple_ref/objc/Class/HBListItemsController" class="dashAnchor"></a>
<a class="token" href="#/c:objc(cs)HBListItemsController">HBListItemsController</a>
</code>
</div>
<div class="height-container">
<div class="pointer-container"></div>
<section class="section">
<div class="pointer"></div>
<div class="abstract">
<p>The HBListItemsController class in CepheiPrefs was used with previous versions to ensure
that the tint color from the previous view controller is retained. As of Cephei 1.4, this is no
longer needed, and this class is kept for backwards compatibility purposes.</p>
</div>
<div class="declaration">
<h4>Declaration</h4>
<div class="language">
<p class="aside-title">Objective-C</p>
<pre class="highlight objective_c"><code><span class="k">@interface</span> <span class="nc">HBListItemsController</span> <span class="p">:</span> <span class="nc">PSListItemsController</span></code></pre>
</div>
<div class="language">
<p class="aside-title">Swift</p>
<pre class="highlight swift"><code><span class="kd">class</span> <span class="kt">HBListItemsController</span> <span class="p">:</span> <span class="kt">PSListItemsController</span></code></pre>
</div>
</div>
</section>
</div>
</li>
<li class="item">
<div>
<code>
<a name="/c:objc(cs)HBInitialsLinkTableCell"></a>
<a name="//apple_ref/objc/Class/HBInitialsLinkTableCell" class="dashAnchor"></a>
<a class="token" href="#/c:objc(cs)HBInitialsLinkTableCell">HBInitialsLinkTableCell</a>
</code>
</div>
<div class="height-container">
<div class="pointer-container"></div>
<section class="section">
<div class="pointer"></div>
<div class="abstract">
<p>The HBInitialsLinkTableCell class in CepheiPrefs is a shim kept for compatibility reasons.
The class is now called <code><a href="Classes/HBLinkTableCell.html">HBLinkTableCell</a></code>.</p>
</div>
<div class="declaration">
<h4>Declaration</h4>
<div class="language">
<p class="aside-title">Objective-C</p>
<pre class="highlight objective_c"><code><span class="k">@interface</span> <span class="nc">HBInitialsLinkTableCell</span> <span class="p">:</span> <span class="nc"><a href="Classes/HBLinkTableCell.html">HBLinkTableCell</a></span></code></pre>
</div>
<div class="language">
<p class="aside-title">Swift</p>
<pre class="highlight swift"><code><span class="kd">class</span> <span class="kt">HBInitialsLinkTableCell</span> <span class="p">:</span> <span class="kt"><a href="Classes/HBLinkTableCell.html">HBLinkTableCell</a></span></code></pre>
</div>
</div>
</section>
</div>
</li>
</ul>
</div>
</section>
</section>
<section id="footer">
<p>© 2021 <a class="link" href="https://hbang.github.io/" target="_blank" rel="external">HASHBANG Productions</a>. All rights reserved. (Last updated: 2021-09-29)</p>
<p>Generated by <a class="link" href="https://github.com/realm/jazzy" target="_blank" rel="external">jazzy ♪♫ v0.13.6</a>, a <a class="link" href="https://realm.io" target="_blank" rel="external">Realm</a> project.</p>
</section>
</article>
</div>
</body>
</div>
</html>
| {
"content_hash": "26ef15c18653e914fd3def50089cb316",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 269,
"avg_line_length": 50.42857142857143,
"alnum_prop": 0.5407132999200988,
"repo_name": "hbang/libcephei",
"id": "35ebc84f3c4958d013cb2e42b5d74174818c635f",
"size": "13795",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "docs/CepheiPrefs — Deprecated.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Logos",
"bytes": "29658"
},
{
"name": "Makefile",
"bytes": "7426"
},
{
"name": "Objective-C",
"bytes": "185118"
},
{
"name": "Shell",
"bytes": "1027"
}
],
"symlink_target": ""
} |
#include <xtests/xtests.h>
#include <unistd.h>
#include <stdlib.h>
#include <windows.h>
int main(int argc, char** argv)
{
int retCode = EXIT_SUCCESS;
int verbosity = 2;
XTESTS_COMMANDLINE_PARSEVERBOSITY(argc, argv, &verbosity);
if(XTESTS_START_RUNNER("test.component.unistd.gethostname", verbosity))
{
/* Test-1 */
if(XTESTS_CASE_BEGIN("Test-1", "testing gethostname"))
{
char hostName[100];
XTESTS_TEST_INTEGER_EQUAL((pid_t)GetCurrentProcessId(), getpid());
gethostname(&hostName[0], STLSOFT_NUM_ELEMENTS(hostName));
XTESTS_CASE_END("Test-1");
}
XTESTS_PRINT_RESULTS();
XTESTS_END_RUNNER_UPDATE_EXITCODE(&retCode);
}
return retCode;
}
/* ////////////////////////////////////////////////////////////////////// */
| {
"content_hash": "aba59f3c147146423ab7df210d5c6986",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 78,
"avg_line_length": 21.897435897435898,
"alnum_prop": 0.5468384074941453,
"repo_name": "synesissoftware/UNIXem",
"id": "b0b8fec1f2b3c79056d8662561ede00cd23731c9",
"size": "854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/unit/test.unit.unistd.gethostname/test.unit.unistd.gethostname.c",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "283103"
},
{
"name": "Makefile",
"bytes": "207697"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
namespace ControlCenter
{
/// <summary>
/// MainWindow.xaml 的交互逻辑
/// </summary>
public partial class MainWindow : Window
{
public MainWindow()
{
InitializeComponent();
InitSerialPort();
}
private void exitMenuItem_Click(object sender, RoutedEventArgs e)
{
Close();
}
private void aboutMenuItem_Click(object sender, RoutedEventArgs e)
{
ControlCenter.About about = new About();
about.ShowDialog();
}
}
}
| {
"content_hash": "9f70f451c7fdc596e6f62593565df72b",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 74,
"avg_line_length": 23.65,
"alnum_prop": 0.653276955602537,
"repo_name": "amaztony/ControlCencter",
"id": "cb6b3111b3e4492ad5ce219a02dbfa110c9e81a6",
"size": "958",
"binary": false,
"copies": "1",
"ref": "refs/heads/mysql",
"path": "ControlCenter/MainWindow.xaml.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "123241"
}
],
"symlink_target": ""
} |
package com.facebook.buck.cxx;
import static com.facebook.buck.cxx.toolchain.CxxFlavorSanitizer.sanitize;
import static java.io.File.pathSeparator;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.oneOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.android.AssumeAndroidPlatform;
import com.facebook.buck.apple.AppleNativeIntegrationTestUtils;
import com.facebook.buck.apple.toolchain.ApplePlatform;
import com.facebook.buck.core.build.engine.BuildRuleStatus;
import com.facebook.buck.core.build.engine.BuildRuleSuccessType;
import com.facebook.buck.core.config.FakeBuckConfig;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.filesystems.AbsPath;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.model.UnconfiguredTargetConfiguration;
import com.facebook.buck.core.model.impl.BuildTargetPaths;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.cxx.config.CxxBuckConfig;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.CxxPlatformUtils;
import com.facebook.buck.cxx.toolchain.HeaderVisibility;
import com.facebook.buck.cxx.toolchain.LinkerMapMode;
import com.facebook.buck.cxx.toolchain.PicType;
import com.facebook.buck.cxx.toolchain.StripStyle;
import com.facebook.buck.io.ExecutableFinder;
import com.facebook.buck.io.file.MostFiles;
import com.facebook.buck.io.filesystem.BuckPaths;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.testutil.ProcessResult;
import com.facebook.buck.testutil.TemporaryPaths;
import com.facebook.buck.testutil.integration.BuckBuildLog;
import com.facebook.buck.testutil.integration.InferHelper;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.environment.EnvVariablesProvider;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Predicates;
import com.google.common.base.Splitter;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class CxxBinaryIntegrationTest {
@Rule public TemporaryPaths tmp = new TemporaryPaths();
@Before
public void setUp() {
assumeTrue(Platform.detect() != Platform.WINDOWS);
}
@Test
public void testInferCxxBinaryDepsCaching() throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
workspace.enableDirCache();
CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(workspace.asCell().getBuckConfig());
CxxPlatform cxxPlatform = CxxPlatformUtils.build(cxxBuckConfig);
BuildTarget inputBuildTarget = BuildTargetFactory.newInstance("//foo:binary_with_deps");
String inputBuildTargetName =
inputBuildTarget
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor())
.getFullyQualifiedName();
/*
* Build the given target and check that it succeeds.
*/
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
/*
* Check that building after clean will use the cache
*/
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
for (BuildTarget buildTarget : buildLog.getAllTargets()) {
buildLog.assertTargetWasFetchedFromCache(buildTarget);
}
/*
* Check that if the file in the binary target changes, then all the deps will be fetched
* from the cache
*/
String sourceName = "src_with_deps.c";
workspace.replaceFileContents("foo/" + sourceName, "10", "30");
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
buildLog = workspace.getBuildLog();
CxxSourceRuleFactory cxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), inputBuildTarget, cxxPlatform, cxxBuckConfig);
BuildTarget captureBuildTarget = cxxSourceRuleFactory.createInferCaptureBuildTarget(sourceName);
// this is flavored, and denotes the analysis step (generates a local report)
BuildTarget inferAnalysisTarget =
inputBuildTarget.withFlavors(CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor());
// this is the flavored version of the top level target (the one give in input to buck)
BuildTarget inferReportTarget =
inputBuildTarget.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
BuildTarget aggregatedDepsTarget =
cxxSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget();
String bt;
for (BuildTarget buildTarget : buildLog.getAllTargets()) {
bt = buildTarget.toString();
if (buildTarget
.getFlavors()
.contains(CxxDescriptionEnhancer.EXPORTED_HEADER_SYMLINK_TREE_FLAVOR)
|| buildTarget.getFlavors().contains(CxxDescriptionEnhancer.HEADER_SYMLINK_TREE_FLAVOR)
|| bt.equals(inferAnalysisTarget.toString())
|| bt.equals(captureBuildTarget.toString())
|| bt.equals(inferReportTarget.toString())
|| bt.equals(aggregatedDepsTarget.toString())) {
buildLog.assertTargetBuiltLocally(bt);
} else {
buildLog.assertTargetWasFetchedFromCache(buildTarget);
}
}
}
@Test
public void testInferCxxBinaryDepsInvalidateCacheWhenVersionChanges() throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
workspace.enableDirCache();
CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(workspace.asCell().getBuckConfig());
CxxPlatform cxxPlatform = CxxPlatformUtils.build(cxxBuckConfig);
BuildTarget inputBuildTarget = BuildTargetFactory.newInstance("//foo:binary_with_deps");
String inputBuildTargetName =
inputBuildTarget
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor())
.getFullyQualifiedName();
/*
* Build the given target and check that it succeeds.
*/
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
/*
* Check that building after clean will use the cache
*/
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
for (BuildTarget buildTarget : buildLog.getAllTargets()) {
buildLog.assertTargetWasFetchedFromCache(buildTarget);
}
/*
* Check that if the version of infer changes, then all the infer-related targets are
* recomputed
*/
workspace.resetBuildLogFile();
workspace.replaceFileContents("fake-infer/fake-bin/infer", "0.12345", "9.9999");
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
buildLog = workspace.getBuildLog();
String sourceName = "src_with_deps.c";
CxxSourceRuleFactory cxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), inputBuildTarget, cxxPlatform, cxxBuckConfig);
BuildTarget topCaptureBuildTarget =
cxxSourceRuleFactory.createInferCaptureBuildTarget(sourceName);
BuildTarget topInferAnalysisTarget =
inputBuildTarget.withFlavors(CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor());
BuildTarget topInferReportTarget =
inputBuildTarget.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
BuildTarget depOneBuildTarget = BuildTargetFactory.newInstance("//foo:dep_one");
String depOneSourceName = "dep_one.c";
CxxSourceRuleFactory depOneSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), depOneBuildTarget, cxxPlatform, cxxBuckConfig);
BuildTarget depOneCaptureBuildTarget =
depOneSourceRuleFactory.createInferCaptureBuildTarget(depOneSourceName);
BuildTarget depOneInferAnalysisTarget =
depOneCaptureBuildTarget.withFlavors(
cxxPlatform.getFlavor(), CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor());
BuildTarget depTwoBuildTarget = BuildTargetFactory.newInstance("//foo:dep_two");
CxxSourceRuleFactory depTwoSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), depTwoBuildTarget, cxxPlatform, cxxBuckConfig);
BuildTarget depTwoCaptureBuildTarget =
depTwoSourceRuleFactory.createInferCaptureBuildTarget("dep_two.c");
BuildTarget depTwoInferAnalysisTarget =
depTwoCaptureBuildTarget.withFlavors(
cxxPlatform.getFlavor(), CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor());
ImmutableSet<String> locallyBuiltTargets =
ImmutableSet.of(
cxxSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget().toString(),
topCaptureBuildTarget.toString(),
topInferAnalysisTarget.toString(),
topInferReportTarget.toString(),
depOneSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget().toString(),
depOneCaptureBuildTarget.toString(),
depOneInferAnalysisTarget.toString(),
depTwoSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget().toString(),
depTwoCaptureBuildTarget.toString(),
depTwoInferAnalysisTarget.toString());
// check that infer-related targets are getting rebuilt
for (String t : locallyBuiltTargets) {
buildLog.assertTargetBuiltLocally(t);
}
Set<String> builtFromCacheTargets =
FluentIterable.from(buildLog.getAllTargets())
// Filter out header symlink tree rules, as they are always built locally.
.filter(
target ->
(!target
.getFlavors()
.contains(CxxDescriptionEnhancer.EXPORTED_HEADER_SYMLINK_TREE_FLAVOR)
&& !target
.getFlavors()
.contains(CxxDescriptionEnhancer.HEADER_SYMLINK_TREE_FLAVOR)))
.transform(Object::toString)
// Filter out any rules that are explicitly built locally.
.filter(Predicates.not(locallyBuiltTargets::contains))
.toSet();
// check that all the other targets are fetched from the cache
for (String t : builtFromCacheTargets) {
buildLog.assertTargetWasFetchedFromCache(t);
}
}
@Test
public void testInferCxxBinaryWithoutDeps() throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(workspace.asCell().getBuckConfig());
CxxPlatform cxxPlatform = CxxPlatformUtils.build(cxxBuckConfig);
BuildTarget inputBuildTarget = BuildTargetFactory.newInstance("//foo:simple");
String inputBuildTargetName =
inputBuildTarget
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor())
.getFullyQualifiedName();
/*
* Build the given target and check that it succeeds.
*/
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
/*
* Check that all the required build targets have been generated.
*/
String sourceName = "simple.cpp";
String sourceFull = "foo/" + sourceName;
CxxSourceRuleFactory cxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), inputBuildTarget, cxxPlatform, cxxBuckConfig);
// this is unflavored, but bounded to the InferCapture build rule
BuildTarget captureBuildTarget = cxxSourceRuleFactory.createInferCaptureBuildTarget(sourceName);
// this is unflavored, but necessary to run the compiler successfully
BuildTarget headerSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
inputBuildTarget, HeaderVisibility.PRIVATE, cxxPlatform.getFlavor());
// this is flavored, and denotes the analysis step (generates a local report)
BuildTarget inferAnalysisTarget =
inputBuildTarget.withFlavors(CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor());
// this is flavored and corresponds to the top level target (the one give in input to buck)
BuildTarget inferReportTarget =
inputBuildTarget.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
BuildTarget aggregatedDepsTarget =
cxxSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget();
ImmutableSortedSet.Builder<BuildTarget> targetsBuilder =
ImmutableSortedSet.<BuildTarget>naturalOrder()
.add(
aggregatedDepsTarget,
headerSymlinkTreeTarget,
captureBuildTarget,
inferAnalysisTarget,
inferReportTarget);
BuckBuildLog buildLog = workspace.getBuildLog();
assertThat(buildLog.getAllTargets(), containsInAnyOrder(targetsBuilder.build().toArray()));
buildLog.assertTargetBuiltLocally(aggregatedDepsTarget);
buildLog.assertTargetBuiltLocally(headerSymlinkTreeTarget);
buildLog.assertTargetBuiltLocally(captureBuildTarget);
buildLog.assertTargetBuiltLocally(inferAnalysisTarget);
buildLog.assertTargetBuiltLocally(inferReportTarget);
/*
* Check that running a build again results in no builds since nothing has changed.
*/
workspace.resetBuildLogFile(); // clear for new build
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(ImmutableSet.of(inferReportTarget), buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(inferReportTarget);
/*
* Check that changing the source file results in running the capture/analysis rules again.
*/
workspace.resetBuildLogFile();
workspace.replaceFileContents(sourceFull, "*s = 42;", "");
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
buildLog = workspace.getBuildLog();
targetsBuilder =
ImmutableSortedSet.<BuildTarget>naturalOrder()
.add(
aggregatedDepsTarget,
captureBuildTarget,
inferAnalysisTarget,
inferReportTarget,
headerSymlinkTreeTarget);
assertEquals(buildLog.getAllTargets(), targetsBuilder.build());
buildLog.assertTargetBuiltLocally(captureBuildTarget);
buildLog.assertTargetBuiltLocally(inferAnalysisTarget);
buildLog.assertTargetHadMatchingRuleKey(aggregatedDepsTarget);
}
@Test
public void testInferCxxBinaryWithDeps() throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(workspace.asCell().getBuckConfig());
CxxPlatform cxxPlatform = CxxPlatformUtils.build(cxxBuckConfig);
BuildTarget inputBuildTarget = BuildTargetFactory.newInstance("//foo:binary_with_deps");
String inputBuildTargetName =
inputBuildTarget
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor())
.getFullyQualifiedName();
/*
* Build the given target and check that it succeeds.
*/
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
/*
* Check that all the required build targets have been generated.
*/
String sourceName = "src_with_deps.c";
CxxSourceRuleFactory cxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), inputBuildTarget, cxxPlatform, cxxBuckConfig);
// 1. create the targets of binary_with_deps
// this is unflavored, but bounded to the InferCapture build rule
BuildTarget topCaptureBuildTarget =
cxxSourceRuleFactory.createInferCaptureBuildTarget(sourceName);
// this is unflavored, but necessary to run the compiler successfully
BuildTarget topHeaderSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
inputBuildTarget, HeaderVisibility.PRIVATE, cxxPlatform.getFlavor());
// this is flavored, and denotes the analysis step (generates a local report)
BuildTarget topInferAnalysisTarget =
inputBuildTarget.withFlavors(CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor());
// this is flavored and corresponds to the top level target (the one give in input to buck)
BuildTarget topInferReportTarget =
inputBuildTarget.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
BuildTarget topAggregatedDepsTarget =
cxxSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget();
// 2. create the targets of dep_one
BuildTarget depOneBuildTarget = BuildTargetFactory.newInstance("//foo:dep_one");
String depOneSourceName = "dep_one.c";
String depOneSourceFull = "foo/" + depOneSourceName;
CxxSourceRuleFactory depOneSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), depOneBuildTarget, cxxPlatform, cxxBuckConfig);
BuildTarget depOneCaptureBuildTarget =
depOneSourceRuleFactory.createInferCaptureBuildTarget(depOneSourceName);
BuildTarget depOneHeaderSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
depOneBuildTarget, HeaderVisibility.PRIVATE, cxxPlatform.getFlavor());
BuildTarget depOneExportedHeaderSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
depOneBuildTarget,
HeaderVisibility.PUBLIC,
CxxPlatformUtils.getHeaderModeForDefaultPlatform(tmp.getRoot()).getFlavor());
BuildTarget depOneInferAnalysisTarget =
depOneCaptureBuildTarget.withFlavors(
cxxPlatform.getFlavor(), CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor());
BuildTarget depOneAggregatedDepsTarget =
depOneSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget();
// 3. create the targets of dep_two
BuildTarget depTwoBuildTarget = BuildTargetFactory.newInstance("//foo:dep_two");
CxxSourceRuleFactory depTwoSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), depTwoBuildTarget, cxxPlatform, cxxBuckConfig);
BuildTarget depTwoCaptureBuildTarget =
depTwoSourceRuleFactory.createInferCaptureBuildTarget("dep_two.c");
BuildTarget depTwoHeaderSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
depTwoBuildTarget, HeaderVisibility.PRIVATE, cxxPlatform.getFlavor());
BuildTarget depTwoExportedHeaderSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
depTwoBuildTarget,
HeaderVisibility.PUBLIC,
CxxPlatformUtils.getHeaderModeForDefaultPlatform(tmp.getRoot()).getFlavor());
BuildTarget depTwoInferAnalysisTarget =
depTwoCaptureBuildTarget.withFlavors(
cxxPlatform.getFlavor(), CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor());
BuildTarget depTwoAggregatedDepsTarget =
depTwoSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget();
ImmutableSet.Builder<BuildTarget> buildTargets =
ImmutableSortedSet.<BuildTarget>naturalOrder()
.add(
topAggregatedDepsTarget,
topCaptureBuildTarget,
topHeaderSymlinkTreeTarget,
topInferAnalysisTarget,
topInferReportTarget,
depOneAggregatedDepsTarget,
depOneCaptureBuildTarget,
depOneHeaderSymlinkTreeTarget,
depOneExportedHeaderSymlinkTreeTarget,
depOneInferAnalysisTarget,
depTwoAggregatedDepsTarget,
depTwoCaptureBuildTarget,
depTwoHeaderSymlinkTreeTarget,
depTwoExportedHeaderSymlinkTreeTarget,
depTwoInferAnalysisTarget);
// Check all the targets are in the buildLog
assertEquals(
buildTargets.build(), ImmutableSet.copyOf(workspace.getBuildLog().getAllTargets()));
/*
* Check that running a build again results in no builds since nothing has changed.
*/
workspace.resetBuildLogFile(); // clear for new build
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertEquals(ImmutableSet.of(topInferReportTarget), buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(topInferReportTarget);
/*
* Check that if a library source file changes then the capture/analysis rules run again on
* the main target and on dep_one only.
*/
workspace.resetBuildLogFile();
workspace.replaceFileContents(depOneSourceFull, "flag > 0", "flag < 0");
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
buildLog = workspace.getBuildLog();
buildTargets =
ImmutableSortedSet.<BuildTarget>naturalOrder()
.add(
topInferAnalysisTarget, // analysis runs again
topInferReportTarget, // report runs again
topCaptureBuildTarget, // cached
depTwoInferAnalysisTarget, // cached
depOneAggregatedDepsTarget,
depOneHeaderSymlinkTreeTarget,
depOneExportedHeaderSymlinkTreeTarget,
depOneCaptureBuildTarget, // capture of the changed file runs again
depOneInferAnalysisTarget // analysis of the library runs again
);
assertEquals(buildTargets.build(), buildLog.getAllTargets());
buildLog.assertTargetBuiltLocally(topInferAnalysisTarget);
buildLog.assertTargetBuiltLocally(topInferReportTarget);
buildLog.assertTargetHadMatchingRuleKey(topCaptureBuildTarget);
buildLog.assertTargetHadMatchingRuleKey(depTwoInferAnalysisTarget);
buildLog.assertTargetBuiltLocally(depOneCaptureBuildTarget);
buildLog.assertTargetBuiltLocally(depOneInferAnalysisTarget);
buildLog.assertTargetHadMatchingRuleKey(depOneAggregatedDepsTarget);
}
@Test
public void testInferCxxBinaryWithDepsEmitsAllTheDependenciesResultsDirs() throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget =
BuildTargetFactory.newInstance("//foo:binary_with_chain_deps")
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
// Build the given target and check that it succeeds.
workspace.runBuckCommand("build", inputBuildTarget.getFullyQualifiedName()).assertSuccess();
assertTrue(
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem, inputBuildTarget, "infer-%s/infer-deps.txt"))));
Set<String> loggedDeps =
getUniqueLines(
workspace.getFileContents(
BuildTargetPaths.getGenPath(
filesystem, inputBuildTarget, "infer-%s/infer-deps.txt")));
String sanitizedChainDepOne = sanitize("chain_dep_one.c.o");
String sanitizedTopChain = sanitize("top_chain.c.o");
String sanitizedChainDepTwo = sanitize("chain_dep_two.c.o");
BuildTarget analyzeTopChainTarget =
BuildTargetFactory.newInstance("//foo:binary_with_chain_deps#infer-analyze");
BuildTarget captureTopChainTarget =
BuildTargetFactory.newInstance(
"//foo:binary_with_chain_deps#default,infer-capture-" + sanitizedTopChain);
BuildTarget analyzeChainDepOneTarget =
BuildTargetFactory.newInstance("//foo:chain_dep_one#default,infer-analyze");
BuildTarget captureChainDepOneTarget =
BuildTargetFactory.newInstance(
"//foo:chain_dep_one#default,infer-capture-" + sanitizedChainDepOne);
BuildTarget analyzeChainDepTwoTarget =
BuildTargetFactory.newInstance("//foo:chain_dep_two#default,infer-analyze");
BuildTarget captureChainDepTwoTarget =
BuildTargetFactory.newInstance(
"//foo:chain_dep_two#default,infer-capture-" + sanitizedChainDepTwo);
AbsPath basePath = filesystem.getRootPath().toRealPath();
Set<String> expectedOutput =
ImmutableSet.of(
analyzeTopChainTarget.getFullyQualifiedName()
+ "\t"
+ "[infer-analyze]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(
filesystem, analyzeTopChainTarget, "infer-analysis-%s")),
captureTopChainTarget.getFullyQualifiedName()
+ "\t"
+ "[default, infer-capture-"
+ sanitizedTopChain
+ "]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(filesystem, captureTopChainTarget, "infer-out-%s")),
analyzeChainDepOneTarget.getFullyQualifiedName()
+ "\t"
+ "[default, infer-analyze]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(
filesystem, analyzeChainDepOneTarget, "infer-analysis-%s")),
captureChainDepOneTarget.getFullyQualifiedName()
+ "\t"
+ "[default, infer-capture-"
+ sanitizedChainDepOne
+ "]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(
filesystem, captureChainDepOneTarget, "infer-out-%s")),
analyzeChainDepTwoTarget.getFullyQualifiedName()
+ "\t"
+ "[default, infer-analyze]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(
filesystem, analyzeChainDepTwoTarget, "infer-analysis-%s")),
captureChainDepTwoTarget.getFullyQualifiedName()
+ "\t"
+ "[default, infer-capture-"
+ sanitizedChainDepTwo
+ "]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(
filesystem, captureChainDepTwoTarget, "infer-out-%s")));
assertEquals(expectedOutput, loggedDeps);
}
private static void registerCell(
ProjectWorkspace cellToModifyConfigOf,
String cellName,
ProjectWorkspace cellToRegisterAsCellName)
throws IOException {
TestDataHelper.overrideBuckconfig(
cellToModifyConfigOf,
ImmutableMap.of(
"repositories",
ImmutableMap.of(
cellName, cellToRegisterAsCellName.getPath(".").normalize().toString())));
}
@Test
public void inferShouldBeAbleToUseMultipleXCell() throws IOException {
Path rootWorkspacePath = tmp.getRoot();
// create infertest workspace
InferHelper.setupWorkspace(this, rootWorkspacePath, "infertest");
// create infertest/inter-cell/multi-cell/primary sub-workspace as infer-configured one
Path primaryRootPath = tmp.newFolder().toRealPath().normalize();
ProjectWorkspace primary =
InferHelper.setupCxxInferWorkspace(
this,
primaryRootPath,
Optional.empty(),
"infertest/inter-cell/multi-cell/primary",
Optional.of(rootWorkspacePath.resolve("fake-infer")));
// create infertest/inter-cell/multi-cell/secondary sub-workspace
Path secondaryRootPath = tmp.newFolder().toRealPath().normalize();
ProjectWorkspace secondary =
InferHelper.setupWorkspace(
this, secondaryRootPath, "infertest/inter-cell/multi-cell/secondary");
// register cells
registerCell(primary, "secondary", secondary);
BuildTarget inputBuildTarget =
BuildTargetFactory.newInstance("//:cxxbinary")
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
// run from primary workspace
ProcessResult result =
primary.runBuckBuild(
InferHelper.getCxxCLIConfigurationArgs(
rootWorkspacePath.resolve("fake-infer"), Optional.empty(), inputBuildTarget));
result.assertSuccess();
ProjectFilesystem filesystem = primary.getProjectFileSystem();
String reportPath =
BuildTargetPaths.getGenPath(filesystem, inputBuildTarget, "infer-%s/report.json")
.toString();
List<Object> bugs = InferHelper.loadInferReport(primary, reportPath);
Assert.assertThat(
"2 bugs expected in " + reportPath + " not found", bugs.size(), Matchers.equalTo(2));
}
@Test
public void testInferCxxBinaryWithDiamondDepsEmitsAllBuildRulesInvolvedWhenCacheHit()
throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
workspace.enableDirCache();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget =
BuildTargetFactory.newInstance("//foo:binary_with_diamond_deps")
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
String buildTargetName = inputBuildTarget.getFullyQualifiedName();
/*
* Build the given target and check that it succeeds.
*/
workspace.runBuckCommand("build", buildTargetName).assertSuccess();
/*
* Check that building after clean will use the cache
*/
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", buildTargetName).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
ImmutableSet<BuildTarget> allInvolvedTargets = buildLog.getAllTargets();
assertEquals(1, allInvolvedTargets.size()); // Only main target should be fetched from cache
for (BuildTarget bt : allInvolvedTargets) {
buildLog.assertTargetWasFetchedFromCache(bt);
}
assertTrue(
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem, inputBuildTarget, "infer-%s/infer-deps.txt"))));
Set<String> loggedDeps =
getUniqueLines(
workspace.getFileContents(
BuildTargetPaths.getGenPath(
filesystem, inputBuildTarget, "infer-%s/infer-deps.txt")));
BuildTarget analyzeMainTarget =
BuildTargetFactory.newInstance("//foo:binary_with_diamond_deps#infer-analyze");
BuildTarget analyzeDepOneTarget =
BuildTargetFactory.newInstance("//foo:diamond_dep_one#default,infer-analyze");
BuildTarget analyzeDepTwoTarget =
BuildTargetFactory.newInstance("//foo:diamond_dep_two#default,infer-analyze");
BuildTarget analyzeSimpleLibTarget =
BuildTargetFactory.newInstance("//foo:simple_lib#default,infer-analyze");
String sanitizedSimpleCpp = sanitize("simple.cpp.o");
String sanitizedDepOne = sanitize("dep_one.c.o");
String sanitizedDepTwo = sanitize("dep_two.c.o");
String sanitizedSrcWithDeps = sanitize("src_with_deps.c.o");
BuildTarget simpleCppTarget =
BuildTargetFactory.newInstance(
"//foo:simple_lib#default,infer-capture-" + sanitizedSimpleCpp);
BuildTarget depOneTarget =
BuildTargetFactory.newInstance(
"//foo:diamond_dep_one#default,infer-capture-" + sanitizedDepOne);
BuildTarget depTwoTarget =
BuildTargetFactory.newInstance(
"//foo:diamond_dep_two#default,infer-capture-" + sanitizedDepTwo);
BuildTarget srcWithDepsTarget =
BuildTargetFactory.newInstance(
"//foo:binary_with_diamond_deps#default,infer-capture-" + sanitizedSrcWithDeps);
AbsPath basePath = filesystem.getRootPath().toRealPath();
Set<String> expectedOutput =
ImmutableSet.of(
InferLogLine.fromBuildTarget(
analyzeMainTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(
filesystem, analyzeMainTarget, "infer-analysis-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
srcWithDepsTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(
filesystem, srcWithDepsTarget, "infer-out-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
analyzeDepOneTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(
filesystem, analyzeDepOneTarget, "infer-analysis-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
depOneTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(filesystem, depOneTarget, "infer-out-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
analyzeDepTwoTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(
filesystem, analyzeDepTwoTarget, "infer-analysis-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
depTwoTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(filesystem, depTwoTarget, "infer-out-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
analyzeSimpleLibTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(
filesystem, analyzeSimpleLibTarget, "infer-analysis-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
simpleCppTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(
filesystem, simpleCppTarget, "infer-out-%s"))
.getPath())
.toString());
assertEquals(expectedOutput, loggedDeps);
}
@Test
public void testInferCaptureAllCxxBinaryWithDiamondDepsEmitsAllBuildRulesInvolvedWhenCacheHit()
throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
workspace.enableDirCache();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget =
BuildTargetFactory.newInstance("//foo:binary_with_diamond_deps")
.withFlavors(CxxInferEnhancer.InferFlavors.INFER_CAPTURE_ALL.getFlavor());
String buildTargetName = inputBuildTarget.getFullyQualifiedName();
/*
* Build the given target and check that it succeeds.
*/
workspace.runBuckCommand("build", buildTargetName).assertSuccess();
/*
* Check that building after clean will use the cache
*/
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", buildTargetName).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
ImmutableSet<BuildTarget> allInvolvedTargets = buildLog.getAllTargets();
for (BuildTarget bt : allInvolvedTargets) {
buildLog.assertTargetWasFetchedFromCache(bt);
}
assertTrue(
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem, inputBuildTarget, "infer-%s/infer-deps.txt"))));
Set<String> loggedDeps =
getUniqueLines(
workspace.getFileContents(
BuildTargetPaths.getGenPath(
filesystem, inputBuildTarget, "infer-%s/infer-deps.txt")));
String sanitizedSimpleCpp = sanitize("simple.cpp.o");
String sanitizedDepOne = sanitize("dep_one.c.o");
String sanitizedDepTwo = sanitize("dep_two.c.o");
String sanitizedSrcWithDeps = sanitize("src_with_deps.c.o");
BuildTarget simpleCppTarget =
BuildTargetFactory.newInstance(
"//foo:simple_lib#default,infer-capture-" + sanitizedSimpleCpp);
BuildTarget depOneTarget =
BuildTargetFactory.newInstance(
"//foo:diamond_dep_one#default,infer-capture-" + sanitizedDepOne);
BuildTarget depTwoTarget =
BuildTargetFactory.newInstance(
"//foo:diamond_dep_two#default,infer-capture-" + sanitizedDepTwo);
BuildTarget srcWithDepsTarget =
BuildTargetFactory.newInstance(
"//foo:binary_with_diamond_deps#default,infer-capture-" + sanitizedSrcWithDeps);
AbsPath basePath = filesystem.getRootPath().toRealPath();
Set<String> expectedOutput =
ImmutableSet.of(
InferLogLine.fromBuildTarget(
srcWithDepsTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(
filesystem, srcWithDepsTarget, "infer-out-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
depOneTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(filesystem, depOneTarget, "infer-out-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
depTwoTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(filesystem, depTwoTarget, "infer-out-%s"))
.getPath())
.toString(),
InferLogLine.fromBuildTarget(
simpleCppTarget,
basePath
.resolve(
BuildTargetPaths.getGenPath(
filesystem, simpleCppTarget, "infer-out-%s"))
.getPath())
.toString());
assertEquals(expectedOutput, loggedDeps);
}
@Test
public void testInferCxxBinaryWithDiamondDepsHasRuntimeDepsOfAllCaptureRulesWhenCacheHits()
throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
workspace.enableDirCache();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget = BuildTargetFactory.newInstance("//foo:binary_with_diamond_deps");
String inputBuildTargetName =
inputBuildTarget
.withFlavors(CxxInferEnhancer.InferFlavors.INFER_CAPTURE_ALL.getFlavor())
.getFullyQualifiedName();
/*
* Build the given target and check that it succeeds.
*/
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
/*
* Check that building after clean will use the cache
*/
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
for (BuildTarget buildTarget : buildLog.getAllTargets()) {
buildLog.assertTargetWasFetchedFromCache(buildTarget);
}
/*
* Check that runtime deps have been fetched from cache as well
*/
assertTrue(
"This file was expected to exist because it's declared as runtime dep",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:simple_lib#default,infer-capture-" + sanitize("simple.cpp.o")),
"infer-out-%s")
.resolve("captured/simple.cpp_captured/simple.cpp.cfg"))));
assertTrue(
"This file was expected to exist because it's declared as runtime dep",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:diamond_dep_one#default,infer-capture-"
+ sanitize("dep_one.c.o")),
"infer-out-%s")
.resolve("captured/dep_one.c_captured/dep_one.c.cfg"))));
assertTrue(
"This file was expected to exist because it's declared as runtime dep",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:diamond_dep_two#default,infer-capture-"
+ sanitize("dep_two.c.o")),
"infer-out-%s")
.resolve("captured/dep_two.c_captured/dep_two.c.cfg"))));
assertTrue(
"This file was expected to exist because it's declared as runtime dep",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:binary_with_diamond_deps#default,infer-capture-"
+ sanitize("src_with_deps.c.o")),
"infer-out-%s")
.resolve("captured/src_with_deps.c_captured/src_with_deps.c.cfg"))));
}
@Test
public void testInferCxxBinaryWithUnusedDepsDoesNotRebuildWhenUnusedHeaderChanges()
throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
workspace.enableDirCache();
BuildTarget inputBuildTarget =
BuildTargetFactory.newInstance("//foo:binary_with_unused_header");
String inputBuildTargetName =
inputBuildTarget
.withFlavors(CxxInferEnhancer.InferFlavors.INFER_CAPTURE_ALL.getFlavor())
.getFullyQualifiedName();
CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(workspace.asCell().getBuckConfig());
CxxPlatform cxxPlatform = CxxPlatformUtils.build(cxxBuckConfig);
CxxSourceRuleFactory cxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), inputBuildTarget, cxxPlatform, cxxBuckConfig);
BuildTarget simpleOneCppCaptureTarget =
cxxSourceRuleFactory.createInferCaptureBuildTarget("simple_one.cpp");
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
/*
* Check that when the unused-header is changed, no builds are triggered
*/
workspace.resetBuildLogFile();
workspace.replaceFileContents("foo/unused_header.h", "int* input", "int* input, int* input2");
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
BuckBuildLog.BuildLogEntry simpleOnceCppCaptureTargetEntry =
buildLog.getLogEntry(simpleOneCppCaptureTarget);
assertThat(
simpleOnceCppCaptureTargetEntry.getSuccessType(),
Matchers.equalTo(Optional.of(BuildRuleSuccessType.FETCHED_FROM_CACHE_MANIFEST_BASED)));
/*
* Check that when the used-header is changed, then a build is triggered
*/
workspace.resetBuildLogFile();
workspace.replaceFileContents("foo/used_header.h", "int* input", "int* input, int* input2");
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
buildLog = workspace.getBuildLog();
buildLog.assertTargetBuiltLocally(simpleOneCppCaptureTarget);
}
@Test
public void testInferCxxBinaryWithDiamondDepsEmitsAllTransitiveCaptureRulesOnce()
throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget =
BuildTargetFactory.newInstance("//foo:binary_with_diamond_deps")
.withFlavors(CxxInferEnhancer.InferFlavors.INFER_CAPTURE_ALL.getFlavor());
// Build the given target and check that it succeeds.
workspace.runBuckCommand("build", inputBuildTarget.getFullyQualifiedName()).assertSuccess();
assertTrue(
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem, inputBuildTarget, "infer-%s/infer-deps.txt"))));
Set<String> loggedDeps =
getUniqueLines(
workspace.getFileContents(
BuildTargetPaths.getGenPath(
filesystem, inputBuildTarget, "infer-%s/infer-deps.txt")));
String sanitizedSimpleCpp = sanitize("simple.cpp.o");
String sanitizedDepOne = sanitize("dep_one.c.o");
String sanitizedDepTwo = sanitize("dep_two.c.o");
String sanitizedSrcWithDeps = sanitize("src_with_deps.c.o");
BuildTarget simpleCppTarget =
BuildTargetFactory.newInstance(
"//foo:simple_lib#default,infer-capture-" + sanitizedSimpleCpp);
BuildTarget depOneTarget =
BuildTargetFactory.newInstance(
"//foo:diamond_dep_one#default,infer-capture-" + sanitizedDepOne);
BuildTarget depTwoTarget =
BuildTargetFactory.newInstance(
"//foo:diamond_dep_two#default,infer-capture-" + sanitizedDepTwo);
BuildTarget srcWithDepsTarget =
BuildTargetFactory.newInstance(
"//foo:binary_with_diamond_deps#default,infer-capture-" + sanitizedSrcWithDeps);
AbsPath basePath = filesystem.getRootPath().toRealPath();
Set<String> expectedOutput =
ImmutableSet.of(
srcWithDepsTarget.getFullyQualifiedName()
+ "\t"
+ "[default, infer-capture-"
+ sanitizedSrcWithDeps
+ "]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(filesystem, srcWithDepsTarget, "infer-out-%s")),
depOneTarget.getFullyQualifiedName()
+ "\t"
+ "[default, infer-capture-"
+ sanitizedDepOne
+ "]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(filesystem, depOneTarget, "infer-out-%s")),
depTwoTarget.getFullyQualifiedName()
+ "\t"
+ "[default, infer-capture-"
+ sanitizedDepTwo
+ "]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(filesystem, depTwoTarget, "infer-out-%s")),
simpleCppTarget.getFullyQualifiedName()
+ "\t"
+ "[default, infer-capture-"
+ sanitizedSimpleCpp
+ "]\t"
+ basePath.resolve(
BuildTargetPaths.getGenPath(filesystem, simpleCppTarget, "infer-out-%s")));
assertEquals(expectedOutput, loggedDeps);
}
@Test
public void testInferCxxBinarySkipsBlacklistedFiles() throws IOException {
ProjectWorkspace workspace =
InferHelper.setupCxxInferWorkspace(this, tmp, Optional.of(".*one\\.c"));
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget = BuildTargetFactory.newInstance("//foo:binary_with_chain_deps");
String inputBuildTargetName =
inputBuildTarget
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor())
.getFullyQualifiedName();
// Build the given target and check that it succeeds.
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
// Check that the cfg associated with chain_dep_one.c does not exist
assertFalse(
"Cfg file for chain_dep_one.c should not exist",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:chain_dep_one#default,infer-capture-"
+ sanitize("chain_dep_one.c.o")),
"infer-out-%s")
.resolve("captured/chain_dep_one.c_captured/chain_dep_one.c.cfg"))));
// Check that the remaining files still have their cfgs
assertTrue(
"Expected cfg for chain_dep_two.c not found",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:chain_dep_two#default,infer-capture-"
+ sanitize("chain_dep_two.c.o")),
"infer-out-%s")
.resolve("captured/chain_dep_two.c_captured/chain_dep_two.c.cfg"))));
assertTrue(
"Expected cfg for top_chain.c not found",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:binary_with_chain_deps#infer-analyze"),
"infer-analysis-%s")
.resolve("captured/top_chain.c_captured/top_chain.c.cfg"))));
}
@Test
public void testInferCxxBinaryRunsOnAllFilesWhenBlacklistIsNotSpecified() throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget = BuildTargetFactory.newInstance("//foo:binary_with_chain_deps");
String inputBuildTargetName =
inputBuildTarget
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor())
.getFullyQualifiedName();
// Build the given target and check that it succeeds.
workspace.runBuckCommand("build", inputBuildTargetName).assertSuccess();
// Check that all cfgs have been created
assertTrue(
"Expected cfg for chain_dep_one.c not found",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:chain_dep_one#default,infer-capture-"
+ sanitize("chain_dep_one.c.o")),
"infer-out-%s")
.resolve("captured/chain_dep_one.c_captured/chain_dep_one.c.cfg"))));
assertTrue(
"Expected cfg for chain_dep_two.c not found",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:chain_dep_two#default,infer-capture-"
+ sanitize("chain_dep_two.c.o")),
"infer-out-%s")
.resolve("captured/chain_dep_two.c_captured/chain_dep_two.c.cfg"))));
assertTrue(
"Expected cfg for top_chain.c not found",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance(
"//foo:binary_with_chain_deps#infer-analyze"),
"infer-analysis-%s")
.resolve("captured/top_chain.c_captured/top_chain.c.cfg"))));
}
@Test
public void testInferCxxBinaryWithCachedDepsGetsAllItsTransitiveDeps() throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
workspace.enableDirCache();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget =
BuildTargetFactory.newInstance("//foo:binary_with_chain_deps")
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
/*
* Build the given target and check that it succeeds.
*/
workspace.runBuckCommand("build", inputBuildTarget.getFullyQualifiedName()).assertSuccess();
/*
* Check that building after clean will use the cache
*/
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", inputBuildTarget.getFullyQualifiedName()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
for (BuildTarget buildTarget : buildLog.getAllTargets()) {
buildLog.assertTargetWasFetchedFromCache(buildTarget);
}
/*
* Check that if the file in the top target changes, then all the transitive deps will be
* fetched from the cache (even those that are not direct dependencies).
* Make sure there's the specs file of the dependency that has distance 2 from
* the binary target.
*/
String sourceName = "top_chain.c";
workspace.replaceFileContents("foo/" + sourceName, "*p += 1", "*p += 10");
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", inputBuildTarget.getFullyQualifiedName()).assertSuccess();
// Check all the buildrules were fetched from the cache (and there's the specs file)
assertTrue(
"Expected specs file for func_ret_null() in chain_dep_two.c not found",
Files.exists(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem,
BuildTargetFactory.newInstance("//foo:chain_dep_two#default,infer-analyze"),
"infer-analysis-%s/specs/mockedSpec.specs"))));
}
@Test
public void testInferCxxBinaryMergesAllReportsOfDependencies() throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget =
BuildTargetFactory.newInstance("//foo:binary_with_chain_deps")
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
/*
* Build the given target and check that it succeeds.
*/
workspace.runBuckCommand("build", inputBuildTarget.getFullyQualifiedName()).assertSuccess();
String reportPath =
BuildTargetPaths.getGenPath(filesystem, inputBuildTarget, "infer-%s/report.json")
.toString();
List<Object> bugs = InferHelper.loadInferReport(workspace, reportPath);
// check that the merge step has merged a total of 3 bugs, one for each target
// (chain_dep_two, chain_dep_one, binary_with_chain_deps)
Assert.assertThat(
"3 bugs expected in " + reportPath + " not found", bugs.size(), Matchers.equalTo(3));
}
@Test
public void testInferCxxBinaryWritesSpecsListFilesOfTransitiveDependencies() throws IOException {
ProjectWorkspace workspace = InferHelper.setupCxxInferWorkspace(this, tmp, Optional.empty());
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget inputBuildTarget =
BuildTargetFactory.newInstance("//foo:binary_with_chain_deps")
.withFlavors(CxxInferEnhancer.InferFlavors.INFER.getFlavor());
// Build the given target and check that it succeeds.
workspace.runBuckCommand("build", inputBuildTarget.getFullyQualifiedName()).assertSuccess();
String specsPathList =
BuildTargetPaths.getGenPath(
filesystem,
inputBuildTarget.withFlavors(
CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor()),
"infer-analysis-%s/specs_path_list.txt")
.toString();
String out = workspace.getFileContents(specsPathList);
ImmutableList<Path> paths =
FluentIterable.from(out.split("\n")).transform(input -> new File(input).toPath()).toList();
assertSame("There must be 2 paths in total", paths.size(), 2);
for (Path path : paths) {
assertTrue("Path must be absolute", path.isAbsolute());
assertTrue("Path must exist", Files.exists(path));
}
}
@Test
public void testChangingCompilerPathForcesRebuild() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
workspace.enableDirCache();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget target = BuildTargetFactory.newInstance("//foo:simple");
BuildTarget linkTarget = CxxDescriptionEnhancer.createCxxLinkTarget(target, Optional.empty());
// Get the real location of the compiler executable.
String executable = Platform.detect() == Platform.MACOS ? "clang++" : "g++";
Path executableLocation =
new ExecutableFinder()
.getOptionalExecutable(Paths.get(executable), EnvVariablesProvider.getSystemEnv())
.orElse(Paths.get("/usr/bin", executable));
// Write script as faux clang++/g++ binary
Path firstCompilerPath = tmp.newFolder("path1");
Path firstCompiler = firstCompilerPath.resolve(executable);
filesystem.writeContentsToPath(
"#!/bin/sh\n" + "exec " + executableLocation + " \"$@\"\n", firstCompiler);
// Write script as slightly different faux clang++/g++ binary
Path secondCompilerPath = tmp.newFolder("path2");
Path secondCompiler = secondCompilerPath.resolve(executable);
filesystem.writeContentsToPath(
"#!/bin/sh\n"
+ "exec "
+ executableLocation
+ " \"$@\"\n"
+ "# Comment to make hash different.\n",
secondCompiler);
// Make the second faux clang++/g++ binary executable
MostFiles.makeExecutable(secondCompiler);
// Run two builds, each with different compiler "binaries". In the first
// instance, both binaries are in the PATH but the first binary is not
// marked executable so is not picked up.
workspace
.runBuckCommandWithEnvironmentOverridesAndContext(
workspace.getDestPath(),
Optional.empty(),
ImmutableMap.of(
"PATH",
firstCompilerPath
+ pathSeparator
+ secondCompilerPath
+ pathSeparator
+ EnvVariablesProvider.getSystemEnv().get("PATH")),
"build",
target.getFullyQualifiedName())
.assertSuccess();
workspace.resetBuildLogFile();
// Now, make the first faux clang++/g++ binary executable. In this second
// instance, both binaries are still in the PATH but the first binary is
// now marked executable and so is picked up; causing a rebuild.
MostFiles.makeExecutable(firstCompiler);
workspace
.runBuckCommandWithEnvironmentOverridesAndContext(
workspace.getDestPath(),
Optional.empty(),
ImmutableMap.of(
"PATH",
firstCompilerPath
+ pathSeparator
+ secondCompilerPath
+ pathSeparator
+ EnvVariablesProvider.getSystemEnv().get("PATH")),
"build",
target.getFullyQualifiedName())
.assertSuccess();
// Make sure the binary change caused a rebuild.
workspace.getBuildLog().assertTargetBuiltLocally(linkTarget);
}
@Test
public void testLinkMapIsNotCached() throws Exception {
// Currently we only support Apple platforms for generating link maps.
assumeTrue(Platform.detect() == Platform.MACOS);
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
workspace.enableDirCache();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget target = BuildTargetFactory.newInstance("//foo:simple");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
Path outputPath = workspace.getPath(BuildTargetPaths.getGenPath(filesystem, target, "%s"));
/*
* Check that building after clean will use the cache
*/
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace.runBuckCommand("build", target.toString()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
buildLog.assertTargetBuiltLocally(target);
assertThat(Files.exists(Paths.get(outputPath + "-LinkMap.txt")), is(true));
}
@Test
public void testLinkMapIsCached() throws Exception {
// Currently we only support Apple platforms for generating link maps.
assumeTrue(Platform.detect() == Platform.MACOS);
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
workspace.enableDirCache();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
BuildTarget target = BuildTargetFactory.newInstance("//foo:simple");
workspace
.runBuckCommand("build", "-c", "cxx.cache_binaries=true", target.getFullyQualifiedName())
.assertSuccess();
Path outputPath = workspace.getPath(BuildTargetPaths.getGenPath(filesystem, target, "%s"));
/*
* Check that building after clean will use the cache
*/
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace
.runBuckCommand("build", "-c", "cxx.cache_binaries=true", target.toString())
.assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
buildLog.assertTargetWasFetchedFromCache(target);
assertThat(Files.exists(Paths.get(outputPath + "-LinkMap.txt")), is(true));
}
@Test
public void testSimpleCxxBinaryBuilds() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(workspace.asCell().getBuckConfig());
CxxPlatform cxxPlatform = CxxPlatformUtils.build(cxxBuckConfig);
BuildTarget target = BuildTargetFactory.newInstance("//foo:simple");
CxxSourceRuleFactory cxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(workspace.getDestPath(), target, cxxPlatform, cxxBuckConfig);
BuildTarget binaryTarget = CxxDescriptionEnhancer.createCxxLinkTarget(target, Optional.empty());
String sourceName = "simple.cpp";
String sourceFull = "foo/" + sourceName;
BuildTarget compileTarget = cxxSourceRuleFactory.createCompileBuildTarget(sourceName);
BuildTarget headerSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
target, HeaderVisibility.PRIVATE, cxxPlatform.getFlavor());
BuildTarget aggregatedDepsTarget =
cxxSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget();
// Do a clean build, verify that it succeeds, and check that all expected targets built
// successfully.
workspace.runBuckCommand("build", target.toString()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertEquals(
ImmutableSet.<BuildTarget>builder()
.add(aggregatedDepsTarget, headerSymlinkTreeTarget, compileTarget, binaryTarget, target)
.build(),
buildLog.getAllTargets());
buildLog.assertTargetBuiltLocally(aggregatedDepsTarget);
buildLog.assertTargetBuiltLocally(headerSymlinkTreeTarget);
buildLog.assertTargetBuiltLocally(compileTarget);
buildLog.assertTargetBuiltLocally(binaryTarget);
buildLog.assertTargetBuiltLocally(target);
// Clear for new build.
workspace.resetBuildLogFile();
// Check that running a build again results in no builds since everything is up to
// date.
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(ImmutableSet.of(target, binaryTarget), buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(binaryTarget);
buildLog.assertTargetHadMatchingRuleKey(target);
// Clear for new build.
workspace.resetBuildLogFile();
// Update the source file.
workspace.replaceFileContents(sourceFull, "{}", "{ return 0; }");
// Check that running a build again makes the source get recompiled and the binary
// re-linked, but does not cause the header rules to re-run.
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(
ImmutableSet.<BuildTarget>builder()
.add(aggregatedDepsTarget, compileTarget, binaryTarget, headerSymlinkTreeTarget, target)
.build(),
buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(aggregatedDepsTarget);
buildLog.assertTargetBuiltLocally(compileTarget);
assertThat(
buildLog.getLogEntry(binaryTarget).getSuccessType().get(),
not(Matchers.equalTo(BuildRuleSuccessType.MATCHING_RULE_KEY)));
// Clear for new build.
workspace.resetBuildLogFile();
// Update the source file.
workspace.replaceFileContents(sourceFull, "{ return 0; }", "won't compile");
// Check that running a build again makes the source get recompiled and the binary
// re-linked, but does not cause the header rules to re-run.
workspace.runBuckCommand("build", target.toString()).assertFailure();
buildLog = workspace.getBuildLog();
assertEquals(
ImmutableSet.<BuildTarget>builder()
.add(aggregatedDepsTarget, compileTarget, binaryTarget, headerSymlinkTreeTarget, target)
.build(),
buildLog.getAllTargets());
buildLog.assertTargetHadMatchingRuleKey(aggregatedDepsTarget);
assertThat(
buildLog.getLogEntry(binaryTarget).getStatus(), Matchers.equalTo(BuildRuleStatus.FAIL));
assertThat(buildLog.getLogEntry(target).getStatus(), Matchers.equalTo(BuildRuleStatus.FAIL));
}
@Test
public void testSimpleCxxBinaryWithoutHeader() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
workspace.runBuckCommand("build", "//foo:simple_without_header").assertFailure();
}
@Test
public void testSimpleCxxBinaryWithHeader() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(workspace.asCell().getBuckConfig());
CxxPlatform cxxPlatform = CxxPlatformUtils.build(cxxBuckConfig);
BuildTarget target = BuildTargetFactory.newInstance("//foo:simple_with_header");
CxxSourceRuleFactory cxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(workspace.getDestPath(), target, cxxPlatform, cxxBuckConfig);
BuildTarget binaryTarget = CxxDescriptionEnhancer.createCxxLinkTarget(target, Optional.empty());
String sourceName = "simple_with_header.cpp";
String headerName = "simple_with_header.h";
String headerFull = "foo/" + headerName;
BuildTarget compileTarget = cxxSourceRuleFactory.createCompileBuildTarget(sourceName);
BuildTarget headerSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
target, HeaderVisibility.PRIVATE, cxxPlatform.getFlavor());
BuildTarget aggregatedDepsTarget =
cxxSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget();
// Do a clean build, verify that it succeeds, and check that all expected targets built
// successfully.
workspace.runBuckCommand("build", target.toString()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertEquals(
ImmutableSet.of(
aggregatedDepsTarget, headerSymlinkTreeTarget, compileTarget, binaryTarget, target),
buildLog.getAllTargets());
buildLog.assertTargetBuiltLocally(aggregatedDepsTarget);
buildLog.assertTargetBuiltLocally(headerSymlinkTreeTarget);
buildLog.assertTargetBuiltLocally(compileTarget);
buildLog.assertTargetBuiltLocally(binaryTarget);
buildLog.assertTargetBuiltLocally(target);
// Clear for new build.
workspace.resetBuildLogFile();
// Update the source file.
workspace.replaceFileContents(headerFull, "blah = 5", "blah = 6");
// Check that running a build again makes the source get recompiled and the binary
// re-linked, but does not cause the header rules to re-run.
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
assertEquals(
ImmutableSet.of(
headerSymlinkTreeTarget, aggregatedDepsTarget, compileTarget, binaryTarget, target),
buildLog.getAllTargets());
buildLog.assertTargetHadMatchingInputRuleKey(headerSymlinkTreeTarget);
buildLog.assertTargetBuiltLocally(aggregatedDepsTarget);
buildLog.assertTargetBuiltLocally(compileTarget);
assertThat(
buildLog.getLogEntry(binaryTarget).getSuccessType().get(),
not(Matchers.equalTo(BuildRuleSuccessType.MATCHING_RULE_KEY)));
}
@Test
public void testSimpleCxxBinaryMissingDependencyOnCxxLibraryWithHeader() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
workspace.runBuckCommand("build", "//foo:binary_without_dep").assertFailure();
}
@Test
public void testSimpleCxxBinaryWithDependencyOnCxxLibraryWithHeader() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
// Setup variables pointing to the sources and targets of the top-level binary rule.
CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(workspace.asCell().getBuckConfig());
CxxPlatform cxxPlatform = CxxPlatformUtils.build(cxxBuckConfig);
BuildTarget target = BuildTargetFactory.newInstance("//foo:binary_with_dep");
CxxSourceRuleFactory cxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(workspace.getDestPath(), target, cxxPlatform, cxxBuckConfig);
BuildTarget binaryTarget = CxxDescriptionEnhancer.createCxxLinkTarget(target, Optional.empty());
String sourceName = "foo.cpp";
BuildTarget compileTarget = cxxSourceRuleFactory.createCompileBuildTarget(sourceName);
BuildTarget headerSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
target, HeaderVisibility.PRIVATE, cxxPlatform.getFlavor());
BuildTarget aggregatedDepsTarget =
cxxSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget();
// Setup variables pointing to the sources and targets of the library dep.
BuildTarget depTarget = BuildTargetFactory.newInstance("//foo:library_with_header");
CxxSourceRuleFactory depCxxSourceRuleFactory =
CxxSourceRuleFactoryHelper.of(
workspace.getDestPath(), depTarget, cxxPlatform, cxxBuckConfig);
String depSourceName = "bar.cpp";
String depSourceFull = "foo/" + depSourceName;
String depHeaderName = "bar.h";
String depHeaderFull = "foo/" + depHeaderName;
BuildTarget depCompileTarget = depCxxSourceRuleFactory.createCompileBuildTarget(depSourceName);
BuildTarget depHeaderSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
depTarget, HeaderVisibility.PRIVATE, cxxPlatform.getFlavor());
BuildTarget depHeaderExportedSymlinkTreeTarget =
CxxDescriptionEnhancer.createHeaderSymlinkTreeTarget(
depTarget,
HeaderVisibility.PUBLIC,
CxxPlatformUtils.getHeaderModeForDefaultPlatform(tmp.getRoot()).getFlavor());
BuildTarget depArchiveTarget =
CxxDescriptionEnhancer.createStaticLibraryBuildTarget(
depTarget, cxxPlatform.getFlavor(), PicType.PDC);
BuildTarget depAggregatedDepsTarget =
depCxxSourceRuleFactory.createAggregatedPreprocessDepsBuildTarget();
ImmutableList.Builder<BuildTarget> builder = ImmutableList.builder();
builder.add(
depAggregatedDepsTarget,
depHeaderSymlinkTreeTarget,
depHeaderExportedSymlinkTreeTarget,
depCompileTarget,
depArchiveTarget,
depTarget,
aggregatedDepsTarget,
headerSymlinkTreeTarget,
compileTarget,
binaryTarget,
target);
// Do a clean build, verify that it succeeds, and check that all expected targets built
// successfully.
workspace.runBuckCommand("build", target.toString()).assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
assertThat(
buildLog.getAllTargets(),
containsInAnyOrder(builder.build().toArray(new BuildTarget[] {})));
buildLog.assertTargetBuiltLocally(depHeaderSymlinkTreeTarget);
buildLog.assertTargetBuiltLocally(depCompileTarget);
buildLog.assertTargetBuiltLocally(depArchiveTarget);
buildLog.assertTargetBuiltLocally(depTarget);
buildLog.assertTargetBuiltLocally(headerSymlinkTreeTarget);
buildLog.assertTargetBuiltLocally(compileTarget);
buildLog.assertTargetBuiltLocally(binaryTarget);
buildLog.assertTargetBuiltLocally(target);
// Clear for new build.
workspace.resetBuildLogFile();
// Update the source file.
workspace.replaceFileContents(depHeaderFull, "int x", "int y");
// Check that running a build again makes the source get recompiled and the binary
// re-linked, but does not cause the header rules to re-run.
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
builder = ImmutableList.builder();
builder.add(
depAggregatedDepsTarget,
depCompileTarget,
depArchiveTarget,
depTarget,
depHeaderSymlinkTreeTarget,
depHeaderExportedSymlinkTreeTarget,
headerSymlinkTreeTarget,
aggregatedDepsTarget,
compileTarget,
binaryTarget,
target);
assertThat(
buildLog.getAllTargets(),
containsInAnyOrder(builder.build().toArray(new BuildTarget[] {})));
buildLog.assertTargetBuiltLocally(depAggregatedDepsTarget);
buildLog.assertTargetBuiltLocally(depCompileTarget);
buildLog.assertTargetHadMatchingInputRuleKey(depArchiveTarget);
buildLog.assertTargetHadMatchingRuleKey(depHeaderSymlinkTreeTarget);
buildLog.assertTargetHadMatchingInputRuleKey(depHeaderExportedSymlinkTreeTarget);
buildLog.assertTargetHadMatchingRuleKey(headerSymlinkTreeTarget);
buildLog.assertTargetHadMatchingRuleKey(depTarget);
buildLog.assertTargetBuiltLocally(aggregatedDepsTarget);
buildLog.assertTargetBuiltLocally(compileTarget);
assertThat(
buildLog.getLogEntry(binaryTarget).getSuccessType().get(),
not(Matchers.equalTo(BuildRuleSuccessType.MATCHING_RULE_KEY)));
// Clear for new build.
workspace.resetBuildLogFile();
// Update the source file.
workspace.replaceFileContents(depSourceFull, "x + 5", "x + 6");
// Check that running a build again makes the source get recompiled and the binary
// re-linked, but does not cause the header rules to re-run.
workspace.runBuckCommand("build", target.toString()).assertSuccess();
buildLog = workspace.getBuildLog();
builder = ImmutableList.builder();
builder.add(
depAggregatedDepsTarget,
depCompileTarget,
depArchiveTarget,
depTarget,
depHeaderExportedSymlinkTreeTarget,
depHeaderSymlinkTreeTarget,
compileTarget,
binaryTarget,
target);
assertThat(
buildLog.getAllTargets(),
containsInAnyOrder(builder.build().toArray(new BuildTarget[] {})));
buildLog.assertTargetHadMatchingRuleKey(depAggregatedDepsTarget);
buildLog.assertTargetBuiltLocally(depCompileTarget);
buildLog.assertTargetBuiltLocally(depArchiveTarget);
buildLog.assertTargetHadMatchingRuleKey(depTarget);
buildLog.assertTargetHadMatchingRuleKey(compileTarget);
buildLog.assertTargetBuiltLocally(binaryTarget);
}
@Test
public void testIncrementalThinLtoBinaryWithDependency() throws IOException {
assumeThat(Platform.detect(), oneOf(Platform.LINUX, Platform.MACOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "incremental_thinlto", tmp);
workspace.setUp();
workspace.runBuckBuild("//:bin#incremental-thinlto");
Path indexResult =
workspace
.getGenPath(
BuildTargetFactory.newInstance("//:bin#incremental-thinlto,thinindex"), "%s")
.resolve("thinlto.indices");
assertTrue(Files.exists(indexResult.resolve("main.cpp.o.thinlto.bc")));
assertTrue(Files.exists(indexResult.resolve("main.cpp.o.imports")));
String indexContents =
new String(
Files.readAllBytes(indexResult.resolve("main.cpp.o.thinlto.bc")),
StandardCharsets.UTF_8);
if (Platform.detect() == Platform.MACOS) {
assertThat(indexContents, containsString("-Wl,-thinlto_emit_indexes"));
assertThat(indexContents, containsString("-Wl,-thinlto_emit_imports"));
assertThat(
indexContents,
containsString(
"-Xlinker -thinlto_new_prefix -Xlinker "
+ BuildTargetPaths.getGenPath(
workspace.getProjectFileSystem(),
BuildTargetFactory.newInstance("//:bin#incremental-thinlto,thinindex"),
"%s")
.resolve("thinlto.indices")));
} else if (Platform.detect() == Platform.LINUX) {
assertThat(
indexContents, containsString("-Wl,-plugin-opt,thinlto-index-only=thinlto.objects"));
assertThat(indexContents, containsString("-Wl,-plugin-opt,thinlto-emit-imports-files"));
assertThat(
indexContents,
containsString(
"-Xlinker -plugin-opt -Xlinker 'thinlto-prefix-replace=;"
+ BuildTargetPaths.getGenPath(
workspace.getProjectFileSystem(),
BuildTargetFactory.newInstance("//:bin#incremental-thinlto,thinindex"),
"%s")
.resolve("thinlto.indices")));
}
// Since we don't have the full thinLTO toolchain, we're just going to verify that the
// -fthinlto-index
// parameter is populated correctly.
Path optResult =
workspace.getScratchPath(
BuildTargetFactory.newInstance(
"//:bin#default,incremental-thinlto,optimize-main.cpp.o.o55eba575"),
"%s");
String optContents =
new String(
Files.readAllBytes(optResult.resolve("ppandcompile.argsfile")), StandardCharsets.UTF_8);
assertThat(Files.exists(optResult.resolve("ppandcompile.argsfile")), Matchers.equalTo(true));
assertThat(
optContents,
containsString(
"-fthinlto-index="
+ BuildTargetPaths.getGenPath(
workspace.getProjectFileSystem(),
BuildTargetFactory.newInstance("//:bin#incremental-thinlto,thinindex"),
"%s")
+ "/thinlto.indices/"
+ BuildTargetPaths.getGenPath(
workspace.getProjectFileSystem(),
BuildTargetFactory.newInstance(
"//:bin#compile-main.cpp.oa5b6a1ba,default,incremental-thinlto"),
"%s")
+ "/main.cpp.o.thinlto.bc"));
}
@Test
public void testCxxBinaryDepfileBuildWithChangedHeader() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "cxx_binary_depfile_build_with_changed_header", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckCommand("build", "//:bin");
result.assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
buildLog.assertTargetBuiltLocally("//:bin#binary");
buildLog.assertTargetBuiltLocally("//:bin#compile-" + sanitize("bin.c.o") + ",default");
buildLog.assertTargetBuiltLocally("//:lib1#default,static");
workspace.resetBuildLogFile();
workspace.replaceFileContents("lib2.h", "hello", "world");
result = workspace.runBuckCommand("build", "//:bin");
result.assertSuccess();
buildLog = workspace.getBuildLog();
buildLog.assertTargetBuiltLocally("//:bin#binary");
buildLog.assertTargetHadMatchingDepfileRuleKey(
"//:bin#compile-" + sanitize("bin.c.o") + ",default");
buildLog.assertTargetBuiltLocally("//:lib1#default,static");
}
@Test
public void testCxxBinaryDepfileBuildWithAddedHeader() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "cxx_binary_depfile_build_with_added_header", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckCommand("build", "//:bin");
result.assertSuccess();
BuckBuildLog buildLog = workspace.getBuildLog();
buildLog.assertTargetBuiltLocally("//:bin#binary");
buildLog.assertTargetBuiltLocally("//:bin#compile-" + sanitize("bin.c.o") + ",default");
buildLog.assertTargetBuiltLocally("//:lib1#default,static");
workspace.resetBuildLogFile();
workspace.replaceFileContents("BUCK", "[\"lib1.h\"]", "[\"lib1.h\", \"lib2.h\"]");
result = workspace.runBuckCommand("build", "//:bin");
result.assertSuccess();
buildLog = workspace.getBuildLog();
buildLog.assertTargetHadMatchingInputRuleKey("//:bin#binary");
buildLog.assertTargetHadMatchingDepfileRuleKey(
"//:bin#compile-" + sanitize("bin.c.o") + ",default");
buildLog.assertTargetHadMatchingInputRuleKey("//:lib1#default,static");
}
@Test
public void testCxxBinaryWithGeneratedSourceAndHeader() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
workspace.runBuckCommand("build", "//foo:binary_without_dep").assertFailure();
}
@Test
public void testHeaderNamespace() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "header_namespace", tmp);
workspace.setUp();
workspace.runBuckCommand("build", "//:test").assertSuccess();
}
@Test
public void resolveHeadersBehindSymlinkTreesInError() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "resolved", tmp);
workspace.setUp();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
workspace.writeContentsToPath("#invalid_pragma", "lib2.h");
BuildTarget target = BuildTargetFactory.newInstance("//:bin");
ProcessResult result = workspace.runBuckCommand("build", target.toString());
result.assertFailure();
// Verify that the preprocessed source contains no references to the symlink tree used to
// setup the headers.
String error = result.getStderr();
BuckPaths buckPaths = filesystem.getBuckPaths();
assertThat(error, not(containsString(buckPaths.getScratchDir().toAbsolutePath().toString())));
assertThat(error, not(containsString(buckPaths.getGenDir().toString())));
assertThat(error, containsString("In file included from lib1.h:1"));
assertThat(error, containsString("from bin.h:1"));
assertThat(error, containsString("from bin.cpp:1:"));
assertThat(error, containsString("lib2.h:1:2: error: invalid preprocessing"));
}
@Test
public void ndkCxxPlatforms() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
AssumeAndroidPlatform.get(workspace).assumeNdkIsAvailable();
boolean isPriorNdk17 = AssumeAndroidPlatform.get(workspace).isArmAvailable();
String armAbiString = isPriorNdk17 ? "arm, " : "";
workspace.writeContentsToPath(
"[ndk]\n"
+ " gcc_version = 4.9\n"
+ (" cpu_abis = " + armAbiString + "armv7, arm64, x86\n")
+ " app_platform = android-21\n",
".buckconfig");
if (isPriorNdk17) {
workspace.runBuckCommand("build", "//foo:simple#android-arm").assertSuccess();
}
workspace.runBuckCommand("build", "//foo:simple#android-armv7").assertSuccess();
workspace.runBuckCommand("build", "//foo:simple#android-arm64").assertSuccess();
workspace.runBuckCommand("build", "//foo:simple#android-x86").assertSuccess();
}
@Test
public void linkerFlags() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "linker_flags", tmp);
workspace.setUp();
workspace.runBuckBuild("//:binary_with_linker_flag").assertFailure("--bad-flag");
workspace.runBuckBuild("//:binary_with_library_dep").assertSuccess();
workspace.runBuckBuild("//:binary_with_exported_flags_library_dep").assertFailure("--bad-flag");
workspace.runBuckBuild("//:binary_with_prebuilt_library_dep").assertFailure("--bad-flag");
// Build binary that has unresolved symbols. Normally this would fail, but should work
// with the proper linker flag.
switch (Platform.detect()) {
case MACOS:
workspace.runBuckBuild("//:binary_with_unresolved_symbols_macos").assertSuccess();
break;
case LINUX:
workspace.runBuckBuild("//:binary_with_unresolved_symbols_linux").assertSuccess();
break;
// $CASES-OMITTED$
default:
break;
}
}
private void platformLinkerFlags(ProjectWorkspace workspace, String target) {
workspace.runBuckBuild("//:binary_matches_default_exactly_" + target).assertSuccess();
workspace.runBuckBuild("//:binary_matches_default_" + target).assertSuccess();
ProcessResult result = workspace.runBuckBuild("//:binary_no_match_" + target);
result.assertFailure();
assertThat(result.getStderr(), containsString("reference"));
workspace.runBuckBuild("//:binary_with_library_matches_default_" + target).assertSuccess();
workspace
.runBuckBuild("//:binary_with_prebuilt_library_matches_default_" + target)
.assertSuccess();
}
@Test
public void platformLinkerFlags() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "platform_linker_flags", tmp);
workspace.setUp();
// Build binary that has unresolved symbols. Normally this would fail, but should work
// with the proper linker flag.
switch (Platform.detect()) {
case MACOS:
platformLinkerFlags(workspace, "macos");
break;
case LINUX:
platformLinkerFlags(workspace, "linux");
break;
// $CASES-OMITTED$
default:
break;
}
}
@Test
public void perFileFlagsUsedForPreprocessing() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "preprocessing_per_file_flags", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckBuild("//:bin");
result.assertSuccess();
}
@Test
public void correctPerFileFlagsUsedForCompilation() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "compiling_per_file_flags", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckBuild("//:working-bin");
result.assertSuccess();
}
@Test
public void incorrectPerFileFlagsUsedForCompilation() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "compiling_per_file_flags", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckBuild("//:broken-bin");
result.assertFailure();
}
@Test
public void platformPreprocessorFlags() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "platform_preprocessor_flags", tmp);
workspace.setUp();
workspace.runBuckBuild("//:binary_matches_default_exactly").assertSuccess();
workspace.runBuckBuild("//:binary_matches_default").assertSuccess();
ProcessResult result = workspace.runBuckBuild("//:binary_no_match");
result.assertFailure();
assertThat(result.getStderr(), containsString("#error"));
workspace.runBuckBuild("//:binary_with_library_matches_default").assertSuccess();
}
@Test
public void platformCompilerFlags() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "platform_compiler_flags", tmp);
workspace.setUp();
workspace.writeContentsToPath("[cxx]\n cxxflags = -Wall -Werror", ".buckconfig");
workspace.runBuckBuild("//:binary_matches_default_exactly").assertSuccess();
workspace.runBuckBuild("//:binary_matches_default").assertSuccess();
ProcessResult result = workspace.runBuckBuild("//:binary_no_match");
result.assertFailure();
assertThat(
result.getStderr(), Matchers.allOf(containsString("non-void"), containsString("function")));
workspace.runBuckBuild("//:binary_with_library_matches_default").assertSuccess();
}
@Test
public void platformHeaders() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "platform_headers", tmp);
workspace.setUp();
workspace.writeContentsToPath("[cxx]\n cxxflags = -Wall -Werror", ".buckconfig");
workspace.runBuckBuild("//:binary_matches_default_exactly").assertSuccess();
workspace.runBuckBuild("//:binary_matches_default").assertSuccess();
ProcessResult result = workspace.runBuckBuild("//:binary_no_match");
result.assertFailure();
assertThat(result.getStderr(), containsString("header.hpp"));
workspace.runBuckBuild("//:binary_with_library_matches_default").assertSuccess();
}
@Test
public void platformSources() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "platform_sources", tmp);
workspace.setUp();
workspace.writeContentsToPath("[cxx]\n cxxflags = -Wall -Werror", ".buckconfig");
workspace.runBuckBuild("//:binary_matches_default_exactly").assertSuccess();
workspace.runBuckBuild("//:binary_matches_default").assertSuccess();
ProcessResult result = workspace.runBuckBuild("//:binary_no_match");
result.assertFailure();
assertThat(result.getStderr(), containsString("answer()"));
workspace.runBuckBuild("//:binary_with_library_matches_default").assertSuccess();
}
@Test
public void buildABinaryIfACxxLibraryDepOnlyDeclaresHeaders() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_binary_headers_only", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckBuild("//:binary");
result.assertSuccess();
}
@Test
public void buildABinaryIfACxxBinaryTransitivelyDepOnlyDeclaresHeaders() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_binary_headers_only", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckBuild("//:transitive");
System.out.println(result.getStdout());
System.err.println(result.getStderr());
result.assertSuccess();
}
@Test
public void buildBinaryWithSharedDependencies() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "shared_library", tmp);
workspace.setUp();
ProcessResult processResult = workspace.runBuckBuild("//:clowny_binary");
processResult.assertFailure();
assertThat(
processResult.getStderr(),
containsString("in the dependencies have the same output filename"));
}
@Test
public void buildBinaryWithPerFileFlags() throws IOException {
assumeThat(Platform.detect(), is(Platform.MACOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "per_file_flags", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckBuild("//:binary");
result.assertSuccess();
}
@Test
public void runBinaryUsingSharedLinkStyle() throws IOException {
assumeThat(Platform.detect(), oneOf(Platform.LINUX, Platform.MACOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "shared_link_style", tmp);
workspace.setUp();
workspace.runBuckCommand("run", "//:bar").assertSuccess();
}
@Test
public void genruleUsingBinaryUsingSharedLinkStyle() throws IOException {
assumeThat(Platform.detect(), oneOf(Platform.LINUX, Platform.MACOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "shared_link_style", tmp);
workspace.setUp();
workspace.runBuckBuild("//:gen").assertSuccess();
}
@Test
public void shBinaryAsLinker() throws IOException {
assumeThat(Platform.detect(), oneOf(Platform.LINUX, Platform.MACOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "step_test", tmp);
workspace.setUp();
workspace.runBuckBuild("-c", "cxx.ld=//:cxx", "//:binary_with_unused_header").assertSuccess();
}
@Test
public void buildBinaryUsingStaticPicLinkStyle() throws IOException {
assumeThat(Platform.detect(), oneOf(Platform.LINUX, Platform.MACOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "static_pic_link_style", tmp);
workspace.setUp();
workspace
.runBuckCommand(
"build",
// This should only work (on some architectures) if PIC was used to build all included
// object files.
"--config",
"cxx.cxxldflags=-shared",
"//:bar")
.assertSuccess();
}
@Test
public void testStrippedBinaryProducesBothUnstrippedAndStrippedOutputs()
throws IOException, InterruptedException {
assumeTrue(Platform.detect() == Platform.MACOS);
BuildTarget unstrippedTarget = BuildTargetFactory.newInstance("//:test");
BuildTarget strippedTarget =
unstrippedTarget.withAppendedFlavors(StripStyle.DEBUGGING_SYMBOLS.getFlavor());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "header_namespace", tmp);
workspace.setUp();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
workspace
.runBuckCommand(
"build", "--config", "cxx.cxxflags=-g", strippedTarget.getFullyQualifiedName())
.assertSuccess();
Path strippedPath =
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem, strippedTarget.withAppendedFlavors(CxxStrip.RULE_FLAVOR), "%s"));
Path unstrippedPath =
workspace.getPath(BuildTargetPaths.getGenPath(filesystem, unstrippedTarget, "%s"));
String strippedOut =
workspace.runCommand("dsymutil", "-s", strippedPath.toString()).getStdout().orElse("");
String unstrippedOut =
workspace.runCommand("dsymutil", "-s", unstrippedPath.toString()).getStdout().orElse("");
assertThat(strippedOut, Matchers.containsStringIgnoringCase("dyld_stub_binder"));
assertThat(unstrippedOut, Matchers.containsStringIgnoringCase("dyld_stub_binder"));
assertThat(strippedOut, not(Matchers.containsStringIgnoringCase("test.cpp")));
assertThat(unstrippedOut, Matchers.containsStringIgnoringCase("test.cpp"));
}
@Test
public void testStrippedBinaryCanBeFetchedFromCacheAlone() throws Exception {
assumeThat(Platform.detect(), oneOf(Platform.LINUX, Platform.MACOS));
BuildTarget strippedTarget =
BuildTargetFactory.newInstance("//:test")
.withFlavors(StripStyle.DEBUGGING_SYMBOLS.getFlavor());
BuildTarget unstrippedTarget =
strippedTarget.withoutFlavors(StripStyle.FLAVOR_DOMAIN.getFlavors());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "header_namespace", tmp);
workspace.setUp();
workspace.enableDirCache();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
workspace
.runBuckCommand(
"build", "--config", "cxx.cxxflags=-g", strippedTarget.getFullyQualifiedName())
.assertSuccess();
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace
.runBuckCommand(
"build", "--config", "cxx.cxxflags=-g", strippedTarget.getFullyQualifiedName())
.assertSuccess();
Path strippedPath =
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem, strippedTarget.withAppendedFlavors(CxxStrip.RULE_FLAVOR), "%s"));
Path unstrippedPath =
workspace.getPath(BuildTargetPaths.getGenPath(filesystem, unstrippedTarget, "%s"));
assertThat(Files.exists(strippedPath), Matchers.equalTo(true));
assertThat(Files.exists(unstrippedPath), Matchers.equalTo(false));
}
@Test
public void stripRuleCanBeMadeUncachable() throws Exception {
assumeThat(Platform.detect(), oneOf(Platform.LINUX, Platform.MACOS));
BuildTarget strippedTarget =
BuildTargetFactory.newInstance("//:test")
.withFlavors(StripStyle.DEBUGGING_SYMBOLS.getFlavor());
BuildTarget unstrippedTarget =
strippedTarget.withoutFlavors(StripStyle.FLAVOR_DOMAIN.getFlavors());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "header_namespace", tmp);
workspace.setUp();
workspace.enableDirCache();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
workspace
.runBuckCommand(
"build",
"--config",
"cxx.cxxflags=-g",
"--config",
"cxx.cache_strips=false",
strippedTarget.getFullyQualifiedName())
.assertSuccess();
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace
.runBuckCommand(
"build",
"--config",
"cxx.cxxflags=-g",
"--config",
"cxx.cache_strips=false",
strippedTarget.getFullyQualifiedName())
.assertSuccess();
Path strippedPath =
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem, strippedTarget.withAppendedFlavors(CxxStrip.RULE_FLAVOR), "%s"));
Path unstrippedPath =
workspace.getPath(BuildTargetPaths.getGenPath(filesystem, unstrippedTarget, "%s"));
// The unstripped path should be materialized because the strip rule is set to not cache.
assertTrue(Files.exists(strippedPath));
assertTrue(Files.exists(unstrippedPath));
}
@Test
public void testStrippedBinaryOutputDiffersFromUnstripped() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
BuildTarget unstrippedTarget = BuildTargetFactory.newInstance("//:test");
BuildTarget strippedTarget =
unstrippedTarget.withFlavors(StripStyle.DEBUGGING_SYMBOLS.getFlavor());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "header_namespace", tmp);
workspace.setUp();
ProcessResult strippedResult =
workspace.runBuckCommand(
"targets", "--show-output", strippedTarget.getFullyQualifiedName());
strippedResult.assertSuccess();
ProcessResult unstrippedResult =
workspace.runBuckCommand(
"targets", "--show-output", unstrippedTarget.getFullyQualifiedName());
unstrippedResult.assertSuccess();
String strippedOutput = strippedResult.getStdout().split(" ")[1];
String unstrippedOutput = unstrippedResult.getStdout().split(" ")[1];
assertThat(strippedOutput, not(Matchers.equalTo(unstrippedOutput)));
}
@Test
public void testBuildingWithAndWithoutLinkerMap() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
BuildTarget target = BuildTargetFactory.newInstance("//:test");
BuildTarget withoutLinkerMapTarget =
target.withAppendedFlavors(LinkerMapMode.NO_LINKER_MAP.getFlavor());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "header_namespace", tmp);
workspace.setUp();
ProjectFilesystem filesystem = workspace.getProjectFileSystem();
workspace
.runBuckCommand("build", "--config", "cxx.cxxflags=-g", target.getFullyQualifiedName())
.assertSuccess();
BuildTarget binaryWithLinkerMap = target;
Path binaryWithLinkerMapPath =
workspace.getPath(BuildTargetPaths.getGenPath(filesystem, binaryWithLinkerMap, "%s"));
Path linkerMapPath =
workspace.getPath(
BuildTargetPaths.getGenPath(filesystem, binaryWithLinkerMap, "%s-LinkMap.txt"));
assertThat(Files.exists(binaryWithLinkerMapPath), Matchers.equalTo(true));
assertThat(Files.exists(linkerMapPath), Matchers.equalTo(true));
workspace.runBuckCommand("clean", "--keep-cache").assertSuccess();
workspace
.runBuckCommand(
"build", "--config", "cxx.cxxflags=-g", withoutLinkerMapTarget.getFullyQualifiedName())
.assertSuccess();
BuildTarget binaryWithoutLinkerMap = withoutLinkerMapTarget;
Path binaryWithoutLinkerMapPath =
workspace.getPath(BuildTargetPaths.getGenPath(filesystem, binaryWithoutLinkerMap, "%s"));
linkerMapPath =
workspace.getPath(
BuildTargetPaths.getGenPath(filesystem, binaryWithoutLinkerMap, "%s-LinkMap.txt"));
assertThat(Files.exists(binaryWithoutLinkerMapPath), Matchers.equalTo(true));
assertThat(Files.exists(linkerMapPath), Matchers.equalTo(false));
}
@Test
public void testDisablingLinkCaching() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
workspace.enableDirCache();
workspace.runBuckBuild("-c", "cxx.cache_links=false", "//foo:simple").assertSuccess();
workspace.runBuckCommand("clean", "--keep-cache");
workspace.runBuckBuild("-c", "cxx.cache_links=false", "//foo:simple").assertSuccess();
workspace
.getBuildLog()
.assertTargetBuiltLocally(
CxxDescriptionEnhancer.createCxxLinkTarget(
BuildTargetFactory.newInstance("//foo:simple"), Optional.empty()));
}
@Test
public void testThinArchives() throws IOException {
CxxPlatform cxxPlatform =
CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));
BuildRuleResolver ruleResolver = new TestActionGraphBuilder();
assumeTrue(
cxxPlatform
.getAr()
.resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE)
.supportsThinArchives());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple", tmp);
workspace.setUp();
workspace.enableDirCache();
workspace
.runBuckBuild(
"-c",
"cxx.cache_links=false",
"-c",
"cxx.archive_contents=thin",
"//foo:binary_with_dep")
.assertSuccess();
ImmutableSortedSet<Path> initialObjects =
findFiles(tmp.getRoot(), tmp.getRoot().getFileSystem().getPathMatcher("glob:**/*.o"));
workspace.runBuckCommand("clean", "--keep-cache");
workspace
.runBuckBuild(
"-c",
"cxx.cache_links=false",
"-c",
"cxx.archive_contents=thin",
"//foo:binary_with_dep")
.assertSuccess();
workspace
.getBuildLog()
.assertTargetBuiltLocally(
CxxDescriptionEnhancer.createCxxLinkTarget(
BuildTargetFactory.newInstance("//foo:binary_with_dep"), Optional.empty()));
ImmutableSortedSet<Path> subsequentObjects =
findFiles(tmp.getRoot(), tmp.getRoot().getFileSystem().getPathMatcher("glob:**/*.o"));
assertThat(initialObjects, Matchers.equalTo(subsequentObjects));
}
/**
* Tests that, if a file has to be rebuilt, but its header dependencies do not, that the header
* tree is still generated into the correct location.
*/
@Test
public void headersShouldBeSetUpCorrectlyOnRebuild() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "cxx_binary_dep_header_tree_materialize", tmp);
workspace.setUp();
workspace.enableDirCache();
workspace.runBuckBuild("//:bin").assertSuccess();
workspace.runBuckCommand("clean", "--keep-cache");
workspace.copyFile("bin.c.new", "bin.c");
workspace.runBuckBuild("//:bin").assertSuccess();
BuckBuildLog log = workspace.getBuildLog();
log.assertTargetBuiltLocally("//:bin#binary");
}
/** Tests --config cxx.declared_platforms */
@Test
public void testDeclaredPlatforms() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "declared_platforms", tmp);
workspace.setUp();
workspace
.runBuckCommand("query", "-c", "cxx.declared_platforms=my-favorite-platform", "//:simple")
.assertSuccess();
}
@Test
public void testDeclaredPlatformsWithDefaultPlatform() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "declared_platforms", tmp);
workspace.setUp();
workspace
.runBuckCommand("query", "-c", "cxx.declared_platforms=my-favorite-platform", "//:defaults")
.assertSuccess();
// Currently failing
workspace
.runBuckCommand(
"query", "-c", "cxx.declared_platforms=my-favorite-platform", "//:default_platform")
.assertFailure();
}
@Test
public void targetsInPlatformSpecificFlagsDoNotBecomeDependencies() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "targets_in_platform_specific_flags_do_not_become_dependencies", tmp);
workspace.setUp();
ProcessResult result = workspace.runBuckBuild(":bin");
result.assertSuccess();
}
@Test
public void conflictingHeadersBuildFails() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "headers_conflicts", tmp);
workspace.setUp();
String errorMsg = workspace.runBuckBuild(":main").assertFailure().getStderr();
assertTrue(
errorMsg.contains(
"has dependencies using headers that can be included using the same path"));
}
@Test
public void conflictingHeadersWithWhitelistSucceeds() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "headers_conflicts", tmp);
workspace.setUp();
workspace
.runBuckBuild("-c", "cxx.conflicting_header_basename_whitelist=public.h", ":main")
.assertSuccess();
}
@Test
public void testLinkMapCreated() throws IOException {
assumeThat(Platform.detect(), is(Platform.MACOS));
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_binary_linkmap", tmp);
workspace.setUp();
workspace.runBuckBuild(":binary#linkmap").assertSuccess();
}
@Test
public void testLinkMapNotCreated() throws IOException {
assumeThat(Platform.detect(), is(Platform.LINUX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_binary_linkmap", tmp);
workspace.setUp();
try {
workspace.runBuckBuild(":binary#linkmap");
} catch (HumanReadableException e) {
assertEquals(
"Linker for target //:binary#linkmap does not support linker maps.",
e.getHumanReadableErrorMessage());
}
}
@Test
public void testRunFlavors() throws IOException {
assumeThat(Platform.detect(), not(Platform.WINDOWS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_flavors", tmp);
workspace.setUp();
workspace.runBuckCommand("build", "//bin:bin").assertSuccess("build //bin:bin1");
workspace.runBuckCommand("run", "//bin:bin").assertSuccess("run //bin:bin1");
workspace.runBuckCommand("build", "//bin:bin#default").assertSuccess("build //bin:bin#default");
workspace.runBuckCommand("run", "//bin:bin#default").assertSuccess("run //bin:bin#default");
workspace.runBuckCommand("build", "//bin:bin1").assertSuccess("build //bin:bin1");
workspace.runBuckCommand("run", "//bin:bin1").assertSuccess("run //bin:bin1");
}
private ImmutableSortedSet<Path> findFiles(Path root, PathMatcher matcher) throws IOException {
ImmutableSortedSet.Builder<Path> files = ImmutableSortedSet.naturalOrder();
Files.walkFileTree(
root,
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
if (matcher.matches(file)) {
files.add(file);
}
return FileVisitResult.CONTINUE;
}
});
return files.build();
}
private static ImmutableSet<String> getUniqueLines(String str) {
return ImmutableSet.copyOf(Splitter.on('\n').omitEmptyStrings().split(str));
}
}
| {
"content_hash": "500a7c1e6d4ab4a32ccca73e337ba17c",
"timestamp": "",
"source": "github",
"line_count": 2601,
"max_line_length": 100,
"avg_line_length": 42.96193771626297,
"alnum_prop": 0.6848510882016037,
"repo_name": "Addepar/buck",
"id": "c48cc647db34581b8b25bffdf4d30f2b63aaca61",
"size": "112360",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "test/com/facebook/buck/cxx/CxxBinaryIntegrationTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1585"
},
{
"name": "Batchfile",
"bytes": "3875"
},
{
"name": "C",
"bytes": "281239"
},
{
"name": "C#",
"bytes": "237"
},
{
"name": "C++",
"bytes": "18966"
},
{
"name": "CSS",
"bytes": "56106"
},
{
"name": "D",
"bytes": "1017"
},
{
"name": "Dockerfile",
"bytes": "2081"
},
{
"name": "Go",
"bytes": "10020"
},
{
"name": "Groovy",
"bytes": "3362"
},
{
"name": "HTML",
"bytes": "11252"
},
{
"name": "Haskell",
"bytes": "1008"
},
{
"name": "IDL",
"bytes": "480"
},
{
"name": "Java",
"bytes": "29247474"
},
{
"name": "JavaScript",
"bytes": "938678"
},
{
"name": "Kotlin",
"bytes": "25755"
},
{
"name": "Lex",
"bytes": "12772"
},
{
"name": "MATLAB",
"bytes": "47"
},
{
"name": "Makefile",
"bytes": "1916"
},
{
"name": "OCaml",
"bytes": "4935"
},
{
"name": "Objective-C",
"bytes": "176958"
},
{
"name": "Objective-C++",
"bytes": "34"
},
{
"name": "PowerShell",
"bytes": "2244"
},
{
"name": "Prolog",
"bytes": "1486"
},
{
"name": "Python",
"bytes": "2076151"
},
{
"name": "Roff",
"bytes": "1207"
},
{
"name": "Rust",
"bytes": "5716"
},
{
"name": "Scala",
"bytes": "5082"
},
{
"name": "Shell",
"bytes": "77999"
},
{
"name": "Smalltalk",
"bytes": "194"
},
{
"name": "Swift",
"bytes": "11393"
},
{
"name": "Thrift",
"bytes": "48595"
},
{
"name": "Yacc",
"bytes": "323"
}
],
"symlink_target": ""
} |
module VagrantPlugins
module VSphere
module Action
class IsRunning
def initialize(app, _env)
@app = app
end
def call(env)
env[:result] = env[:machine].state.id == :running
@app.call env
end
end
end
end
end
| {
"content_hash": "4febc00037a473949431e7c88ce38df0",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 59,
"avg_line_length": 18.3125,
"alnum_prop": 0.5290102389078498,
"repo_name": "edmcman/vagrant-vsphere",
"id": "a2524ae515c90085f7fa9aecf646f731963ea697",
"size": "293",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/vSphere/action/is_running.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "74998"
}
],
"symlink_target": ""
} |
package hudson.tasks.junit;
import hudson.FilePath;
import hudson.model.FreeStyleBuild;
import hudson.model.FreeStyleProject;
import hudson.slaves.DumbSlave;
import hudson.tasks.test.TestObject;
import java.util.concurrent.TimeUnit;
import org.jvnet.hudson.test.HudsonTestCase;
import org.jvnet.hudson.test.TouchBuilder;
import org.jvnet.hudson.test.recipes.LocalData;
import com.gargoylesoftware.htmlunit.html.HtmlForm;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
public class JUnitResultArchiverTest extends HudsonTestCase {
private FreeStyleProject project;
private JUnitResultArchiver archiver;
@Override
protected void setUp() throws Exception {
super.setUp();
project = createFreeStyleProject("junit");
archiver = new JUnitResultArchiver("*.xml");
project.getPublishersList().add(archiver);
project.getBuildersList().add(new TouchBuilder());
}
@LocalData
public void testBasic() throws Exception {
FreeStyleBuild build = project.scheduleBuild2(0).get(10, TimeUnit.SECONDS);
assertTestResults(build);
WebClient wc =new WebClient();
wc.getPage(project); // project page
wc.getPage(build); // build page
wc.getPage(build, "testReport"); // test report
wc.getPage(build, "testReport/hudson.security"); // package
wc.getPage(build, "testReport/hudson.security/HudsonPrivateSecurityRealmTest/"); // class
wc.getPage(build, "testReport/hudson.security/HudsonPrivateSecurityRealmTest/testDataCompatibilityWith1_282/"); // method
}
@LocalData
public void testSlave() throws Exception {
DumbSlave s = createOnlineSlave();
project.setAssignedLabel(s.getSelfLabel());
FilePath src = new FilePath(hudson.getRootPath(), "jobs/junit/workspace/");
assertNotNull(src);
FilePath dest = s.getWorkspaceFor(project);
assertNotNull(dest);
src.copyRecursiveTo("*.xml", dest);
testBasic();
}
private void assertTestResults(FreeStyleBuild build) {
TestResultAction testResultAction = build.getAction(TestResultAction.class);
assertNotNull("no TestResultAction", testResultAction);
TestResult result = testResultAction.getResult();
assertNotNull("no TestResult", result);
assertEquals("should have 1 failing test", 1, testResultAction.getFailCount());
assertEquals("should have 1 failing test", 1, result.getFailCount());
assertEquals("should have 132 total tests", 132, testResultAction.getTotalCount());
assertEquals("should have 132 total tests", 132, result.getTotalCount());
}
@LocalData
public void testPersistence() throws Exception {
project.scheduleBuild2(0).get(60, TimeUnit.SECONDS);
reloadJenkins();
FreeStyleBuild build = project.getBuildByNumber(1);
assertTestResults(build);
}
private void reloadJenkins() throws Exception {
hudson.reload();
project = (FreeStyleProject) hudson.getItem("junit");
}
@LocalData
public void testSetDescription() throws Exception {
FreeStyleBuild build = project.scheduleBuild2(0).get(10, TimeUnit.SECONDS);
CaseResult caseResult = build.getAction(TestResultAction.class).getFailedTests().get(0);
String url = build.getUrl() + "/testReport/" + caseResult.getRelativePathFrom(caseResult.getTestResult());
testSetDescription(url, caseResult);
ClassResult classResult = caseResult.getParent();
url = build.getUrl() + "/testReport/" + classResult.getParent().getSafeName() + "/" + classResult.getSafeName();
testSetDescription(url, classResult);
PackageResult packageResult = classResult.getParent();
url = build.getUrl() + "/testReport/" + classResult.getParent().getSafeName();
testSetDescription(url, packageResult);
}
private void testSetDescription(String url, TestObject object) throws Exception {
object.doSubmitDescription("description");
// test the roundtrip
HtmlPage page = new WebClient().goTo(url);
page.getAnchorByHref("editDescription").click();
HtmlForm form = findForm(page, "submitDescription");
submit(form);
assertEquals("description", object.getDescription());
}
private HtmlForm findForm(HtmlPage page, String action) {
for (HtmlForm form: page.getForms()) {
if (action.equals(form.getActionAttribute())) {
return form;
}
}
fail("no form found");
return null;
}
}
| {
"content_hash": "d411538ea5f39f8ece64b06e70fee8d9",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 123,
"avg_line_length": 32.074074074074076,
"alnum_prop": 0.7344110854503464,
"repo_name": "lvotypko/jenkins2",
"id": "82849d6915e12b0644334d574059e8fc556c4bce",
"size": "5473",
"binary": false,
"copies": "2",
"ref": "refs/heads/url-without-ampersand",
"path": "test/src/test/java/hudson/tasks/junit/JUnitResultArchiverTest.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2091"
},
{
"name": "Elixir",
"bytes": "4077"
},
{
"name": "Groovy",
"bytes": "83905"
},
{
"name": "Java",
"bytes": "6413327"
},
{
"name": "JavaScript",
"bytes": "132398"
},
{
"name": "Perl",
"bytes": "13335"
},
{
"name": "Python",
"bytes": "2169"
},
{
"name": "Ruby",
"bytes": "21442"
},
{
"name": "Shell",
"bytes": "20645"
}
],
"symlink_target": ""
} |
package today.comeet.android.comeet.modules;
import android.os.AsyncTask;
import com.google.android.gms.maps.model.LatLng;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import today.comeet.android.comeet.listener.DirectionFinderListener;
/**
* Created by Annick on 05/11/2016.
*/
public class DirectionFinder {
private static final String DIRECTION_URL_API = "https://maps.googleapis.com/maps/api/directions/json?";
private static final String GOOGLE_API_KEY = "AIzaSyDnwLF2-WfK8cVZt9OoDYJ9Y8kspXhEHfI";
private DirectionFinderListener listener;
private String origin;
private String destination;
public DirectionFinder(DirectionFinderListener listener, String origin, String destination) {
this.listener = listener;
this.origin = origin;
this.destination = destination;
}
public void execute() throws UnsupportedEncodingException {
listener.onDirectionFinderStart();
new DownloadRawData().execute(createUrl());
}
private String createUrl() throws UnsupportedEncodingException {
String urlOrigin = URLEncoder.encode(origin, "utf-8");
String urlDestination = URLEncoder.encode(destination, "utf-8");
return DIRECTION_URL_API + "origin=" + urlOrigin + "&destination=" + urlDestination + "&key=" + GOOGLE_API_KEY;
}
private class DownloadRawData extends AsyncTask<String, Void, String> {
@Override
protected String doInBackground(String... params) {
String link = params[0];
try {
URL url = new URL(link);
InputStream is = url.openConnection().getInputStream();
StringBuffer buffer = new StringBuffer();
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
String line;
while ((line = reader.readLine()) != null) {
buffer.append(line + "\n");
}
return buffer.toString();
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(String res) {
try {
parseJSon(res);
} catch (JSONException e) {
e.printStackTrace();
}
}
}
private void parseJSon(String data) throws JSONException {
if (data == null)
return;
List<Route> routes = new ArrayList<Route>();
JSONObject jsonData = new JSONObject(data);
JSONArray jsonRoutes = jsonData.getJSONArray("routes");
for (int i = 0; i < jsonRoutes.length(); i++) {
JSONObject jsonRoute = jsonRoutes.getJSONObject(i);
Route route = new Route();
JSONObject overview_polylineJson = jsonRoute.getJSONObject("overview_polyline");
JSONArray jsonLegs = jsonRoute.getJSONArray("legs");
JSONObject jsonLeg = jsonLegs.getJSONObject(0);
JSONObject jsonDistance = jsonLeg.getJSONObject("distance");
JSONObject jsonDuration = jsonLeg.getJSONObject("duration");
JSONObject jsonEndLocation = jsonLeg.getJSONObject("end_location");
JSONObject jsonStartLocation = jsonLeg.getJSONObject("start_location");
route.distance = new Distance(jsonDistance.getString("text"), jsonDistance.getInt("value"));
route.duration = new Duration(jsonDuration.getString("text"), jsonDuration.getInt("value"));
route.endAddress = jsonLeg.getString("end_address");
route.startAddress = jsonLeg.getString("start_address");
route.startLocation = new LatLng(jsonStartLocation.getDouble("lat"), jsonStartLocation.getDouble("lng"));
route.endLocation = new LatLng(jsonEndLocation.getDouble("lat"), jsonEndLocation.getDouble("lng"));
route.points = decodePolyLine(overview_polylineJson.getString("points"));
routes.add(route);
}
listener.onDirectionFinderSuccess(routes);
}
private List<LatLng> decodePolyLine(final String poly) {
int len = poly.length();
int index = 0;
List<LatLng> decoded = new ArrayList<LatLng>();
int lat = 0;
int lng = 0;
while (index < len) {
int b;
int shift = 0;
int result = 0;
do {
b = poly.charAt(index++) - 63;
result |= (b & 0x1f) << shift;
shift += 5;
} while (b >= 0x20);
int dlat = ((result & 1) != 0 ? ~(result >> 1) : (result >> 1));
lat += dlat;
shift = 0;
result = 0;
do {
b = poly.charAt(index++) - 63;
result |= (b & 0x1f) << shift;
shift += 5;
} while (b >= 0x20);
int dlng = ((result & 1) != 0 ? ~(result >> 1) : (result >> 1));
lng += dlng;
decoded.add(new LatLng(
lat / 100000d, lng / 100000d
));
}
return decoded;
}
}
| {
"content_hash": "c1d39664442b50fb1b87911df54b2bec",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 119,
"avg_line_length": 35.29113924050633,
"alnum_prop": 0.5961262553802008,
"repo_name": "AlexisSoto/comeet-android",
"id": "198d767a58c49794d2588cf767624d423279146d",
"size": "5576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/today/comeet/android/comeet/modules/DirectionFinder.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "93174"
}
],
"symlink_target": ""
} |
package org.elasticsearch.node.internal;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class InternalSettingsPreparerTests extends ESTestCase {
Settings baseEnvSettings;
@Before
public void createBaseEnvSettings() {
baseEnvSettings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
}
@After
public void clearBaseEnvSettings() {
baseEnvSettings = null;
}
public void testEmptySettings() {
Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY);
assertNotNull(settings.get("node.name")); // a name was set
assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set
int size = settings.names().size();
Environment env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null);
settings = env.settings();
assertNotNull(settings.get("node.name")); // a name was set
assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set
assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size());
String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings);
String configDir = env.configFile().toString();
assertTrue(configDir, configDir.startsWith(home));
}
public void testClusterNameDefault() {
Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY);
assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey()));
settings = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null).settings();
assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey()));
}
public void testReplacePromptPlaceholders() {
MockTerminal terminal = new MockTerminal();
terminal.addTextInput("text");
terminal.addSecretInput("replaced");
Settings.Builder builder = Settings.builder()
.put(baseEnvSettings)
.put("password.replace", InternalSettingsPreparer.SECRET_PROMPT_VALUE)
.put("dont.replace", "prompt:secret")
.put("dont.replace2", "_prompt:secret_")
.put("dont.replace3", "_prompt:text__")
.put("dont.replace4", "__prompt:text_")
.put("dont.replace5", "prompt:secret__")
.put("replace_me", InternalSettingsPreparer.TEXT_PROMPT_VALUE);
Settings settings = InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal).settings();
assertThat(settings.get("password.replace"), equalTo("replaced"));
assertThat(settings.get("replace_me"), equalTo("text"));
// verify other values unchanged
assertThat(settings.get("dont.replace"), equalTo("prompt:secret"));
assertThat(settings.get("dont.replace2"), equalTo("_prompt:secret_"));
assertThat(settings.get("dont.replace3"), equalTo("_prompt:text__"));
assertThat(settings.get("dont.replace4"), equalTo("__prompt:text_"));
assertThat(settings.get("dont.replace5"), equalTo("prompt:secret__"));
}
public void testReplaceSecretPromptPlaceholderWithNullTerminal() {
Settings.Builder builder = Settings.builder()
.put(baseEnvSettings)
.put("replace_me1", InternalSettingsPreparer.SECRET_PROMPT_VALUE);
try {
InternalSettingsPreparer.prepareEnvironment(builder.build(), null);
fail("an exception should have been thrown since no terminal was provided!");
} catch (UnsupportedOperationException e) {
assertThat(e.getMessage(), containsString("with value [" + InternalSettingsPreparer.SECRET_PROMPT_VALUE + "]"));
}
}
public void testReplaceTextPromptPlaceholderWithNullTerminal() {
Settings.Builder builder = Settings.builder()
.put(baseEnvSettings)
.put("replace_me1", InternalSettingsPreparer.TEXT_PROMPT_VALUE);
try {
InternalSettingsPreparer.prepareEnvironment(builder.build(), null);
fail("an exception should have been thrown since no terminal was provided!");
} catch (UnsupportedOperationException e) {
assertThat(e.getMessage(), containsString("with value [" + InternalSettingsPreparer.TEXT_PROMPT_VALUE + "]"));
}
}
public void testGarbageIsNotSwallowed() throws IOException {
try {
InputStream garbage = getClass().getResourceAsStream("/config/garbage/garbage.yml");
Path home = createTempDir();
Path config = home.resolve("config");
Files.createDirectory(config);
Files.copy(garbage, config.resolve("elasticsearch.yml"));
InternalSettingsPreparer.prepareEnvironment(Settings.builder()
.put(baseEnvSettings)
.build(), null);
} catch (SettingsException e) {
assertEquals("Failed to load settings from [elasticsearch.yml]", e.getMessage());
}
}
public void testMultipleSettingsFileNotAllowed() throws IOException {
InputStream yaml = getClass().getResourceAsStream("/config/elasticsearch.yaml");
InputStream properties = getClass().getResourceAsStream("/config/elasticsearch.properties");
Path home = createTempDir();
Path config = home.resolve("config");
Files.createDirectory(config);
Files.copy(yaml, config.resolve("elasticsearch.yaml"));
Files.copy(properties, config.resolve("elasticsearch.properties"));
try {
InternalSettingsPreparer.prepareEnvironment(Settings.builder()
.put(baseEnvSettings)
.build(), null);
} catch (SettingsException e) {
assertTrue(e.getMessage(), e.getMessage().contains("multiple settings files found with suffixes"));
assertTrue(e.getMessage(), e.getMessage().contains(".yaml"));
assertTrue(e.getMessage(), e.getMessage().contains(".properties"));
}
}
}
| {
"content_hash": "d9917bebffa55873e48c237503a27c08",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 124,
"avg_line_length": 46.12328767123287,
"alnum_prop": 0.6703296703296703,
"repo_name": "camilojd/elasticsearch",
"id": "87abc20a0de98ee592b57aba8064c935d4e37897",
"size": "7522",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "8172"
},
{
"name": "Batchfile",
"bytes": "11820"
},
{
"name": "Emacs Lisp",
"bytes": "3341"
},
{
"name": "FreeMarker",
"bytes": "45"
},
{
"name": "Groovy",
"bytes": "221450"
},
{
"name": "HTML",
"bytes": "5595"
},
{
"name": "Java",
"bytes": "33735203"
},
{
"name": "Perl",
"bytes": "7111"
},
{
"name": "Python",
"bytes": "75936"
},
{
"name": "Ruby",
"bytes": "1917"
},
{
"name": "Shell",
"bytes": "90919"
}
],
"symlink_target": ""
} |
package gov.sandia.cognition.learning.algorithm.svm;
import gov.sandia.cognition.annotation.PublicationReference;
import gov.sandia.cognition.annotation.PublicationType;
import gov.sandia.cognition.learning.algorithm.AbstractAnytimeSupervisedBatchLearner;
import gov.sandia.cognition.learning.data.InputOutputPair;
import gov.sandia.cognition.learning.function.categorization.KernelBinaryCategorizer;
import gov.sandia.cognition.learning.function.kernel.Kernel;
import gov.sandia.cognition.learning.function.kernel.KernelContainer;
import gov.sandia.cognition.util.DefaultWeightedValue;
import gov.sandia.cognition.util.Randomized;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Random;
/**
* This is a simplified version of the Sequential Minimization Algorithm (SMO)
* that was used as a stepping-stone in the full SMO implementation.
*
* @author Justin Basilico
* @since 3.1
* @see SequentialMinimalOptimization
*/
@PublicationReference(
title="The Simplified SMO Algorithm",
author="Andrew Ng",
year=2009,
type=PublicationType.WebPage,
url="http://www.stanford.edu/class/cs229/materials/smo.pdf")
public class SimplifiedSequentialMinimalOptimization<InputType>
extends AbstractAnytimeSupervisedBatchLearner<InputType, Boolean, KernelBinaryCategorizer<InputType, DefaultWeightedValue<InputType>>>
implements KernelContainer<InputType>, Randomized
{
public static final int DEFAULT_MAX_ITERATIONS = 1000;
public static final int DEFAULT_MAX_STEPS_WITHOUT_CHANGE = 10;
/** The default maximum penalty is infinite, which means that it is
* hard-assignment. */
public static final double DEFAULT_MAX_PENALTY = Double.POSITIVE_INFINITY;
/** The default error tolerance is 0.001, which is what was recommended in
* the original Sequential Minimal Optimization paper. */
public static final double DEFAULT_ERROR_TOLERANCE = 0.001;
/** The default effective value for zero is {@value}. */
public static final double DEFAULT_EFFECTIVE_ZERO = 1.0e-10;
/** The kernel to use. */
private Kernel<? super InputType> kernel;
private double maxPenalty;
private double errorTolerance;
private int maxStepsWithoutChange;
private double effectiveZero;
private Random random;
/** The result categorizer. */
private transient KernelBinaryCategorizer<InputType, DefaultWeightedValue<InputType>> result;
private transient ArrayList<InputOutputPair<? extends InputType, Boolean>> dataList;
private transient int dataSize;
/** The number of items changed on the most recent iteration. */
private transient int changeCount;
private transient int stepsWithoutChange;
/** The mapping of weight objects to non-zero weighted examples
* (support vectors). */
private transient LinkedHashMap<Integer, DefaultWeightedValue<InputType>> supportsMap;
public SimplifiedSequentialMinimalOptimization()
{
this(null, DEFAULT_MAX_PENALTY, DEFAULT_ERROR_TOLERANCE,
DEFAULT_MAX_STEPS_WITHOUT_CHANGE, DEFAULT_EFFECTIVE_ZERO,
DEFAULT_MAX_ITERATIONS, new Random());
}
public SimplifiedSequentialMinimalOptimization(
Kernel<? super InputType> kernel,
final double maxPenalty,
double errorTolerance,
int maxStepsWithoutChange,
double effectiveZero,
final int maxIterations,
Random random)
{
super(maxIterations);
this.setKernel(kernel);
this.setMaxPenalty(maxPenalty);
this.setErrorTolerance(errorTolerance);
this.setMaxStepsWithoutChange(maxStepsWithoutChange);
this.setEffectiveZero(effectiveZero);
this.setRandom(random);
}
@Override
protected boolean initializeAlgorithm()
{
this.result = null;
if (this.getData() == null)
{
// Error: No data to learn on.
return false;
}
this.dataList = new ArrayList<InputOutputPair<? extends InputType, Boolean>>(
this.getData().size());
int positives = 0;
for (InputOutputPair<? extends InputType, Boolean> example : this.getData())
{
if (example != null && example.getInput() != null && example.getOutput() != null)
{
this.dataList.add(example);
if (example.getOutput())
{
positives++;
}
}
}
this.dataSize = this.dataList.size();
if (this.dataSize <= 0)
{
// Error: No valid data to learn from.
this.dataList = null;
return false;
}
else if (positives <= 0 || positives >= this.dataSize)
{
throw new IllegalArgumentException("Data is all one category");
}
this.changeCount = this.getData().size();
this.stepsWithoutChange = 0;
this.supportsMap = new LinkedHashMap<Integer, DefaultWeightedValue<InputType>>();
// initialize alpha array to all zero
// initialize threshold to zero
this.result = new KernelBinaryCategorizer<InputType, DefaultWeightedValue<InputType>>(
this.kernel, this.supportsMap.values(), 0.0);
return true;
}
@Override
protected boolean step()
{
System.out.println("Iteration: " + this.getIteration());
final double tol = this.errorTolerance;
final double C = this.maxPenalty;
this.changeCount = 0;
for (int i = 0; i < this.dataSize; i++)
{
//System.out.println();
//System.out.println(" i: " + i);
final double yI = this.getTarget(i);
final double eI = this.getSVMOutput(i) - yI;
double alphaI = this.getAlpha(i);
//System.out.println(" yi: " + yi);
//System.out.println(" Ei: " + Ei);
//System.out.println(" alphai: " + alphai);
final double yITimesEI = yI * eI;
if ( ((yITimesEI < -tol) && (alphaI < C))
|| ((yITimesEI > +tol) && (alphaI > 0)))
{
// Select a random j != i
int j = this.random.nextInt(this.dataSize - 1);
if (j >= i)
{
j += 1;
}
//for (int j = 0; j < dataSize; j++)
//{
// if (i == j) continue;
if (this.takeStep(i, j))
{
changeCount++;
}
}
//}
}
/*
System.out.println("Change count: " + changeCount);
System.out.println("Result " + result);
for (WeightedValue<?> support : result.getExamples())
{
System.out.println(" " + support.getWeight() + " " + support.getValue());
}
System.out.println("Bias: " + result.getBias());
*/
if (this.changeCount <= 0)
{
this.stepsWithoutChange++;
}
else
{
this.stepsWithoutChange = 0;
}
return this.stepsWithoutChange < this.maxStepsWithoutChange;
}
private boolean takeStep(
final int i,
final int j)
{
if (i == j)
{
// This is a sanity check. It cannot take a step if the two
// examples are exactly the same.
return false;
}
final double C = this.maxPenalty;
final double epsilon = this.effectiveZero;
final double CMinusEpsilon = C - epsilon;
final double yI = this.getTarget(i);
final double eI = this.getSVMOutput(i) - yI;
final double oldAlphaI = this.getAlpha(i);
// double alphaI = this.getAlpha(i);
final double yJ = this.getTarget(j);
final double eJ = this.getSVMOutput(j) - yJ;
final double oldAlphaJ = this.getAlpha(j);
// double alphaJ = this.getAlpha(j);
//System.out.println(" i: " + i);
//System.out.println(" yi: " + yi);
//System.out.println(" Ei: " + Ei);
//System.out.println(" alphai: " + alphai);
//System.out.println(" j: " + j);
//System.out.println(" Ej: " + Ej);
//System.out.println(" yj: " + yj);
//System.out.println(" alphaj: " + alphaj);
// Compute the lower and upper bounds to solve for new values of
// alphaI and alphaJ.
final double lowerBound;
final double upperBound;
if (yI != yJ)
{
final double alphaJMinusAlphaI = oldAlphaJ - oldAlphaI;
lowerBound = Math.max(0, alphaJMinusAlphaI);
upperBound = Math.min(C, alphaJMinusAlphaI + C);
}
else
{
final double alphaIPlusAlphaJ = oldAlphaI + oldAlphaJ;
lowerBound = Math.max(0, alphaIPlusAlphaJ - C);
upperBound = Math.min(C, alphaIPlusAlphaJ);
}
//System.out.println(" L: " + L);
//System.out.println(" H: " + H);
if (lowerBound >= upperBound)
{
return false;
}
// Evaluate the kernels between the values, using the property that by
// kernel symmetry: k(i,j) == k(j,i)
final double kII = this.evaluateKernel(i, i);
final double kIJ = this.evaluateKernel(i, j);
final double kJI = kIJ;
final double kJJ = this.evaluateKernel(j, j);
final double eta = kIJ + kJI - kII - kJJ;
//System.out.println(" eta: " + eta);
if (eta >= 0.0)
{
return false;
}
double newAlphaJ = oldAlphaJ - (yJ * (eI - eJ)) / eta;
if (newAlphaJ <= lowerBound)
{
newAlphaJ = lowerBound;
}
else if (newAlphaJ >= upperBound)
{
newAlphaJ = upperBound;
}
// If the new alpha is close enough to 0.0 or the maximum alpha, just
// set it to that value.
if (newAlphaJ < epsilon)
{
newAlphaJ = 0.0;
}
else if (newAlphaJ > CMinusEpsilon)
{
newAlphaJ = C;
}
//System.out.println(" alphajnew: " + alphaj);
if (Math.abs(newAlphaJ - oldAlphaJ) < epsilon)
{
return false;
}
double newAlphaI = oldAlphaI + yI * yJ * (oldAlphaJ - newAlphaJ);
// If the new alpha is close enough to 0.0 or the maximum alpha, just
// set it to that value.
if (newAlphaI < epsilon)
{
newAlphaI = 0.0;
}
else if (newAlphaI > CMinusEpsilon)
{
newAlphaI = C;
}
final double oldBias = this.getBias();
final double b1 = oldBias - eI
- yI * (newAlphaI - oldAlphaI) * kII
- yJ * (newAlphaJ - oldAlphaJ) * kIJ;
final double b2 = oldBias - eJ
- yI * (newAlphaI - oldAlphaI) * kJI
- yJ * (newAlphaJ - oldAlphaJ) * kJJ;
final double newBias;
if (newAlphaI > epsilon && newAlphaI < CMinusEpsilon)
{
newBias = b1;
}
else if (newAlphaJ > epsilon && newAlphaJ < CMinusEpsilon)
{
newBias = b2;
}
else
{
newBias = (b1 + b2) / 2.0;
}
//System.out.println(" alphai: " + alphai);
//System.out.println(" alphaj: " + alphaj);
//System.out.println(" b: " + b);
this.setAlpha(i, newAlphaI);
this.setAlpha(j, newAlphaJ);
this.setBias(newBias);
return true;
}
@Override
protected void cleanupAlgorithm()
{
this.dataList = null;
this.supportsMap = null;
}
private double evaluateKernel(
final int i,
final int j)
{
return this.kernel.evaluate(this.getPoint(i), this.getPoint(j));
}
private double getSVMOutput(
final InputType input)
{
return this.result.evaluateAsDouble(input);
}
private double getSVMOutput(
final int i)
{
return this.getSVMOutput(this.getPoint(i));
}
private InputType getPoint(
final int i)
{
return this.dataList.get(i).getInput();
}
private double getTarget(
final int i)
{
return this.dataList.get(i).getOutput() ? +1.0 : -1.0;
}
private double getAlpha(
final int i)
{
final DefaultWeightedValue<InputType> support = this.supportsMap.get(i);
if (support == null)
{
return 0.0;
}
else
{
// The weight is the label (+1 or -1) times alpha. Alpha is always
// greater than zero, so we just take the absolute value of the
// weight to get it.
return Math.abs(support.getWeight());
}
}
private void setAlpha(
int i,
double alpha)
{
if (alpha == 0.0)
{
this.supportsMap.remove(i);
}
else
{
// The weight is the label times alpha.
final double weight = this.getTarget(i) * alpha;
DefaultWeightedValue<InputType> support = this.supportsMap.get(i);
if (support == null)
{
support = new DefaultWeightedValue<InputType>(
this.getPoint(i), weight);
supportsMap.put(i, support);
}
else
{
support.setWeight(weight);
}
}
}
private double getBias()
{
return this.result.getBias();
}
private void setBias(
final double b)
{
this.result.setBias(b);
}
public KernelBinaryCategorizer<InputType, DefaultWeightedValue<InputType>> getResult()
{
return this.result;
}
public Kernel<? super InputType> getKernel()
{
return kernel;
}
public void setKernel(
final Kernel<? super InputType> kernel)
{
this.kernel = kernel;
}
public double getMaxPenalty()
{
return maxPenalty;
}
public void setMaxPenalty(
final double maxPenalty)
{
if (maxPenalty <= 0.0)
{
throw new IllegalArgumentException("maxPenalty must be positive.");
}
this.maxPenalty = maxPenalty;
}
public double getErrorTolerance()
{
return errorTolerance;
}
public void setErrorTolerance(
final double errorTolerance)
{
if (errorTolerance < 0.0)
{
throw new IllegalArgumentException(
"errorTolerance cannot be negative.");
}
this.errorTolerance = errorTolerance;
}
public int getMaxStepsWithoutChange()
{
return maxStepsWithoutChange;
}
public void setMaxStepsWithoutChange(
final int maxStepsWithoutChange)
{
if (maxStepsWithoutChange <= 0)
{
throw new IllegalArgumentException(
"maxStepsWithoutChange must be positive");
}
this.maxStepsWithoutChange = maxStepsWithoutChange;
}
public double getEffectiveZero()
{
return this.effectiveZero;
}
public void setEffectiveZero(
final double effectiveZero)
{
if (effectiveZero < 0.0)
{
throw new IllegalArgumentException(
"effectiveZero cannot be negative.");
}
this.effectiveZero = effectiveZero;
}
public Random getRandom()
{
return this.random;
}
public void setRandom(
final Random random)
{
this.random = random;
}
}
| {
"content_hash": "4cc97def5902148d9d7e6c99ad09ef13",
"timestamp": "",
"source": "github",
"line_count": 549,
"max_line_length": 138,
"avg_line_length": 28.29143897996357,
"alnum_prop": 0.5780968323461241,
"repo_name": "codeaudit/Foundry",
"id": "d624d9ca0970d3740e216268bdedf849af997bf9",
"size": "16037",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Components/LearningCore/Test/gov/sandia/cognition/learning/algorithm/svm/SimplifiedSequentialMinimalOptimization.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "685"
},
{
"name": "Java",
"bytes": "10924869"
}
],
"symlink_target": ""
} |
.combo {
display: inline-block;
white-space: nowrap;
margin: 0;
padding: 0;
border-width: 1px;
border-style: solid;
overflow: hidden;
vertical-align: middle;
}
.combo .combo-text {
font-size: 12px;
border: 0px;
margin: 0;
padding: 0px 2px;
vertical-align: baseline;
}
.combo-arrow {
width: 18px;
height: 20px;
overflow: hidden;
display: inline-block;
vertical-align: top;
cursor: pointer;
opacity: 0.6;
filter: alpha(opacity=60);
}
.combo-arrow-hover {
opacity: 1.0;
filter: alpha(opacity=100);
}
.combo-panel {
overflow: auto;
}
.combo-arrow {
background: url('images/combo_arrow.png') no-repeat center center;
}
.combo-panel {
background-color: #fff;
}
.combo {
border-color: #ddd;
background-color: #fff;
}
.combo-arrow {
background-color: #ffffff;
}
.combo-arrow-hover {
background-color: #E6E6E6;
}
.combo-arrow:hover {
background-color: #E6E6E6;
}
.combo .textbox-icon-disabled:hover {
cursor: default;
}
.textbox-invalid {
border-color: #ffa8a8;
background-color: #fff3f3;
}
| {
"content_hash": "f543d1c345b483ea9575b164ba790fe6",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 70,
"avg_line_length": 15.694444444444445,
"alnum_prop": 0.6221238938053097,
"repo_name": "george510257/gls-demo",
"id": "f59e7bca65a553c44ee0ece83840a4e82753d047",
"size": "1130",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/resources/static/plugins/jquery-easyui-1.4.4/themes/metro/combo.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "506069"
},
{
"name": "HTML",
"bytes": "488910"
},
{
"name": "Java",
"bytes": "149639"
},
{
"name": "JavaScript",
"bytes": "930442"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Data;
using Windows.UI.Xaml.Media;
namespace MahApps.Metro.IconPacks
{
/// <summary>
/// PixelartIcons are licensed under the [MIT license](<see><cref>https://github.com/halfmage/pixelarticons/blob/master/LICENSE</cref></see>).
/// Contributions, corrections and requests can be made on GitHub <see><cref>https://github.com/halfmage/pixelarticons</cref></see>.
/// </summary>
[MetaData("Pixelarticons", "https://pixelarticons.com/", "https://github.com/halfmage/pixelarticons/blob/master/LICENSE")]
public class PathIconPixelartIcons : PathIconControlBase
{
public static readonly DependencyProperty KindProperty
= DependencyProperty.Register(nameof(Kind), typeof(PackIconPixelartIconsKind), typeof(PathIconPixelartIcons), new PropertyMetadata(default(PackIconPixelartIconsKind), KindPropertyChangedCallback));
private static void KindPropertyChangedCallback(DependencyObject dependencyObject, DependencyPropertyChangedEventArgs e)
{
if (e.NewValue != e.OldValue)
{
((PathIconPixelartIcons)dependencyObject).UpdateData();
}
}
/// <summary>
/// Gets or sets the icon to display.
/// </summary>
public PackIconPixelartIconsKind Kind
{
get { return (PackIconPixelartIconsKind)GetValue(KindProperty); }
set { SetValue(KindProperty, value); }
}
public PathIconPixelartIcons()
{
}
protected override void UpdateData()
{
string data = null;
PackIconPixelartIconsDataFactory.DataIndex.Value?.TryGetValue(Kind, out data);
if (string.IsNullOrEmpty(data))
{
this.Data = default(Geometry);
}
else
{
BindingOperations.SetBinding(this, PathIcon.DataProperty, new Binding() {Source = data, Mode = BindingMode.OneTime});
}
}
}
} | {
"content_hash": "890afdce37df0b3235ca173ac7c50556",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 209,
"avg_line_length": 38.527272727272724,
"alnum_prop": 0.6460594620103822,
"repo_name": "MahApps/MahApps.Metro.IconPacks",
"id": "a2f9c54dd3caf1fafc3460ece01217dbc8ec06d9",
"size": "2121",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/MahApps.Metro.IconPacks/Icons/PixelartIcons/PathIconPixelartIcons.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "24975987"
},
{
"name": "PowerShell",
"bytes": "648"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
<bean id="sakaiFCKTextEvolver" class="org.sakaiproject.lessonbuildertool.tool.evolvers.SakaiFCKTextEvolver">
<property name="contentHostingService" ref="org.sakaiproject.content.api.ContentHostingService" />
<property name="context" ref="sakai-Context" />
<!-- sets the default editor size for this context -->
<property name="width" value="400" />
<property name="height" value="400" />
</bean>
<!-- rich text evolver bean def -->
<bean id="richTextEvolver" class="uk.org.ponder.springutil.BeanReferenceFactoryBean">
<property name="targetBeanName" value="sakaiFCKTextEvolver" />
</bean>
<!-- <alias name="sakaiFCKTextEvolver" alias="richTextEvolver"/> -->
<bean id="fieldDateTransit" parent="beanExploder">
<property name="factory">
<bean
class="uk.org.ponder.dateutil.StandardFieldDateTransit"
init-method="init">
<property name="locale" ref="requestLocale" />
<property name="timeZone" ref="requestTimeZone" />
</bean>
</property>
</bean>
<bean id="httpServletRequest" factory-bean="httpServletFactory"
factory-method="getHttpServletRequest" />
<bean id="httpServletResponse" factory-bean="httpServletFactory"
factory-method="getHttpServletResponse" />
<!-- the producer beans -->
<bean name="showPageProducer" class="org.sakaiproject.lessonbuildertool.tool.producers.ShowPageProducer">
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="authzGroupService" ref="org.sakaiproject.authz.api.AuthzGroupService" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="dateEvolver" ref="fieldDateInputEvolver" />
<property name="forumEntity" ref="org.sakaiproject.lessonbuildertool.service.ForumEntity"/>
<property name="quizEntity" ref="org.sakaiproject.lessonbuildertool.service.SamigoEntity"/>
<property name="bltiEntity" ref="org.sakaiproject.lessonbuildertool.service.BltiEntity"/>
<property name="assignmentEntity" ref="org.sakaiproject.lessonbuildertool.service.AssignmentEntity"/>
<property name="timeService" ref="org.sakaiproject.time.api.TimeService" />
<property name="localeGetter" ref="requestLocaleProxy" />
<property name="httpServletResponse" ref="httpServletResponse" />
<property name="httpServletRequest" ref="httpServletRequest" />
<property name="toolManager" ref="org.sakaiproject.tool.api.ActiveToolManager" />
<property name="lessonBuilderAccessService" ref="org.sakaiproject.lessonbuildertool.service.LessonBuilderAccessService" />
<property name="richTextEvolver" ref="richTextEvolver" />
<property name="imageToMimeMap" ref="org.sakaiproject.lessonbuildertool.util.ImageToMimeMap"/>
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.ReorderProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.PreviewProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="toolManager" ref="org.sakaiproject.tool.api.ActiveToolManager" />
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.AssignmentPickerProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="assignmentEntity" ref="org.sakaiproject.lessonbuildertool.service.AssignmentEntity"/>
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.ForumPickerProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean"
/> <property name="simplePageToolDao"
ref="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao"
/> <property name="forumEntity"
ref="org.sakaiproject.lessonbuildertool.service.ForumEntity"/>
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.BltiPickerProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="bltiEntity" ref="org.sakaiproject.lessonbuildertool.service.BltiEntity"/>
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean name="ltiFileItemProducer"
class="org.sakaiproject.lessonbuildertool.tool.producers.LtiFileItemProducer" init-method="init">
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="localeGetter" ref="requestLocaleProxy" />
<property name="ltiService" ref="org.sakaiproject.lti.api.LTIService" />
<property name="toolManager" ref="org.sakaiproject.tool.api.ToolManager" />
</bean>
<bean name="org.sakaiproject.lessonbuildertool.tool.producers.PagePickerProducer"
class="org.sakaiproject.lessonbuildertool.tool.producers.PagePickerProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao" ref="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
<property name="toolManager" ref="org.sakaiproject.tool.api.ActiveToolManager" />
<property name="localeGetter" ref="requestLocaleProxy" />
<property name="lessonsAccess" ref="org.sakaiproject.lessonbuildertool.service.LessonsAccess" />
<property name="imageToMimeMap" ref="org.sakaiproject.lessonbuildertool.util.ImageToMimeMap"/>
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.LinkTrackerProducer" >
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="messageLocator" ref="messageLocator"/>
<property name="localeGetter" ref="requestLocaleProxy" />
<property name="lessonBuilderAccessService" ref="org.sakaiproject.lessonbuildertool.service.LessonBuilderAccessService" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.RemovePageProducer" >
<property name="simplePageBean" ref="simplePageBean" />
<property name="messageLocator" ref="messageLocator"/>
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.QuizPickerProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="quizEntity" ref="org.sakaiproject.lessonbuildertool.service.SamigoEntity"/>
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.ResourcePickerProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="sessionManager" ref="org.sakaiproject.tool.api.SessionManager" />
<property name="contentHostingService" ref="org.sakaiproject.content.api.ContentHostingService" />
<property name="toolManager" ref="org.sakaiproject.tool.api.ActiveToolManager" />
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.EditPageProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="richTextEvolver" ref="richTextEvolver" />
<property name="showPageProducer" ref="showPageProducer" />
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.PermissionsHelperProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="sessionManager" ref="org.sakaiproject.tool.api.SessionManager" />
<property name="simplePageBean" ref="simplePageBean" />
<property name="siteService" ref="org.sakaiproject.site.api.SiteService" />
<property name="toolManager" ref="org.sakaiproject.tool.api.ActiveToolManager" />
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.ReloadPageProducer" >
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.CommentGradingPaneProducer" >
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="messageLocator" ref="messageLocator"/>
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.QuestionGradingPaneProducer" >
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="messageLocator" ref="messageLocator"/>
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.ShowItemProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="httpServletRequest" ref="httpServletRequest" />
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.IFrameWindowProducer" >
<property name="simplePageBean" ref="simplePageBean" />
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.CommentsProducer" >
<property name="messageLocator" ref="messageLocator"/>
<property name="localeGetter" ref="requestLocaleProxy" />
<property name="simplePageBean" ref="simplePageBean" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
</bean>
<bean class="org.sakaiproject.lessonbuildertool.tool.producers.PeerEvalStatsProducer" >
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="messageLocator" ref="messageLocator"/>
<property name="simplePageBean" ref="simplePageBean" />
<property name="showPageProducer" ref="showPageProducer" />
<property name="localeGetter" ref="requestLocaleProxy" />
</bean>
<!-- backing beans -->
<bean name="simplePageBean" class="org.sakaiproject.lessonbuildertool.tool.beans.SimplePageBean" init-method="init">
<property name="messageLocator" ref="messageLocator"/>
<property name="toolManager" ref="org.sakaiproject.tool.api.ActiveToolManager" />
<property name="securityService" ref="org.sakaiproject.authz.api.SecurityService" />
<property name="sessionManager" ref="org.sakaiproject.tool.api.SessionManager" />
<property name="siteService" ref="org.sakaiproject.site.api.SiteService" />
<property name="contentHostingService" ref="org.sakaiproject.content.api.ContentHostingService" />
<property name="authzGroupService" ref="org.sakaiproject.authz.api.AuthzGroupService" />
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="httpServletResponse" ref="httpServletResponse" />
<property name="multipartMap" ref="multipartMap" />
<property name="forumEntity" ref="org.sakaiproject.lessonbuildertool.service.ForumEntity"/>
<property name="quizEntity" ref="org.sakaiproject.lessonbuildertool.service.SamigoEntity"/>
<property name="assignmentEntity" ref="org.sakaiproject.lessonbuildertool.service.AssignmentEntity"/>
<property name="bltiEntity" ref="org.sakaiproject.lessonbuildertool.service.BltiEntity"/>
<property name="gradebookIfc" ref="org.sakaiproject.lessonbuildertool.service.GradebookIfc"/>
<property name="lessonBuilderEntityProducer" ref="org.sakaiproject.lessonbuildertool.service.LessonBuilderEntityProducer"/>
<property name="lessonsAccess" ref="org.sakaiproject.lessonbuildertool.service.LessonsAccess"/>
<property name="lessonBuilderAccessService" ref="org.sakaiproject.lessonbuildertool.service.LessonBuilderAccessService" />
<property name="ltiService" ref="org.sakaiproject.lti.api.LTIService" />
</bean>
<!-- Over-riding the CommonsMultipartResolver to have multiple files incoming under the same form input name -->
<bean id="commonsMultipartResolver" class="org.sakaiproject.lessonbuildertool.resolver.MultiCommonsMultipartResolver">
</bean>
<bean name="gradingBean" class="org.sakaiproject.lessonbuildertool.tool.beans.GradingBean">
<property name="simplePageToolDao">
<ref bean="org.sakaiproject.lessonbuildertool.model.SimplePageToolDao" />
</property>
<property name="gradebookIfc" ref="org.sakaiproject.lessonbuildertool.service.GradebookIfc"/>
<property name="simplePageBean" ref="simplePageBean" />
</bean>
<!-- Bean override for rootHandlerHook see RSF-123 and CTL-1214 -->
<bean id="reportHandlerHook"
class="org.sakaiproject.lessonbuildertool.tool.beans.ReportHandlerHook">
<property name="viewparams" ref="viewParameters" />
<property name="response" ref="httpServletResponse" />
</bean>
<bean id="rootHandlerBean" class="org.sakaiproject.lessonbuildertool.util.RootHandlerBeanOverride" init-method="handle">
<property name="rootHandlerBeanBase" ref="overridedRootHandlerBean"/>
<property name="reportHandlerHook" ref="reportHandlerHook"/>
</bean>
<bean id="overridedRootHandlerBean" parent="rootHandlerBeanBase" init-method="doNothing" class="org.sakaiproject.lessonbuildertool.util.OverridedServletRootHandlerBean">
<property name="httpServletRequest" ref="httpServletRequest" />
<property name="httpServletResponse" ref="httpServletResponse" />
</bean>
</beans>
| {
"content_hash": "3e1a67d16c8af1c245198dca17901ea0",
"timestamp": "",
"source": "github",
"line_count": 295,
"max_line_length": 177,
"avg_line_length": 53.42033898305085,
"alnum_prop": 0.7533472936100006,
"repo_name": "bzhouduke123/sakai",
"id": "c8bb9cdad0b0711284771add1b70d4a1f7211151",
"size": "15759",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "lessonbuilder/tool/src/webapp/WEB-INF/requestContext.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "59098"
},
{
"name": "Batchfile",
"bytes": "5172"
},
{
"name": "CSS",
"bytes": "2010598"
},
{
"name": "ColdFusion",
"bytes": "146057"
},
{
"name": "HTML",
"bytes": "5536789"
},
{
"name": "Java",
"bytes": "46982784"
},
{
"name": "JavaScript",
"bytes": "9704443"
},
{
"name": "Lasso",
"bytes": "26436"
},
{
"name": "PHP",
"bytes": "962699"
},
{
"name": "PLSQL",
"bytes": "2268793"
},
{
"name": "Perl",
"bytes": "61738"
},
{
"name": "Python",
"bytes": "44698"
},
{
"name": "Ruby",
"bytes": "2344"
},
{
"name": "Shell",
"bytes": "19247"
},
{
"name": "SourcePawn",
"bytes": "2242"
},
{
"name": "XSLT",
"bytes": "280424"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<!--
Licensed to ObjectStyle LLC under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ObjectStyle LLC licenses
this file to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<data-map xmlns="http://cayenne.apache.org/schema/7/modelMap"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://cayenne.apache.org/schema/7/modelMap http://cayenne.apache.org/schema/7/modelMap.xsd"
project-version="7">
<db-entity name="db_entity">
<db-attribute name="id" type="BIGINT" isPrimaryKey="true" isMandatory="true"/>
</db-entity>
</data-map>
| {
"content_hash": "360b8480ee377841e7308b07e0848c2a",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 114,
"avg_line_length": 43.5,
"alnum_prop": 0.7487684729064039,
"repo_name": "nhl/bootique-cayenne",
"id": "5f29e93f1d1c0028dfff182f6e4f14613f837605",
"size": "1218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bootique-cayenne-default-it/src/test/resources/datamap.map.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "24929"
}
],
"symlink_target": ""
} |
namespace chromeos {
namespace sensors {
namespace {
SensorHalDispatcher* g_sensor_hal_dispatcher = nullptr;
}
// static
void SensorHalDispatcher::Initialize() {
if (g_sensor_hal_dispatcher) {
LOG(WARNING) << "SensorHalDispatcher was already initialized";
return;
}
g_sensor_hal_dispatcher = new SensorHalDispatcher();
}
// static
void SensorHalDispatcher::Shutdown() {
if (!g_sensor_hal_dispatcher) {
LOG(WARNING)
<< "SensorHalDispatcher::Shutdown() called with null dispatcher";
return;
}
delete g_sensor_hal_dispatcher;
g_sensor_hal_dispatcher = nullptr;
}
// static
SensorHalDispatcher* SensorHalDispatcher::GetInstance() {
return g_sensor_hal_dispatcher;
}
void SensorHalDispatcher::RegisterServer(
mojo::PendingRemote<mojom::SensorHalServer> remote) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
sensor_hal_server_.Bind(std::move(remote));
sensor_hal_server_.set_disconnect_handler(
base::BindOnce(&SensorHalDispatcher::OnSensorHalServerDisconnect,
base::Unretained(this)));
// Set up the Mojo channels for clients which registered before the server
// registers.
for (auto& client : sensor_hal_clients_)
EstablishMojoChannel(client);
}
void SensorHalDispatcher::RegisterClient(
mojo::PendingRemote<mojom::SensorHalClient> remote) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
auto client = mojo::Remote<mojom::SensorHalClient>(std::move(remote));
if (sensor_hal_server_)
EstablishMojoChannel(client);
sensor_hal_clients_.Add(std::move(client));
}
SensorHalDispatcher::SensorHalDispatcher() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
sensor_hal_clients_.set_disconnect_handler(
base::BindRepeating(&SensorHalDispatcher::OnSensorHalClientDisconnect,
base::Unretained(this)));
}
base::UnguessableToken SensorHalDispatcher::GetTokenForTrustedClient() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
auto token = base::UnguessableToken::Create();
client_token_set_.insert(token);
return token;
}
bool SensorHalDispatcher::AuthenticateClient(
const base::UnguessableToken& token) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
return client_token_set_.find(token) != client_token_set_.end();
}
SensorHalDispatcher::~SensorHalDispatcher() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
void SensorHalDispatcher::EstablishMojoChannel(
const mojo::Remote<mojom::SensorHalClient>& client) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(sensor_hal_server_);
mojo::PendingRemote<mojom::SensorService> service_remote;
sensor_hal_server_->CreateChannel(
service_remote.InitWithNewPipeAndPassReceiver());
client->SetUpChannel(std::move(service_remote));
}
void SensorHalDispatcher::OnSensorHalServerDisconnect() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
LOG(ERROR) << "Sensor HAL Server connection lost";
sensor_hal_server_.reset();
}
void SensorHalDispatcher::OnSensorHalClientDisconnect(
mojo::RemoteSetElementId id) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
LOG(ERROR) << "Sensor HAL Client connection lost: " << id;
}
} // namespace sensors
} // namespace chromeos
| {
"content_hash": "cde0f11d19d89235f333220584c8d910",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 76,
"avg_line_length": 29.926605504587155,
"alnum_prop": 0.7354383813611282,
"repo_name": "ric2b/Vivaldi-browser",
"id": "30919feb4e39b031b81aa9ec67d79229abcbf27b",
"size": "3572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chromium/chromeos/components/sensors/ash/sensor_hal_dispatcher.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="RectangleSelectionCommand.cs" company="Helix Toolkit">
// Copyright (c) 2014 Helix Toolkit contributors
// </copyright>
// <summary>
// Provides a command that shows a rectangle when the mouse is dragged and raises an event returning the models contained in the rectangle
// when the mouse button is released.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace HelixToolkit.Wpf
{
using System;
using System.Linq;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
/// <summary>
/// Provides a command that shows a rectangle when the mouse is dragged and raises an event returning the models contained in the rectangle
/// when the mouse button is released.
/// </summary>
public class RectangleSelectionCommand : SelectionCommand
{
/// <summary>
/// The selection rectangle.
/// </summary>
private Rect selectionRect;
/// <summary>
/// The rectangle adorner.
/// </summary>
private RectangleAdorner rectangleAdorner;
/// <summary>
/// Initializes a new instance of the <see cref="RectangleSelectionCommand" /> class.
/// </summary>
/// <param name="viewport">The viewport.</param>
/// <param name="modelsSelectedEventHandler">The selection event handler.</param>
public RectangleSelectionCommand(Viewport3D viewport, EventHandler<ModelsSelectedEventArgs> modelsSelectedEventHandler)
: base(viewport, modelsSelectedEventHandler, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="RectangleSelectionCommand" /> class.
/// </summary>
/// <param name="viewport">The viewport.</param>
/// <param name="visualsSelectedEventHandler">The selection event handler.</param>
public RectangleSelectionCommand(Viewport3D viewport, EventHandler<VisualsSelectedEventArgs> visualsSelectedEventHandler)
: base(viewport, null, visualsSelectedEventHandler)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="RectangleSelectionCommand" /> class.
/// </summary>
/// <param name="viewport">The viewport.</param>
/// <param name="modelsSelectedEventHandler">The selection event handler.</param>
/// <param name="visualsSelectedEventHandler">The selection event handler.</param>
public RectangleSelectionCommand(Viewport3D viewport, EventHandler<ModelsSelectedEventArgs> modelsSelectedEventHandler, EventHandler<VisualsSelectedEventArgs> visualsSelectedEventHandler)
: base(viewport, modelsSelectedEventHandler, visualsSelectedEventHandler)
{
}
/// <summary>
/// Occurs when the manipulation is started.
/// </summary>
/// <param name="e">The <see cref="ManipulationEventArgs"/> instance containing the event data.</param>
protected override void Started(ManipulationEventArgs e)
{
base.Started(e);
this.selectionRect = new Rect(this.MouseDownPoint, this.MouseDownPoint);
this.ShowRectangle();
}
/// <summary>
/// Occurs when the position is changed during a manipulation.
/// </summary>
/// <param name="e">The <see cref="ManipulationEventArgs"/> instance containing the event data.</param>
protected override void Delta(ManipulationEventArgs e)
{
base.Delta(e);
this.selectionRect = new Rect(this.MouseDownPoint, e.CurrentPosition);
this.UpdateRectangle();
}
/// <summary>
/// The customized complete operation when the manipulation is completed.
/// </summary>
/// <param name="e">
/// The <see cref="ManipulationEventArgs"/> instance containing the event data.
/// </param>
protected override void Completed(ManipulationEventArgs e)
{
this.HideRectangle();
var res = this.Viewport.FindHits(this.selectionRect, this.SelectionHitMode);
var selectedModels = res.Select(hit => hit.Model).ToList();
// We do not handle the point selection, unless no models are selected. If no models are selected, we clear the
// existing selection.
if (this.selectionRect.Size.Equals(default(Size)) && selectedModels.Any())
{
return;
}
this.OnModelsSelected(new ModelsSelectedByRectangleEventArgs(selectedModels, this.selectionRect));
var selectedVisuals = res.Select(hit => hit.Visual).ToList();
this.OnVisualsSelected(new VisualsSelectedByRectangleEventArgs(selectedVisuals, this.selectionRect));
}
/// <summary>
/// Gets the cursor for the gesture.
/// </summary>
/// <returns>
/// A cursor.
/// </returns>
protected override Cursor GetCursor()
{
return Cursors.Arrow;
}
/// <summary>
/// Hides the selection rectangle.
/// </summary>
private void HideRectangle()
{
var myAdornerLayer = AdornerLayer.GetAdornerLayer(this.Viewport);
if (myAdornerLayer == null) { return; }
if (this.rectangleAdorner != null)
{
myAdornerLayer.Remove(this.rectangleAdorner);
}
this.rectangleAdorner = null;
this.Viewport.InvalidateVisual();
}
/// <summary>
/// Updates the selection rectangle.
/// </summary>
private void UpdateRectangle()
{
if (this.rectangleAdorner == null)
{
return;
}
this.rectangleAdorner.Rectangle = this.selectionRect;
this.rectangleAdorner.InvalidateVisual();
}
/// <summary>
/// Shows the selection rectangle.
/// </summary>
private void ShowRectangle()
{
if (this.rectangleAdorner != null)
{
return;
}
var adornerLayer = AdornerLayer.GetAdornerLayer(this.Viewport);
if (adornerLayer == null) { return; }
this.rectangleAdorner = new RectangleAdorner(this.Viewport, this.selectionRect, Colors.LightGray, Colors.Black, 1, 1, 0, DashStyles.Dash);
adornerLayer.Add(this.rectangleAdorner);
}
}
} | {
"content_hash": "4d048844ab198f50236ca11196719c51",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 195,
"avg_line_length": 39.21264367816092,
"alnum_prop": 0.591235526894328,
"repo_name": "holance/helix-toolkit",
"id": "82890f4e6a4ee033ff85271df9219d5536bc51c6",
"size": "6825",
"binary": false,
"copies": "4",
"ref": "refs/heads/develop",
"path": "Source/HelixToolkit.Wpf.Shared/SelectionCommands/RectangleSelectionCommand.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1092"
},
{
"name": "C#",
"bytes": "9223803"
},
{
"name": "HLSL",
"bytes": "291590"
}
],
"symlink_target": ""
} |
template<typename E>
struct enable_bitmask_operators{
static const bool enable=false;
};
template<typename E>
typename std::enable_if<enable_bitmask_operators<E>::enable,E>::type
operator|(E lhs,E rhs){
typedef typename std::underlying_type<E>::type underlying;
return static_cast<E>(
static_cast<underlying>(lhs) | static_cast<underlying>(rhs));
}
template<typename E>
typename std::enable_if<enable_bitmask_operators<E>::enable,E>::type
operator&(E lhs,E rhs){
typedef typename std::underlying_type<E>::type underlying;
return static_cast<E>(
static_cast<underlying>(lhs) & static_cast<underlying>(rhs));
}
template<typename E>
typename std::enable_if<enable_bitmask_operators<E>::enable,E>::type
operator^(E lhs,E rhs){
typedef typename std::underlying_type<E>::type underlying;
return static_cast<E>(
static_cast<underlying>(lhs) ^ static_cast<underlying>(rhs));
}
template<typename E>
typename std::enable_if<enable_bitmask_operators<E>::enable,E>::type
operator~(E lhs){
typedef typename std::underlying_type<E>::type underlying;
return static_cast<E>(
~static_cast<underlying>(lhs));
}
template<typename E>
typename std::enable_if<enable_bitmask_operators<E>::enable,E&>::type
operator|=(E& lhs,E rhs){
typedef typename std::underlying_type<E>::type underlying;
lhs=static_cast<E>(
static_cast<underlying>(lhs) | static_cast<underlying>(rhs));
return lhs;
}
template<typename E>
typename std::enable_if<enable_bitmask_operators<E>::enable,E&>::type
operator&=(E& lhs,E rhs){
typedef typename std::underlying_type<E>::type underlying;
lhs=static_cast<E>(
static_cast<underlying>(lhs) & static_cast<underlying>(rhs));
return lhs;
}
template<typename E>
typename std::enable_if<enable_bitmask_operators<E>::enable,E&>::type
operator^=(E& lhs,E rhs){
typedef typename std::underlying_type<E>::type underlying;
lhs=static_cast<E>(
static_cast<underlying>(lhs) ^ static_cast<underlying>(rhs));
return lhs;
}
#endif // JSS_BITMASK_HPP
| {
"content_hash": "1cc4cdc6f7be842092eee316fc8f4280",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 69,
"avg_line_length": 31.861538461538462,
"alnum_prop": 0.7011105746016417,
"repo_name": "gscorpss/sockpp",
"id": "d8a8b0787df022c5279981e8383fe39bf3b802a8",
"size": "3678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/libs/sockpp/BitmaskOperations.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "26136"
},
{
"name": "CMake",
"bytes": "1104"
},
{
"name": "Makefile",
"bytes": "132"
},
{
"name": "Shell",
"bytes": "86"
}
],
"symlink_target": ""
} |
function logResult(result) {
console.log(result);
}
function logError(error) {
console.log('Looks like there was a problem: \n', error);
}
function validateResponse(response) {
if (!response.ok) {
throw Error(response.statusText);
}
return response;
}
function readResponseAsJSON(response) {
return response.json();
}
function fetchJSON(pathToResource,accion,datos) {
var init= {method:accion};
if (datos) {
init.body= JSON.stringify(datos);
var myHeaders = new Headers();
myHeaders.append("Content-Type", "application/json");
myHeaders.append("Accept", "application/json");
init.headers= myHeaders;
}
fetch(pathToResource, init)
.then(validateResponse)
.then(readResponseAsJSON)
.then(logResult)
.catch(logError);
}
fetchJSON('api/productos','GET');
fetchJSON('api/productos/galletas','GET');
fetchJSON('api/productos/patatas','GET');
fetchJSON('api/productos/galletas','DELETE');
fetchJSON('api/productos/','GET');
fetchJSON('api/productos/','POST',{nombre:"patatas", unidades:"1000"});
fetchJSON('api/productos/','GET');
fetchJSON('api/productos/','PUT',{nombre:"patatas", unidades:"10"});
fetchJSON('api/productos/','GET');
| {
"content_hash": "5ac1fbc7ec470539ae8adaf345dffc3f",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 71,
"avg_line_length": 26.355555555555554,
"alnum_prop": 0.7032040472175379,
"repo_name": "jaspock/dai",
"id": "664e214c53dacab9bad6cfa56d90d95d08cee7af",
"size": "1186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oldies/rest-carrito-jetty/src/main/webapp/rest.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "9422"
},
{
"name": "Java",
"bytes": "54421"
},
{
"name": "JavaScript",
"bytes": "2421"
}
],
"symlink_target": ""
} |
/* global adminCommentsL10n, thousandsSeparator, list_args, QTags, ajaxurl, wpAjax */
var setCommentsList, theList, theExtraList, commentReply;
(function($) {
var getCount, updateCount, updateCountText, updatePending, updateApproved,
updateHtmlTitle, updateDashboardText, adminTitle = document.title,
isDashboard = $('#dashboard_right_now').length,
titleDiv, titleRegEx;
getCount = function(el) {
var n = parseInt( el.html().replace(/[^0-9]+/g, ''), 10 );
if ( isNaN(n) ) {
return 0;
}
return n;
};
updateCount = function(el, n) {
var n1 = '';
if ( isNaN(n) ) {
return;
}
n = n < 1 ? '0' : n.toString();
if ( n.length > 3 ) {
while ( n.length > 3 ) {
n1 = thousandsSeparator + n.substr(n.length - 3) + n1;
n = n.substr(0, n.length - 3);
}
n = n + n1;
}
el.html(n);
};
updateApproved = function( diff, commentPostId ) {
var postSelector = '.post-com-count-' + commentPostId,
noClass = 'comment-count-no-comments',
approvedClass = 'comment-count-approved',
approved,
noComments;
updateCountText( 'span.approved-count', diff );
if ( ! commentPostId ) {
return;
}
// cache selectors to not get dupes
approved = $( 'span.' + approvedClass, postSelector );
noComments = $( 'span.' + noClass, postSelector );
approved.each(function() {
var a = $(this), n = getCount(a) + diff;
if ( n < 1 )
n = 0;
if ( 0 === n ) {
a.removeClass( approvedClass ).addClass( noClass );
} else {
a.addClass( approvedClass ).removeClass( noClass );
}
updateCount( a, n );
});
noComments.each(function() {
var a = $(this);
if ( diff > 0 ) {
a.removeClass( noClass ).addClass( approvedClass );
} else {
a.addClass( noClass ).removeClass( approvedClass );
}
updateCount( a, diff );
});
};
updateCountText = function( selector, diff ) {
$( selector ).each(function() {
var a = $(this), n = getCount(a) + diff;
if ( n < 1 ) {
n = 0;
}
updateCount( a, n );
});
};
updateDashboardText = function ( response ) {
if ( ! isDashboard || ! response || ! response.i18n_comments_text ) {
return;
}
var rightNow = $( '#dashboard_right_now' );
$( '.comment-count a', rightNow ).text( response.i18n_comments_text );
$( '.comment-mod-count a', rightNow ).text( response.i18n_moderation_text )
.parent()
[ response.in_moderation > 0 ? 'removeClass' : 'addClass' ]( 'hidden' );
};
updateHtmlTitle = function ( diff ) {
var newTitle, regExMatch, titleCount, commentFrag;
titleRegEx = titleRegEx || new RegExp( adminCommentsL10n.docTitleCommentsCount.replace( '%s', '\\([0-9' + thousandsSeparator + ']+\\)' ) + '?' );
// count funcs operate on a $'d element
titleDiv = titleDiv || $( '<div />' );
newTitle = adminTitle;
commentFrag = titleRegEx.exec( document.title );
if ( commentFrag ) {
commentFrag = commentFrag[0];
titleDiv.html( commentFrag );
titleCount = getCount( titleDiv ) + diff;
} else {
titleDiv.html( 0 );
titleCount = diff;
}
if ( titleCount >= 1 ) {
updateCount( titleDiv, titleCount );
regExMatch = titleRegEx.exec( document.title );
if ( regExMatch ) {
newTitle = document.title.replace( regExMatch[0], adminCommentsL10n.docTitleCommentsCount.replace( '%s', titleDiv.text() ) + ' ' );
}
} else {
regExMatch = titleRegEx.exec( newTitle );
if ( regExMatch ) {
newTitle = newTitle.replace( regExMatch[0], adminCommentsL10n.docTitleComments );
}
}
document.title = newTitle;
};
updatePending = function( diff, commentPostId ) {
var postSelector = '.post-com-count-' + commentPostId,
noClass = 'comment-count-no-pending',
noParentClass = 'post-com-count-no-pending',
pendingClass = 'comment-count-pending',
pending,
noPending;
if ( ! isDashboard ) {
updateHtmlTitle( diff );
}
$( 'span.pending-count' ).each(function() {
var a = $(this), n = getCount(a) + diff;
if ( n < 1 )
n = 0;
a.closest('.awaiting-mod')[ 0 === n ? 'addClass' : 'removeClass' ]('count-0');
updateCount( a, n );
});
if ( ! commentPostId ) {
return;
}
// cache selectors to not get dupes
pending = $( 'span.' + pendingClass, postSelector );
noPending = $( 'span.' + noClass, postSelector );
pending.each(function() {
var a = $(this), n = getCount(a) + diff;
if ( n < 1 )
n = 0;
if ( 0 === n ) {
a.parent().addClass( noParentClass );
a.removeClass( pendingClass ).addClass( noClass );
} else {
a.parent().removeClass( noParentClass );
a.addClass( pendingClass ).removeClass( noClass );
}
updateCount( a, n );
});
noPending.each(function() {
var a = $(this);
if ( diff > 0 ) {
a.parent().removeClass( noParentClass );
a.removeClass( noClass ).addClass( pendingClass );
} else {
a.parent().addClass( noParentClass );
a.addClass( noClass ).removeClass( pendingClass );
}
updateCount( a, diff );
});
};
setCommentsList = function() {
var totalInput, perPageInput, pageInput, dimAfter, delBefore, updateTotalCount, delAfter, refillTheExtraList, diff,
lastConfidentTime = 0;
totalInput = $('input[name="_total"]', '#comments-form');
perPageInput = $('input[name="_per_page"]', '#comments-form');
pageInput = $('input[name="_page"]', '#comments-form');
// Updates the current total (stored in the _total input)
updateTotalCount = function( total, time, setConfidentTime ) {
if ( time < lastConfidentTime )
return;
if ( setConfidentTime )
lastConfidentTime = time;
totalInput.val( total.toString() );
};
// this fires when viewing "All"
dimAfter = function( r, settings ) {
var editRow, replyID, replyButton, response,
c = $( '#' + settings.element );
if ( true !== settings.parsed ) {
response = settings.parsed.responses[0];
}
editRow = $('#replyrow');
replyID = $('#comment_ID', editRow).val();
replyButton = $('#replybtn', editRow);
if ( c.is('.unapproved') ) {
if ( settings.data.id == replyID )
replyButton.text(adminCommentsL10n.replyApprove);
c.find('div.comment_status').html('0');
} else {
if ( settings.data.id == replyID )
replyButton.text(adminCommentsL10n.reply);
c.find('div.comment_status').html('1');
}
diff = $('#' + settings.element).is('.' + settings.dimClass) ? 1 : -1;
if ( response ) {
updateDashboardText( response.supplemental );
updatePending( diff, response.supplemental.postId );
updateApproved( -1 * diff, response.supplemental.postId );
} else {
updatePending( diff );
updateApproved( -1 * diff );
}
};
// Send current total, page, per_page and url
delBefore = function( settings, list ) {
var note, id, el, n, h, a, author,
action = false,
wpListsData = $( settings.target ).attr( 'data-wp-lists' );
settings.data._total = totalInput.val() || 0;
settings.data._per_page = perPageInput.val() || 0;
settings.data._page = pageInput.val() || 0;
settings.data._url = document.location.href;
settings.data.comment_status = $('input[name="comment_status"]', '#comments-form').val();
if ( wpListsData.indexOf(':trash=1') != -1 )
action = 'trash';
else if ( wpListsData.indexOf(':spam=1') != -1 )
action = 'spam';
if ( action ) {
id = wpListsData.replace(/.*?comment-([0-9]+).*/, '$1');
el = $('#comment-' + id);
note = $('#' + action + '-undo-holder').html();
el.find('.check-column :checkbox').prop('checked', false); // Uncheck the row so as not to be affected by Bulk Edits.
if ( el.siblings('#replyrow').length && commentReply.cid == id )
commentReply.close();
if ( el.is('tr') ) {
n = el.children(':visible').length;
author = $('.author strong', el).text();
h = $('<tr id="undo-' + id + '" class="undo un' + action + '" style="display:none;"><td colspan="' + n + '">' + note + '</td></tr>');
} else {
author = $('.comment-author', el).text();
h = $('<div id="undo-' + id + '" style="display:none;" class="undo un' + action + '">' + note + '</div>');
}
el.before(h);
$('strong', '#undo-' + id).text(author);
a = $('.undo a', '#undo-' + id);
a.attr('href', 'comment.php?action=un' + action + 'comment&c=' + id + '&_wpnonce=' + settings.data._ajax_nonce);
a.attr('data-wp-lists', 'delete:the-comment-list:comment-' + id + '::un' + action + '=1');
a.attr('class', 'vim-z vim-destructive');
$('.avatar', el).first().clone().prependTo('#undo-' + id + ' .' + action + '-undo-inside');
a.click(function( e ){
e.preventDefault();
list.wpList.del(this);
$('#undo-' + id).css( {backgroundColor:'#ceb'} ).fadeOut(350, function(){
$(this).remove();
$('#comment-' + id).css('backgroundColor', '').fadeIn(300, function(){ $(this).show(); });
});
});
}
return settings;
};
// In admin-ajax.php, we send back the unix time stamp instead of 1 on success
delAfter = function( r, settings ) {
var total_items_i18n, total, animated, animatedCallback,
response = true === settings.parsed ? {} : settings.parsed.responses[0],
commentStatus = true === settings.parsed ? '' : response.supplemental.status,
commentPostId = true === settings.parsed ? '' : response.supplemental.postId,
newTotal = true === settings.parsed ? '' : response.supplemental,
targetParent = $( settings.target ).parent(),
commentRow = $('#' + settings.element),
spamDiff, trashDiff, pendingDiff, approvedDiff,
approved = commentRow.hasClass( 'approved' ),
unapproved = commentRow.hasClass( 'unapproved' ),
spammed = commentRow.hasClass( 'spam' ),
trashed = commentRow.hasClass( 'trash' );
updateDashboardText( newTotal );
// the order of these checks is important
// .unspam can also have .approve or .unapprove
// .untrash can also have .approve or .unapprove
if ( targetParent.is( 'span.undo' ) ) {
// the comment was spammed
if ( targetParent.hasClass( 'unspam' ) ) {
spamDiff = -1;
if ( 'trash' === commentStatus ) {
trashDiff = 1;
} else if ( '1' === commentStatus ) {
approvedDiff = 1;
} else if ( '0' === commentStatus ) {
pendingDiff = 1;
}
// the comment was trashed
} else if ( targetParent.hasClass( 'untrash' ) ) {
trashDiff = -1;
if ( 'spam' === commentStatus ) {
spamDiff = 1;
} else if ( '1' === commentStatus ) {
approvedDiff = 1;
} else if ( '0' === commentStatus ) {
pendingDiff = 1;
}
}
// user clicked "Spam"
} else if ( targetParent.is( 'span.spam' ) ) {
// the comment is currently approved
if ( approved ) {
approvedDiff = -1;
// the comment is currently pending
} else if ( unapproved ) {
pendingDiff = -1;
// the comment was in the trash
} else if ( trashed ) {
trashDiff = -1;
}
// you can't spam an item on the spam screen
spamDiff = 1;
// user clicked "Unspam"
} else if ( targetParent.is( 'span.unspam' ) ) {
if ( approved ) {
pendingDiff = 1;
} else if ( unapproved ) {
approvedDiff = 1;
} else if ( trashed ) {
// the comment was previously approved
if ( targetParent.hasClass( 'approve' ) ) {
approvedDiff = 1;
// the comment was previously pending
} else if ( targetParent.hasClass( 'unapprove' ) ) {
pendingDiff = 1;
}
} else if ( spammed ) {
if ( targetParent.hasClass( 'approve' ) ) {
approvedDiff = 1;
} else if ( targetParent.hasClass( 'unapprove' ) ) {
pendingDiff = 1;
}
}
// you can Unspam an item on the spam screen
spamDiff = -1;
// user clicked "Trash"
} else if ( targetParent.is( 'span.trash' ) ) {
if ( approved ) {
approvedDiff = -1;
} else if ( unapproved ) {
pendingDiff = -1;
// the comment was in the spam queue
} else if ( spammed ) {
spamDiff = -1;
}
// you can't trash an item on the trash screen
trashDiff = 1;
// user clicked "Restore"
} else if ( targetParent.is( 'span.untrash' ) ) {
if ( approved ) {
pendingDiff = 1;
} else if ( unapproved ) {
approvedDiff = 1;
} else if ( trashed ) {
if ( targetParent.hasClass( 'approve' ) ) {
approvedDiff = 1;
} else if ( targetParent.hasClass( 'unapprove' ) ) {
pendingDiff = 1;
}
}
// you can't go from trash to spam
// you can untrash on the trash screen
trashDiff = -1;
// User clicked "Approve"
} else if ( targetParent.is( 'span.approve:not(.unspam):not(.untrash)' ) ) {
approvedDiff = 1;
pendingDiff = -1;
// User clicked "Unapprove"
} else if ( targetParent.is( 'span.unapprove:not(.unspam):not(.untrash)' ) ) {
approvedDiff = -1;
pendingDiff = 1;
// User clicked "Delete Permanently"
} else if ( targetParent.is( 'span.delete' ) ) {
if ( spammed ) {
spamDiff = -1;
} else if ( trashed ) {
trashDiff = -1;
}
}
if ( pendingDiff ) {
updatePending( pendingDiff, commentPostId );
updateCountText( 'span.all-count', pendingDiff );
}
if ( approvedDiff ) {
updateApproved( approvedDiff, commentPostId );
updateCountText( 'span.all-count', approvedDiff );
}
if ( spamDiff ) {
updateCountText( 'span.spam-count', spamDiff );
}
if ( trashDiff ) {
updateCountText( 'span.trash-count', trashDiff );
}
if ( ! isDashboard ) {
total = totalInput.val() ? parseInt( totalInput.val(), 10 ) : 0;
if ( $(settings.target).parent().is('span.undo') )
total++;
else
total--;
if ( total < 0 )
total = 0;
if ( 'object' === typeof r ) {
if ( response.supplemental.total_items_i18n && lastConfidentTime < response.supplemental.time ) {
total_items_i18n = response.supplemental.total_items_i18n || '';
if ( total_items_i18n ) {
$('.displaying-num').text( total_items_i18n );
$('.total-pages').text( response.supplemental.total_pages_i18n );
$('.tablenav-pages').find('.next-page, .last-page').toggleClass('disabled', response.supplemental.total_pages == $('.current-page').val());
}
updateTotalCount( total, response.supplemental.time, true );
} else if ( response.supplemental.time ) {
updateTotalCount( total, response.supplemental.time, false );
}
} else {
updateTotalCount( total, r, false );
}
}
if ( ! theExtraList || theExtraList.length === 0 || theExtraList.children().length === 0 ) {
return;
}
theList.get(0).wpList.add( theExtraList.children( ':eq(0):not(.no-items)' ).remove().clone() );
refillTheExtraList();
animated = $( ':animated', '#the-comment-list' );
animatedCallback = function () {
if ( ! $( '#the-comment-list tr:visible' ).length ) {
theList.get(0).wpList.add( theExtraList.find( '.no-items' ).clone() );
}
};
if ( animated.length ) {
animated.promise().done( animatedCallback );
} else {
animatedCallback();
}
};
refillTheExtraList = function(ev) {
var args = $.query.get(), total_pages = $('.total-pages').text(), per_page = $('input[name="_per_page"]', '#comments-form').val();
if (! args.paged)
args.paged = 1;
if (args.paged > total_pages) {
return;
}
if (ev) {
theExtraList.empty();
args.number = Math.min(8, per_page); // see WP_Comments_List_Table::prepare_items() @ class-wp-comments-list-table.php
} else {
args.number = 1;
args.offset = Math.min(8, per_page) - 1; // fetch only the next item on the extra list
}
args.no_placeholder = true;
args.paged ++;
// $.query.get() needs some correction to be sent into an ajax request
if ( true === args.comment_type )
args.comment_type = '';
args = $.extend(args, {
'action': 'fetch-list',
'list_args': list_args,
'_ajax_fetch_list_nonce': $('#_ajax_fetch_list_nonce').val()
});
$.ajax({
url: ajaxurl,
global: false,
dataType: 'json',
data: args,
success: function(response) {
theExtraList.get(0).wpList.add( response.rows );
}
});
};
theExtraList = $('#the-extra-comment-list').wpList( { alt: '', delColor: 'none', addColor: 'none' } );
theList = $('#the-comment-list').wpList( { alt: '', delBefore: delBefore, dimAfter: dimAfter, delAfter: delAfter, addColor: 'none' } )
.bind('wpListDelEnd', function(e, s){
var wpListsData = $(s.target).attr('data-wp-lists'), id = s.element.replace(/[^0-9]+/g, '');
if ( wpListsData.indexOf(':trash=1') != -1 || wpListsData.indexOf(':spam=1') != -1 )
$('#undo-' + id).fadeIn(300, function(){ $(this).show(); });
});
};
commentReply = {
cid : '',
act : '',
init : function() {
var row = $('#replyrow');
$('a.cancel', row).click(function() { return commentReply.revert(); });
$('a.save', row).click(function() { return commentReply.send(); });
$( 'input#author-name, input#author-email, input#author-url', row ).keypress( function( e ) {
if ( e.which == 13 ) {
commentReply.send();
e.preventDefault();
return false;
}
});
// add events
$('#the-comment-list .column-comment > p').dblclick(function(){
commentReply.toggle($(this).parent());
});
$('#doaction, #doaction2, #post-query-submit').click(function(){
if ( $('#the-comment-list #replyrow').length > 0 )
commentReply.close();
});
this.comments_listing = $('#comments-form > input[name="comment_status"]').val() || '';
/* $(listTable).bind('beforeChangePage', function(){
commentReply.close();
}); */
},
addEvents : function(r) {
r.each(function() {
$(this).find('.column-comment > p').dblclick(function(){
commentReply.toggle($(this).parent());
});
});
},
toggle : function(el) {
if ( 'none' !== $( el ).css( 'display' ) && ( $( '#replyrow' ).parent().is('#com-reply') || window.confirm( adminCommentsL10n.warnQuickEdit ) ) ) {
$( el ).find( 'a.vim-q' ).click();
}
},
revert : function() {
if ( $('#the-comment-list #replyrow').length < 1 )
return false;
$('#replyrow').fadeOut('fast', function(){
commentReply.close();
});
return false;
},
close : function() {
var c, replyrow = $('#replyrow');
// replyrow is not showing?
if ( replyrow.parent().is('#com-reply') )
return;
if ( this.cid && this.act == 'edit-comment' ) {
c = $('#comment-' + this.cid);
c.fadeIn(300, function(){ c.show(); }).css('backgroundColor', '');
}
// reset the Quicktags buttons
if ( typeof QTags != 'undefined' )
QTags.closeAllTags('replycontent');
$('#add-new-comment').css('display', '');
replyrow.hide();
$('#com-reply').append( replyrow );
$('#replycontent').css('height', '').val('');
$('#edithead input').val('');
$('.error', replyrow).empty().hide();
$( '.spinner', replyrow ).removeClass( 'is-active' );
this.cid = '';
},
open : function(comment_id, post_id, action) {
var editRow, rowData, act, replyButton, editHeight,
t = this,
c = $('#comment-' + comment_id),
h = c.height(),
colspanVal = 0;
t.close();
t.cid = comment_id;
editRow = $('#replyrow');
rowData = $('#inline-'+comment_id);
action = action || 'replyto';
act = 'edit' == action ? 'edit' : 'replyto';
act = t.act = act + '-comment';
colspanVal = $( '> th:visible, > td:visible', c ).length;
// Make sure it's actually a table and there's a `colspan` value to apply.
if ( editRow.hasClass( 'inline-edit-row' ) && 0 !== colspanVal ) {
$( 'td', editRow ).attr( 'colspan', colspanVal );
}
$('#action', editRow).val(act);
$('#comment_post_ID', editRow).val(post_id);
$('#comment_ID', editRow).val(comment_id);
if ( action == 'edit' ) {
$( '#author-name', editRow ).val( $( 'div.author', rowData ).text() );
$('#author-email', editRow).val( $('div.author-email', rowData).text() );
$('#author-url', editRow).val( $('div.author-url', rowData).text() );
$('#status', editRow).val( $('div.comment_status', rowData).text() );
$('#replycontent', editRow).val( $('textarea.comment', rowData).val() );
$( '#edithead, #editlegend, #savebtn', editRow ).show();
$('#replyhead, #replybtn, #addhead, #addbtn', editRow).hide();
if ( h > 120 ) {
// Limit the maximum height when editing very long comments to make it more manageable.
// The textarea is resizable in most browsers, so the user can adjust it if needed.
editHeight = h > 500 ? 500 : h;
$('#replycontent', editRow).css('height', editHeight + 'px');
}
c.after( editRow ).fadeOut('fast', function(){
$('#replyrow').fadeIn(300, function(){ $(this).show(); });
});
} else if ( action == 'add' ) {
$('#addhead, #addbtn', editRow).show();
$( '#replyhead, #replybtn, #edithead, #editlegend, #savebtn', editRow ) .hide();
$('#the-comment-list').prepend(editRow);
$('#replyrow').fadeIn(300);
} else {
replyButton = $('#replybtn', editRow);
$( '#edithead, #editlegend, #savebtn, #addhead, #addbtn', editRow ).hide();
$('#replyhead, #replybtn', editRow).show();
c.after(editRow);
if ( c.hasClass('unapproved') ) {
replyButton.text(adminCommentsL10n.replyApprove);
} else {
replyButton.text(adminCommentsL10n.reply);
}
$('#replyrow').fadeIn(300, function(){ $(this).show(); });
}
setTimeout(function() {
var rtop, rbottom, scrollTop, vp, scrollBottom;
rtop = $('#replyrow').offset().top;
rbottom = rtop + $('#replyrow').height();
scrollTop = window.pageYOffset || document.documentElement.scrollTop;
vp = document.documentElement.clientHeight || window.innerHeight || 0;
scrollBottom = scrollTop + vp;
if ( scrollBottom - 20 < rbottom )
window.scroll(0, rbottom - vp + 35);
else if ( rtop - 20 < scrollTop )
window.scroll(0, rtop - 35);
$('#replycontent').focus().keyup(function(e){
if ( e.which == 27 )
commentReply.revert(); // close on Escape
});
}, 600);
return false;
},
send : function() {
var post = {};
$('#replysubmit .error').hide();
$( '#replysubmit .spinner' ).addClass( 'is-active' );
$('#replyrow input').not(':button').each(function() {
var t = $(this);
post[ t.attr('name') ] = t.val();
});
post.content = $('#replycontent').val();
post.id = post.comment_post_ID;
post.comments_listing = this.comments_listing;
post.p = $('[name="p"]').val();
if ( $('#comment-' + $('#comment_ID').val()).hasClass('unapproved') )
post.approve_parent = 1;
$.ajax({
type : 'POST',
url : ajaxurl,
data : post,
success : function(x) { commentReply.show(x); },
error : function(r) { commentReply.error(r); }
});
return false;
},
show : function(xml) {
var t = this, r, c, id, bg, pid;
if ( typeof(xml) == 'string' ) {
t.error({'responseText': xml});
return false;
}
r = wpAjax.parseAjaxResponse(xml);
if ( r.errors ) {
t.error({'responseText': wpAjax.broken});
return false;
}
t.revert();
r = r.responses[0];
id = '#comment-' + r.id;
if ( 'edit-comment' == t.act )
$(id).remove();
if ( r.supplemental.parent_approved ) {
pid = $('#comment-' + r.supplemental.parent_approved);
updatePending( -1, r.supplemental.parent_post_id );
if ( this.comments_listing == 'moderated' ) {
pid.animate( { 'backgroundColor':'#CCEEBB' }, 400, function(){
pid.fadeOut();
});
return;
}
}
if ( r.supplemental.i18n_comments_text ) {
if ( isDashboard ) {
updateDashboardText( r.supplemental );
} else {
updateApproved( 1, r.supplemental.parent_post_id );
updateCountText( 'span.all-count', 1 );
}
}
c = $.trim(r.data); // Trim leading whitespaces
$(c).hide();
$('#replyrow').after(c);
id = $(id);
t.addEvents(id);
bg = id.hasClass('unapproved') ? '#FFFFE0' : id.closest('.widefat, .postbox').css('backgroundColor');
id.animate( { 'backgroundColor':'#CCEEBB' }, 300 )
.animate( { 'backgroundColor': bg }, 300, function() {
if ( pid && pid.length ) {
pid.animate( { 'backgroundColor':'#CCEEBB' }, 300 )
.animate( { 'backgroundColor': bg }, 300 )
.removeClass('unapproved').addClass('approved')
.find('div.comment_status').html('1');
}
});
},
error : function(r) {
var er = r.statusText;
$( '#replysubmit .spinner' ).removeClass( 'is-active' );
if ( r.responseText )
er = r.responseText.replace( /<.[^<>]*?>/g, '' );
if ( er )
$('#replysubmit .error').html(er).show();
},
addcomment: function(post_id) {
var t = this;
$('#add-new-comment').fadeOut(200, function(){
t.open(0, post_id, 'add');
$('table.comments-box').css('display', '');
$('#no-comments').remove();
});
}
};
$(document).ready(function(){
var make_hotkeys_redirect, edit_comment, toggle_all, make_bulk;
setCommentsList();
commentReply.init();
$(document).on( 'click', 'span.delete a.delete', function( e ) {
e.preventDefault();
});
if ( typeof $.table_hotkeys != 'undefined' ) {
make_hotkeys_redirect = function(which) {
return function() {
var first_last, l;
first_last = 'next' == which? 'first' : 'last';
l = $('.tablenav-pages .'+which+'-page:not(.disabled)');
if (l.length)
window.location = l[0].href.replace(/\&hotkeys_highlight_(first|last)=1/g, '')+'&hotkeys_highlight_'+first_last+'=1';
};
};
edit_comment = function(event, current_row) {
window.location = $('span.edit a', current_row).attr('href');
};
toggle_all = function() {
$('#cb-select-all-1').data( 'wp-toggle', 1 ).trigger( 'click' ).removeData( 'wp-toggle' );
};
make_bulk = function(value) {
return function() {
var scope = $('select[name="action"]');
$('option[value="' + value + '"]', scope).prop('selected', true);
$('#doaction').click();
};
};
$.table_hotkeys(
$('table.widefat'),
[
'a', 'u', 's', 'd', 'r', 'q', 'z',
['e', edit_comment],
['shift+x', toggle_all],
['shift+a', make_bulk('approve')],
['shift+s', make_bulk('spam')],
['shift+d', make_bulk('delete')],
['shift+t', make_bulk('trash')],
['shift+z', make_bulk('untrash')],
['shift+u', make_bulk('unapprove')]
],
{
highlight_first: adminCommentsL10n.hotkeys_highlight_first,
highlight_last: adminCommentsL10n.hotkeys_highlight_last,
prev_page_link_cb: make_hotkeys_redirect('prev'),
next_page_link_cb: make_hotkeys_redirect('next'),
hotkeys_opts: {
disableInInput: true,
type: 'keypress',
noDisable: '.check-column input[type="checkbox"]'
},
cycle_expr: '#the-comment-list tr',
start_row_index: 0
}
);
}
// Quick Edit and Reply have an inline comment editor.
$( '#the-comment-list' ).on( 'click', '.comment-inline', function (e) {
e.preventDefault();
var $el = $( this ),
action = 'replyto';
if ( 'undefined' !== typeof $el.data( 'action' ) ) {
action = $el.data( 'action' );
}
commentReply.open( $el.data( 'commentId' ), $el.data( 'postId' ), action );
} );
});
})(jQuery);
| {
"content_hash": "7f2a834dd70d21f0eaa3580cb1b51a88",
"timestamp": "",
"source": "github",
"line_count": 933,
"max_line_length": 149,
"avg_line_length": 28.617363344051448,
"alnum_prop": 0.5970786516853933,
"repo_name": "oconn270/oconn270.github.io",
"id": "dd3025f194adf70abe4708706d6599c700ae558f",
"size": "26700",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "blog/wp-admin/js/edit-comments.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "616"
},
{
"name": "CSS",
"bytes": "2105455"
},
{
"name": "HTML",
"bytes": "41119"
},
{
"name": "JavaScript",
"bytes": "2014413"
},
{
"name": "PHP",
"bytes": "9926088"
}
],
"symlink_target": ""
} |
class BuiltIn(object):
def __init__(self):
self.code = []
self.methods = []
self.messages = []
self.defaults = {}
self.constant_names = ['False', 'True', 'Less', 'Equal', 'Greater', 'Empty', 'BuiltIn']
self.opaque_names = ['Constant', 'Small-Integer']
self.pointer_names = ['String', 'Array', 'Large-Integer']
class BuiltInMethod(object):
def __init__(self, tag_name, symbol, arg_names, sent_messages, code):
self.tag_name = tag_name
self.symbol = symbol
self.arg_names = arg_names
self.sent_messages = sent_messages
self.code = code
def generate_target_code(self, label, target):
return target.generate_builtin_method(label, self.arg_names, self.code)
def __repr__(self):
return 'BuiltInMethod(%r, %r, %r, %r, %r)' % (self.tag_name, self.symbol, self.arg_names, self.sent_messages, self.code)
class TraceBackInfo(object):
def __init__(self, index, method_name, stream_name, source_line, line_number, column, underline):
self.index = index
self.method_name = method_name
self.stream_name = stream_name
self.source_line = source_line
self.line_number = line_number
self.column = column
self.underline = underline
class CompileOptions(object):
def __init__(self):
self.verbose = False
self.traceback = True
self.source_traceback = True
| {
"content_hash": "0233935df577cf9e8303c0f867c132e3",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 128,
"avg_line_length": 37.30769230769231,
"alnum_prop": 0.6096219931271478,
"repo_name": "shaurz/ome",
"id": "9d71ef383788915fba20232178abe3c3347b8b3a",
"size": "1552",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ome/ome_types.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "59102"
},
{
"name": "Python",
"bytes": "136748"
}
],
"symlink_target": ""
} |
import axios from 'axios';
import jwtDecode from 'jwt-decode';
import Auth from './auth';
const apiBaseURI = 'http://localhost:8080';
function checkStatusCode(resp) {
if (resp.status >= 200 && resp.status < 300) {
return resp.data;
} else {
throw new Error(resp.statusText);
}
}
function getChannels() {
return axios.get(`${apiBaseURI}/api/channels`).then(checkStatusCode);
}
function createUser(body) {
return axios.post(`${apiBaseURI}/api/users`, body).then(checkStatusCode);
}
function getPendingUsers() {
const token = Auth.getToken();
var config = {
headers: {
Authorization: token
}
};
return axios
.get(`${apiBaseURI}/api/admins?filter=pending`, config)
.then(checkStatusCode);
}
function applyPermissions(userId, body) {
const token = Auth.getToken();
var config = {
headers: {
Authorization: token
}
};
return axios
.post(`${apiBaseURI}/api/users/${userId}/convertInAdmin`, body, config)
.then(checkStatusCode);
}
function getAllAdmins() {
const token = Auth.getToken();
var config = {
headers: {
Authorization: token
}
};
return axios.get(`${apiBaseURI}/api/admins`, config).then(checkStatusCode);
}
function getEvents() {
const token = Auth.getToken();
const adminId = jwtDecode(token).sub;
const config = {
headers: {
Authorization: token
}
};
return axios
.get(`${apiBaseURI}/api/admins/${adminId}/events`, config)
.then(checkStatusCode);
}
function createEvent(body) {
console.log('creating event', body);
const token = Auth.getToken();
var config = {
headers: {
Authorization: token
}
};
return axios
.post(`${apiBaseURI}/api/events`, body, config)
.then(checkStatusCode);
}
function updateEvent(body) {
console.log('updating event', body);
const token = Auth.getToken();
const eventId = body.eventId;
delete body.eventId;
var config = {
headers: {
Authorization: token
}
};
return axios
.put(`${apiBaseURI}/api/events/${eventId}`, body, config)
.then(checkStatusCode);
}
function findChannel(body) {
console.log('finding channel', body);
const params = `municipality=${body.municipality}&activity=${body.activity}`;
return axios
.get(`${apiBaseURI}/api/channels?${params}`)
.then(checkStatusCode)
.then(channels => {
if (channels) {
body.channel = channels.data[0]._id;
console.log('channel found', body);
return Promise.resolve(body);
} else {
return Promise.reject('No valid params to get the channel');
}
});
}
const getMunicipalities = () => {
const token = Auth.getToken();
const adminId = jwtDecode(token).sub;
const config = {
headers: {
Authorization: token
}
};
return axios
.get(`${apiBaseURI}/api/admins/${adminId}/municipalities`, config)
.then(checkStatusCode);
};
const getActivities = () => {
const token = Auth.getToken();
const adminId = jwtDecode(token).sub;
const config = {
headers: {
Authorization: token
}
};
return axios
.get(`${apiBaseURI}/api/admins/${adminId}/activities`, config)
.then(checkStatusCode);
};
const getUserEvents = () => {
const token = Auth.getToken();
const userId = jwtDecode(token).sub;
const config = {
headers: {
Authorization: token
}
};
return axios
.get(`${apiBaseURI}/api/users/${userId}/events`, config)
.then(checkStatusCode);
};
export default {
getChannels,
createUser,
getPendingUsers,
getAllAdmins,
applyPermissions,
findChannel,
getEvents,
createEvent,
getMunicipalities,
getActivities,
getUserEvents,
updateEvent
};
| {
"content_hash": "ff15991d7a0535370352a98544cd0593",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 79,
"avg_line_length": 21.893491124260354,
"alnum_prop": 0.6448648648648648,
"repo_name": "leiverandres/turismo-risaralda",
"id": "af51b8f2102918e2c25cdb4dc81696d1595efc68",
"size": "3700",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "turismo_client/src/utils/requests.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "549"
},
{
"name": "HTML",
"bytes": "1307"
},
{
"name": "JavaScript",
"bytes": "110367"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.