text
stringlengths 3
1.05M
|
|---|
#-*- coding:utf-8 -*-
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtNetwork import *
from uiEvents.AWindowBase import *
from uiDefines.Ui_MainWindow import *
from uiEvents.eventSplashWindow import *
import os
import sys
import pathlib
import datetime
'''
这是MainWindow窗体的实现类
'''
#class FMainWindow(IWindowImpl):
class FMainWindow(IWindowImplM):
'''
初始化所有数据(抽象函数)
'''
def initUIAndData(self):
self.initEvents()
self.msgWorker = QTInvokeQueueWorker(self)
self.msgWorker.start()
'''
初始化事件
'''
def initEvents(self):
self.uiObj.btnTestA.clicked.connect(self.btnTestAClicked)
self.uiObj.btnTestB.clicked.connect(self.btnTestBClicked)
self.uiObj.btnTestC.clicked.connect(self.btnTestCClicked)
self.uiObj.btnTestD.clicked.connect(self.btnTestDClicked)
'''
返回UI定义类的实例(例如uiDefines/Ui_MainWindow.py的实例,抽象函数)
'''
def getUIDefineObject(self):
return Ui_MainWindow()
'''
InvokeUI的实现(用于跨线程操作UI内容)
'''
def runUIImpl(self, uiArgs):
self.uiObj.txtContent.setText(uiArgs.content)
'''
按钮A
'''
def btnTestAClicked(self, e):
#显示SplashWindow窗体,SplashProcess为实现类
FSplashWindow.showWindow('aaaaa', SplashProcess())
'''
按钮B
'''
def btnTestBClicked(self, e):
if QMessageBox.question(self,"消息框标题","这是一条问答。",QMessageBox.Yes | QMessageBox.No) == QMessageBox.Yes:
self.invokeUI(QTObjectInvokeArgs("bbbbbbbbbbbbbbb"))
'''
按钮C
'''
def btnTestCClicked(self, e):
self.msgWorker.addMsg(QTObjectInvokeArgs(datetime.datetime.now().__str__()))
'''
按钮D
'''
def btnTestDClicked(self, e):
iotool.shellExecute('file:///home/flyss/Downloads')
'''
SplashProcess为SplashWindow显示控制类
'''
class SplashProcess(ISplashDoWork):
def process(self):
#显示进度为10,内容为111111111111111111111111
self.eventObj.msgWorker.addMsg(SplashInvokeArgs(10, '111111111111111111111111'))
time.sleep(1)
#显示进度为30,内容为222222222222222222222222
self.eventObj.msgWorker.addMsg(SplashInvokeArgs(30, '222222222222222222222222'))
time.sleep(1)
#显示进度为60,内容为333333333333333333333333
self.eventObj.msgWorker.addMsg(SplashInvokeArgs(60, '333333333333333333333333'))
time.sleep(1)
#显示进度为80,内容为444444444444444444444444
self.eventObj.msgWorker.addMsg(SplashInvokeArgs(80, '444444444444444444444444'))
time.sleep(1)
#显示进度为100,内容为555555555555555555555555
self.eventObj.msgWorker.addMsg(SplashInvokeArgs(100, '555555555555555555555555'))
time.sleep(1)
#关闭窗体
self.windowObj.close()
|
"""
Module: 'ssl' on esp32 1.11.0
"""
# MCU: (sysname='esp32', nodename='esp32', release='1.11.0', version='v1.11-132-gc24d81119 on 2019-07-08', machine='ESP32 module with ESP32')
# Stubber: 1.2.0
def wrap_socket():
pass
|
import React, { Component } from 'react'
import { Link } from 'gatsby'
import Helmet from 'react-helmet'
import Layout from '../components/layout'
import sandDunesElk from '../assets/images/main/DunesMain.jpg'
class Homepage extends Component {
render() {
const siteTitle = 'DataBit Designs'
return (
<Layout>
<Helmet title={siteTitle} />
<section id="one" className="main style1">
<div className="grid-wrapper">
<div className="col-6">
<header className="major">
<h2>Who We Are</h2>
</header>
<p>
We are a small team based in the San Luis Valley who specialize
in providing affordable, custom designed web-sites for small
businesses and individuals. For only $50 a month, you can get a
custom designed web-site to help drive sales and increase
traffic to your business. Cancel your subscription at anytime
and only pay for the months you use our service.
</p>
</div>
<div className="col-6">
<span className="image fit">
<img src={sandDunesElk} alt="San Luis Valley Sand Dunes" />
</span>
</div>
</div>
</section>
<section id="two" className="main style2">
<div className="grid-wrapper">
<div className="col-6">
<ul className="major-icons">
<li>
<span className="icon style1 major fa-star-o" />
</li>
<li>
<span className="icon style2 major fa-code" />
</li>
<li>
<span className="icon style3 major fa-star-half-o" />
</li>
<li>
<span className="icon style4 major fa-laptop" />
</li>
<li>
<span className="icon style5 major fa-star" />
</li>
<li>
<span className="icon style6 major fa-diamond" />
</li>
</ul>
</div>
<div className="col-6">
<header className="major">
<h2>Custom Web-Design on a Budget</h2>
</header>
<p>
After using our contact form to get in touch with us, we will
touch base with you concerning design preferences and business
needs. We then get to work immediately on building your custom
designed single page web-site.
</p>
<p>
$50 a month includes a custom designed web-site, web hosting,
and the ability to make <b>1</b> edit to your web-site each
month. Additional edits and web-site redesigns cost extra. You
can cancel your subscription at anytime and your web-site will
continue to be hosted for the duration of the month(s) you paid
for.
</p>
<p>
We're also able to do bigger or more custom projects if a single
page web-site doesn't suite your needs. Contact us about special
pricing options.
</p>
</div>
</div>
</section>
<section id="three" className="main style1 special">
<div className="grid-wrapper">
<div className="col-12">
<header className="major">
<h2>Web-Design Made Easy</h2>
</header>
<p>Simplifying web-sites for small businesses and indivduals.</p>
</div>
<div className="col-4">
<span className="image fit">
<span className="icon style5 major fa-code" />
</span>
<h3>Custom Designed</h3>
<p>
All web-sites are designed from the ground up using the latest
coding practices and frameworks. All sites are created using
React.js
</p>
</div>
<div className="col-4">
<span className="image fit">
<span className="icon style5 major fa-users" />
</span>
<h3>New Audiences</h3>
<p>
Increase traffic by giving customers an easy place to find
information about your place of business.
</p>
</div>
<div className="col-4">
<span className="image fit">
<span className="icon style5 major fa-desktop" />
</span>
<h3>Multiple Designs</h3>
<p>
All web-sites include a desktop version and mobile version of
the site to make sure you're reaching as many customers as
possible.
</p>
</div>
</div>
</section>
<section id="four" className="main style2 special">
<div className="container">
<header className="major">
<h2>Questions?</h2>
</header>
<p>
Use the link below if you're interested in getting a site or just
want to learn more.
</p>
<ul className="actions uniform">
<li>
<Link to="/contact/" className="button special">
Contact Us
</Link>
</li>
</ul>
</div>
</section>
</Layout>
)
}
}
export default Homepage
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def ProviderRegistrationFault(vim, *args, **kwargs):
'''This fault is thrown if failed to register provider to storage management
service.'''
obj = vim.client.factory.create('{urn:sms}ProviderRegistrationFault')
# do some validation checking...
if (len(args) + len(kwargs)) < 4:
raise IndexError('Expected at least 5 arguments got: %d' % len(args))
required = [ 'dynamicProperty', 'dynamicType', 'faultCause', 'faultMessage' ]
optional = [ ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
"use strict";
const k8s = require("@pulumi/kubernetes");
const appLabels = { app: "nginx" };
const deployment = new k8s.apps.v1.Deployment("nginx", {
spec: {
selector: { matchLabels: appLabels },
replicas: 1,
template: {
metadata: { labels: appLabels },
spec: { containers: [{ name: "nginx", image: "nginx" }] }
}
}
});
exports.name = deployment.metadata.name;
|
const remote = require("electron").remote;
const Swal = require('sweetalert2');
const ace = require('ace-builds/src/ace');
var xml_beautify = require('xml-formatter');
const low = require('lowdb');
const FileSync = require('lowdb/adapters/FileSync');
const { ipcRenderer, ipcMain } = require('electron');
const path = require("path");
const app = remote.app;
const db_path = path.join(app.getPath('home'), "/.guvercin.json");
const adapter = new FileSync(db_path);
const db = low(adapter);
db.defaults({ history: [], collection: [] }).write();
const fetch = require("node-fetch");
var body_editor;
var response_editor;
|
from django.core.management.base import BaseCommand
from query.models import *
from faceDB.open_face_helper import OpenFaceHelper
import random
import json
import tensorflow as tf
import cv2
import os
def load_imgs(img_directory):
imgs = []
for root, subdirs, files in os.walk(img_directory):
for file in files:
if os.path.splitext(file)[1].lower() in ('.jpg', '.jpeg'):
path = os.path.join(root, file)
imgs.append(path)
return imgs
# FIXME: Exit gracefully if same images being sent to clustering algorithm a
# second time...
class Command(BaseCommand):
help = 'Cluster faces in videos'
def add_arguments(self, parser):
parser.add_argument('path')
def handle(self, *args, **options):
with open(options['path']) as f:
paths = [s.strip() for s in f.readlines()]
model_path = '/app/deps/openface/models'
of = OpenFaceHelper(model_path)
print(model_path)
#load facenet models and start tensorflow
out_size = 160
batch_size = 1000
for path in paths:
if path == '':
return
print(path)
video = Video.objects.get(path=path)
faces = FaceInstance.objects.filter(frame__video=video).all()
faces = [f for f in faces if f.bbox.x2 - f.bbox.x1 >= .05]
print((len(faces)))
#index in the face array NOT the face id
for face_idx in range(len(faces)):
curr_face = faces[face_idx]
curr_img_path = './assets/thumbnails/face_{}.jpg'.format(curr_face.id)
try:
rep = of.get_rep(curr_img_path)
except Exception:
continue
curr_face.features = json.dumps(rep.tolist())
curr_face.save()
if (face_idx + 1) % 100 == 0:
print(face_idx)
|
# !/usr/bin/python
# -*- coding: UTF-8 -*-
import feedparser
import requests
import ssl
import json
from datetime import datetime
from datetime import timedelta
from translate import translate
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse, parse_qsl
if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context
agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
publish_api = "https://bbs.zuqiuxunlian.com/api/v1/topics"
time_format = "%Y-%m-%dT%H:%M:%SZ"
# for test
# accesstoken = "eb8b35cc-fb1a-4e0d-822b-4b729617fff8"
# pwd = "./"
# online
pwd = "/home/ubuntu/publish/"
def origin_url(url):
parsed_url = urlparse(url)
qs = dict(parse_qsl(parsed_url.query))
return qs['url']
def read_entry(entry):
title = entry['title'].replace("<b>", "").replace("</b>", "")
title = title.replace(""", "\"")
title = title.replace("&", "&")
summary = entry['summary'].replace("<b>", "").replace("</b>", "")
summary = summary.replace(" ", "")
link = origin_url(entry['link'])
topic = dict(
id=entry['id'],
author = entry.get('author', ''),
published=datetime.strptime(entry['published'], time_format),
title = title,
summary=summary,
link = link)
return topic
def publish(topic, user):
link = topic['link']
title = topic['title']
summary = topic['summary']
# 翻译 title,content
if user['lang'] == 'en':
title = translate(title) +" - "+ title
summary = translate(summary) +"\r\n\r\n"+ summary
content = summary +"\r\n\r\n"+"["+link+"]("+link+")"
print(title.encode('utf-8'))
payload = {
"title": title,
"tab": user['tab'],
"content": content
}
querystring = {"accesstoken": user['accesstoken']}
headers = {
'Content-Type': "application/json",
'cache-control': "no-cache",
}
response = requests.request("POST", publish_api, json=payload, headers=headers, params=querystring)
def read_conf():
with open(pwd+'conf.json', 'r') as f:
data = json.load(f)
return data
def write_conf(data):
with open(pwd+'conf.json', 'w') as f:
json.dump(data, f, indent=4, ensure_ascii=False)
# main
publishes = read_conf()
for p in publishes:
feed = feedparser.parse(p['rss_url'], agent=agent)
updated = datetime.strptime(p['updated'], time_format)
# updated = datetime.strptime("Mon, 18 Mar 2019 00:34:42 GMT", time_format)
print(p['title'])
flag = False
for entry in reversed(feed['entries']):
print(entry['published'])
published = datetime.strptime(entry['published'], time_format)
if (published >= updated): # 同时间有几个文章
updated = published
content = read_entry(entry)
publish(content, p)
# print(entry['title'].encode('utf-8'))
flag = True
# else:
# print(entry['title'].encode('utf-8'))
if (flag):
updated = updated + timedelta(minutes=1)
p['updated'] = updated.strftime(time_format)
write_conf(publishes)
# url = "https://cdn.werss.weapp.design/api/v1/feeds/7061a4e1-3d34-472a-942a-e370c7ea2ec4.xml"
# feed = feedparser.parse(url)
# # print(feed['feed'])
# print(feed['entries'][0])
|
/*
* Copyright (c) 2014 DeNA Co., Ltd.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#include "h2o.h"
#include "h2o/http2.h"
#include "h2o/http2_internal.h"
const h2o_http2_settings_t H2O_HTTP2_SETTINGS_DEFAULT = {
/* header_table_size */ 4096,
/* enable_push */ 1,
/* max_concurrent_streams */ UINT32_MAX,
/* initial_window_size */ 65535,
/* max_frame_size */ 16384};
int h2o_http2_update_peer_settings(h2o_http2_settings_t *settings, const uint8_t *src, size_t len, const char **err_desc)
{
for (; len >= 6; len -= 6, src += 6) {
uint16_t identifier = h2o_http2_decode16u(src);
uint32_t value = h2o_http2_decode32u(src + 2);
switch (identifier) {
#define SET(label, member, min, max, err_code) \
case H2O_HTTP2_SETTINGS_##label: \
if (!(min <= value && value <= max)) { \
*err_desc = "invalid SETTINGS frame"; \
return err_code; \
} \
settings->member = value; \
break
SET(HEADER_TABLE_SIZE, header_table_size, 0, UINT32_MAX, 0);
SET(ENABLE_PUSH, enable_push, 0, 1, H2O_HTTP2_ERROR_PROTOCOL);
SET(MAX_CONCURRENT_STREAMS, max_concurrent_streams, 0, UINT32_MAX, 0);
SET(INITIAL_WINDOW_SIZE, initial_window_size, 0, 0x7fffffff, H2O_HTTP2_ERROR_FLOW_CONTROL);
SET(MAX_FRAME_SIZE, max_frame_size, 16384, 16777215, H2O_HTTP2_ERROR_PROTOCOL);
#undef SET
default:
/* ignore unknown (5.5) */
break;
}
}
if (len != 0)
return H2O_HTTP2_ERROR_FRAME_SIZE;
return 0;
}
uint8_t *h2o_http2_encode_frame_header(uint8_t *dst, size_t length, uint8_t type, uint8_t flags, int32_t stream_id)
{
if (length > 0xffffff)
h2o_fatal("invalid length");
dst = h2o_http2_encode24u(dst, (uint32_t)length);
*dst++ = type;
*dst++ = flags;
dst = h2o_http2_encode32u(dst, stream_id);
return dst;
}
static uint8_t *allocate_frame(h2o_buffer_t **buf, size_t length, uint8_t type, uint8_t flags, int32_t stream_id)
{
h2o_iovec_t alloced = h2o_buffer_reserve(buf, H2O_HTTP2_FRAME_HEADER_SIZE + length);
(*buf)->size += H2O_HTTP2_FRAME_HEADER_SIZE + length;
return h2o_http2_encode_frame_header((uint8_t *)alloced.base, length, type, flags, stream_id);
}
void h2o_http2_encode_rst_stream_frame(h2o_buffer_t **buf, uint32_t stream_id, int errnum)
{
uint8_t *dst = allocate_frame(buf, 4, H2O_HTTP2_FRAME_TYPE_RST_STREAM, 0, stream_id);
dst = h2o_http2_encode32u(dst, errnum);
}
void h2o_http2_encode_ping_frame(h2o_buffer_t **buf, int is_ack, const uint8_t *data)
{
uint8_t *dst = allocate_frame(buf, 8, H2O_HTTP2_FRAME_TYPE_PING, is_ack ? H2O_HTTP2_FRAME_FLAG_ACK : 0, 0);
memcpy(dst, data, 8);
dst += 8;
}
void h2o_http2_encode_goaway_frame(h2o_buffer_t **buf, uint32_t last_stream_id, int errnum, h2o_iovec_t additional_data)
{
uint8_t *dst = allocate_frame(buf, 8 + additional_data.len, H2O_HTTP2_FRAME_TYPE_GOAWAY, 0, 0);
dst = h2o_http2_encode32u(dst, last_stream_id);
dst = h2o_http2_encode32u(dst, (uint32_t)-errnum);
memcpy(dst, additional_data.base, additional_data.len);
}
void h2o_http2_encode_window_update_frame(h2o_buffer_t **buf, uint32_t stream_id, int32_t window_size_increment)
{
uint8_t *dst = allocate_frame(buf, 4, H2O_HTTP2_FRAME_TYPE_WINDOW_UPDATE, 0, stream_id);
dst = h2o_http2_encode32u(dst, window_size_increment);
}
ssize_t h2o_http2_decode_frame(h2o_http2_frame_t *frame, const uint8_t *src, size_t len, const h2o_http2_settings_t *host_settings,
const char **err_desc)
{
if (len < H2O_HTTP2_FRAME_HEADER_SIZE)
return H2O_HTTP2_ERROR_INCOMPLETE;
frame->length = h2o_http2_decode24u(src);
frame->type = src[3];
frame->flags = src[4];
frame->stream_id = h2o_http2_decode32u(src + 5);
if (frame->length > host_settings->max_frame_size)
return H2O_HTTP2_ERROR_FRAME_SIZE;
if (len < H2O_HTTP2_FRAME_HEADER_SIZE + frame->length)
return H2O_HTTP2_ERROR_INCOMPLETE;
frame->payload = src + H2O_HTTP2_FRAME_HEADER_SIZE;
return H2O_HTTP2_FRAME_HEADER_SIZE + frame->length;
}
int h2o_http2_decode_data_payload(h2o_http2_data_payload_t *payload, const h2o_http2_frame_t *frame, const char **err_desc)
{
if (frame->stream_id == 0) {
*err_desc = "invalid stream id in DATA frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
if ((frame->flags & H2O_HTTP2_FRAME_FLAG_PADDED) != 0) {
uint8_t padding_length;
if (frame->length < 1) {
*err_desc = "invalid DATA frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
padding_length = frame->payload[0];
if (frame->length < 1 + padding_length) {
*err_desc = "invalid DATA frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
payload->data = frame->payload + 1;
payload->length = frame->length - (1 + padding_length);
} else {
payload->data = frame->payload;
payload->length = frame->length;
}
return 0;
}
static const uint8_t *decode_priority(h2o_http2_priority_t *priority, const uint8_t *src)
{
uint32_t u4 = h2o_http2_decode32u(src);
src += 4;
priority->exclusive = u4 >> 31;
priority->dependency = u4 & 0x7fffffff;
priority->weight = (uint16_t)*src++ + 1;
return src;
}
int h2o_http2_decode_headers_payload(h2o_http2_headers_payload_t *payload, const h2o_http2_frame_t *frame, const char **err_desc)
{
const uint8_t *src = frame->payload, *src_end = frame->payload + frame->length;
if (frame->stream_id == 0) {
*err_desc = "invalid stream id in HEADERS frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
if ((frame->flags & H2O_HTTP2_FRAME_FLAG_PADDED) != 0) {
uint32_t padlen;
if (src == src_end) {
*err_desc = "invalid HEADERS frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
padlen = *src++;
if (src_end - src < padlen) {
*err_desc = "invalid HEADERS frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
src_end -= padlen;
}
if ((frame->flags & H2O_HTTP2_FRAME_FLAG_PRIORITY) != 0) {
if (src_end - src < 5)
return -1;
src = decode_priority(&payload->priority, src);
} else {
payload->priority = h2o_http2_default_priority;
}
payload->headers = src;
payload->headers_len = src_end - src;
return 0;
}
int h2o_http2_decode_priority_payload(h2o_http2_priority_t *payload, const h2o_http2_frame_t *frame, const char **err_desc)
{
if (frame->stream_id == 0) {
*err_desc = "invalid stream id in PRIORITY frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
if (frame->length != 5) {
*err_desc = "invaild PRIORITY frame";
return H2O_HTTP2_ERROR_FRAME_SIZE;
}
decode_priority(payload, frame->payload);
return 0;
}
int h2o_http2_decode_rst_stream_payload(h2o_http2_rst_stream_payload_t *payload, const h2o_http2_frame_t *frame,
const char **err_desc)
{
if (frame->stream_id == 0) {
*err_desc = "invalid stream id in RST_STREAM frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
if (frame->length != sizeof(payload->error_code)) {
*err_desc = "invalid RST_STREAM frame";
return H2O_HTTP2_ERROR_FRAME_SIZE;
}
payload->error_code = h2o_http2_decode32u(frame->payload);
return 0;
}
int h2o_http2_decode_ping_payload(h2o_http2_ping_payload_t *payload, const h2o_http2_frame_t *frame, const char **err_desc)
{
if (frame->stream_id != 0) {
*err_desc = "invalid PING frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
if (frame->length != sizeof(payload->data)) {
*err_desc = "invalid PING frame";
return H2O_HTTP2_ERROR_FRAME_SIZE;
}
memcpy(payload->data, frame->payload, sizeof(payload->data));
return 0;
}
int h2o_http2_decode_goaway_payload(h2o_http2_goaway_payload_t *payload, const h2o_http2_frame_t *frame, const char **err_desc)
{
if (frame->stream_id != 0) {
*err_desc = "invalid stream id in GOAWAY frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
if (frame->length < 8) {
*err_desc = "invalid GOAWAY frame";
return H2O_HTTP2_ERROR_FRAME_SIZE;
}
payload->last_stream_id = h2o_http2_decode32u(frame->payload) & 0x7fffffff;
payload->error_code = h2o_http2_decode32u(frame->payload + 4);
if ((payload->debug_data.len = frame->length - 8) != 0)
payload->debug_data.base = (char *)frame->payload + 8;
else
payload->debug_data.base = NULL;
return 0;
}
int h2o_http2_decode_window_update_payload(h2o_http2_window_update_payload_t *payload, const h2o_http2_frame_t *frame,
const char **err_desc, int *err_is_stream_level)
{
if (frame->length != 4) {
*err_is_stream_level = 0;
return H2O_HTTP2_ERROR_FRAME_SIZE;
}
payload->window_size_increment = h2o_http2_decode32u(frame->payload) & 0x7fffffff;
if (payload->window_size_increment == 0) {
*err_is_stream_level = frame->stream_id != 0;
*err_desc = "invaild WINDOW_UPDATE frame";
return H2O_HTTP2_ERROR_PROTOCOL;
}
return 0;
}
|
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double Pods_InfinitePageViewControllerVersionNumber;
FOUNDATION_EXPORT const unsigned char Pods_InfinitePageViewControllerVersionString[];
|
/* globals GBX, VST, THREE, THR, PIN, divDragMoveContent, VSTdetMenu, VSTselStoreys, VSTdivReportsLog, VSTdivSurfaceType */
// jshint esversion: 6
// jshint loopfunc: true
const VST = {
script: {
copyright: "Copyright 2019 Ladybug Tools authors",
date: "2019-07-29",
description: "View the surfaces in a gbXML file by selecting one or more storeys from the list of all storeys",
helpFile: "js-view-gbxml/vst-view-storeys.md",
license: "MIT License",
sourceCode: "js-view-gbxml/vst-view-storeys.js",
version: "0.17.01-1vst"
}
};
VST.getMenuViewStoreys = function() {
const source = `<a href=${ MNU.urlSourceCode + VST.script.sourceCode } target=_blank >${ MNU.urlSourceCodeIcon } source code</a>`;
const help = VGC.getHelpButton("VSTbutSum",VST.script.helpFile,POP.footer,source);
const selectOptions = [ "id", "Level", "Name" ].map( option => `<option ${ option === "Name" ? "selected" : "" }>${ option }</option>`);
const htm =
`
<details id=VSTdetMenu ontoggle=VST.setViewStoreysOptions(); >
<summary>VST Storeys</summary>
${ help }
<p>
Display of surfaces by storey. Storeys are listed from lowest to highest elevation followed by storey name.
Operates in conjunction with surface type settings.
<span id=VSTspnCount ></span>
</p>
<p>
<input type=search id=VSTinpAttribute oninput=VGC.setSelectedIndex(this,VSTselStoreys) placeholder="Enter an attribute" >
</p>
<div id="VSTdivViewStoreys" >
<select id=VSTselStoreys oninput=VST.selStoreys(this); onclick=VST.selStoreys(this); multiple style=width:100%; ></select
</div>
<div id="VSTdivReportsLog" ></div>
<!--
<p>Attribute to show:
<select id=VSTselAttribute oninput=VST.setViewStoreysOptions(); >${ selectOptions }</select>
</p>
-->
<p>
<button onclick=VGC.toggleViewSelectedMeshes(this,VSTselStoreys,VST.visible); >
Show/hide all storeys
</button>
</p>
<p>Select multiple storeys by pressing shift or control keys</p>
</details>
`;
return htm;
};
VST.setViewStoreysOptions = function() {
if ( VSTdetMenu.open === false ) { return; }
VSTinpAttribute.value = "";
VSTselStoreys.size = GBX.storeysJson.length > 10 ? 10 : GBX.storeysJson.length;
let color;
console.log( 'GBX.storeysJson', GBX.storeysJson );
const options = GBX.storeysJson.map( storey => {
color = color === 'pink' ? '' : 'pink';
return `<option style=background-color:${ color } value=${ storey.id } title="id: ${ storey.id }">${ storey.level } m / ${ storey.name }</option>}`
} );
VSTselStoreys.innerHTML = options;
VSTspnCount.innerHTML = `${ GBX.storeysJson.length } storeys found.`;
THR.controls.enableKeys = false;
};
//////////
VST.selStoreys = function( select ) {
VGC.setPopupBlank();
divDragMoveContent.innerHTML = PCO.getStoreyAttributes( VSTselStoreys.value );
GBX.storeyIdsActive = Array.from( select.selectedOptions ).map( option => option.value );
VST.setVisible();
VST.visible = GBX.meshGroup.children.filter( mesh => mesh.visible === true )
.map( mesh => mesh.userData.index );
VSTdivReportsLog.innerHTML = `<p>${ VST.visible.length } surfaces visible</p>`;
THR.controls.enableKeys = false;
};
VST.setVisible = function () {
GBX.meshGroup.children.forEach( mesh => mesh.visible = false );
GBX.meshGroup.children
.filter( mesh => GBX.storeyIdsActive.includes( mesh.userData.storeyId ) || mesh.userData.surfaceType === "Shade" )
.filter( mesh => GBX.surfaceTypesActive.includes( mesh.userData.surfaceType ) )
.forEach( mesh => mesh.visible = true );
GBX.placards.children.forEach( mesh => mesh.visible = false );
GBX.placards.children
.filter( mesh => VST.storeyIdsActive.includes( mesh.userData.storeyId ) )
.forEach( mesh => mesh.visible = true );
};
|
/* Compute checksum Author: Johan W. Stevenson */
#include <stdio.h>
int errs;
main(argc, argv)
char **argv;
{
char line[256];
if (argc <= 1)
crc((char *) 0);
else if (argc == 2 && strcmp(argv[1], "-") == 0)
while (fgets(line, sizeof line, stdin) != NULL) {
if (line[strlen(line) - 1] == '\n')
line[strlen(line) - 1] = '\0';
crc(line);
}
else
do {
crc(argv[1]);
argv++;
argc--;
} while (argc > 1);
exit(errs != 0);
}
/* Crctab calculated by Mark G. Mendel, Network Systems Corporation */
static unsigned short crctab[256] = {
0x0000, 0x1021, 0x2042, 0x3063, 0x4084, 0x50a5, 0x60c6, 0x70e7,
0x8108, 0x9129, 0xa14a, 0xb16b, 0xc18c, 0xd1ad, 0xe1ce, 0xf1ef,
0x1231, 0x0210, 0x3273, 0x2252, 0x52b5, 0x4294, 0x72f7, 0x62d6,
0x9339, 0x8318, 0xb37b, 0xa35a, 0xd3bd, 0xc39c, 0xf3ff, 0xe3de,
0x2462, 0x3443, 0x0420, 0x1401, 0x64e6, 0x74c7, 0x44a4, 0x5485,
0xa56a, 0xb54b, 0x8528, 0x9509, 0xe5ee, 0xf5cf, 0xc5ac, 0xd58d,
0x3653, 0x2672, 0x1611, 0x0630, 0x76d7, 0x66f6, 0x5695, 0x46b4,
0xb75b, 0xa77a, 0x9719, 0x8738, 0xf7df, 0xe7fe, 0xd79d, 0xc7bc,
0x48c4, 0x58e5, 0x6886, 0x78a7, 0x0840, 0x1861, 0x2802, 0x3823,
0xc9cc, 0xd9ed, 0xe98e, 0xf9af, 0x8948, 0x9969, 0xa90a, 0xb92b,
0x5af5, 0x4ad4, 0x7ab7, 0x6a96, 0x1a71, 0x0a50, 0x3a33, 0x2a12,
0xdbfd, 0xcbdc, 0xfbbf, 0xeb9e, 0x9b79, 0x8b58, 0xbb3b, 0xab1a,
0x6ca6, 0x7c87, 0x4ce4, 0x5cc5, 0x2c22, 0x3c03, 0x0c60, 0x1c41,
0xedae, 0xfd8f, 0xcdec, 0xddcd, 0xad2a, 0xbd0b, 0x8d68, 0x9d49,
0x7e97, 0x6eb6, 0x5ed5, 0x4ef4, 0x3e13, 0x2e32, 0x1e51, 0x0e70,
0xff9f, 0xefbe, 0xdfdd, 0xcffc, 0xbf1b, 0xaf3a, 0x9f59, 0x8f78,
0x9188, 0x81a9, 0xb1ca, 0xa1eb, 0xd10c, 0xc12d, 0xf14e, 0xe16f,
0x1080, 0x00a1, 0x30c2, 0x20e3, 0x5004, 0x4025, 0x7046, 0x6067,
0x83b9, 0x9398, 0xa3fb, 0xb3da, 0xc33d, 0xd31c, 0xe37f, 0xf35e,
0x02b1, 0x1290, 0x22f3, 0x32d2, 0x4235, 0x5214, 0x6277, 0x7256,
0xb5ea, 0xa5cb, 0x95a8, 0x8589, 0xf56e, 0xe54f, 0xd52c, 0xc50d,
0x34e2, 0x24c3, 0x14a0, 0x0481, 0x7466, 0x6447, 0x5424, 0x4405,
0xa7db, 0xb7fa, 0x8799, 0x97b8, 0xe75f, 0xf77e, 0xc71d, 0xd73c,
0x26d3, 0x36f2, 0x0691, 0x16b0, 0x6657, 0x7676, 0x4615, 0x5634,
0xd94c, 0xc96d, 0xf90e, 0xe92f, 0x99c8, 0x89e9, 0xb98a, 0xa9ab,
0x5844, 0x4865, 0x7806, 0x6827, 0x18c0, 0x08e1, 0x3882, 0x28a3,
0xcb7d, 0xdb5c, 0xeb3f, 0xfb1e, 0x8bf9, 0x9bd8, 0xabbb, 0xbb9a,
0x4a75, 0x5a54, 0x6a37, 0x7a16, 0x0af1, 0x1ad0, 0x2ab3, 0x3a92,
0xfd2e, 0xed0f, 0xdd6c, 0xcd4d, 0xbdaa, 0xad8b, 0x9de8, 0x8dc9,
0x7c26, 0x6c07, 0x5c64, 0x4c45, 0x3ca2, 0x2c83, 0x1ce0, 0x0cc1,
0xef1f, 0xff3e, 0xcf5d, 0xdf7c, 0xaf9b, 0xbfba, 0x8fd9, 0x9ff8,
0x6e17, 0x7e36, 0x4e55, 0x5e74, 0x2e93, 0x3eb2, 0x0ed1, 0x1ef0
};
/* Updcrc macro derived from article Copyright (C) 1986 Stephen Satchell.
* NOTE: First argument must be in range 0 to 255.
* Second argument is referenced twice.
*
* Programmers may incorporate any or all code into their programs,
* giving proper credit within the source. Publication of the
* source routines is permitted so long as proper credit is given
* to Stephen Satchell, Satchell Evaluations and Chuck Forsberg,
* Omen Technology.
*/
#define updcrc(cp, crc) ( crctab[((crc >> 8) & 255)] ^ (crc << 8) ^ cp)
crc(fname)
char *fname;
{
register int c;
register int i;
register long len = 0;
register unsigned short crc = 0;
register FILE *fp;
if (fname == NULL)
fp = stdin;
else if ((fp = fopen(fname, "r")) == NULL) {
fprintf(stderr, "crc: cannot open %s\n", fname);
errs++;
return;
}
while ((c = getc(fp)) != EOF) {
len++;
crc = updcrc(c, crc);
}
printf("%05u %6ld", crc, len);
if (fname) {
printf(" %s", fname);
fclose(fp);
}
printf("\n");
}
|
/**
* @fileoverview added by tsickle
* Generated from: src/cdk/tree/padding.ts
* @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { Directionality } from '@angular/cdk/bidi';
import { coerceNumberProperty } from '@angular/cdk/coercion';
import { Directive, ElementRef, Input, Optional, Renderer2 } from '@angular/core';
import { takeUntil } from 'rxjs/operators';
import { Subject } from 'rxjs';
import { CdkTree, CdkTreeNode } from './tree';
/**
* Regex used to split a string on its CSS units.
* @type {?}
*/
import * as ɵngcc0 from '@angular/core';
import * as ɵngcc1 from '@angular/cdk/bidi';
const cssUnitPattern = /([A-Za-z%]+)$/;
/**
* Indent for the children tree dataNodes.
* This directive will add left-padding to the node to show hierarchy.
* @template T
*/
export class CdkTreeNodePadding {
/**
* @param {?} _treeNode
* @param {?} _tree
* @param {?} _renderer
* @param {?} _element
* @param {?} _dir
*/
constructor(_treeNode, _tree, _renderer, _element, _dir) {
this._treeNode = _treeNode;
this._tree = _tree;
this._renderer = _renderer;
this._element = _element;
this._dir = _dir;
/**
* Subject that emits when the component has been destroyed.
*/
this._destroyed = new Subject();
/**
* CSS units used for the indentation value.
*/
this.indentUnits = 'px';
this._indent = 40;
this._setPadding();
if (_dir) {
_dir.change.pipe(takeUntil(this._destroyed)).subscribe((/**
* @return {?}
*/
() => this._setPadding(true)));
}
// In Ivy the indentation binding might be set before the tree node's data has been added,
// which means that we'll miss the first render. We have to subscribe to changes in the
// data to ensure that everything is up to date.
_treeNode._dataChanges.subscribe((/**
* @return {?}
*/
() => this._setPadding()));
}
/**
* The level of depth of the tree node. The padding will be `level * indent` pixels.
* @return {?}
*/
get level() { return this._level; }
/**
* @param {?} value
* @return {?}
*/
set level(value) {
// Set to null as the fallback value so that _setPadding can fall back to the node level if the
// consumer set the directive as `cdkTreeNodePadding=""`. We still want to take this value if
// they set 0 explicitly.
this._level = (/** @type {?} */ (coerceNumberProperty(value, null)));
this._setPadding();
}
/**
* The indent for each level. Can be a number or a CSS string.
* Default number 40px from material design menu sub-menu spec.
* @return {?}
*/
get indent() { return this._indent; }
/**
* @param {?} indent
* @return {?}
*/
set indent(indent) {
/** @type {?} */
let value = indent;
/** @type {?} */
let units = 'px';
if (typeof indent === 'string') {
/** @type {?} */
const parts = indent.split(cssUnitPattern);
value = parts[0];
units = parts[1] || units;
}
this.indentUnits = units;
this._indent = coerceNumberProperty(value);
this._setPadding();
}
/**
* @return {?}
*/
ngOnDestroy() {
this._destroyed.next();
this._destroyed.complete();
}
/**
* The padding indent value for the tree node. Returns a string with px numbers if not null.
* @return {?}
*/
_paddingIndent() {
/** @type {?} */
const nodeLevel = (this._treeNode.data && this._tree.treeControl.getLevel)
? this._tree.treeControl.getLevel(this._treeNode.data)
: null;
/** @type {?} */
const level = this._level == null ? nodeLevel : this._level;
return typeof level === 'number' ? `${level * this._indent}${this.indentUnits}` : null;
}
/**
* @param {?=} forceChange
* @return {?}
*/
_setPadding(forceChange = false) {
/** @type {?} */
const padding = this._paddingIndent();
if (padding !== this._currentPadding || forceChange) {
/** @type {?} */
const element = this._element.nativeElement;
/** @type {?} */
const paddingProp = this._dir && this._dir.value === 'rtl' ? 'paddingRight' : 'paddingLeft';
/** @type {?} */
const resetProp = paddingProp === 'paddingLeft' ? 'paddingRight' : 'paddingLeft';
this._renderer.setStyle(element, paddingProp, padding);
this._renderer.setStyle(element, resetProp, null);
this._currentPadding = padding;
}
}
}
CdkTreeNodePadding.ɵfac = function CdkTreeNodePadding_Factory(t) { return new (t || CdkTreeNodePadding)(ɵngcc0.ɵɵdirectiveInject(CdkTreeNode), ɵngcc0.ɵɵdirectiveInject(CdkTree), ɵngcc0.ɵɵdirectiveInject(ɵngcc0.Renderer2), ɵngcc0.ɵɵdirectiveInject(ɵngcc0.ElementRef), ɵngcc0.ɵɵdirectiveInject(ɵngcc1.Directionality, 8)); };
CdkTreeNodePadding.ɵdir = ɵngcc0.ɵɵdefineDirective({ type: CdkTreeNodePadding, selectors: [["", "cdkTreeNodePadding", ""]], inputs: { level: ["cdkTreeNodePadding", "level"], indent: ["cdkTreeNodePaddingIndent", "indent"] } });
/** @nocollapse */
CdkTreeNodePadding.ctorParameters = () => [
{ type: CdkTreeNode },
{ type: CdkTree },
{ type: Renderer2 },
{ type: ElementRef },
{ type: Directionality, decorators: [{ type: Optional }] }
];
CdkTreeNodePadding.propDecorators = {
level: [{ type: Input, args: ['cdkTreeNodePadding',] }],
indent: [{ type: Input, args: ['cdkTreeNodePaddingIndent',] }]
};
/*@__PURE__*/ (function () { ɵngcc0.ɵsetClassMetadata(CdkTreeNodePadding, [{
type: Directive,
args: [{
selector: '[cdkTreeNodePadding]'
}]
}], function () { return [{ type: CdkTreeNode }, { type: CdkTree }, { type: ɵngcc0.Renderer2 }, { type: ɵngcc0.ElementRef }, { type: ɵngcc1.Directionality, decorators: [{
type: Optional
}] }]; }, { level: [{
type: Input,
args: ['cdkTreeNodePadding']
}], indent: [{
type: Input,
args: ['cdkTreeNodePaddingIndent']
}] }); })();
if (false) {
/** @type {?} */
CdkTreeNodePadding.ngAcceptInputType_level;
/**
* Current padding value applied to the element. Used to avoid unnecessarily hitting the DOM.
* @type {?}
* @private
*/
CdkTreeNodePadding.prototype._currentPadding;
/**
* Subject that emits when the component has been destroyed.
* @type {?}
* @private
*/
CdkTreeNodePadding.prototype._destroyed;
/**
* CSS units used for the indentation value.
* @type {?}
*/
CdkTreeNodePadding.prototype.indentUnits;
/** @type {?} */
CdkTreeNodePadding.prototype._level;
/** @type {?} */
CdkTreeNodePadding.prototype._indent;
/**
* @type {?}
* @private
*/
CdkTreeNodePadding.prototype._treeNode;
/**
* @type {?}
* @private
*/
CdkTreeNodePadding.prototype._tree;
/**
* @type {?}
* @private
*/
CdkTreeNodePadding.prototype._renderer;
/**
* @type {?}
* @private
*/
CdkTreeNodePadding.prototype._element;
/**
* @type {?}
* @private
*/
CdkTreeNodePadding.prototype._dir;
}
//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"sources":["C:/Users/user/Desktop/PROJECTS/STUANA/IdeyPay/tovo-angular-9-app-landing-page/node_modules/@angular/cdk/esm2015/tree/padding.js"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;AAsBA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA4HC,iOAKC;;;;;;;;;;;;;;;;;;;;;;;;;;oBAYA","file":"padding.js","sourcesContent":["/**\n * @fileoverview added by tsickle\n * Generated from: src/cdk/tree/padding.ts\n * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc\n */\n/**\n * @license\n * Copyright Google LLC All Rights Reserved.\n *\n * Use of this source code is governed by an MIT-style license that can be\n * found in the LICENSE file at https://angular.io/license\n */\nimport { Directionality } from '@angular/cdk/bidi';\nimport { coerceNumberProperty } from '@angular/cdk/coercion';\nimport { Directive, ElementRef, Input, Optional, Renderer2 } from '@angular/core';\nimport { takeUntil } from 'rxjs/operators';\nimport { Subject } from 'rxjs';\nimport { CdkTree, CdkTreeNode } from './tree';\n/**\n * Regex used to split a string on its CSS units.\n * @type {?}\n */\nconst cssUnitPattern = /([A-Za-z%]+)$/;\n/**\n * Indent for the children tree dataNodes.\n * This directive will add left-padding to the node to show hierarchy.\n * @template T\n */\nexport class CdkTreeNodePadding {\n    /**\n     * @param {?} _treeNode\n     * @param {?} _tree\n     * @param {?} _renderer\n     * @param {?} _element\n     * @param {?} _dir\n     */\n    constructor(_treeNode, _tree, _renderer, _element, _dir) {\n        this._treeNode = _treeNode;\n        this._tree = _tree;\n        this._renderer = _renderer;\n        this._element = _element;\n        this._dir = _dir;\n        /**\n         * Subject that emits when the component has been destroyed.\n         */\n        this._destroyed = new Subject();\n        /**\n         * CSS units used for the indentation value.\n         */\n        this.indentUnits = 'px';\n        this._indent = 40;\n        this._setPadding();\n        if (_dir) {\n            _dir.change.pipe(takeUntil(this._destroyed)).subscribe((/**\n             * @return {?}\n             */\n            () => this._setPadding(true)));\n        }\n        // In Ivy the indentation binding might be set before the tree node's data has been added,\n        // which means that we'll miss the first render. We have to subscribe to changes in the\n        // data to ensure that everything is up to date.\n        _treeNode._dataChanges.subscribe((/**\n         * @return {?}\n         */\n        () => this._setPadding()));\n    }\n    /**\n     * The level of depth of the tree node. The padding will be `level * indent` pixels.\n     * @return {?}\n     */\n    get level() { return this._level; }\n    /**\n     * @param {?} value\n     * @return {?}\n     */\n    set level(value) {\n        // Set to null as the fallback value so that _setPadding can fall back to the node level if the\n        // consumer set the directive as `cdkTreeNodePadding=\"\"`. We still want to take this value if\n        // they set 0 explicitly.\n        this._level = (/** @type {?} */ (coerceNumberProperty(value, null)));\n        this._setPadding();\n    }\n    /**\n     * The indent for each level. Can be a number or a CSS string.\n     * Default number 40px from material design menu sub-menu spec.\n     * @return {?}\n     */\n    get indent() { return this._indent; }\n    /**\n     * @param {?} indent\n     * @return {?}\n     */\n    set indent(indent) {\n        /** @type {?} */\n        let value = indent;\n        /** @type {?} */\n        let units = 'px';\n        if (typeof indent === 'string') {\n            /** @type {?} */\n            const parts = indent.split(cssUnitPattern);\n            value = parts[0];\n            units = parts[1] || units;\n        }\n        this.indentUnits = units;\n        this._indent = coerceNumberProperty(value);\n        this._setPadding();\n    }\n    /**\n     * @return {?}\n     */\n    ngOnDestroy() {\n        this._destroyed.next();\n        this._destroyed.complete();\n    }\n    /**\n     * The padding indent value for the tree node. Returns a string with px numbers if not null.\n     * @return {?}\n     */\n    _paddingIndent() {\n        /** @type {?} */\n        const nodeLevel = (this._treeNode.data && this._tree.treeControl.getLevel)\n            ? this._tree.treeControl.getLevel(this._treeNode.data)\n            : null;\n        /** @type {?} */\n        const level = this._level == null ? nodeLevel : this._level;\n        return typeof level === 'number' ? `${level * this._indent}${this.indentUnits}` : null;\n    }\n    /**\n     * @param {?=} forceChange\n     * @return {?}\n     */\n    _setPadding(forceChange = false) {\n        /** @type {?} */\n        const padding = this._paddingIndent();\n        if (padding !== this._currentPadding || forceChange) {\n            /** @type {?} */\n            const element = this._element.nativeElement;\n            /** @type {?} */\n            const paddingProp = this._dir && this._dir.value === 'rtl' ? 'paddingRight' : 'paddingLeft';\n            /** @type {?} */\n            const resetProp = paddingProp === 'paddingLeft' ? 'paddingRight' : 'paddingLeft';\n            this._renderer.setStyle(element, paddingProp, padding);\n            this._renderer.setStyle(element, resetProp, null);\n            this._currentPadding = padding;\n        }\n    }\n}\nCdkTreeNodePadding.decorators = [\n    { type: Directive, args: [{\n                selector: '[cdkTreeNodePadding]',\n            },] }\n];\n/** @nocollapse */\nCdkTreeNodePadding.ctorParameters = () => [\n    { type: CdkTreeNode },\n    { type: CdkTree },\n    { type: Renderer2 },\n    { type: ElementRef },\n    { type: Directionality, decorators: [{ type: Optional }] }\n];\nCdkTreeNodePadding.propDecorators = {\n    level: [{ type: Input, args: ['cdkTreeNodePadding',] }],\n    indent: [{ type: Input, args: ['cdkTreeNodePaddingIndent',] }]\n};\nif (false) {\n    /** @type {?} */\n    CdkTreeNodePadding.ngAcceptInputType_level;\n    /**\n     * Current padding value applied to the element. Used to avoid unnecessarily hitting the DOM.\n     * @type {?}\n     * @private\n     */\n    CdkTreeNodePadding.prototype._currentPadding;\n    /**\n     * Subject that emits when the component has been destroyed.\n     * @type {?}\n     * @private\n     */\n    CdkTreeNodePadding.prototype._destroyed;\n    /**\n     * CSS units used for the indentation value.\n     * @type {?}\n     */\n    CdkTreeNodePadding.prototype.indentUnits;\n    /** @type {?} */\n    CdkTreeNodePadding.prototype._level;\n    /** @type {?} */\n    CdkTreeNodePadding.prototype._indent;\n    /**\n     * @type {?}\n     * @private\n     */\n    CdkTreeNodePadding.prototype._treeNode;\n    /**\n     * @type {?}\n     * @private\n     */\n    CdkTreeNodePadding.prototype._tree;\n    /**\n     * @type {?}\n     * @private\n     */\n    CdkTreeNodePadding.prototype._renderer;\n    /**\n     * @type {?}\n     * @private\n     */\n    CdkTreeNodePadding.prototype._element;\n    /**\n     * @type {?}\n     * @private\n     */\n    CdkTreeNodePadding.prototype._dir;\n}\n"]}
|
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import atexit
import ctypes
import re
import os
import platform
import sys
if os.getenv("TERMUX_VERSION"):
try:
import cv2
except Exception as exc:
print("Run MegEngine python interface at Android/Termux env")
print("!!!You need build opencv-python manually!!!, by run sh:")
print(
"https://github.com/MegEngine/MegEngine/blob/master/scripts/whl/android/android_opencv_python.sh"
)
raise exc
if sys.platform == "win32":
lib_path = os.path.join(os.path.dirname(__file__), "core/lib")
dll_paths = list(filter(os.path.exists, [lib_path,]))
assert len(dll_paths) > 0
kernel32 = ctypes.WinDLL("kernel32.dll", use_last_error=True)
has_load_library_attr = hasattr(kernel32, "AddDllDirectory")
old_error_mode = kernel32.SetErrorMode(0x0001)
kernel32.LoadLibraryW.restype = ctypes.c_void_p
if has_load_library_attr:
kernel32.AddDllDirectory.restype = ctypes.c_void_p
kernel32.LoadLibraryExW.restype = ctypes.c_void_p
for dll_path in dll_paths:
if sys.version_info >= (3, 8):
os.add_dll_directory(dll_path)
elif has_load_library_attr:
res = kernel32.AddDllDirectory(dll_path)
if res is None:
err = ctypes.WinError(ctypes.get_last_error())
err.strerror += ' Error adding "{}" to the DLL search PATH.'.format(
dll_path
)
raise err
else:
print("WARN: python or OS env have some issue, may load DLL failed!!!")
import glob
dlls = glob.glob(os.path.join(lib_path, "*.dll"))
path_patched = False
for dll in dlls:
is_loaded = False
if has_load_library_attr:
res = kernel32.LoadLibraryExW(dll, None, 0x00001100)
last_error = ctypes.get_last_error()
if res is None and last_error != 126:
err = ctypes.WinError(last_error)
err.strerror += ' Error loading "{}" or one of its dependencies.'.format(
dll
)
err.strerror += " \nplease install VC runtime from: "
err.strerror += " \nhttps://docs.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-160"
raise err
elif res is not None:
is_loaded = True
if not is_loaded:
if not path_patched:
os.environ["PATH"] = ";".join(dll_paths + [os.environ["PATH"]])
path_patched = True
res = kernel32.LoadLibraryW(dll)
if res is None:
err = ctypes.WinError(ctypes.get_last_error())
err.strerror += ' Error loading "{}" or one of its dependencies.'.format(
dll
)
err.strerror += " \nplease install VC runtime from: "
err.strerror += " \nhttps://docs.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-160"
raise err
kernel32.SetErrorMode(old_error_mode)
from .core._imperative_rt.core2 import close as _close
from .core._imperative_rt.core2 import full_sync as _full_sync
from .core._imperative_rt.core2 import sync as _sync
from .core._imperative_rt.common import (
get_supported_sm_versions as _get_supported_sm_versions,
)
from .core._imperative_rt.utils import _set_fork_exec_path_for_timed_func
from .config import *
from .device import *
from .logger import enable_debug_log, get_logger, set_log_file, set_log_level
from .serialization import load, save
from .tensor import Parameter, Tensor, tensor
from .utils import comp_graph_tools as cgtools
from .utils.persistent_cache import PersistentCacheOnServer as _PersistentCacheOnServer
from .version import __version__
def _check_sm_version():
cur_logger = get_logger(__name__)
ngpus = get_device_count("gpu")
supported_sm_versions = re.findall(r"sm_(\d+)", _get_supported_sm_versions())
for idx in range(ngpus):
prop = get_cuda_device_property(idx)
cur_sm = str(prop.major * 10 + prop.minor)
if not cur_sm in supported_sm_versions:
cur_logger.warning(
"{} with CUDA capability sm_{} is not compatible with the current MegEngine installation. The current MegEngine install supports CUDA {} {}. If you want to use the {} with MegEngine, please check the instructions at https://github.com/MegEngine/MegEngine/blob/master/scripts/cmake-build/BUILD_README.md".format(
prop.name,
cur_sm,
"capabilities" if len(supported_sm_versions) > 1 else "capability",
" ".join(["sm_" + v for v in supported_sm_versions]),
prop.name,
)
)
_check_sm_version()
_set_fork_exec_path_for_timed_func(
sys.executable,
os.path.join(os.path.dirname(__file__), "utils", "_timed_func_fork_exec_entry.py"),
)
del _set_fork_exec_path_for_timed_func
_exit_handlers = []
def _run_exit_handlers():
for handler in reversed(_exit_handlers):
handler()
_exit_handlers.clear()
atexit.register(_run_exit_handlers)
def _exit(code):
_run_exit_handlers()
sys.exit(code)
def _atexit(handler):
_exit_handlers.append(handler)
_atexit(_close)
_persistent_cache = _PersistentCacheOnServer()
_persistent_cache.reg()
_atexit(_persistent_cache.flush)
# subpackages
import megengine.amp
import megengine.autodiff
import megengine.data
import megengine.distributed
import megengine.dtr
import megengine.functional
import megengine.hub
import megengine.jit
import megengine.module
import megengine.optimizer
import megengine.quantization
import megengine.random
import megengine.utils
import megengine.traced_module
|
'''
ModernGL: High performance rendering for Python 3
'''
from .error import *
from .buffer import *
from .compute_shader import *
from .conditional_render import *
from .context import *
from .framebuffer import *
from .program import *
from .program_members import *
from .query import *
from .renderbuffer import *
from .scope import *
from .texture import *
from .texture_3d import *
from .texture_array import *
from .texture_cube import *
from .vertex_array import *
from .sampler import *
__version__ = '5.6.2'
|
jest.autoMockOff();
jest.unmock('../http.js');
const httpfs = require('../http.js');
describe('http', () => {
it('does a lot of cool stuff', () => {
});
});
|
import Collection from '../Collection';
import { deleteCachedLocRange } from '../util/misc';
/**
* @extends {Collection<StaticTrace>}
*/
export default class StaticTraceCollection extends Collection {
// lastStaticContextId = 0;
// lastStaticCodeChunkId = 0;
constructor(dp) {
super('staticTraces', dp);
}
serialize(staticTrace) {
const staticTraceData = { ...staticTrace };
deleteCachedLocRange(staticTraceData.loc);
return staticTraceData;
}
// handleEntryAdded(staticTrace) {
// const {
// staticContextId
// } = staticTrace;
// // TODO: add new StaticCodeChunkCollection to also manage code-chunk related information, especially: `loc`
// if (staticContextId !== this.lastStaticContextId) {
// // new code chunk
// ++this.lastStaticCodeChunkId;
// this.lastStaticContextId = staticContextId;
// }
// staticTrace.staticCodeChunkId = this.lastStaticCodeChunkId;
// }
}
|
//define an 3x3 array and fill it with 'x'
let choiceArray = new Array(3).fill('x').map(() => new Array(3).fill('x'));
let count = 0;
//Array that maintains elements to be highlighted in case an player wins
let highlightArray = [];
/**
* Method to validate if inputs coordinates are duplicate or not
* true is returned in case of duplicate coordinates
*
* @param {number} x X coordinate in the grid
* @param {number} y Y coordianate in the grid
*/
function isDuplicateClick (x, y) {
let match = false;
if (choiceArray[x][y] != 'x') {
console.log("Duplicate move x=", x , ", y=", y);
match = true;
}
return match;
}
/**
* This method updates users choice in choice array.
* as soon as players complete 5 moves, we need to check for winner
* this method returns true in case we have winner
*
* @param {number} x X coordinate in the grid
* @param {number} y Y coordinate in the grid
* @param {number} choice value 0 or 1
*/
function updateMove (x, y, choice) {
let match = false;
if (x > 2 || x < 0 || y > 2 || y < 0 || choice < 0 || choice > 1) {
console.log ("Move for invalid pos x=", x , ", y=", y, ", choice =", choice);
} else {
choiceArray[x][y] = choice;
count = count + 1;
//Both users have started playing and we have 1 possible winner
if (count >= 5) {
//lets verify who is winner user choice 0 or 1
match = startMatchingAround(x, y, choice);
if (!match) {
match = validateDiagonalWinner(choice);
}
}
}
return match;
}
/**
* This method matches horizontally and vertically grids to check
* if we have all boxes selected with same option
* It retuns true in case match is sucessful
*
* @param {number} x X cooridate of cell selected
* @param {number} y Y coordinate of cell seleceted
* @param {*} matchVal value to be matched 0 or 1
*/
function startMatchingAround(x, y, matchVal) {
let match = true;
let arr = [];
//lets process values starting 0 index of choiceArray[x]
for (let i = 0; i < choiceArray.length; i++) {
arr.push(x + "" + i);
if (choiceArray[x][i] != matchVal) {
match = false;
break;
}
}
if (!match) {
console.log ("lets iterate over y axis for x=", x, "y=", y);
match = true;
arr = [];
for (let i = 0; i < choiceArray.length; i++) {
arr.push(i + "" + y);
if (choiceArray[i][y] != matchVal) {
match = false;
break;
}
}
}
if (match) {
highlightArray = [...arr];
}
console.log ("Returning ", match);
return match;
}
/**
* This method matches horizontally selected move to verify
* that current move is a winning move
* returns true if move a winning move
*
* @param {number} choice option selected by user
*/
function validateDiagonalWinner (choice) {
let match = true;
let arr = [];
for (let i = 0; i < choiceArray.length; i++) {
arr.push(i + "" + i);
if (choiceArray[i][i] != choice) {
match = false;
break;
}
}
if (!match) {
match = true;
arr = [];
for (let i = 0, j = choiceArray.length - 1; i < choiceArray.length; i++, j--) {
arr.push(i + "" + j);
if (choiceArray[i][j] != choice) {
match = false;
break;
}
}
}
if (match) {
highlightArray = [...arr];
}
console.log ("Found diagonally ", match);
return match;
}
/**
* This method is invoked on the user action perfomed on the UI.
* On each user move we update local 3x3 array to update position of move
* on the UI. As soon as we have 5 moves we start looking for an winner
*
* @param {number} x X coordinate location
* @param {number} y Y coordinate location
*/
function toggle (x, y) {
if (count < 9 && highlightArray[0] == null) {
let loc = x + "" + y;
document.getElementById(loc).style.display = "none";
let choice = 1;
if (!isDuplicateClick(x, y)) {
if (count % 2 == 0) {
document.getElementById(loc + "_tick").style.display = "inline";
} else {
document.getElementById(loc + "_close").style.display = "inline";
choice = 0;
}
match = updateMove(x, y, choice);
if (match) {
if (choice == 1) {
highlightArray.forEach(item => {
document.getElementById(item + "_tick").style = "border:2px solid red";
});
} else {
highlightArray.forEach(item => {
document.getElementById(item + "_close").style= "border:2px solid blue";
});
}
document.getElementById("result").hidden = false;
}
}
}
}
/**
* Function invoked by pressing refresh button on UI
*/
function refresh () {
return window.location.reload();
}
|
/*!
@file textstream_reader.h
@brief textsteam_reader header file
@date 2004/12/17
Time: 23:20:25
@author Ryan Ginstrom
*/
#pragma once
#include "stringex.h"
/** CLASS textstream_reader.
This class is for parsing streams of characters.
It is templated to allow both wide and narrow-character streams.
In order to ensure safety the multibyte versions have to
be used for the narrow-character routines, which will compromise
speed to some extent. However, this class should still be quite fast
compared to higher-level implementations.
*/
template< class CHAR_T >
class textstream_reader
{
typedef CHAR_T char_type ;
typedef CHAR_T* pointer_type ;
typedef const CHAR_T* const_pointer_type ;
typedef textstream_reader< CHAR_T > reader_type ;
typedef std::basic_string< CHAR_T > string_type ;
const_pointer_type m_buffer ; ///< The buffer
const_pointer_type m_pos ; ///< The current pos in the buffer
public:
typedef const CHAR_T* bookmark_type ; ///< For remembering points in buffer
/** constructor.
@param reader parameter.
*/
textstream_reader( const reader_type &reader ) :
m_buffer( reader.m_buffer ),
m_pos( reader.m_pos )
{
}
/** constructor.
@param buffer A text buffer. The caller must not free this buffer until the reader is done with it.
*/
explicit textstream_reader( const_pointer_type buffer = NULL ) :
m_buffer( buffer ),
m_pos( buffer )
{
}
/** Get the length of the buffer.
@return size_t description.
*/
size_t length()
{
if ( m_buffer == NULL )
return 0 ;
return str::generic_charlen( m_buffer ) ;
}
/** Get the length from the current pos to the end of the buffer.
@return size_t description.
*/
size_t length_at_pos()
{
if ( this->empty() )
return 0 ;
return str::generic_charlen( m_pos ) ;
}
/** Set the buffer.
The caller must not free this buffer until the reader is done with it.
@param buffer parameter.
*/
void set_buffer( const_pointer_type buffer )
{
m_pos = m_buffer = buffer ;
}
// assignment operators
/** Assignment operator.
@return reader_type& reference to self.
@param buffer parameter.
*/
reader_type &operator =( const_pointer_type buffer )
{
set_buffer( buffer ) ;
return *this ;
}
/** Assignment operator.
@return reader_type& reference to self.
@param reader parameter.
*/
reader_type &operator =( const reader_type &reader )
{
m_buffer = reader.m_buffer ;
m_pos = reader.m_pos ;
return *this ;
}
/** Whether the buffer is empty.
@return bool description.
*/
bool empty() const
{
if ( m_pos == NULL )
{
return true ;
}
if ( *m_pos == char_type(0) )
{
return true ;
}
return false ;
}
/** Determines if the present character is a whitespace character.
@return bool description.
*/
bool is_space() const
{
return str::generic_isspace( peek() ) ;
}
/** Determines if the present character is a punctuation character.
@return bool description.
*/
bool is_punct() const
{
return str::generic_ispunct( peek() ) ;
}
/** Determines if the present character is a digit.
@return bool description.
*/
bool is_digit() const
{
return str::generic_isdigit( peek() ) ;
}
bool is_hexint_digit() const
{
if ( str::generic_isdigit( peek() ) )
{
return true ;
}
switch( peek() )
{
case CHAR_T('a'):
case CHAR_T('b'):
case CHAR_T('c'):
case CHAR_T('d'):
case CHAR_T('e'):
case CHAR_T('f'):
case CHAR_T('A'):
case CHAR_T('B'):
case CHAR_T('C'):
case CHAR_T('D'):
case CHAR_T('E'):
case CHAR_T('F'):
return true ;
default:
return false ;
}
}
/** Determines if the present character is a the specified character.
@return bool The answer.
@param comp_char The character to check.
*/
bool current_is( char_type comp_char ) const
{
if ( m_pos == NULL )
{
return false ;
}
return *m_pos == comp_char ;
}
/** Determines if the string at the current pos matches the specified string.
@return bool description.
@param comp_str A string to match. Must not be NULL.
*/
bool current_is( const_pointer_type comp_str ) const
{
if ( comp_str == NULL )
{
throw std::invalid_argument("Null pointer") ;
}
if ( m_pos == comp_str )
{
return true ;
}
if ( this->empty() )
{
return false ;
}
return ( 0 == str::generic_strncmp( m_pos, comp_str, str::generic_strlen( comp_str ) ) ) ;
}
/** Take a peek at the current character, without eating it.
@return char_type description.
@param parameter.
*/
char_type peek( ) const
{
if ( m_pos == NULL )
{
return char_type(0) ;
}
else
{
return *m_pos ;
}
}
/** Advance the specified number of steps.
@return bool Success.
@param steps The number of steps to advance.
*/
bool advance( size_t steps=1 )
{
if ( this->empty() )
{
return false ;
}
const_pointer_type temp ;
for ( size_t i=0 ; i<steps ; ++i )
{
temp = m_pos ;
m_pos = str::generic_char_next( temp ) ;
if ( ! m_pos )
return false ;
if ( ! *m_pos )
return false ;
}
if ( ! m_pos ) return false ;
return *m_pos != char_type(0) ;
}
/** Get the next character.
@return char_type The next character.
*/
char_type nextc()
{
char_type c ;
if ( this->empty() )
{
c = char_type(0) ;
}
else
{
m_pos = str::generic_char_next( m_pos ) ;
c = *m_pos ;
}
return c ;
}
/** Get the current character, and advance.
@return char_type The current character.
*/
char_type get()
{
char_type c ;
get( c ) ;
return c ;
}
/** Get the current character, and advance.
@return bool Success.
@param c The current character.
*/
bool get( char_type &c )
{
if ( this->empty() )
{
c = char_type(0) ;
return false ;
}
c = *m_pos ;
m_pos = str::generic_char_next( m_pos ) ;
return true ;
}
/** Get the current character, and advance.
@return bool Success.
@param str The current character is appended to this string.
*/
bool get( string_type &str )
{
if ( this->empty() )
{
return false ;
}
const_pointer_type next_pos = str::generic_char_next( m_pos ) ;
if ( next_pos == NULL )
{
return false ;
}
string_type current_char( m_pos, next_pos ) ;
str.append( current_char ) ;
m_pos = next_pos ;
return true ;
}
/** Get the specified number of characters. broken into a separate function, instead of default param, for speed of base case
@return bool description.
@param str parameter.
@param num_steps parameter.
*/
bool get( string_type &str, size_t num_steps )
{
for( size_t i=0 ; i<num_steps ; ++i )
{
get( str ) ;
}
return true ;
}
/** Back up the specified number of steps.
@return char_type description.
@param steps parameter.
*/
char_type unget( size_t steps = 1 )
{
char_type c = 0 ;
if ( m_pos == NULL || m_pos == m_buffer )
{
c = char_type(0) ;
}
else
{
for ( size_t i=0 ; i<steps ; ++i )
{
if ( m_pos != m_buffer )
m_pos = str::generic_char_prev( m_buffer, m_pos ) ;
}
if (m_pos)
{
c = *m_pos;
}
}
return c;
}
/** Get the buffer.
@return const_pointer_type description.
*/
const_pointer_type get_buffer()
{
return m_buffer ;
}
/** Eat if the current character is the specified character.
@return bool description.
@param c parameter.
*/
bool eat_if( char_type c )
{
if( current_is( c ) )
{
advance() ;
return true ;
}
return false ;
}
/** Eat until the next non-whitespace character.
@return bool Success. False if m_pos is NULL
*/
bool eat_whitespace()
{
if ( m_pos == NULL )
{
return false ;
}
while ( str::generic_isspace( *m_pos ) )
{
++m_pos ; // mb spaces are only one byte, so this is safe
}
return true ;
}
/** Jump to the first of the specified character.
If the function fails, no jump is made and returns false.
Set eat to true to jump past char being jumped to
@return bool description.
@param delim parameter.
@param eat parameter.
*/
bool jump_to_first_of( char_type delim, bool eat=false )
{
if ( this->empty() )
return false ;
const_pointer_type new_pos = str::generic_strchr( m_pos, delim ) ;
if ( new_pos == NULL )
return false ;
m_pos = new_pos ;
if ( eat ) m_pos = str::generic_char_next( m_pos ) ;
return true ;
}
/** Jump to the first of the specified string.
If the function fails, no jump is made and returns false.
Set eat to true to jump past string being jumped to
@return bool description.
@param delims parameter.
@param eat parameter.
*/
bool jump_to_first_of( const_pointer_type delims, bool eat=false )
{
if ( this->empty() )
return false ;
const_pointer_type new_pos = str::generic_strpbrk( m_pos, delims ) ;
if ( new_pos == NULL )
return false ;
m_pos = new_pos ;
if ( eat )
m_pos = str::generic_char_next( m_pos ) ;
return true ;
}
/** Gets the text up to the end-of-line marker (specified by user).
If the delim is not found, gets the rest of the buffer.
The pos ends up 1 beyond the characters in the line.
@return bool description.
@param str parameter.
@param '\n' parameter.
@param eat parameter.
*/
bool getline( string_type &str, char_type delim = char_type('\n'), bool eat=true )
{
if ( this->empty() )
{
str.erase() ;
return false ;
}
const_pointer_type end_pos = str::generic_strchr( m_pos, delim ) ;
return str_from_endpos( str, end_pos, eat ) ;
}
string_type getline(char_type delim = char_type('\n'), bool eat=true )
{
string_type str ;
this->getline(str, delim, eat) ;
return str ;
}
std::vector<string_type> &split(std::vector<string_type> &items,
char_type delim = char_type(' '))
{
while (! this->empty())
{
string_type item ;
this->getline(item, delim, true) ;
items.push_back(item) ;
while (this->peek() == delim)
{
this->advance() ;
}
}
return items ;
}
std::vector<string_type> &split(std::vector<string_type> &items,
const_pointer_type delims)
{
while (! this->empty())
{
string_type item ;
this->getline(item, delims, true) ;
items.push_back(item) ;
while(str::contains(delims, this->peek()) && ! this->empty())
{
this->advance() ;
}
}
return items ;
}
/** Gets the text up to the end-of-line marker (specified by user).
If the delim is not found, gets the rest of the buffer.
The pos ends up 1 beyond the characters in the line.
Any text already in the string will be erased.
@return bool description.
@param str parameter.
@param delims parameter.
@param eat parameter.
*/
bool getline( string_type &str, const_pointer_type delims, bool eat=true )
{
ATLASSERT( delims != NULL ) ;
if ( delims==NULL || this->empty())
{
str.erase() ;
return false ;
}
const_pointer_type end_pos = str::generic_strpbrk( m_pos, delims ) ;
return str_from_endpos( str, end_pos, eat ) ;
}
string_type getline_as_string(const_pointer_type delims, bool eat=true )
{
string_type chunk ;
this->getline(chunk, delims, eat) ;
return chunk ;
}
string_type getline_delims(const_pointer_type delims, bool eat=true )
{
string_type out ;
this->getline(out, delims, eat) ;
return out ;
}
string_type get_until( const_pointer_type endPos, bool eat=true )
{
bookmark_type bookmarkStart = get_current_pos() ;
if ( ! find( endPos, false ) )
{
set_pos( bookmarkStart ) ;
return string_type() ;
}
bookmark_type bookmarkEnd = get_current_pos() ;
if ( eat )
{
find( endPos, true ) ;
}
return string_type( bookmarkStart, bookmarkEnd ) ;
}
/** Set the buffer back to the beginning.
*/
void rewind()
{
m_pos = m_buffer ;
}
/** Find the specified string.
If the string is not found, the pos does not move.
@return bool Found.
@param substring The string to search for.
@param eat Whether to advance beyond the string if found.
*/
bool find( const_pointer_type substring, bool eat=false )
{
ATLASSERT( substring != NULL ) ;
if ( substring == NULL ) return false ;
if ( this->empty() ) return false ;
bookmark_type old_pos = m_pos ;
m_pos = str::generic_strstr( m_pos, substring ) ;
if ( m_pos == NULL )
{
m_pos = old_pos ;
return false ;
}
if ( eat )
{
m_pos += str::generic_strlen( substring ) ;
}
return true ;
}
/** Jump the specified offset.
For textstream_reader< char >, this is in bytes, rather than characters.
To jump forward the specified number of characters, use @ref advance( size_t ).
There is no difference for wchar_t, except that unlike advance,
this function allows you to jump forwards or backwards.
@param offset The offset to jump. Can be positive or negative.
*/
void jump( int offset )
{
m_pos += offset ;
}
// for bookmarking positions in the buffer
/** The current pos.
@return bookmark_type The current pos.
*/
bookmark_type get_current_pos()
{
return m_pos ;
}
/** Set the current pos. Usually with a bookmark that was previously saved.
@param pos parameter.
*/
void set_pos( bookmark_type pos )
{
m_pos = pos ;
}
/** Get the offset of the specified bookmark_type from the beginning of the buffer.
A bit computation-intensive...
@return size_t description.
@param bookmark parameter.
*/
size_t get_offset( bookmark_type bookmark ) const
{
size_t offset = 0 ;
for ( offset=0 ; m_buffer+offset != bookmark ; ++offset ) ;
return offset ;
}
/** Get the offset of the current pos from the specified offset from the beginning of the buffer.
For example, if you already know that the pos is at least 10, you can specify 10 so that
it does not count back from the beginning.
A bit computation-intensive...
@return size_t description.
@param start_offset parameter.
*/
size_t get_offset( size_t start_offset = 0 )
{
if ( m_buffer + start_offset > m_pos )
{
throw std::out_of_range("Invalid index") ;
}
size_t offset = 0 ;
for ( offset=start_offset ; m_buffer+offset != m_pos ; ++offset ) ;
return offset ;
}
private:
/** Get the string from the buffer pos up to a specified end-pos.
For example, the position of a period or newline.
@return bool Success.
@param s Holds the result.
@param end_pos The end pos.
@param eat Whether to eat the characters.
*/
bool str_from_endpos( string_type &s, const_pointer_type end_pos, bool eat )
{
// delim not found
if ( end_pos == NULL || *end_pos == char_type(0) )
{
s = m_pos ;
m_pos = NULL ;
return true ;
}
s = string_type( m_pos, end_pos ) ;
m_pos = end_pos ;
if ( eat ) m_pos = str::generic_char_next( m_pos ) ; // skip the delimiter
return true ;
}
} ;
typedef textstream_reader< char > c_reader ;
typedef textstream_reader< wchar_t > wc_reader ;
|
import React from "react";
import ReactDOM from "react-dom";
import "@patternfly/react-core/dist/styles/base.css";
import OperatorWizard from "./component/operator-wizard/OperatorWizard";
document.addEventListener("DOMContentLoaded", function() {
ReactDOM.render(
React.createElement(OperatorWizard),
document.getElementById("mount")
);
});
|
import React from 'react';
import iconSvg from '../icons/normalized/music-note-bluetooth-off.svg';
function IconRender(props) {
const paths = /^\<svg [^>]+\>(.*)<\/svg>/ig.exec(iconSvg)[1]
return (
<svg
{...props}
xmlns="http://www.w3.org/2000/svg"
baseProfile="full"
viewBox="0 0 24 24"
className={`react-pretty-icons react-pretty-icons__music-note-bluetooth-off ${props.className}`}
dangerouslySetInnerHTML={{__html: paths}} />
)
}
export default IconRender
|
# all the imports
import os
import sqlite3
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
app = Flask(__name__) # create the application instance :)
app.config.from_object(__name__) # load config from this file , wp_sorter.py
# Load default config and override config from an environment variable
app.config.update(dict(
DATABASE=os.path.join(app.root_path, 'wp_sorter.db'),
SECRET_KEY='fake secret key that is top secret',
USERNAME='admin',
PASSWORD='admin'
))
app.config.from_envvar('WP_SORTER_SETTINGS', silent=True)
# DATABASE STUFF
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
if not hasattr(g, 'sqlite_db'):
g.sqlite_db = connect_db()
return g.sqlite_db
def connect_db():
"""Connects to the specific database."""
rv = sqlite3.connect(app.config['DATABASE'])
rv.row_factory = sqlite3.Row
return rv
# Decorator is run automagically when the app dies
@app.teardown_appcontext
def close_db(error):
"""Closes the database again at the end of the request."""
if hasattr(g, 'sqlite_db'):
g.sqlite_db.close()
def init_db():
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
@app.cli.command('initdb')
def initdb_command():
"""Initializes the database."""
init_db()
print('Initialized the database.')
# VIEW FUNCTIONS
@app.route('/')
def show_entries():
db = get_db()
return render_template('layout.html')
@app.route('/add', methods=['POST'])
def add_entry():
if not session.get('logged_in'):
abort(401)
db = get_db()
db.execute('INSERT INTO entries (title, text) VALUES (?, ?)',
[request.form['title'], request.form['text']])
db.commit()
flash('New entry was successfully posted')
return redirect(url_for('show_entries'))
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != app.config['USERNAME']:
error = 'Invalid username'
elif request.form['password'] != app.config['PASSWORD']:
error = 'Invalid password'
else:
session['logged_in'] = True
flash('You were logged in')
return redirect(url_for('show_entries'))
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('show_entries'))
|
/* Price Table Server | tradekit.io
*
* @mudule: timers
*
* Copyright (c) 2019 Milen Bilyanov
* Licensed under the MIT license.
*
*/
'use strict';
// Local Imports
var logging = require('./logging');
var MODULE = 'timers';
// Logging
const log = logging.getLogger();
/** @private getTime() {{{1
*
* Constructs a time string for debuging purposes.
*/
const getTime = () => {
var now = new Date();
var h = now.getHours().toString().padStart(2, '0');
var m = now.getMinutes().toString().padStart(2, '0');
var s = now.getSeconds().toString().padStart(2, '0');
const timeSignature = h + ':' + m + ':' + s;
return timeSignature;
};
// }}}1
/** @public Interval (name) {{{1
*
* Creates an Interval object with the required methods. The name of the
* interval is initialized through the _name_ paramter.
*
* @param {String} name
*/
function Interval(name = 'default') {
const CONTEXT = MODULE + '.' + 'interval';
// Private Data
// Counter
let _counter = 0;
// State Container
let state = {
isFirstRun: true,
isRequestActive: false,
get counter() {
return _counter;
}
};
log.info({
context: CONTEXT,
verbosity: 3,
message: 'Interval initialised as [' + name + ']',
});
// Public Access
let self = {};
// Private Methods
/** @private updateCounter(value) {{{2
*
* Increments the counter state.
*
* @param {Integer} value
*/
const updateCounter = function(value) {
_counter += value;
};
// }}}2
/** @private getState(item) {{{2
*
* Returns the state of the requested item.
*
* @param {String} item
*/
const getState = function(item) {
return state[item];
};
// }}}2
/** @private getNextTick(mySkip, myInterval, currentSeconds) {{{2
*
* Calculates the time to the next tick in milliseconds.
*
* @param {Number} mySkip
* @param {Number} myInterval
* @param {Number} currentSeconds
*/
const getNextTick = function(mySkip, myInterval, currentSeconds) {
let CONTEXT = MODULE + '.' + 'getNextTick';
// Local Data
let myGap, pivotIsLocal, mode, nextUpdateMilliseconds, identifier;
log.debug({
context: CONTEXT,
verbosity: 7,
message: '[{0}] Parameters: TIME <{1}>, INDEX <{2}>, SKIP (MINUTES) <{3}>, INTERVAL (SECONDS) <{4}>, ENTRY TIME (SECONDS) <{5}>'
.stringFormatter(
name,
getTime(),
getState('counter'),
mySkip,
myInterval,
currentSeconds
)
});
log.debug({
context: CONTEXT,
verbosity: 7,
message: '[{0}] State flags: IS_FIRST_RUN <{1}>, IS_REQUEST_ACTIVE <{2}>'
.stringFormatter(
name,
getState('isFirstRun').toString(),
getState('isRequestActive').toString()
)
});
// Pivot detection for our entry point.
// For cases where our entry is before the INTERVAL or right at the INTERVAL,
// we consider the pivot to be local as the temporal resolution will take place
// within the current minute. For situations where we are beyond the INTERVAL,
// we will be waiting for the start of the next minute for the resolution to be
// started.
if (currentSeconds <= myInterval) {
// Just a debug.
if (currentSeconds == myInterval) {
identifier = 'aligned';
} else {
identifier = 'ahead';
}
pivotIsLocal = true;
} else {
identifier = 'on_next_minute';
pivotIsLocal = false;
}
log.debug({
context: CONTEXT,
verbosity: 7,
message: '[{0}] Identifier: START_TIME <{1}>'
.stringFormatter(
name,
identifier
)
});
if (currentSeconds % 60 == 0) {
// We need to move $INTERVAL seconds forward.
mode = 0;
} else {
if (currentSeconds == myInterval) {
// We need to move 60 seconds forward.
mode = 1;
} else {
// We need to calculate how much we will be jumping forward.
mode = 2;
}
}
if (mode == 2) {
// mode 2
// Reset skip only if we are in the first cycle.
if (getState('isFirstRun')) {
// Need to reset skip.
log.debug({
context: CONTEXT,
verbosity: 7,
message: '[{0}] Resetting any SKIP values on the first run.'
.stringFormatter(
name
)
});
mySkip = 0;
}
if (pivotIsLocal) {
myGap = (myInterval - currentSeconds);
} else {
myGap = (60 - currentSeconds) + myInterval;
}
} else {
// mode 0 and mode 1
if (mode == 0) {
// mode 0
myGap = myInterval;
} else {
// mode 1
myGap = 60;
}
}
log.debug({
context: CONTEXT,
verbosity: 7,
message: '[{0}] Interval values: LOCAL_PIVOT <{1}>, MODE <{2}>, SKIP <{3}>, GAP (SECONDS) <{4}>'
.stringFormatter(
name,
pivotIsLocal.toString(),
mode,
mySkip,
myGap
)
});
nextUpdateMilliseconds = (myGap * 1000) + (mySkip * 60 * 1000);
return nextUpdateMilliseconds;
};
// }}}2
/** @private startInterval(timeToNextTick, skip, interval, callback) {{{2
*
* This is a recursive function and is the main timer responsible for the cycle.
*
* @param {Number} timeToNextTick
* @param {Number} skip
* @param {Number} interval
* @param {Function} callback
*/
const startInterval = function(timeToNextTick, skip, interval, callback) {
let CONTEXT = MODULE + '.' + 'startInterval';
// Plant the next run.
setTimeout(function() {
if(getState('isRequestActive')){
// Skip the interval task if previous task still active.
log.warning({
context: CONTEXT,
verbosity: 3,
message: '[{0}] Previous interval task is still active, skipping most recent request at {1}'
.stringFormatter(
name,
getTime().toString()
)
});
}else{
// Perform the interval task.
log.info({
context: CONTEXT,
verbosity: 3,
message: '[{0}] Performing interval task at {1}'
.stringFormatter(
name,
getTime().toString()
)
});
// Execute the callback.
callback();
}
log.info({
context: CONTEXT,
verbosity: 3,
message: '[{0}] Restarting interval cycle.'
.stringFormatter(
name,
)
});
// Stick to the raw parameters.
self.runInterval(skip, interval, callback);
}, timeToNextTick);
};
// }}}2
// Public Methods
/** @public setState(item, value) {{{2
*
* A public method dedicated to the control of the states.
*
* @param {String} item
* @param {String} value
*/
self.setState = function(item, value) {
state[item] = value;
};
// }}}2
/** @public runInterval(skip, interval, callback) {{{2
*
* This is the main starting point for the interval.
*
* @param {Number} skip
* @param {Number} interval
* @param {Function} callback
*/
self.runInterval = function(skip, interval, callback) {
let CONTEXT = MODULE + '.' + 'runInterval';
var date = new Date();
let myInterval = interval;
let mySkip = skip;
let currentSeconds = date.getSeconds();
// Start
updateCounter(1);
let timeToNextTick = getNextTick(mySkip, myInterval, currentSeconds);
log.info({
context: CONTEXT,
verbosity: 3,
message: '[{0}] Next interval task will be called in {1} milliseconds.'
.stringFormatter(
name,
timeToNextTick.toString()
)
});
startInterval(timeToNextTick, mySkip, myInterval, callback);
// Rest first run state flag.
if(getState('isFirstRun')){
self.setState('isFirstRun', false);
}
};
// }}}2
// Publish public end-points.
return self;
}
// }}}1
/* EXPORTS */
module.exports = {
Interval: Interval,
};
// vim: fdm=marker ts=4
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.chromeos.moblab_v1beta1.types import resources
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.chromeos.moblab.v1beta1',
manifest={
'FindMostStableBuildRequest',
'FindMostStableBuildResponse',
'ListBuildTargetsRequest',
'ListBuildTargetsResponse',
'ListModelsRequest',
'ListModelsResponse',
'ListBuildsRequest',
'ListBuildsResponse',
'CheckBuildStageStatusRequest',
'CheckBuildStageStatusResponse',
'StageBuildRequest',
'StageBuildResponse',
'StageBuildMetadata',
},
)
class FindMostStableBuildRequest(proto.Message):
r"""Request message for finding the most stable build.
Attributes:
build_target (str):
Required. The full resource name of the build
target. For example,
'buildTargets/octopus'.
"""
build_target = proto.Field(
proto.STRING,
number=1,
)
class FindMostStableBuildResponse(proto.Message):
r"""Response message for finding the most stable build.
Attributes:
build (google.chromeos.moblab_v1beta1.types.Build):
The most stable build.
"""
build = proto.Field(
proto.MESSAGE,
number=1,
message=resources.Build,
)
class ListBuildTargetsRequest(proto.Message):
r"""Request message for listing build targets.
Attributes:
page_size (int):
Optional. The number of build targets to
return in a page.
page_token (str):
Optional. A page token, received from a previous
``ListBuildTargets`` call. Provide this to retrieve the
subsequent page.
"""
page_size = proto.Field(
proto.INT32,
number=1,
)
page_token = proto.Field(
proto.STRING,
number=2,
)
class ListBuildTargetsResponse(proto.Message):
r"""Response message for listing build targets.
Attributes:
build_targets (Sequence[google.chromeos.moblab_v1beta1.types.BuildTarget]):
The list of build targets.
next_page_token (str):
Token to retrieve the next page of build
targets. If this field is omitted, there are no
subsequent pages.
total_size (int):
Total number of build targets.
"""
@property
def raw_page(self):
return self
build_targets = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=resources.BuildTarget,
)
next_page_token = proto.Field(
proto.STRING,
number=2,
)
total_size = proto.Field(
proto.INT32,
number=3,
)
class ListModelsRequest(proto.Message):
r"""Request message for listing models.
Attributes:
parent (str):
Required. The full resource name of build
target.
page_size (int):
Optional. The number of models to return in a
page.
page_token (str):
Optional. A page token, received from a previous
``ListModels`` call. Provide this to retrieve the subsequent
page.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
page_size = proto.Field(
proto.INT32,
number=2,
)
page_token = proto.Field(
proto.STRING,
number=3,
)
class ListModelsResponse(proto.Message):
r"""Response message for listing models.
Attributes:
models (Sequence[google.chromeos.moblab_v1beta1.types.Model]):
The list of models.
next_page_token (str):
Token to retrieve the next page of models. If
this field is omitted, there are no subsequent
pages.
total_size (int):
Total number of models.
"""
@property
def raw_page(self):
return self
models = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=resources.Model,
)
next_page_token = proto.Field(
proto.STRING,
number=2,
)
total_size = proto.Field(
proto.INT32,
number=3,
)
class ListBuildsRequest(proto.Message):
r"""Request message for listing builds.
Attributes:
parent (str):
Required. The full resource name of the
model. The model id is the same as the build
target id for non-unified builds. For example,
'buildTargets/octopus/models/bobba'.
page_size (int):
Optional. The number of builds to return in a
page.
page_token (str):
Optional. A page token, received from a previous
``ListBuilds`` call. Provide this to retrieve the subsequent
page.
filter (str):
Optional. Filter that specifies value
constraints of fields. For example, the filter
can be set as "filter='milestone=milestones/80'"
to only select builds in milestone 80.
read_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Read mask that specifies which Build fields to
return. If empty, all Build fields will be returned. Valid
fields: name, milestone, build_version. For example, if the
read_mask is set as "read_mask='milestone'", the ListBuilds
will return a list of Builds object with only the milestone
field.
group_by (google.protobuf.field_mask_pb2.FieldMask):
Optional. The operation that groups by all the Build fields
specified in the read mask. The group_by field should be the
same as the read_mask field in convention of SQL.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
page_size = proto.Field(
proto.INT32,
number=2,
)
page_token = proto.Field(
proto.STRING,
number=3,
)
filter = proto.Field(
proto.STRING,
number=4,
)
read_mask = proto.Field(
proto.MESSAGE,
number=5,
message=field_mask_pb2.FieldMask,
)
group_by = proto.Field(
proto.MESSAGE,
number=6,
message=field_mask_pb2.FieldMask,
)
class ListBuildsResponse(proto.Message):
r"""Response message for listing builds.
Attributes:
builds (Sequence[google.chromeos.moblab_v1beta1.types.Build]):
The list of builds.
next_page_token (str):
Token to retrieve the next page of builds. If
this field is omitted, there are no subsequent
pages.
total_size (int):
Total number of builds.
"""
@property
def raw_page(self):
return self
builds = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=resources.Build,
)
next_page_token = proto.Field(
proto.STRING,
number=2,
)
total_size = proto.Field(
proto.INT32,
number=3,
)
class CheckBuildStageStatusRequest(proto.Message):
r"""Request message for checking if the build artifact is staged.
Attributes:
name (str):
Required. The full resource name of the build
artifact. For example,
'buildTargets/octopus/models/bobba/builds/12607.6.0/artifacts/chromeos-
moblab-peng-staging'.
filter (str):
Optional. Filter that specifies value
constraints of fields. For example, the filter
can be set as "filter='type=release'" to only
check the release builds.
"""
name = proto.Field(
proto.STRING,
number=1,
)
filter = proto.Field(
proto.STRING,
number=2,
)
class CheckBuildStageStatusResponse(proto.Message):
r"""Response message for checking the stage status of a build
artifact.
Attributes:
is_build_staged (bool):
The status to represent if the build is
staged or not.
staged_build_artifact (google.chromeos.moblab_v1beta1.types.BuildArtifact):
The staged build artifact in the destination
bucket.
source_build_artifact (google.chromeos.moblab_v1beta1.types.BuildArtifact):
The source build artifact in the source
bucket.
"""
is_build_staged = proto.Field(
proto.BOOL,
number=1,
)
staged_build_artifact = proto.Field(
proto.MESSAGE,
number=2,
message=resources.BuildArtifact,
)
source_build_artifact = proto.Field(
proto.MESSAGE,
number=3,
message=resources.BuildArtifact,
)
class StageBuildRequest(proto.Message):
r"""Request message for staging a build artifact.
Attributes:
name (str):
Required. The full resource name of the build
artifact. For example,
'buildTargets/octopus/models/bobba/builds/12607.6.0/artifacts/chromeos-
moblab-peng-staging'.
filter (str):
Optional. Filter that specifies value
constraints of fields. For example, the filter
can be set as "filter='type=release'" to only
check the release builds.
"""
name = proto.Field(
proto.STRING,
number=1,
)
filter = proto.Field(
proto.STRING,
number=2,
)
class StageBuildResponse(proto.Message):
r"""Response message for staging a build artifact.
Attributes:
staged_build_artifact (google.chromeos.moblab_v1beta1.types.BuildArtifact):
The staged build in the destination bucket.
"""
staged_build_artifact = proto.Field(
proto.MESSAGE,
number=1,
message=resources.BuildArtifact,
)
class StageBuildMetadata(proto.Message):
r"""Metadata message for staging a build artifact.
Attributes:
progress_percent (float):
Approximate percentage of progress, e.g. "50"
means 50%.
start_time (google.protobuf.timestamp_pb2.Timestamp):
Build stage start time.
end_time (google.protobuf.timestamp_pb2.Timestamp):
Build stage end time.
"""
progress_percent = proto.Field(
proto.FLOAT,
number=1,
)
start_time = proto.Field(
proto.MESSAGE,
number=2,
message=timestamp_pb2.Timestamp,
)
end_time = proto.Field(
proto.MESSAGE,
number=3,
message=timestamp_pb2.Timestamp,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
"use strict";function compressTemplateAttr(e){if(e){if(e.children)for(var t=0,r=e.children.length;t<r;t++){var a=e.children[t];compressTemplateAttr(a)}for(var s in e)if(templateAttrMap[s]){var p=templateAttrMap[s];e[p]=e[s],delete e[s]}}}Object.defineProperty(exports,"__esModule",{value:!0});var templateAttrMap={type:"t",attr:"a",classList:"cL",style:"s",events:"e",children:"c"};exports.compressTemplateAttr=compressTemplateAttr;
|
from .models import *
from rest_framework import serializers
from .serializer_comments import *
read_only_fields_global = (['author'])
class FilmSerializer(serializers.ModelSerializer):
rate_show = serializers.SerializerMethodField()
author_username = serializers.ReadOnlyField(source='author.username')
postfrom = serializers.SerializerMethodField()
def get_postfrom(self,obj):
return '영화리뷰'
def get_rate_show(self, instance):
return instance.get_rate()
class Meta:
model = Film
fields = '__all__'
read_only_fields = read_only_fields_global
# views.py 에서 필드 수정 할 수 있게 하는 커스텀 쿼리
def __init__(self, *args, **kwargs):
# Don't pass the 'fields' arg up to the superclass
fields = kwargs.pop('fields', None)
# Instantiate the superclass normally
super(FilmSerializer, self).__init__(*args, **kwargs)
if fields is not None:
# Drop any fields that are not specified in the `fields` argument.
allowed = set(fields)
existing = set(self.fields)
for field_name in existing - allowed:
self.fields.pop(field_name)
class FreeBoardSerializer(serializers.ModelSerializer, object):
# user = serializers.ReadOnlyField(source='user.nickname')
get_likes = serializers.SerializerMethodField()
tag_set = serializers.SerializerMethodField()
author_username = serializers.ReadOnlyField(source='author.username')
is_like_user = serializers.SerializerMethodField()
CommentFreeBoard = CommentFreeBoardSerializer(many=True,read_only=True)
postfrom = serializers.SerializerMethodField()
def get_postfrom(self,obj):
return '자유게시판'
def get_get_likes(self, obj):
return obj.get_likes()
def get_tag_set(self, obj):
return obj.extract_tag_list()
def get_is_like_user(self, instance):
return instance.is_like_user(self.context['request'].user)
class Meta:
model = FreeBoard
fields = ('id','hit','author_username','get_likes','created_at',
'updated_at','title','context','image','category',
'tag_set','is_like_user','like_user_set','CommentFreeBoard','postfrom',
)
read_only_fields = read_only_fields_global
# views.py 에서 필드 수정 할 수 있게 하는 커스텀 쿼리
def __init__(self, *args, **kwargs):
# Don't pass the 'fields' arg up to the superclass
fields = kwargs.pop('fields', None)
# Instantiate the superclass normally
super(FreeBoardSerializer, self).__init__(*args, **kwargs)
if fields is not None:
# Drop any fields that are not specified in the `fields` argument.
allowed = set(fields)
existing = set(self.fields)
for field_name in existing - allowed:
self.fields.pop(field_name)
class FreeBoard_SubSerializer(serializers.ModelSerializer, object):
postfrom = serializers.SerializerMethodField()
author_username = serializers.ReadOnlyField(source='author.username')
def get_postfrom(self,obj):
return '자유게시판'
class Meta:
model = FreeBoard
fields = ('id','hit','created_at','author_username',
'updated_at','title','context','image','category',
'tag_set','postfrom',
)
read_only_fields = read_only_fields_global
class HirePostStaffSerializer(serializers.ModelSerializer):
author_username = serializers.ReadOnlyField(source='author.username')
tag_set = serializers.SerializerMethodField()
postfrom = serializers.SerializerMethodField()
is_like_user = serializers.SerializerMethodField()
is_applied_user = serializers.SerializerMethodField()
def get_is_applied_user(self, instance):
return instance.is_applied_user(self.context['request'].user)
def get_is_like_user(self, instance):
return instance.is_like_user(self.context['request'].user)
def get_postfrom(self, obj):
return '스탭 구인'
def get_tag_set(self, obj):
return obj.extract_tag_list()
class Meta:
model = HirePostStaff
fields = ('id', 'hit', 'author_username', 'thumbs', 'created_at',
'updated_at', 'title', 'context', 'image', 'category',
'tag_set', 'like_user_set', 'payment', 'requirement', 'advantage',
'job_loca', 'company', 'company_loca', 'company_desc', 'deadline',
'company_url', 'job_position','postfrom','is_like_user','is_applied_user',
)
read_only_fields = read_only_fields_global
# views.py 에서 필드 수정 할 수 있게 하는 커스텀 쿼리
def __init__(self, *args, **kwargs):
# Don't pass the 'fields' arg up to the superclass
fields = kwargs.pop('fields', None)
# Instantiate the superclass normally
super(HirePostStaffSerializer, self).__init__(*args, **kwargs)
if fields is not None:
# Drop any fields that are not specified in the `fields` argument.
allowed = set(fields)
existing = set(self.fields)
for field_name in existing - allowed:
self.fields.pop(field_name)
class HirePostActorSerializer(serializers.ModelSerializer):
author_username = serializers.ReadOnlyField(source='author.username')
tag_set = serializers.SerializerMethodField()
postfrom = serializers.SerializerMethodField()
is_like_user = serializers.SerializerMethodField()
def get_is_like_user(self, instance):
return instance.is_like_user(self.context['request'].user)
def get_postfrom(self, obj):
return '액터 구인'
def get_tag_set(self, obj):
return obj.extract_tag_list()
class Meta:
model = HirePostActor
fields = ('id', 'hit', 'author_username', 'thumbs', 'created_at',
'updated_at', 'title', 'context', 'image', 'category',
'tag_set', 'like_user_set', 'payment', 'requirement', 'advantage',
'job_loca', 'company', 'company_loca', 'company_desc', 'deadline',
'company_url', 'job_position','postfrom','is_like_user',
)
read_only_fields = read_only_fields_global
# views.py 에서 필드 수정 할 수 있게 하는 커스텀 쿼리
def __init__(self, *args, **kwargs):
# Don't pass the 'fields' arg up to the superclass
fields = kwargs.pop('fields', None)
# Instantiate the superclass normally
super(HirePostActorSerializer, self).__init__(*args, **kwargs)
if fields is not None:
# Drop any fields that are not specified in the `fields` argument.
allowed = set(fields)
existing = set(self.fields)
for field_name in existing - allowed:
self.fields.pop(field_name)
class ResumeStaffSerializer(serializers.ModelSerializer):
author_username = serializers.ReadOnlyField(source='author.username')
postfrom = serializers.SerializerMethodField()
def get_postfrom(self, obj):
return '스탭 이력서'
class Meta:
model = ResumeStaff
fields = '__all__'
read_only_fields = read_only_fields_global
class ResumeActorSerializer(serializers.ModelSerializer):
author_username = serializers.ReadOnlyField(source='author.username')
postfrom = serializers.SerializerMethodField()
def get_postfrom(self, obj):
return '액터 이력서'
class Meta:
model = ResumeActor
fields = '__all__'
read_only_fields = read_only_fields_global
class QnASerializer(serializers.ModelSerializer):
author_username = serializers.ReadOnlyField(source='author.username')
class Meta:
model = QnA
fields = '__all__'
read_only_fields = read_only_fields_global
class MyHirePostStaffSerializer(serializers.ModelSerializer):
author_username = serializers.ReadOnlyField(source='author.username')
tag_set = serializers.SerializerMethodField()
postfrom = serializers.SerializerMethodField()
def get_postfrom(self, obj):
return '스탭 구인'
def get_tag_set(self, obj):
return obj.extract_tag_list()
class Meta:
model = HirePostStaff
fields = ('id', 'hit', 'author_username', 'thumbs', 'created_at',
'updated_at', 'title', 'context', 'image', 'category',
'tag_set', 'like_user_set', 'payment', 'requirement', 'advantage',
'job_loca', 'company', 'company_loca', 'company_desc', 'deadline',
'company_url', 'job_position','postfrom',
)
read_only_fields = read_only_fields_global
class MyHirePostActorSerializer(serializers.ModelSerializer):
author_username = serializers.ReadOnlyField(source='author.username')
tag_set = serializers.SerializerMethodField()
postfrom = serializers.SerializerMethodField()
def get_postfrom(self, obj):
return '액터 구인'
def get_tag_set(self, obj):
return obj.extract_tag_list()
class Meta:
model = HirePostActor
fields = ('id', 'hit', 'author_username', 'thumbs', 'created_at',
'updated_at', 'title', 'context', 'image', 'category',
'tag_set', 'like_user_set', 'payment', 'requirement', 'advantage',
'job_loca', 'company', 'company_loca', 'company_desc', 'deadline',
'company_url', 'job_position','postfrom',
)
read_only_fields = read_only_fields_global
|
/* -*- Mode: C; tab-width: 8; c-basic-offset: 2; indent-tabs-mode: nil; -*- */
#include "rrutil.h"
#define NUM_ITERATIONS (1 << 26)
int spin(void) {
int i, dummy = 0;
atomic_puts("spinning");
for (i = 1; i < NUM_ITERATIONS; ++i) {
dummy += i % (1 << 20);
dummy += i % (79 * (1 << 20));
}
return dummy;
}
int main(int argc, char* argv[]) {
pid_t pid;
int status;
pid = fork();
if (0 == pid) {
signal(SIGINT, SIG_IGN);
spin();
kill(getpid(), SIGINT);
return 77;
}
test_assert(pid == wait(&status));
test_assert(WIFEXITED(status) && WEXITSTATUS(status) == 77);
atomic_puts("EXIT-SUCCESS");
return 0;
}
|
import matplotlib.pyplot as plt
import numpy as np
y = 0.5
def fun(b, a):
return -1.5*a*(b*y+y)+1.5
a = np.zeros((200, 200))
for i in range(-100, 100):
for j in range(-100, 100):
a[i - 100, j - 100] = fun(-i/100, j/100)
print(np.min(a), np.max(a))
plt.imshow(a, cmap='hot', interpolation='nearest')
plt.show()
|
# coding: utf-8
"""
LUSID API
# Introduction This page documents the [LUSID APIs](https://www.lusid.com/api/swagger), which allows authorised clients to query and update their data within the LUSID platform. SDKs to interact with the LUSID APIs are available in the following languages : * [C#](https://github.com/finbourne/lusid-sdk-csharp) * [Java](https://github.com/finbourne/lusid-sdk-java) * [JavaScript](https://github.com/finbourne/lusid-sdk-js) * [Python](https://github.com/finbourne/lusid-sdk-python) # Data Model The LUSID API has a relatively lightweight but extremely powerful data model. One of the goals of LUSID was not to enforce on clients a single rigid data model but rather to provide a flexible foundation onto which clients can map their own data models. The core entities in LUSID provide a minimal structure and set of relationships, and the data model can be extended using Properties. The LUSID data model is exposed through the LUSID APIs. The APIs provide access to both business objects and the meta data used to configure the systems behaviours. The key business entities are: - * **Portfolios** A portfolio is a container for transactions and holdings (a **Transaction Portfolio**) or constituents (a **Reference Portfolio**). * **Derived Portfolios**. Derived Portfolios allow Portfolios to be created based on other Portfolios, by overriding or adding specific items. * **Holdings** A Holding is a quantity of an Instrument or a balance of cash within a Portfolio. Holdings can only be adjusted via Transactions. * **Transactions** A Transaction is an economic event that occurs in a Portfolio, causing its holdings to change. * **Corporate Actions** A corporate action is a market event which occurs to an Instrument and thus applies to all portfolios which holding the instrument. Examples are stock splits or mergers. * **Constituents** A constituent is a record in a Reference Portfolio containing an Instrument and an associated weight. * **Instruments** An instrument represents a currency, tradable instrument or OTC contract that is attached to a transaction and a holding. * **Properties** All major entities allow additional user defined properties to be associated with them. For example, a Portfolio manager may be associated with a portfolio. Meta data includes: - * **Transaction Types** Transactions are booked with a specific transaction type. The types are client defined and are used to map the Transaction to a series of movements which update the portfolio holdings. * **Properties Types** Types of user defined properties used within the system. ## Scope All data in LUSID is segregated at the client level. Entities in LUSID are identifiable by a unique code. Every entity lives within a logical data partition known as a Scope. Scope is an identity namespace allowing two entities with the same unique code to co-exist within individual address spaces. For example, prices for equities from different vendors may be uploaded into different scopes such as `client/vendor1` and `client/vendor2`. A portfolio may then be valued using either of the price sources by referencing the appropriate scope. LUSID Clients cannot access scopes of other clients. ## Instruments LUSID has its own built-in instrument master which you can use to master your own instrument universe. Every instrument must be created with one or more unique market identifiers, such as [FIGI](https://openfigi.com/). For any non-listed instruments (eg OTCs), you can upload an instrument against a custom ID of your choosing. In addition, LUSID will allocate each instrument a unique 'LUSID instrument identifier'. The LUSID instrument identifier is what is used when uploading transactions, holdings, prices, etc. The API exposes an `instrument/lookup` endpoint which can be used to lookup these LUSID identifiers using their market identifiers. Cash can be referenced using the ISO currency code prefixed with \"`CCY_`\" e.g. `CCY_GBP` ## Instrument Data Instrument data can be uploaded to the system using the [Instrument Properties](#tag/InstrumentProperties) endpoint. | Field|Type|Description | | ---|---|--- | | Key|propertykey|The key of the property. This takes the format {domain}/{scope}/{code} e.g. 'Instrument/system/Name' or 'Transaction/strategy/quantsignal'. | | Value|string|The value of the property. | | EffectiveFrom|datetimeoffset|The effective datetime from which the property is valid. | | EffectiveUntil|datetimeoffset|The effective datetime until which the property is valid. If not supplied this will be valid indefinitely, potentially overwriting values with EffectiveFrom's in the future. | ## Transaction Portfolios Portfolios are the top-level entity containers within LUSID, containing transactions, corporate actions and holdings. The transactions build up the portfolio holdings on which valuations, analytics profit & loss and risk can be calculated. Properties can be associated with Portfolios to add in additional data. Portfolio properties can be changed over time, for example to allow a Portfolio Manager to be linked with a Portfolio. Additionally, portfolios can be securitised and held by other portfolios, allowing LUSID to perform \"drill-through\" into underlying fund holdings ### Derived Portfolios LUSID also allows for a portfolio to be composed of another portfolio via derived portfolios. A derived portfolio can contain its own transactions and also inherits any transactions from its parent portfolio. Any changes made to the parent portfolio are automatically reflected in derived portfolio. Derived portfolios in conjunction with scopes are a powerful construct. For example, to do pre-trade what-if analysis, a derived portfolio could be created a new namespace linked to the underlying live (parent) portfolio. Analysis can then be undertaken on the derived portfolio without affecting the live portfolio. ### Transactions A transaction represents an economic activity against a Portfolio. Transactions are processed according to a configuration. This will tell the LUSID engine how to interpret the transaction and correctly update the holdings. LUSID comes with a set of transaction types you can use out of the box, or you can configure your own set(s) of transactions. For more details see the [LUSID Getting Started Guide for transaction configuration.](https://support.lusid.com/configuring-transaction-types) | Field|Type|Description | | ---|---|--- | | TransactionId|string|The unique identifier for the transaction. | | Type|string|The type of the transaction e.g. 'Buy', 'Sell'. The transaction type should have been pre-configured via the System Configuration API endpoint. If it hasn't been pre-configured the transaction will still be updated or inserted however you will be unable to generate the resultant holdings for the portfolio that contains this transaction as LUSID does not know how to process it. | | InstrumentIdentifiers|map|A set of instrument identifiers to use to resolve the transaction to a unique instrument. | | TransactionDate|dateorcutlabel|The date of the transaction. | | SettlementDate|dateorcutlabel|The settlement date of the transaction. | | Units|decimal|The number of units transacted in the associated instrument. | | TransactionPrice|transactionprice|The price for each unit of the transacted instrument in the transaction currency. | | TotalConsideration|currencyandamount|The total value of the transaction in the settlement currency. | | ExchangeRate|decimal|The exchange rate between the transaction and settlement currency. For example if the transaction currency is in USD and the settlement currency is in GBP this this the USD/GBP rate. | | TransactionCurrency|currency|The transaction currency. | | Properties|map|Set of unique transaction properties and associated values to store with the transaction. Each property must be from the 'Transaction' domain. | | CounterpartyId|string|The identifier for the counterparty of the transaction. | | Source|string|The source of the transaction. This is used to look up the appropriate transaction group set in the transaction type configuration. | From these fields, the following values can be calculated * **Transaction value in Transaction currency**: TotalConsideration / ExchangeRate * **Transaction value in Portfolio currency**: Transaction value in Transaction currency * TradeToPortfolioRate #### Example Transactions ##### A Common Purchase Example Three example transactions are shown in the table below. They represent a purchase of USD denominated IBM shares within a Sterling denominated portfolio. * The first two transactions are for separate buy and fx trades * Buying 500 IBM shares for $71,480.00 * A spot foreign exchange conversion to fund the IBM purchase. (Buy $71,480.00 for £54,846.60) * The third transaction is an alternate version of the above trades. Buying 500 IBM shares and settling directly in Sterling. | Column | Buy Trade | Fx Trade | Buy Trade with foreign Settlement | | ----- | ----- | ----- | ----- | | TransactionId | FBN00001 | FBN00002 | FBN00003 | | Type | Buy | FxBuy | Buy | | InstrumentIdentifiers | { \"figi\", \"BBG000BLNNH6\" } | { \"CCY\", \"CCY_USD\" } | { \"figi\", \"BBG000BLNNH6\" } | | TransactionDate | 2018-08-02 | 2018-08-02 | 2018-08-02 | | SettlementDate | 2018-08-06 | 2018-08-06 | 2018-08-06 | | Units | 500 | 71480 | 500 | | TransactionPrice | 142.96 | 1 | 142.96 | | TradeCurrency | USD | USD | USD | | ExchangeRate | 1 | 0.7673 | 0.7673 | | TotalConsideration.Amount | 71480.00 | 54846.60 | 54846.60 | | TotalConsideration.Currency | USD | GBP | GBP | | Trade/default/TradeToPortfolioRate* | 0.7673 | 0.7673 | 0.7673 | [* This is a property field] ##### A Forward FX Example LUSID has a flexible transaction modelling system, meaning there are a number of different ways of modelling forward fx trades. The default LUSID transaction types are FwdFxBuy and FwdFxSell. Using these transaction types, LUSID will generate two holdings for each Forward FX trade, one for each currency in the trade. An example Forward Fx trade to sell GBP for USD in a JPY-denominated portfolio is shown below: | Column | Forward 'Sell' Trade | Notes | | ----- | ----- | ---- | | TransactionId | FBN00004 | | | Type | FwdFxSell | | | InstrumentIdentifiers | { \"Instrument/default/Currency\", \"GBP\" } | | | TransactionDate | 2018-08-02 | | | SettlementDate | 2019-02-06 | Six month forward | | Units | 10000.00 | Units of GBP | | TransactionPrice | 1 | | | TradeCurrency | GBP | Currency being sold | | ExchangeRate | 1.3142 | Agreed rate between GBP and USD | | TotalConsideration.Amount | 13142.00 | Amount in the settlement currency, USD | | TotalConsideration.Currency | USD | Settlement currency | | Trade/default/TradeToPortfolioRate | 142.88 | Rate between trade currency, GBP and portfolio base currency, JPY | Please note that exactly the same economic behaviour could be modelled using the FwdFxBuy Transaction Type with the amounts and rates reversed. ### Holdings A holding represents a position in an instrument or cash on a given date. | Field|Type|Description | | ---|---|--- | | InstrumentUid|string|The unqiue Lusid Instrument Id (LUID) of the instrument that the holding is in. | | SubHoldingKeys|map|The sub-holding properties which identify the holding. Each property will be from the 'Transaction' domain. These are configured when a transaction portfolio is created. | | Properties|map|The properties which have been requested to be decorated onto the holding. These will be from the 'Instrument' or 'Holding' domain. | | HoldingType|string|The type of the holding e.g. Position, Balance, CashCommitment, Receivable, ForwardFX etc. | | Units|decimal|The total number of units of the holding. | | SettledUnits|decimal|The total number of settled units of the holding. | | Cost|currencyandamount|The total cost of the holding in the transaction currency. | | CostPortfolioCcy|currencyandamount|The total cost of the holding in the portfolio currency. | | Transaction|transaction|The transaction associated with an unsettled holding. | ## Corporate Actions Corporate actions are represented within LUSID in terms of a set of instrument-specific 'transitions'. These transitions are used to specify the participants of the corporate action, and the effect that the corporate action will have on holdings in those participants. ### Corporate Action | Field|Type|Description | | ---|---|--- | | CorporateActionCode|code|The unique identifier of this corporate action | | Description|string| | | AnnouncementDate|datetimeoffset|The announcement date of the corporate action | | ExDate|datetimeoffset|The ex date of the corporate action | | RecordDate|datetimeoffset|The record date of the corporate action | | PaymentDate|datetimeoffset|The payment date of the corporate action | | Transitions|corporateactiontransition[]|The transitions that result from this corporate action | ### Transition | Field|Type|Description | | ---|---|--- | | InputTransition|corporateactiontransitioncomponent|Indicating the basis of the corporate action - which security and how many units | | OutputTransitions|corporateactiontransitioncomponent[]|What will be generated relative to the input transition | ### Example Corporate Action Transitions #### A Dividend Action Transition In this example, for each share of IBM, 0.20 units (or 20 pence) of GBP are generated. | Column | Input Transition | Output Transition | | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"ccy\" : \"CCY_GBP\" } | | Units Factor | 1 | 0.20 | | Cost Factor | 1 | 0 | #### A Split Action Transition In this example, for each share of IBM, we end up with 2 units (2 shares) of IBM, with total value unchanged. | Column | Input Transition | Output Transition | | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | | Units Factor | 1 | 2 | | Cost Factor | 1 | 1 | #### A Spinoff Action Transition In this example, for each share of IBM, we end up with 1 unit (1 share) of IBM and 3 units (3 shares) of Celestica, with 85% of the value remaining on the IBM share, and 5% in each Celestica share (15% total). | Column | Input Transition | Output Transition 1 | Output Transition 2 | | ----- | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000HBGRF3\" } | | Units Factor | 1 | 1 | 3 | | Cost Factor | 1 | 0.85 | 0.15 | ## Reference Portfolios Reference portfolios are portfolios that contain constituents with weights. They are designed to represent entities such as indices and benchmarks. ### Constituents | Field|Type|Description | | ---|---|--- | | InstrumentIdentifiers|map|Unique instrument identifiers | | InstrumentUid|string|LUSID's internal unique instrument identifier, resolved from the instrument identifiers | | Currency|decimal| | | Weight|decimal| | | FloatingWeight|decimal| | ## Portfolio Groups Portfolio groups allow the construction of a hierarchy from portfolios and groups. Portfolio operations on the group are executed on an aggregated set of portfolios in the hierarchy. For example: * Global Portfolios _(group)_ * APAC _(group)_ * Hong Kong _(portfolio)_ * Japan _(portfolio)_ * Europe _(group)_ * France _(portfolio)_ * Germany _(portfolio)_ * UK _(portfolio)_ In this example **Global Portfolios** is a group that consists of an aggregate of **Hong Kong**, **Japan**, **France**, **Germany** and **UK** portfolios. ## Properties Properties are key-value pairs that can be applied to any entity within a domain (where a domain is `trade`, `portfolio`, `security` etc). Properties must be defined before use with a `PropertyDefinition` and can then subsequently be added to entities. ## Schema A detailed description of the entities used by the API and parameters for endpoints which take a JSON document can be retrieved via the `schema` endpoint. ## Meta data The following headers are returned on all responses from LUSID | Name | Purpose | | --- | --- | | lusid-meta-duration | Duration of the request | | lusid-meta-success | Whether or not LUSID considered the request to be successful | | lusid-meta-requestId | The unique identifier for the request | | lusid-schema-url | Url of the schema for the data being returned | | lusid-property-schema-url | Url of the schema for any properties | # Error Codes | Code|Name|Description | | ---|---|--- | | <a name=\"-10\">-10</a>|Server Configuration Error| | | <a name=\"-1\">-1</a>|Unknown error|An unexpected error was encountered on our side. | | <a name=\"102\">102</a>|Version Not Found| | | <a name=\"103\">103</a>|Api Rate Limit Violation| | | <a name=\"104\">104</a>|Instrument Not Found| | | <a name=\"105\">105</a>|Property Not Found| | | <a name=\"106\">106</a>|Portfolio Recursion Depth| | | <a name=\"108\">108</a>|Group Not Found| | | <a name=\"109\">109</a>|Portfolio Not Found| | | <a name=\"110\">110</a>|Property Schema Not Found| | | <a name=\"111\">111</a>|Portfolio Ancestry Not Found| | | <a name=\"112\">112</a>|Portfolio With Id Already Exists| | | <a name=\"113\">113</a>|Orphaned Portfolio| | | <a name=\"119\">119</a>|Missing Base Claims| | | <a name=\"121\">121</a>|Property Not Defined| | | <a name=\"122\">122</a>|Cannot Delete System Property| | | <a name=\"123\">123</a>|Cannot Modify Immutable Property Field| | | <a name=\"124\">124</a>|Property Already Exists| | | <a name=\"125\">125</a>|Invalid Property Life Time| | | <a name=\"126\">126</a>|Property Constraint Style Excludes Properties| | | <a name=\"127\">127</a>|Cannot Modify Default Data Type| | | <a name=\"128\">128</a>|Group Already Exists| | | <a name=\"129\">129</a>|No Such Data Type| | | <a name=\"130\">130</a>|Undefined Value For Data Type| | | <a name=\"131\">131</a>|Unsupported Value Type Defined On Data Type| | | <a name=\"132\">132</a>|Validation Error| | | <a name=\"133\">133</a>|Loop Detected In Group Hierarchy| | | <a name=\"134\">134</a>|Undefined Acceptable Values| | | <a name=\"135\">135</a>|Sub Group Already Exists| | | <a name=\"138\">138</a>|Price Source Not Found| | | <a name=\"139\">139</a>|Analytic Store Not Found| | | <a name=\"141\">141</a>|Analytic Store Already Exists| | | <a name=\"143\">143</a>|Client Instrument Already Exists| | | <a name=\"144\">144</a>|Duplicate In Parameter Set| | | <a name=\"147\">147</a>|Results Not Found| | | <a name=\"148\">148</a>|Order Field Not In Result Set| | | <a name=\"149\">149</a>|Operation Failed| | | <a name=\"150\">150</a>|Elastic Search Error| | | <a name=\"151\">151</a>|Invalid Parameter Value| | | <a name=\"153\">153</a>|Command Processing Failure| | | <a name=\"154\">154</a>|Entity State Construction Failure| | | <a name=\"155\">155</a>|Entity Timeline Does Not Exist| | | <a name=\"156\">156</a>|Concurrency Conflict Failure| | | <a name=\"157\">157</a>|Invalid Request| | | <a name=\"158\">158</a>|Event Publish Unknown| | | <a name=\"159\">159</a>|Event Query Failure| | | <a name=\"160\">160</a>|Blob Did Not Exist| | | <a name=\"162\">162</a>|Sub System Request Failure| | | <a name=\"163\">163</a>|Sub System Configuration Failure| | | <a name=\"165\">165</a>|Failed To Delete| | | <a name=\"166\">166</a>|Upsert Client Instrument Failure| | | <a name=\"167\">167</a>|Illegal As At Interval| | | <a name=\"168\">168</a>|Illegal Bitemporal Query| | | <a name=\"169\">169</a>|Invalid Alternate Id| | | <a name=\"170\">170</a>|Cannot Add Source Portfolio Property Explicitly| | | <a name=\"171\">171</a>|Entity Already Exists In Group| | | <a name=\"173\">173</a>|Entity With Id Already Exists| | | <a name=\"174\">174</a>|Derived Portfolio Details Do Not Exist| | | <a name=\"176\">176</a>|Portfolio With Name Already Exists| | | <a name=\"177\">177</a>|Invalid Transactions| | | <a name=\"178\">178</a>|Reference Portfolio Not Found| | | <a name=\"179\">179</a>|Duplicate Id| | | <a name=\"180\">180</a>|Command Retrieval Failure| | | <a name=\"181\">181</a>|Data Filter Application Failure| | | <a name=\"182\">182</a>|Search Failed| | | <a name=\"183\">183</a>|Movements Engine Configuration Key Failure| | | <a name=\"184\">184</a>|Fx Rate Source Not Found| | | <a name=\"185\">185</a>|Accrual Source Not Found| | | <a name=\"186\">186</a>|Access Denied| | | <a name=\"187\">187</a>|Invalid Identity Token| | | <a name=\"188\">188</a>|Invalid Request Headers| | | <a name=\"189\">189</a>|Price Not Found| | | <a name=\"190\">190</a>|Invalid Sub Holding Keys Provided| | | <a name=\"191\">191</a>|Duplicate Sub Holding Keys Provided| | | <a name=\"192\">192</a>|Cut Definition Not Found| | | <a name=\"193\">193</a>|Cut Definition Invalid| | | <a name=\"194\">194</a>|Time Variant Property Deletion Date Unspecified| | | <a name=\"195\">195</a>|Perpetual Property Deletion Date Specified| | | <a name=\"196\">196</a>|Time Variant Property Upsert Date Unspecified| | | <a name=\"197\">197</a>|Perpetual Property Upsert Date Specified| | | <a name=\"200\">200</a>|Invalid Unit For Data Type| | | <a name=\"201\">201</a>|Invalid Type For Data Type| | | <a name=\"202\">202</a>|Invalid Value For Data Type| | | <a name=\"203\">203</a>|Unit Not Defined For Data Type| | | <a name=\"204\">204</a>|Units Not Supported On Data Type| | | <a name=\"205\">205</a>|Cannot Specify Units On Data Type| | | <a name=\"206\">206</a>|Unit Schema Inconsistent With Data Type| | | <a name=\"207\">207</a>|Unit Definition Not Specified| | | <a name=\"208\">208</a>|Duplicate Unit Definitions Specified| | | <a name=\"209\">209</a>|Invalid Units Definition| | | <a name=\"210\">210</a>|Invalid Instrument Identifier Unit| | | <a name=\"211\">211</a>|Holdings Adjustment Does Not Exist| | | <a name=\"212\">212</a>|Could Not Build Excel Url| | | <a name=\"213\">213</a>|Could Not Get Excel Version| | | <a name=\"214\">214</a>|Instrument By Code Not Found| | | <a name=\"215\">215</a>|Entity Schema Does Not Exist| | | <a name=\"216\">216</a>|Feature Not Supported On Portfolio Type| | | <a name=\"217\">217</a>|Quote Not Found| | | <a name=\"218\">218</a>|Invalid Quote Identifier| | | <a name=\"219\">219</a>|Invalid Metric For Data Type| | | <a name=\"220\">220</a>|Invalid Instrument Definition| | | <a name=\"221\">221</a>|Instrument Upsert Failure| | | <a name=\"222\">222</a>|Reference Portfolio Request Not Supported| | | <a name=\"223\">223</a>|Transaction Portfolio Request Not Supported| | | <a name=\"224\">224</a>|Invalid Property Value Assignment| | | <a name=\"230\">230</a>|Transaction Type Not Found| | | <a name=\"231\">231</a>|Transaction Type Duplication| | | <a name=\"232\">232</a>|Portfolio Does Not Exist At Given Date| | | <a name=\"233\">233</a>|Query Parser Failure| | | <a name=\"234\">234</a>|Duplicate Constituent| | | <a name=\"235\">235</a>|Unresolved Instrument Constituent| | | <a name=\"236\">236</a>|Unresolved Instrument In Transition| | | <a name=\"237\">237</a>|Missing Side Definitions| | | <a name=\"299\">299</a>|Invalid Recipe| | | <a name=\"300\">300</a>|Missing Recipe| | | <a name=\"301\">301</a>|Dependencies| | | <a name=\"304\">304</a>|Portfolio Preprocess Failure| | | <a name=\"310\">310</a>|Valuation Engine Failure| | | <a name=\"311\">311</a>|Task Factory Failure| | | <a name=\"312\">312</a>|Task Evaluation Failure| | | <a name=\"313\">313</a>|Task Generation Failure| | | <a name=\"314\">314</a>|Engine Configuration Failure| | | <a name=\"315\">315</a>|Model Specification Failure| | | <a name=\"320\">320</a>|Market Data Key Failure| | | <a name=\"321\">321</a>|Market Resolver Failure| | | <a name=\"322\">322</a>|Market Data Failure| | | <a name=\"330\">330</a>|Curve Failure| | | <a name=\"331\">331</a>|Volatility Surface Failure| | | <a name=\"332\">332</a>|Volatility Cube Failure| | | <a name=\"350\">350</a>|Instrument Failure| | | <a name=\"351\">351</a>|Cash Flows Failure| | | <a name=\"352\">352</a>|Reference Data Failure| | | <a name=\"360\">360</a>|Aggregation Failure| | | <a name=\"361\">361</a>|Aggregation Measure Failure| | | <a name=\"370\">370</a>|Result Retrieval Failure| | | <a name=\"371\">371</a>|Result Processing Failure| | | <a name=\"372\">372</a>|Vendor Result Processing Failure| | | <a name=\"373\">373</a>|Vendor Result Mapping Failure| | | <a name=\"374\">374</a>|Vendor Library Unauthorised| | | <a name=\"375\">375</a>|Vendor Connectivity Error| | | <a name=\"376\">376</a>|Vendor Interface Error| | | <a name=\"377\">377</a>|Vendor Pricing Failure| | | <a name=\"378\">378</a>|Vendor Translation Failure| | | <a name=\"379\">379</a>|Vendor Key Mapping Failure| | | <a name=\"380\">380</a>|Vendor Reflection Failure| | | <a name=\"390\">390</a>|Attempt To Upsert Duplicate Quotes| | | <a name=\"391\">391</a>|Corporate Action Source Does Not Exist| | | <a name=\"392\">392</a>|Corporate Action Source Already Exists| | | <a name=\"393\">393</a>|Instrument Identifier Already In Use| | | <a name=\"394\">394</a>|Properties Not Found| | | <a name=\"395\">395</a>|Batch Operation Aborted| | | <a name=\"400\">400</a>|Invalid Iso4217 Currency Code| | | <a name=\"401\">401</a>|Cannot Assign Instrument Identifier To Currency| | | <a name=\"402\">402</a>|Cannot Assign Currency Identifier To Non Currency| | | <a name=\"403\">403</a>|Currency Instrument Cannot Be Deleted| | | <a name=\"404\">404</a>|Currency Instrument Cannot Have Economic Definition| | | <a name=\"405\">405</a>|Currency Instrument Cannot Have Lookthrough Portfolio| | | <a name=\"406\">406</a>|Cannot Create Currency Instrument With Multiple Identifiers| | | <a name=\"407\">407</a>|Specified Currency Is Undefined| | | <a name=\"410\">410</a>|Index Does Not Exist| | | <a name=\"411\">411</a>|Sort Field Does Not Exist| | | <a name=\"413\">413</a>|Negative Pagination Parameters| | | <a name=\"414\">414</a>|Invalid Search Syntax| | | <a name=\"415\">415</a>|Filter Execution Timeout| | | <a name=\"420\">420</a>|Side Definition Inconsistent| | | <a name=\"450\">450</a>|Invalid Quote Access Metadata Rule| | | <a name=\"451\">451</a>|Access Metadata Not Found| | | <a name=\"452\">452</a>|Invalid Access Metadata Identifier| | | <a name=\"460\">460</a>|Standard Resource Not Found| | | <a name=\"461\">461</a>|Standard Resource Conflict| | | <a name=\"462\">462</a>|Calendar Not Found| | | <a name=\"463\">463</a>|Date In A Calendar Not Found| | | <a name=\"464\">464</a>|Invalid Date Source Data| | | <a name=\"465\">465</a>|Invalid Timezone| | | <a name=\"601\">601</a>|Person Identifier Already In Use| | | <a name=\"602\">602</a>|Person Not Found| | | <a name=\"603\">603</a>|Cannot Set Identifier| | | <a name=\"617\">617</a>|Invalid Recipe Specification In Request| | | <a name=\"618\">618</a>|Inline Recipe Deserialisation Failure| | | <a name=\"619\">619</a>|Identifier Types Not Set For Entity| | | <a name=\"620\">620</a>|Cannot Delete All Client Defined Identifiers| | | <a name=\"650\">650</a>|The Order requested was not found.| | | <a name=\"654\">654</a>|The Allocation requested was not found.| | | <a name=\"655\">655</a>|Cannot build the fx forward target with the given holdings.| | | <a name=\"656\">656</a>|Group does not contain expected entities.| | | <a name=\"667\">667</a>|Relation definition already exists| | | <a name=\"673\">673</a>|Missing entitlements for entities in Group| | | <a name=\"674\">674</a>|Next Best Action not found| | | <a name=\"676\">676</a>|Relation definition not defined| | | <a name=\"677\">677</a>|Invalid entity identifier for relation| | | <a name=\"681\">681</a>|Sorting by specified field not supported|One or more of the provided fields to order by were either invalid or not supported. | | <a name=\"682\">682</a>|Too many fields to sort by|The number of fields to sort the data by exceeds the number allowed by the endpoint | | <a name=\"684\">684</a>|Sequence Not Found| | | <a name=\"685\">685</a>|Sequence Already Exists| | | <a name=\"686\">686</a>|Non-cycling sequence has been exhausted| | | <a name=\"687\">687</a>|Legal Entity Identifier Already In Use| | | <a name=\"688\">688</a>|Legal Entity Not Found| | | <a name=\"689\">689</a>|The supplied pagination token is invalid| | | <a name=\"690\">690</a>|Property Type Is Not Supported| | | <a name=\"691\">691</a>|Multiple Tax-lots For Currency Type Is Not Supported| | # noqa: E501
The version of the OpenAPI document: 0.11.2275
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class DataType(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'href': 'str',
'type_value_range': 'str',
'id': 'ResourceId',
'display_name': 'str',
'description': 'str',
'value_type': 'str',
'acceptable_values': 'list[str]',
'unit_schema': 'str',
'acceptable_units': 'list[IUnitDefinitionDto]',
'links': 'list[Link]'
}
attribute_map = {
'href': 'href',
'type_value_range': 'typeValueRange',
'id': 'id',
'display_name': 'displayName',
'description': 'description',
'value_type': 'valueType',
'acceptable_values': 'acceptableValues',
'unit_schema': 'unitSchema',
'acceptable_units': 'acceptableUnits',
'links': 'links'
}
required_map = {
'href': 'optional',
'type_value_range': 'required',
'id': 'required',
'display_name': 'required',
'description': 'required',
'value_type': 'required',
'acceptable_values': 'optional',
'unit_schema': 'optional',
'acceptable_units': 'optional',
'links': 'optional'
}
def __init__(self, href=None, type_value_range=None, id=None, display_name=None, description=None, value_type=None, acceptable_values=None, unit_schema=None, acceptable_units=None, links=None): # noqa: E501
"""
DataType - a model defined in OpenAPI
:param href:
:type href: str
:param type_value_range: The available values are: Open, Closed (required)
:type type_value_range: str
:param id: (required)
:type id: lusid.ResourceId
:param display_name: (required)
:type display_name: str
:param description: (required)
:type description: str
:param value_type: The available values are: String, Int, Decimal, DateTime, Boolean, Map, List, PropertyArray, Percentage, Code, Id, Uri, CurrencyAndAmount, TradePrice, Currency, MetricValue, ResourceId, ResultValue, CutLocalTime, DateOrCutLabel (required)
:type value_type: str
:param acceptable_values:
:type acceptable_values: list[str]
:param unit_schema: The available values are: NoUnits, Basic, Iso4217Currency
:type unit_schema: str
:param acceptable_units:
:type acceptable_units: list[lusid.IUnitDefinitionDto]
:param links:
:type links: list[lusid.Link]
""" # noqa: E501
self._href = None
self._type_value_range = None
self._id = None
self._display_name = None
self._description = None
self._value_type = None
self._acceptable_values = None
self._unit_schema = None
self._acceptable_units = None
self._links = None
self.discriminator = None
self.href = href
self.type_value_range = type_value_range
self.id = id
self.display_name = display_name
self.description = description
self.value_type = value_type
self.acceptable_values = acceptable_values
if unit_schema is not None:
self.unit_schema = unit_schema
self.acceptable_units = acceptable_units
self.links = links
@property
def href(self):
"""Gets the href of this DataType. # noqa: E501
:return: The href of this DataType. # noqa: E501
:rtype: str
"""
return self._href
@href.setter
def href(self, href):
"""Sets the href of this DataType.
:param href: The href of this DataType. # noqa: E501
:type: str
"""
self._href = href
@property
def type_value_range(self):
"""Gets the type_value_range of this DataType. # noqa: E501
The available values are: Open, Closed # noqa: E501
:return: The type_value_range of this DataType. # noqa: E501
:rtype: str
"""
return self._type_value_range
@type_value_range.setter
def type_value_range(self, type_value_range):
"""Sets the type_value_range of this DataType.
The available values are: Open, Closed # noqa: E501
:param type_value_range: The type_value_range of this DataType. # noqa: E501
:type: str
"""
if type_value_range is None:
raise ValueError("Invalid value for `type_value_range`, must not be `None`") # noqa: E501
allowed_values = ["Open", "Closed"] # noqa: E501
if type_value_range not in allowed_values:
raise ValueError(
"Invalid value for `type_value_range` ({0}), must be one of {1}" # noqa: E501
.format(type_value_range, allowed_values)
)
self._type_value_range = type_value_range
@property
def id(self):
"""Gets the id of this DataType. # noqa: E501
:return: The id of this DataType. # noqa: E501
:rtype: ResourceId
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this DataType.
:param id: The id of this DataType. # noqa: E501
:type: ResourceId
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501
self._id = id
@property
def display_name(self):
"""Gets the display_name of this DataType. # noqa: E501
:return: The display_name of this DataType. # noqa: E501
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""Sets the display_name of this DataType.
:param display_name: The display_name of this DataType. # noqa: E501
:type: str
"""
if display_name is None:
raise ValueError("Invalid value for `display_name`, must not be `None`") # noqa: E501
self._display_name = display_name
@property
def description(self):
"""Gets the description of this DataType. # noqa: E501
:return: The description of this DataType. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this DataType.
:param description: The description of this DataType. # noqa: E501
:type: str
"""
if description is None:
raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501
self._description = description
@property
def value_type(self):
"""Gets the value_type of this DataType. # noqa: E501
The available values are: String, Int, Decimal, DateTime, Boolean, Map, List, PropertyArray, Percentage, Code, Id, Uri, CurrencyAndAmount, TradePrice, Currency, MetricValue, ResourceId, ResultValue, CutLocalTime, DateOrCutLabel # noqa: E501
:return: The value_type of this DataType. # noqa: E501
:rtype: str
"""
return self._value_type
@value_type.setter
def value_type(self, value_type):
"""Sets the value_type of this DataType.
The available values are: String, Int, Decimal, DateTime, Boolean, Map, List, PropertyArray, Percentage, Code, Id, Uri, CurrencyAndAmount, TradePrice, Currency, MetricValue, ResourceId, ResultValue, CutLocalTime, DateOrCutLabel # noqa: E501
:param value_type: The value_type of this DataType. # noqa: E501
:type: str
"""
if value_type is None:
raise ValueError("Invalid value for `value_type`, must not be `None`") # noqa: E501
allowed_values = ["String", "Int", "Decimal", "DateTime", "Boolean", "Map", "List", "PropertyArray", "Percentage", "Code", "Id", "Uri", "CurrencyAndAmount", "TradePrice", "Currency", "MetricValue", "ResourceId", "ResultValue", "CutLocalTime", "DateOrCutLabel"] # noqa: E501
if value_type not in allowed_values:
raise ValueError(
"Invalid value for `value_type` ({0}), must be one of {1}" # noqa: E501
.format(value_type, allowed_values)
)
self._value_type = value_type
@property
def acceptable_values(self):
"""Gets the acceptable_values of this DataType. # noqa: E501
:return: The acceptable_values of this DataType. # noqa: E501
:rtype: list[str]
"""
return self._acceptable_values
@acceptable_values.setter
def acceptable_values(self, acceptable_values):
"""Sets the acceptable_values of this DataType.
:param acceptable_values: The acceptable_values of this DataType. # noqa: E501
:type: list[str]
"""
self._acceptable_values = acceptable_values
@property
def unit_schema(self):
"""Gets the unit_schema of this DataType. # noqa: E501
The available values are: NoUnits, Basic, Iso4217Currency # noqa: E501
:return: The unit_schema of this DataType. # noqa: E501
:rtype: str
"""
return self._unit_schema
@unit_schema.setter
def unit_schema(self, unit_schema):
"""Sets the unit_schema of this DataType.
The available values are: NoUnits, Basic, Iso4217Currency # noqa: E501
:param unit_schema: The unit_schema of this DataType. # noqa: E501
:type: str
"""
allowed_values = ["NoUnits", "Basic", "Iso4217Currency"] # noqa: E501
if unit_schema not in allowed_values:
raise ValueError(
"Invalid value for `unit_schema` ({0}), must be one of {1}" # noqa: E501
.format(unit_schema, allowed_values)
)
self._unit_schema = unit_schema
@property
def acceptable_units(self):
"""Gets the acceptable_units of this DataType. # noqa: E501
:return: The acceptable_units of this DataType. # noqa: E501
:rtype: list[IUnitDefinitionDto]
"""
return self._acceptable_units
@acceptable_units.setter
def acceptable_units(self, acceptable_units):
"""Sets the acceptable_units of this DataType.
:param acceptable_units: The acceptable_units of this DataType. # noqa: E501
:type: list[IUnitDefinitionDto]
"""
self._acceptable_units = acceptable_units
@property
def links(self):
"""Gets the links of this DataType. # noqa: E501
:return: The links of this DataType. # noqa: E501
:rtype: list[Link]
"""
return self._links
@links.setter
def links(self, links):
"""Sets the links of this DataType.
:param links: The links of this DataType. # noqa: E501
:type: list[Link]
"""
self._links = links
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DataType):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
"""
////////////////////////////////////////////////////////////////////////////
//
// Copyright (C) NVIDIA Corporation. All rights reserved.
//
// NVIDIA Sample Code
//
// Please refer to the NVIDIA end user license agreement (EULA) associated
// with this source code for terms and conditions that govern your use of
// this software. Any use, reproduction, disclosure, or distribution of
// this software and related documentation outside the terms of the EULA
// is strictly prohibited.
//
////////////////////////////////////////////////////////////////////////////
"""
from greenflow.dataframe_flow import (ConfSchema, PortsSpecSchema)
from greenflow.dataframe_flow.template_node_mixin import TemplateNodeMixin
from greenflow.dataframe_flow import Node
from greenflow.dataframe_flow.metaSpec import MetaDataSchema
class DiffNode(TemplateNodeMixin, Node):
def init(self):
TemplateNodeMixin.init(self)
self.delayed_process = True
self.infer_meta = False
self.OUTPUT_PORT_NAME = 'out'
self.DIFF_A = 'diff_a'
self.DIFF_B = 'diff_b'
port_type = PortsSpecSchema.port_type
port_inports = {
self.DIFF_A: {
port_type: [
"pandas.DataFrame", "cudf.DataFrame",
"dask_cudf.DataFrame", "dask.dataframe.DataFrame"
]
},
self.DIFF_B: {
port_type: [
"pandas.DataFrame", "cudf.DataFrame",
"dask_cudf.DataFrame", "dask.dataframe.DataFrame"
]
}
}
port_outports = {
self.OUTPUT_PORT_NAME: {
port_type: "${port:diff_a}"
},
}
col_required = {
'sample_id': 'int64',
'portfolio': 'float64',
}
meta_inports = {
self.DIFF_A: col_required,
self.DIFF_B: col_required
}
output_meta = {
'sample_id': 'int64',
'portfolio': 'float64',
}
meta_outports = {
self.OUTPUT_PORT_NAME: {
MetaDataSchema.META_OP: MetaDataSchema.META_OP_RETENTION,
MetaDataSchema.META_DATA: output_meta
}
}
self.template_ports_setup(in_ports=port_inports,
out_ports=port_outports)
self.template_meta_setup(
in_ports=meta_inports,
out_ports=meta_outports
)
def conf_schema(self):
json = {
"title": "Calculate Sharpe diff",
"type": "object",
"properties": {
},
}
ui = {
}
return ConfSchema(json=json, ui=ui)
def process(self, inputs):
df_a = inputs[self.DIFF_A].set_index('sample_id')
df_b = inputs[self.DIFF_B].set_index('sample_id')
# df = df.drop('datetime', axis=1)
output = {}
diff = df_a - df_b
output.update({self.OUTPUT_PORT_NAME: diff.reset_index()})
return output
|
# pylint: disable=W0231,E1101
import collections
from datetime import timedelta
import functools
import gc
import json
import operator
from textwrap import dedent
import warnings
import weakref
import numpy as np
from pandas._libs import Timestamp, iNaT, properties
import pandas.compat as compat
from pandas.compat import (
cPickle as pkl, isidentifier, lrange, lzip, map, set_function_name,
string_types, to_str, zip)
from pandas.compat.numpy import function as nv
from pandas.errors import AbstractMethodError
from pandas.util._decorators import (
Appender, Substitution, rewrite_axis_style_signature)
from pandas.util._validators import validate_bool_kwarg, validate_fillna_kwargs
from pandas.core.dtypes.cast import maybe_promote, maybe_upcast_putmask
from pandas.core.dtypes.common import (
ensure_int64, ensure_object, is_bool, is_bool_dtype,
is_datetime64_any_dtype, is_datetime64tz_dtype, is_dict_like,
is_extension_array_dtype, is_integer, is_list_like, is_number,
is_numeric_dtype, is_object_dtype, is_period_arraylike, is_re_compilable,
is_scalar, is_timedelta64_dtype, pandas_dtype)
from pandas.core.dtypes.generic import ABCDataFrame, ABCPanel, ABCSeries
from pandas.core.dtypes.inference import is_hashable
from pandas.core.dtypes.missing import isna, notna
import pandas as pd
from pandas.core import config, missing, nanops
import pandas.core.algorithms as algos
from pandas.core.base import PandasObject, SelectionMixin
import pandas.core.common as com
from pandas.core.index import (
Index, InvalidIndexError, MultiIndex, RangeIndex, ensure_index)
from pandas.core.indexes.datetimes import DatetimeIndex
from pandas.core.indexes.period import Period, PeriodIndex
import pandas.core.indexing as indexing
from pandas.core.internals import BlockManager
from pandas.core.ops import _align_method_FRAME
from pandas.io.formats.format import DataFrameFormatter, format_percentiles
from pandas.io.formats.printing import pprint_thing
from pandas.tseries.frequencies import to_offset
# goal is to be able to define the docs close to function, while still being
# able to share
_shared_docs = dict()
_shared_doc_kwargs = dict(
axes='keywords for axes', klass='NDFrame',
axes_single_arg='int or labels for object',
args_transpose='axes to permute (int or label for object)',
optional_by="""
by : str or list of str
Name or list of names to sort by""")
# sentinel value to use as kwarg in place of None when None has special meaning
# and needs to be distinguished from a user explicitly passing None.
sentinel = object()
def _single_replace(self, to_replace, method, inplace, limit):
"""
Replaces values in a Series using the fill method specified when no
replacement value is given in the replace method
"""
if self.ndim != 1:
raise TypeError('cannot replace {0} with method {1} on a {2}'
.format(to_replace, method, type(self).__name__))
orig_dtype = self.dtype
result = self if inplace else self.copy()
fill_f = missing.get_fill_func(method)
mask = missing.mask_missing(result.values, to_replace)
values = fill_f(result.values, limit=limit, mask=mask)
if values.dtype == orig_dtype and inplace:
return
result = pd.Series(values, index=self.index,
dtype=self.dtype).__finalize__(self)
if inplace:
self._update_inplace(result._data)
return
return result
class NDFrame(PandasObject, SelectionMixin):
"""
N-dimensional analogue of DataFrame. Store multi-dimensional in a
size-mutable, labeled data structure
Parameters
----------
data : BlockManager
axes : list
copy : boolean, default False
"""
_internal_names = ['_data', '_cacher', '_item_cache', '_cache', '_is_copy',
'_subtyp', '_name', '_index', '_default_kind',
'_default_fill_value', '_metadata', '__array_struct__',
'__array_interface__']
_internal_names_set = set(_internal_names)
_accessors = frozenset()
_deprecations = frozenset(['as_blocks', 'blocks',
'convert_objects', 'is_copy'])
_metadata = []
_is_copy = None
# dummy attribute so that datetime.__eq__(Series/DataFrame) defers
# by returning NotImplemented
timetuple = None
# ----------------------------------------------------------------------
# Constructors
def __init__(self, data, axes=None, copy=False, dtype=None,
fastpath=False):
if not fastpath:
if dtype is not None:
data = data.astype(dtype)
elif copy:
data = data.copy()
if axes is not None:
for i, ax in enumerate(axes):
data = data.reindex_axis(ax, axis=i)
object.__setattr__(self, '_is_copy', None)
object.__setattr__(self, '_data', data)
object.__setattr__(self, '_item_cache', {})
def _init_mgr(self, mgr, axes=None, dtype=None, copy=False):
""" passed a manager and a axes dict """
for a, axe in axes.items():
if axe is not None:
mgr = mgr.reindex_axis(axe,
axis=self._get_block_manager_axis(a),
copy=False)
# make a copy if explicitly requested
if copy:
mgr = mgr.copy()
if dtype is not None:
# avoid further copies if we can
if len(mgr.blocks) > 1 or mgr.blocks[0].values.dtype != dtype:
mgr = mgr.astype(dtype=dtype)
return mgr
# ----------------------------------------------------------------------
@property
def is_copy(self):
"""
Return the copy.
"""
warnings.warn("Attribute 'is_copy' is deprecated and will be removed "
"in a future version.", FutureWarning, stacklevel=2)
return self._is_copy
@is_copy.setter
def is_copy(self, msg):
warnings.warn("Attribute 'is_copy' is deprecated and will be removed "
"in a future version.", FutureWarning, stacklevel=2)
self._is_copy = msg
def _validate_dtype(self, dtype):
""" validate the passed dtype """
if dtype is not None:
dtype = pandas_dtype(dtype)
# a compound dtype
if dtype.kind == 'V':
raise NotImplementedError("compound dtypes are not implemented"
" in the {0} constructor"
.format(self.__class__.__name__))
return dtype
# ----------------------------------------------------------------------
# Construction
@property
def _constructor(self):
"""Used when a manipulation result has the same dimensions as the
original.
"""
raise AbstractMethodError(self)
@property
def _constructor_sliced(self):
"""Used when a manipulation result has one lower dimension(s) as the
original, such as DataFrame single columns slicing.
"""
raise AbstractMethodError(self)
@property
def _constructor_expanddim(self):
"""Used when a manipulation result has one higher dimension as the
original, such as Series.to_frame() and DataFrame.to_panel()
"""
raise NotImplementedError
# ----------------------------------------------------------------------
# Axis
@classmethod
def _setup_axes(cls, axes, info_axis=None, stat_axis=None, aliases=None,
slicers=None, axes_are_reversed=False, build_axes=True,
ns=None, docs=None):
"""Provide axes setup for the major PandasObjects.
Parameters
----------
axes : the names of the axes in order (lowest to highest)
info_axis_num : the axis of the selector dimension (int)
stat_axis_num : the number of axis for the default stats (int)
aliases : other names for a single axis (dict)
slicers : how axes slice to others (dict)
axes_are_reversed : boolean whether to treat passed axes as
reversed (DataFrame)
build_axes : setup the axis properties (default True)
"""
cls._AXIS_ORDERS = axes
cls._AXIS_NUMBERS = {a: i for i, a in enumerate(axes)}
cls._AXIS_LEN = len(axes)
cls._AXIS_ALIASES = aliases or dict()
cls._AXIS_IALIASES = {v: k for k, v in cls._AXIS_ALIASES.items()}
cls._AXIS_NAMES = dict(enumerate(axes))
cls._AXIS_SLICEMAP = slicers or None
cls._AXIS_REVERSED = axes_are_reversed
# typ
setattr(cls, '_typ', cls.__name__.lower())
# indexing support
cls._ix = None
if info_axis is not None:
cls._info_axis_number = info_axis
cls._info_axis_name = axes[info_axis]
if stat_axis is not None:
cls._stat_axis_number = stat_axis
cls._stat_axis_name = axes[stat_axis]
# setup the actual axis
if build_axes:
def set_axis(a, i):
setattr(cls, a, properties.AxisProperty(i, docs.get(a, a)))
cls._internal_names_set.add(a)
if axes_are_reversed:
m = cls._AXIS_LEN - 1
for i, a in cls._AXIS_NAMES.items():
set_axis(a, m - i)
else:
for i, a in cls._AXIS_NAMES.items():
set_axis(a, i)
assert not isinstance(ns, dict)
def _construct_axes_dict(self, axes=None, **kwargs):
"""Return an axes dictionary for myself."""
d = {a: self._get_axis(a) for a in (axes or self._AXIS_ORDERS)}
d.update(kwargs)
return d
@staticmethod
def _construct_axes_dict_from(self, axes, **kwargs):
"""Return an axes dictionary for the passed axes."""
d = {a: ax for a, ax in zip(self._AXIS_ORDERS, axes)}
d.update(kwargs)
return d
def _construct_axes_dict_for_slice(self, axes=None, **kwargs):
"""Return an axes dictionary for myself."""
d = {self._AXIS_SLICEMAP[a]: self._get_axis(a)
for a in (axes or self._AXIS_ORDERS)}
d.update(kwargs)
return d
def _construct_axes_from_arguments(
self, args, kwargs, require_all=False, sentinel=None):
"""Construct and returns axes if supplied in args/kwargs.
If require_all, raise if all axis arguments are not supplied
return a tuple of (axes, kwargs).
sentinel specifies the default parameter when an axis is not
supplied; useful to distinguish when a user explicitly passes None
in scenarios where None has special meaning.
"""
# construct the args
args = list(args)
for a in self._AXIS_ORDERS:
# if we have an alias for this axis
alias = self._AXIS_IALIASES.get(a)
if alias is not None:
if a in kwargs:
if alias in kwargs:
raise TypeError("arguments are mutually exclusive "
"for [%s,%s]" % (a, alias))
continue
if alias in kwargs:
kwargs[a] = kwargs.pop(alias)
continue
# look for a argument by position
if a not in kwargs:
try:
kwargs[a] = args.pop(0)
except IndexError:
if require_all:
raise TypeError("not enough/duplicate arguments "
"specified!")
axes = {a: kwargs.pop(a, sentinel) for a in self._AXIS_ORDERS}
return axes, kwargs
@classmethod
def _from_axes(cls, data, axes, **kwargs):
# for construction from BlockManager
if isinstance(data, BlockManager):
return cls(data, **kwargs)
else:
if cls._AXIS_REVERSED:
axes = axes[::-1]
d = cls._construct_axes_dict_from(cls, axes, copy=False)
d.update(kwargs)
return cls(data, **d)
@classmethod
def _get_axis_number(cls, axis):
axis = cls._AXIS_ALIASES.get(axis, axis)
if is_integer(axis):
if axis in cls._AXIS_NAMES:
return axis
else:
try:
return cls._AXIS_NUMBERS[axis]
except KeyError:
pass
raise ValueError('No axis named {0} for object type {1}'
.format(axis, type(cls)))
@classmethod
def _get_axis_name(cls, axis):
axis = cls._AXIS_ALIASES.get(axis, axis)
if isinstance(axis, string_types):
if axis in cls._AXIS_NUMBERS:
return axis
else:
try:
return cls._AXIS_NAMES[axis]
except KeyError:
pass
raise ValueError('No axis named {0} for object type {1}'
.format(axis, type(cls)))
def _get_axis(self, axis):
name = self._get_axis_name(axis)
return getattr(self, name)
@classmethod
def _get_block_manager_axis(cls, axis):
"""Map the axis to the block_manager axis."""
axis = cls._get_axis_number(axis)
if cls._AXIS_REVERSED:
m = cls._AXIS_LEN - 1
return m - axis
return axis
def _get_axis_resolvers(self, axis):
# index or columns
axis_index = getattr(self, axis)
d = dict()
prefix = axis[0]
for i, name in enumerate(axis_index.names):
if name is not None:
key = level = name
else:
# prefix with 'i' or 'c' depending on the input axis
# e.g., you must do ilevel_0 for the 0th level of an unnamed
# multiiindex
key = '{prefix}level_{i}'.format(prefix=prefix, i=i)
level = i
level_values = axis_index.get_level_values(level)
s = level_values.to_series()
s.index = axis_index
d[key] = s
# put the index/columns itself in the dict
if isinstance(axis_index, MultiIndex):
dindex = axis_index
else:
dindex = axis_index.to_series()
d[axis] = dindex
return d
def _get_index_resolvers(self):
d = {}
for axis_name in self._AXIS_ORDERS:
d.update(self._get_axis_resolvers(axis_name))
return d
@property
def _info_axis(self):
return getattr(self, self._info_axis_name)
@property
def _stat_axis(self):
return getattr(self, self._stat_axis_name)
@property
def shape(self):
"""
Return a tuple of axis dimensions
"""
return tuple(len(self._get_axis(a)) for a in self._AXIS_ORDERS)
@property
def axes(self):
"""
Return index label(s) of the internal NDFrame
"""
# we do it this way because if we have reversed axes, then
# the block manager shows then reversed
return [self._get_axis(a) for a in self._AXIS_ORDERS]
@property
def ndim(self):
"""
Return an int representing the number of axes / array dimensions.
Return 1 if Series. Otherwise return 2 if DataFrame.
See Also
--------
ndarray.ndim : Number of array dimensions.
Examples
--------
>>> s = pd.Series({'a': 1, 'b': 2, 'c': 3})
>>> s.ndim
1
>>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
>>> df.ndim
2
"""
return self._data.ndim
@property
def size(self):
"""
Return an int representing the number of elements in this object.
Return the number of rows if Series. Otherwise return the number of
rows times number of columns if DataFrame.
See Also
--------
ndarray.size : Number of elements in the array.
Examples
--------
>>> s = pd.Series({'a': 1, 'b': 2, 'c': 3})
>>> s.size
3
>>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
>>> df.size
4
"""
return np.prod(self.shape)
@property
def _selected_obj(self):
""" internal compat with SelectionMixin """
return self
@property
def _obj_with_exclusions(self):
""" internal compat with SelectionMixin """
return self
def _expand_axes(self, key):
new_axes = []
for k, ax in zip(key, self.axes):
if k not in ax:
if type(k) != ax.dtype.type:
ax = ax.astype('O')
new_axes.append(ax.insert(len(ax), k))
else:
new_axes.append(ax)
return new_axes
def set_axis(self, labels, axis=0, inplace=None):
"""
Assign desired index to given axis.
Indexes for column or row labels can be changed by assigning
a list-like or Index.
.. versionchanged:: 0.21.0
The signature is now `labels` and `axis`, consistent with
the rest of pandas API. Previously, the `axis` and `labels`
arguments were respectively the first and second positional
arguments.
Parameters
----------
labels : list-like, Index
The values for the new index.
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to update. The value 0 identifies the rows, and 1
identifies the columns.
inplace : bool, default None
Whether to return a new %(klass)s instance.
.. warning::
``inplace=None`` currently falls back to to True, but in a
future version, will default to False. Use inplace=True
explicitly rather than relying on the default.
Returns
-------
renamed : %(klass)s or None
An object of same type as caller if inplace=False, None otherwise.
See Also
--------
DataFrame.rename_axis : Alter the name of the index or columns.
Examples
--------
**Series**
>>> s = pd.Series([1, 2, 3])
>>> s
0 1
1 2
2 3
dtype: int64
>>> s.set_axis(['a', 'b', 'c'], axis=0, inplace=False)
a 1
b 2
c 3
dtype: int64
The original object is not modified.
>>> s
0 1
1 2
2 3
dtype: int64
**DataFrame**
>>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
Change the row labels.
>>> df.set_axis(['a', 'b', 'c'], axis='index', inplace=False)
A B
a 1 4
b 2 5
c 3 6
Change the column labels.
>>> df.set_axis(['I', 'II'], axis='columns', inplace=False)
I II
0 1 4
1 2 5
2 3 6
Now, update the labels inplace.
>>> df.set_axis(['i', 'ii'], axis='columns', inplace=True)
>>> df
i ii
0 1 4
1 2 5
2 3 6
"""
if is_scalar(labels):
warnings.warn(
'set_axis now takes "labels" as first argument, and '
'"axis" as named parameter. The old form, with "axis" as '
'first parameter and \"labels\" as second, is still supported '
'but will be deprecated in a future version of pandas.',
FutureWarning, stacklevel=2)
labels, axis = axis, labels
if inplace is None:
warnings.warn(
'set_axis currently defaults to operating inplace.\nThis '
'will change in a future version of pandas, use '
'inplace=True to avoid this warning.',
FutureWarning, stacklevel=2)
inplace = True
if inplace:
setattr(self, self._get_axis_name(axis), labels)
else:
obj = self.copy()
obj.set_axis(labels, axis=axis, inplace=True)
return obj
def _set_axis(self, axis, labels):
self._data.set_axis(axis, labels)
self._clear_item_cache()
def transpose(self, *args, **kwargs):
"""
Permute the dimensions of the %(klass)s
Parameters
----------
args : %(args_transpose)s
copy : boolean, default False
Make a copy of the underlying data. Mixed-dtype data will
always result in a copy
Returns
-------
y : same as input
Examples
--------
>>> p.transpose(2, 0, 1)
>>> p.transpose(2, 0, 1, copy=True)
"""
# construct the args
axes, kwargs = self._construct_axes_from_arguments(args, kwargs,
require_all=True)
axes_names = tuple(self._get_axis_name(axes[a])
for a in self._AXIS_ORDERS)
axes_numbers = tuple(self._get_axis_number(axes[a])
for a in self._AXIS_ORDERS)
# we must have unique axes
if len(axes) != len(set(axes)):
raise ValueError('Must specify %s unique axes' % self._AXIS_LEN)
new_axes = self._construct_axes_dict_from(self, [self._get_axis(x)
for x in axes_names])
new_values = self.values.transpose(axes_numbers)
if kwargs.pop('copy', None) or (len(args) and args[-1]):
new_values = new_values.copy()
nv.validate_transpose_for_generic(self, kwargs)
return self._constructor(new_values, **new_axes).__finalize__(self)
def swapaxes(self, axis1, axis2, copy=True):
"""
Interchange axes and swap values axes appropriately.
Returns
-------
y : same as input
"""
i = self._get_axis_number(axis1)
j = self._get_axis_number(axis2)
if i == j:
if copy:
return self.copy()
return self
mapping = {i: j, j: i}
new_axes = (self._get_axis(mapping.get(k, k))
for k in range(self._AXIS_LEN))
new_values = self.values.swapaxes(i, j)
if copy:
new_values = new_values.copy()
return self._constructor(new_values, *new_axes).__finalize__(self)
def droplevel(self, level, axis=0):
"""
Return DataFrame with requested index / column level(s) removed.
.. versionadded:: 0.24.0
Parameters
----------
level : int, str, or list-like
If a string is given, must be the name of a level
If list-like, elements must be names or positional indexes
of levels.
axis : {0 or 'index', 1 or 'columns'}, default 0
Returns
-------
DataFrame.droplevel()
Examples
--------
>>> df = pd.DataFrame([
... [1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12]
... ]).set_index([0, 1]).rename_axis(['a', 'b'])
>>> df.columns = pd.MultiIndex.from_tuples([
... ('c', 'e'), ('d', 'f')
... ], names=['level_1', 'level_2'])
>>> df
level_1 c d
level_2 e f
a b
1 2 3 4
5 6 7 8
9 10 11 12
>>> df.droplevel('a')
level_1 c d
level_2 e f
b
2 3 4
6 7 8
10 11 12
>>> df.droplevel('level2', axis=1)
level_1 c d
a b
1 2 3 4
5 6 7 8
9 10 11 12
"""
labels = self._get_axis(axis)
new_labels = labels.droplevel(level)
result = self.set_axis(new_labels, axis=axis, inplace=False)
return result
def pop(self, item):
"""
Return item and drop from frame. Raise KeyError if not found.
Parameters
----------
item : str
Column label to be popped
Returns
-------
popped : Series
Examples
--------
>>> df = pd.DataFrame([('falcon', 'bird', 389.0),
... ('parrot', 'bird', 24.0),
... ('lion', 'mammal', 80.5),
... ('monkey', 'mammal', np.nan)],
... columns=('name', 'class', 'max_speed'))
>>> df
name class max_speed
0 falcon bird 389.0
1 parrot bird 24.0
2 lion mammal 80.5
3 monkey mammal NaN
>>> df.pop('class')
0 bird
1 bird
2 mammal
3 mammal
Name: class, dtype: object
>>> df
name max_speed
0 falcon 389.0
1 parrot 24.0
2 lion 80.5
3 monkey NaN
"""
result = self[item]
del self[item]
try:
result._reset_cacher()
except AttributeError:
pass
return result
def squeeze(self, axis=None):
"""
Squeeze 1 dimensional axis objects into scalars.
Series or DataFrames with a single element are squeezed to a scalar.
DataFrames with a single column or a single row are squeezed to a
Series. Otherwise the object is unchanged.
This method is most useful when you don't know if your
object is a Series or DataFrame, but you do know it has just a single
column. In that case you can safely call `squeeze` to ensure you have a
Series.
Parameters
----------
axis : {0 or 'index', 1 or 'columns', None}, default None
A specific axis to squeeze. By default, all length-1 axes are
squeezed.
.. versionadded:: 0.20.0
Returns
-------
DataFrame, Series, or scalar
The projection after squeezing `axis` or all the axes.
See Also
--------
Series.iloc : Integer-location based indexing for selecting scalars.
DataFrame.iloc : Integer-location based indexing for selecting Series.
Series.to_frame : Inverse of DataFrame.squeeze for a
single-column DataFrame.
Examples
--------
>>> primes = pd.Series([2, 3, 5, 7])
Slicing might produce a Series with a single value:
>>> even_primes = primes[primes % 2 == 0]
>>> even_primes
0 2
dtype: int64
>>> even_primes.squeeze()
2
Squeezing objects with more than one value in every axis does nothing:
>>> odd_primes = primes[primes % 2 == 1]
>>> odd_primes
1 3
2 5
3 7
dtype: int64
>>> odd_primes.squeeze()
1 3
2 5
3 7
dtype: int64
Squeezing is even more effective when used with DataFrames.
>>> df = pd.DataFrame([[1, 2], [3, 4]], columns=['a', 'b'])
>>> df
a b
0 1 2
1 3 4
Slicing a single column will produce a DataFrame with the columns
having only one value:
>>> df_a = df[['a']]
>>> df_a
a
0 1
1 3
So the columns can be squeezed down, resulting in a Series:
>>> df_a.squeeze('columns')
0 1
1 3
Name: a, dtype: int64
Slicing a single row from a single column will produce a single
scalar DataFrame:
>>> df_0a = df.loc[df.index < 1, ['a']]
>>> df_0a
a
0 1
Squeezing the rows produces a single scalar Series:
>>> df_0a.squeeze('rows')
a 1
Name: 0, dtype: int64
Squeezing all axes wil project directly into a scalar:
>>> df_0a.squeeze()
1
"""
axis = (self._AXIS_NAMES if axis is None else
(self._get_axis_number(axis),))
try:
return self.iloc[
tuple(0 if i in axis and len(a) == 1 else slice(None)
for i, a in enumerate(self.axes))]
except Exception:
return self
def swaplevel(self, i=-2, j=-1, axis=0):
"""
Swap levels i and j in a MultiIndex on a particular axis
Parameters
----------
i, j : int, string (can be mixed)
Level of index to be swapped. Can pass level name as string.
Returns
-------
swapped : same type as caller (new object)
.. versionchanged:: 0.18.1
The indexes ``i`` and ``j`` are now optional, and default to
the two innermost levels of the index.
"""
axis = self._get_axis_number(axis)
result = self.copy()
labels = result._data.axes[axis]
result._data.set_axis(axis, labels.swaplevel(i, j))
return result
# ----------------------------------------------------------------------
# Rename
def rename(self, *args, **kwargs):
"""
Alter axes input function or functions. Function / dict values must be
unique (1-to-1). Labels not contained in a dict / Series will be left
as-is. Extra labels listed don't throw an error. Alternatively, change
``Series.name`` with a scalar value (Series only).
Parameters
----------
%(axes)s : scalar, list-like, dict-like or function, optional
Scalar or list-like will alter the ``Series.name`` attribute,
and raise on DataFrame or Panel.
dict-like or functions are transformations to apply to
that axis' values
copy : boolean, default True
Also copy underlying data
inplace : boolean, default False
Whether to return a new %(klass)s. If True then value of copy is
ignored.
level : int or level name, default None
In case of a MultiIndex, only rename labels in the specified
level.
Returns
-------
renamed : %(klass)s (new object)
See Also
--------
NDFrame.rename_axis
Examples
--------
>>> s = pd.Series([1, 2, 3])
>>> s
0 1
1 2
2 3
dtype: int64
>>> s.rename("my_name") # scalar, changes Series.name
0 1
1 2
2 3
Name: my_name, dtype: int64
>>> s.rename(lambda x: x ** 2) # function, changes labels
0 1
1 2
4 3
dtype: int64
>>> s.rename({1: 3, 2: 5}) # mapping, changes labels
0 1
3 2
5 3
dtype: int64
Since ``DataFrame`` doesn't have a ``.name`` attribute,
only mapping-type arguments are allowed.
>>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
>>> df.rename(2)
Traceback (most recent call last):
...
TypeError: 'int' object is not callable
``DataFrame.rename`` supports two calling conventions
* ``(index=index_mapper, columns=columns_mapper, ...)``
* ``(mapper, axis={'index', 'columns'}, ...)``
We *highly* recommend using keyword arguments to clarify your
intent.
>>> df.rename(index=str, columns={"A": "a", "B": "c"})
a c
0 1 4
1 2 5
2 3 6
>>> df.rename(index=str, columns={"A": "a", "C": "c"})
a B
0 1 4
1 2 5
2 3 6
Using axis-style parameters
>>> df.rename(str.lower, axis='columns')
a b
0 1 4
1 2 5
2 3 6
>>> df.rename({1: 2, 2: 4}, axis='index')
A B
0 1 4
2 2 5
4 3 6
See the :ref:`user guide <basics.rename>` for more.
"""
axes, kwargs = self._construct_axes_from_arguments(args, kwargs)
copy = kwargs.pop('copy', True)
inplace = kwargs.pop('inplace', False)
level = kwargs.pop('level', None)
axis = kwargs.pop('axis', None)
if axis is not None:
# Validate the axis
self._get_axis_number(axis)
if kwargs:
raise TypeError('rename() got an unexpected keyword '
'argument "{0}"'.format(list(kwargs.keys())[0]))
if com.count_not_none(*axes.values()) == 0:
raise TypeError('must pass an index to rename')
self._consolidate_inplace()
result = self if inplace else self.copy(deep=copy)
# start in the axis order to eliminate too many copies
for axis in lrange(self._AXIS_LEN):
v = axes.get(self._AXIS_NAMES[axis])
if v is None:
continue
f = com._get_rename_function(v)
baxis = self._get_block_manager_axis(axis)
if level is not None:
level = self.axes[axis]._get_level_number(level)
result._data = result._data.rename_axis(f, axis=baxis, copy=copy,
level=level)
result._clear_item_cache()
if inplace:
self._update_inplace(result._data)
else:
return result.__finalize__(self)
@rewrite_axis_style_signature('mapper', [('copy', True),
('inplace', False)])
def rename_axis(self, mapper=sentinel, **kwargs):
"""
Set the name of the axis for the index or columns.
Parameters
----------
mapper : scalar, list-like, optional
Value to set the axis name attribute.
index, columns : scalar, list-like, dict-like or function, optional
A scalar, list-like, dict-like or functions transformations to
apply to that axis' values.
Use either ``mapper`` and ``axis`` to
specify the axis to target with ``mapper``, or ``index``
and/or ``columns``.
.. versionchanged:: 0.24.0
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to rename.
copy : bool, default True
Also copy underlying data.
inplace : bool, default False
Modifies the object directly, instead of creating a new Series
or DataFrame.
Returns
-------
Series, DataFrame, or None
The same type as the caller or None if `inplace` is True.
See Also
--------
Series.rename : Alter Series index labels or name.
DataFrame.rename : Alter DataFrame index labels or name.
Index.rename : Set new names on index.
Notes
-----
Prior to version 0.21.0, ``rename_axis`` could also be used to change
the axis *labels* by passing a mapping or scalar. This behavior is
deprecated and will be removed in a future version. Use ``rename``
instead.
``DataFrame.rename_axis`` supports two calling conventions
* ``(index=index_mapper, columns=columns_mapper, ...)``
* ``(mapper, axis={'index', 'columns'}, ...)``
The first calling convention will only modify the names of
the index and/or the names of the Index object that is the columns.
In this case, the parameter ``copy`` is ignored.
The second calling convention will modify the names of the
the corresponding index if mapper is a list or a scalar.
However, if mapper is dict-like or a function, it will use the
deprecated behavior of modifying the axis *labels*.
We *highly* recommend using keyword arguments to clarify your
intent.
Examples
--------
**Series**
>>> s = pd.Series(["dog", "cat", "monkey"])
>>> s
0 dog
1 cat
2 monkey
dtype: object
>>> s.rename_axis("animal")
animal
0 dog
1 cat
2 monkey
dtype: object
**DataFrame**
>>> df = pd.DataFrame({"num_legs": [4, 4, 2],
... "num_arms": [0, 0, 2]},
... ["dog", "cat", "monkey"])
>>> df
num_legs num_arms
dog 4 0
cat 4 0
monkey 2 2
>>> df = df.rename_axis("animal")
>>> df
num_legs num_arms
animal
dog 4 0
cat 4 0
monkey 2 2
>>> df = df.rename_axis("limbs", axis="columns")
>>> df
limbs num_legs num_arms
animal
dog 4 0
cat 4 0
monkey 2 2
**MultiIndex**
>>> df.index = pd.MultiIndex.from_product([['mammal'],
... ['dog', 'cat', 'monkey']],
... names=['type', 'name'])
>>> df
limbs num_legs num_arms
type name
mammal dog 4 0
cat 4 0
monkey 2 2
>>> df.rename_axis(index={'type': 'class'})
limbs num_legs num_arms
class name
mammal dog 4 0
cat 4 0
monkey 2 2
>>> df.rename_axis(columns=str.upper)
LIMBS num_legs num_arms
type name
mammal dog 4 0
cat 4 0
monkey 2 2
"""
axes, kwargs = self._construct_axes_from_arguments(
(), kwargs, sentinel=sentinel)
copy = kwargs.pop('copy', True)
inplace = kwargs.pop('inplace', False)
axis = kwargs.pop('axis', 0)
if axis is not None:
axis = self._get_axis_number(axis)
if kwargs:
raise TypeError('rename_axis() got an unexpected keyword '
'argument "{0}"'.format(list(kwargs.keys())[0]))
inplace = validate_bool_kwarg(inplace, 'inplace')
if (mapper is not sentinel):
# Use v0.23 behavior if a scalar or list
non_mapper = is_scalar(mapper) or (is_list_like(mapper) and not
is_dict_like(mapper))
if non_mapper:
return self._set_axis_name(mapper, axis=axis, inplace=inplace)
else:
# Deprecated (v0.21) behavior is if mapper is specified,
# and not a list or scalar, then call rename
msg = ("Using 'rename_axis' to alter labels is deprecated. "
"Use '.rename' instead")
warnings.warn(msg, FutureWarning, stacklevel=3)
axis = self._get_axis_name(axis)
d = {'copy': copy, 'inplace': inplace}
d[axis] = mapper
return self.rename(**d)
else:
# Use new behavior. Means that index and/or columns
# is specified
result = self if inplace else self.copy(deep=copy)
for axis in lrange(self._AXIS_LEN):
v = axes.get(self._AXIS_NAMES[axis])
if v is sentinel:
continue
non_mapper = is_scalar(v) or (is_list_like(v) and not
is_dict_like(v))
if non_mapper:
newnames = v
else:
f = com._get_rename_function(v)
curnames = self._get_axis(axis).names
newnames = [f(name) for name in curnames]
result._set_axis_name(newnames, axis=axis,
inplace=True)
if not inplace:
return result
def _set_axis_name(self, name, axis=0, inplace=False):
"""
Set the name(s) of the axis.
Parameters
----------
name : str or list of str
Name(s) to set.
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to set the label. The value 0 or 'index' specifies index,
and the value 1 or 'columns' specifies columns.
inplace : bool, default False
If `True`, do operation inplace and return None.
.. versionadded:: 0.21.0
Returns
-------
Series, DataFrame, or None
The same type as the caller or `None` if `inplace` is `True`.
See Also
--------
DataFrame.rename : Alter the axis labels of :class:`DataFrame`.
Series.rename : Alter the index labels or set the index name
of :class:`Series`.
Index.rename : Set the name of :class:`Index` or :class:`MultiIndex`.
Examples
--------
>>> df = pd.DataFrame({"num_legs": [4, 4, 2]},
... ["dog", "cat", "monkey"])
>>> df
num_legs
dog 4
cat 4
monkey 2
>>> df._set_axis_name("animal")
num_legs
animal
dog 4
cat 4
monkey 2
>>> df.index = pd.MultiIndex.from_product(
... [["mammal"], ['dog', 'cat', 'monkey']])
>>> df._set_axis_name(["type", "name"])
legs
type name
mammal dog 4
cat 4
monkey 2
"""
pd.MultiIndex.from_product([["mammal"], ['dog', 'cat', 'monkey']])
axis = self._get_axis_number(axis)
idx = self._get_axis(axis).set_names(name)
inplace = validate_bool_kwarg(inplace, 'inplace')
renamed = self if inplace else self.copy()
renamed.set_axis(idx, axis=axis, inplace=True)
if not inplace:
return renamed
# ----------------------------------------------------------------------
# Comparison Methods
def _indexed_same(self, other):
return all(self._get_axis(a).equals(other._get_axis(a))
for a in self._AXIS_ORDERS)
def equals(self, other):
"""
Test whether two objects contain the same elements.
This function allows two Series or DataFrames to be compared against
each other to see if they have the same shape and elements. NaNs in
the same location are considered equal. The column headers do not
need to have the same type, but the elements within the columns must
be the same dtype.
Parameters
----------
other : Series or DataFrame
The other Series or DataFrame to be compared with the first.
Returns
-------
bool
True if all elements are the same in both objects, False
otherwise.
See Also
--------
Series.eq : Compare two Series objects of the same length
and return a Series where each element is True if the element
in each Series is equal, False otherwise.
DataFrame.eq : Compare two DataFrame objects of the same shape and
return a DataFrame where each element is True if the respective
element in each DataFrame is equal, False otherwise.
assert_series_equal : Return True if left and right Series are equal,
False otherwise.
assert_frame_equal : Return True if left and right DataFrames are
equal, False otherwise.
numpy.array_equal : Return True if two arrays have the same shape
and elements, False otherwise.
Notes
-----
This function requires that the elements have the same dtype as their
respective elements in the other Series or DataFrame. However, the
column labels do not need to have the same type, as long as they are
still considered equal.
Examples
--------
>>> df = pd.DataFrame({1: [10], 2: [20]})
>>> df
1 2
0 10 20
DataFrames df and exactly_equal have the same types and values for
their elements and column labels, which will return True.
>>> exactly_equal = pd.DataFrame({1: [10], 2: [20]})
>>> exactly_equal
1 2
0 10 20
>>> df.equals(exactly_equal)
True
DataFrames df and different_column_type have the same element
types and values, but have different types for the column labels,
which will still return True.
>>> different_column_type = pd.DataFrame({1.0: [10], 2.0: [20]})
>>> different_column_type
1.0 2.0
0 10 20
>>> df.equals(different_column_type)
True
DataFrames df and different_data_type have different types for the
same values for their elements, and will return False even though
their column labels are the same values and types.
>>> different_data_type = pd.DataFrame({1: [10.0], 2: [20.0]})
>>> different_data_type
1 2
0 10.0 20.0
>>> df.equals(different_data_type)
False
"""
if not isinstance(other, self._constructor):
return False
return self._data.equals(other._data)
# -------------------------------------------------------------------------
# Unary Methods
def __neg__(self):
values = com.values_from_object(self)
if is_bool_dtype(values):
arr = operator.inv(values)
elif (is_numeric_dtype(values) or is_timedelta64_dtype(values)
or is_object_dtype(values)):
arr = operator.neg(values)
else:
raise TypeError("Unary negative expects numeric dtype, not {}"
.format(values.dtype))
return self.__array_wrap__(arr)
def __pos__(self):
values = com.values_from_object(self)
if (is_bool_dtype(values) or is_period_arraylike(values)):
arr = values
elif (is_numeric_dtype(values) or is_timedelta64_dtype(values)
or is_object_dtype(values)):
arr = operator.pos(values)
else:
raise TypeError("Unary plus expects numeric dtype, not {}"
.format(values.dtype))
return self.__array_wrap__(arr)
def __invert__(self):
try:
arr = operator.inv(com.values_from_object(self))
return self.__array_wrap__(arr)
except Exception:
# inv fails with 0 len
if not np.prod(self.shape):
return self
raise
def __nonzero__(self):
raise ValueError("The truth value of a {0} is ambiguous. "
"Use a.empty, a.bool(), a.item(), a.any() or a.all()."
.format(self.__class__.__name__))
__bool__ = __nonzero__
def bool(self):
"""
Return the bool of a single element PandasObject.
This must be a boolean scalar value, either True or False. Raise a
ValueError if the PandasObject does not have exactly 1 element, or that
element is not boolean
"""
v = self.squeeze()
if isinstance(v, (bool, np.bool_)):
return bool(v)
elif is_scalar(v):
raise ValueError("bool cannot act on a non-boolean single element "
"{0}".format(self.__class__.__name__))
self.__nonzero__()
def __abs__(self):
return self.abs()
def __round__(self, decimals=0):
return self.round(decimals)
# -------------------------------------------------------------------------
# Label or Level Combination Helpers
#
# A collection of helper methods for DataFrame/Series operations that
# accept a combination of column/index labels and levels. All such
# operations should utilize/extend these methods when possible so that we
# have consistent precedence and validation logic throughout the library.
def _is_level_reference(self, key, axis=0):
"""
Test whether a key is a level reference for a given axis.
To be considered a level reference, `key` must be a string that:
- (axis=0): Matches the name of an index level and does NOT match
a column label.
- (axis=1): Matches the name of a column level and does NOT match
an index label.
Parameters
----------
key : str
Potential level name for the given axis
axis : int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
is_level : bool
"""
axis = self._get_axis_number(axis)
if self.ndim > 2:
raise NotImplementedError(
"_is_level_reference is not implemented for {type}"
.format(type=type(self)))
return (key is not None and
is_hashable(key) and
key in self.axes[axis].names and
not self._is_label_reference(key, axis=axis))
def _is_label_reference(self, key, axis=0):
"""
Test whether a key is a label reference for a given axis.
To be considered a label reference, `key` must be a string that:
- (axis=0): Matches a column label
- (axis=1): Matches an index label
Parameters
----------
key: str
Potential label name
axis: int, default 0
Axis perpendicular to the axis that labels are associated with
(0 means search for column labels, 1 means search for index labels)
Returns
-------
is_label: bool
"""
axis = self._get_axis_number(axis)
other_axes = [ax for ax in range(self._AXIS_LEN) if ax != axis]
if self.ndim > 2:
raise NotImplementedError(
"_is_label_reference is not implemented for {type}"
.format(type=type(self)))
return (key is not None and
is_hashable(key) and
any(key in self.axes[ax] for ax in other_axes))
def _is_label_or_level_reference(self, key, axis=0):
"""
Test whether a key is a label or level reference for a given axis.
To be considered either a label or a level reference, `key` must be a
string that:
- (axis=0): Matches a column label or an index level
- (axis=1): Matches an index label or a column level
Parameters
----------
key: str
Potential label or level name
axis: int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
is_label_or_level: bool
"""
if self.ndim > 2:
raise NotImplementedError(
"_is_label_or_level_reference is not implemented for {type}"
.format(type=type(self)))
return (self._is_level_reference(key, axis=axis) or
self._is_label_reference(key, axis=axis))
def _check_label_or_level_ambiguity(self, key, axis=0):
"""
Check whether `key` is ambiguous.
By ambiguous, we mean that it matches both a level of the input
`axis` and a label of the other axis.
Parameters
----------
key: str or object
label or level name
axis: int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Raises
------
ValueError: `key` is ambiguous
"""
axis = self._get_axis_number(axis)
other_axes = [ax for ax in range(self._AXIS_LEN) if ax != axis]
if self.ndim > 2:
raise NotImplementedError(
"_check_label_or_level_ambiguity is not implemented for {type}"
.format(type=type(self)))
if (key is not None and
is_hashable(key) and
key in self.axes[axis].names and
any(key in self.axes[ax] for ax in other_axes)):
# Build an informative and grammatical warning
level_article, level_type = (('an', 'index')
if axis == 0 else
('a', 'column'))
label_article, label_type = (('a', 'column')
if axis == 0 else
('an', 'index'))
msg = ("'{key}' is both {level_article} {level_type} level and "
"{label_article} {label_type} label, which is ambiguous."
).format(key=key,
level_article=level_article,
level_type=level_type,
label_article=label_article,
label_type=label_type)
raise ValueError(msg)
def _get_label_or_level_values(self, key, axis=0):
"""
Return a 1-D array of values associated with `key`, a label or level
from the given `axis`.
Retrieval logic:
- (axis=0): Return column values if `key` matches a column label.
Otherwise return index level values if `key` matches an index
level.
- (axis=1): Return row values if `key` matches an index label.
Otherwise return column level values if 'key' matches a column
level
Parameters
----------
key: str
Label or level name.
axis: int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
values: np.ndarray
Raises
------
KeyError
if `key` matches neither a label nor a level
ValueError
if `key` matches multiple labels
FutureWarning
if `key` is ambiguous. This will become an ambiguity error in a
future version
"""
axis = self._get_axis_number(axis)
other_axes = [ax for ax in range(self._AXIS_LEN) if ax != axis]
if self.ndim > 2:
raise NotImplementedError(
"_get_label_or_level_values is not implemented for {type}"
.format(type=type(self)))
if self._is_label_reference(key, axis=axis):
self._check_label_or_level_ambiguity(key, axis=axis)
values = self.xs(key, axis=other_axes[0])._values
elif self._is_level_reference(key, axis=axis):
values = self.axes[axis].get_level_values(key)._values
else:
raise KeyError(key)
# Check for duplicates
if values.ndim > 1:
if other_axes and isinstance(
self._get_axis(other_axes[0]), MultiIndex):
multi_message = ('\n'
'For a multi-index, the label must be a '
'tuple with elements corresponding to '
'each level.')
else:
multi_message = ''
label_axis_name = 'column' if axis == 0 else 'index'
raise ValueError(("The {label_axis_name} label '{key}' "
"is not unique.{multi_message}")
.format(key=key,
label_axis_name=label_axis_name,
multi_message=multi_message))
return values
def _drop_labels_or_levels(self, keys, axis=0):
"""
Drop labels and/or levels for the given `axis`.
For each key in `keys`:
- (axis=0): If key matches a column label then drop the column.
Otherwise if key matches an index level then drop the level.
- (axis=1): If key matches an index label then drop the row.
Otherwise if key matches a column level then drop the level.
Parameters
----------
keys: str or list of str
labels or levels to drop
axis: int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
dropped: DataFrame
Raises
------
ValueError
if any `keys` match neither a label nor a level
"""
axis = self._get_axis_number(axis)
if self.ndim > 2:
raise NotImplementedError(
"_drop_labels_or_levels is not implemented for {type}"
.format(type=type(self)))
# Validate keys
keys = com.maybe_make_list(keys)
invalid_keys = [k for k in keys if not
self._is_label_or_level_reference(k, axis=axis)]
if invalid_keys:
raise ValueError(("The following keys are not valid labels or "
"levels for axis {axis}: {invalid_keys}")
.format(axis=axis,
invalid_keys=invalid_keys))
# Compute levels and labels to drop
levels_to_drop = [k for k in keys
if self._is_level_reference(k, axis=axis)]
labels_to_drop = [k for k in keys
if not self._is_level_reference(k, axis=axis)]
# Perform copy upfront and then use inplace operations below.
# This ensures that we always perform exactly one copy.
# ``copy`` and/or ``inplace`` options could be added in the future.
dropped = self.copy()
if axis == 0:
# Handle dropping index levels
if levels_to_drop:
dropped.reset_index(levels_to_drop, drop=True, inplace=True)
# Handle dropping columns labels
if labels_to_drop:
dropped.drop(labels_to_drop, axis=1, inplace=True)
else:
# Handle dropping column levels
if levels_to_drop:
if isinstance(dropped.columns, MultiIndex):
# Drop the specified levels from the MultiIndex
dropped.columns = dropped.columns.droplevel(levels_to_drop)
else:
# Drop the last level of Index by replacing with
# a RangeIndex
dropped.columns = RangeIndex(dropped.columns.size)
# Handle dropping index labels
if labels_to_drop:
dropped.drop(labels_to_drop, axis=0, inplace=True)
return dropped
# ----------------------------------------------------------------------
# Iteration
def __hash__(self):
raise TypeError('{0!r} objects are mutable, thus they cannot be'
' hashed'.format(self.__class__.__name__))
def __iter__(self):
"""Iterate over infor axis"""
return iter(self._info_axis)
# can we get a better explanation of this?
def keys(self):
"""Get the 'info axis' (see Indexing for more)
This is index for Series, columns for DataFrame and major_axis for
Panel.
"""
return self._info_axis
def iteritems(self):
"""Iterate over (label, values) on info axis
This is index for Series, columns for DataFrame, major_axis for Panel,
and so on.
"""
for h in self._info_axis:
yield h, self[h]
def __len__(self):
"""Returns length of info axis"""
return len(self._info_axis)
def __contains__(self, key):
"""True if the key is in the info axis"""
return key in self._info_axis
@property
def empty(self):
"""
Indicator whether DataFrame is empty.
True if DataFrame is entirely empty (no items), meaning any of the
axes are of length 0.
Returns
-------
bool
If DataFrame is empty, return True, if not return False.
See Also
--------
Series.dropna
DataFrame.dropna
Notes
-----
If DataFrame contains only NaNs, it is still not considered empty. See
the example below.
Examples
--------
An example of an actual empty DataFrame. Notice the index is empty:
>>> df_empty = pd.DataFrame({'A' : []})
>>> df_empty
Empty DataFrame
Columns: [A]
Index: []
>>> df_empty.empty
True
If we only have NaNs in our DataFrame, it is not considered empty! We
will need to drop the NaNs to make the DataFrame empty:
>>> df = pd.DataFrame({'A' : [np.nan]})
>>> df
A
0 NaN
>>> df.empty
False
>>> df.dropna().empty
True
"""
return any(len(self._get_axis(a)) == 0 for a in self._AXIS_ORDERS)
# ----------------------------------------------------------------------
# Array Interface
# This is also set in IndexOpsMixin
# GH#23114 Ensure ndarray.__op__(DataFrame) returns NotImplemented
__array_priority__ = 1000
def __array__(self, dtype=None):
return com.values_from_object(self)
def __array_wrap__(self, result, context=None):
d = self._construct_axes_dict(self._AXIS_ORDERS, copy=False)
return self._constructor(result, **d).__finalize__(self)
# ideally we would define this to avoid the getattr checks, but
# is slower
# @property
# def __array_interface__(self):
# """ provide numpy array interface method """
# values = self.values
# return dict(typestr=values.dtype.str,shape=values.shape,data=values)
def to_dense(self):
"""
Return dense representation of NDFrame (as opposed to sparse).
"""
# compat
return self
# ----------------------------------------------------------------------
# Picklability
def __getstate__(self):
meta = {k: getattr(self, k, None) for k in self._metadata}
return dict(_data=self._data, _typ=self._typ, _metadata=self._metadata,
**meta)
def __setstate__(self, state):
if isinstance(state, BlockManager):
self._data = state
elif isinstance(state, dict):
typ = state.get('_typ')
if typ is not None:
# set in the order of internal names
# to avoid definitional recursion
# e.g. say fill_value needing _data to be
# defined
meta = set(self._internal_names + self._metadata)
for k in list(meta):
if k in state:
v = state[k]
object.__setattr__(self, k, v)
for k, v in state.items():
if k not in meta:
object.__setattr__(self, k, v)
else:
self._unpickle_series_compat(state)
elif isinstance(state[0], dict):
if len(state) == 5:
self._unpickle_sparse_frame_compat(state)
else:
self._unpickle_frame_compat(state)
elif len(state) == 4:
self._unpickle_panel_compat(state)
elif len(state) == 2:
self._unpickle_series_compat(state)
else: # pragma: no cover
# old pickling format, for compatibility
self._unpickle_matrix_compat(state)
self._item_cache = {}
# ----------------------------------------------------------------------
# Rendering Methods
def __unicode__(self):
# unicode representation based upon iterating over self
# (since, by definition, `PandasContainers` are iterable)
prepr = '[%s]' % ','.join(map(pprint_thing, self))
return '%s(%s)' % (self.__class__.__name__, prepr)
def _repr_latex_(self):
"""
Returns a LaTeX representation for a particular object.
Mainly for use with nbconvert (jupyter notebook conversion to pdf).
"""
if config.get_option('display.latex.repr'):
return self.to_latex()
else:
return None
def _repr_data_resource_(self):
"""
Not a real Jupyter special repr method, but we use the same
naming convention.
"""
if config.get_option("display.html.table_schema"):
data = self.head(config.get_option('display.max_rows'))
payload = json.loads(data.to_json(orient='table'),
object_pairs_hook=collections.OrderedDict)
return payload
# ----------------------------------------------------------------------
# I/O Methods
_shared_docs['to_excel'] = """
Write %(klass)s to an Excel sheet.
To write a single %(klass)s to an Excel .xlsx file it is only necessary to
specify a target file name. To write to multiple sheets it is necessary to
create an `ExcelWriter` object with a target file name, and specify a sheet
in the file to write to.
Multiple sheets may be written to by specifying unique `sheet_name`.
With all data written to the file it is necessary to save the changes.
Note that creating an `ExcelWriter` object with a file name that already
exists will result in the contents of the existing file being erased.
Parameters
----------
excel_writer : str or ExcelWriter object
File path or existing ExcelWriter.
sheet_name : str, default 'Sheet1'
Name of sheet which will contain DataFrame.
na_rep : str, default ''
Missing data representation.
float_format : str, optional
Format string for floating point numbers. For example
``float_format="%%.2f"`` will format 0.1234 to 0.12.
columns : sequence or list of str, optional
Columns to write.
header : bool or list of str, default True
Write out the column names. If a list of string is given it is
assumed to be aliases for the column names.
index : bool, default True
Write row names (index).
index_label : str or sequence, optional
Column label for index column(s) if desired. If not specified, and
`header` and `index` are True, then the index names are used. A
sequence should be given if the DataFrame uses MultiIndex.
startrow : int, default 0
Upper left cell row to dump data frame.
startcol : int, default 0
Upper left cell column to dump data frame.
engine : str, optional
Write engine to use, 'openpyxl' or 'xlsxwriter'. You can also set this
via the options ``io.excel.xlsx.writer``, ``io.excel.xls.writer``, and
``io.excel.xlsm.writer``.
merge_cells : bool, default True
Write MultiIndex and Hierarchical Rows as merged cells.
encoding : str, optional
Encoding of the resulting excel file. Only necessary for xlwt,
other writers support unicode natively.
inf_rep : str, default 'inf'
Representation for infinity (there is no native representation for
infinity in Excel).
verbose : bool, default True
Display more information in the error logs.
freeze_panes : tuple of int (length 2), optional
Specifies the one-based bottommost row and rightmost column that
is to be frozen.
.. versionadded:: 0.20.0.
See Also
--------
to_csv : Write DataFrame to a comma-separated values (csv) file.
ExcelWriter : Class for writing DataFrame objects into excel sheets.
read_excel : Read an Excel file into a pandas DataFrame.
read_csv : Read a comma-separated values (csv) file into DataFrame.
Notes
-----
For compatibility with :meth:`~DataFrame.to_csv`,
to_excel serializes lists and dicts to strings before writing.
Once a workbook has been saved it is not possible write further data
without rewriting the whole workbook.
Examples
--------
Create, write to and save a workbook:
>>> df1 = pd.DataFrame([['a', 'b'], ['c', 'd']],
... index=['row 1', 'row 2'],
... columns=['col 1', 'col 2'])
>>> df1.to_excel("output.xlsx") # doctest: +SKIP
To specify the sheet name:
>>> df1.to_excel("output.xlsx",
... sheet_name='Sheet_name_1') # doctest: +SKIP
If you wish to write to more than one sheet in the workbook, it is
necessary to specify an ExcelWriter object:
>>> df2 = df1.copy()
>>> with pd.ExcelWriter('output.xlsx') as writer: # doctest: +SKIP
... df1.to_excel(writer, sheet_name='Sheet_name_1')
... df2.to_excel(writer, sheet_name='Sheet_name_2')
To set the library that is used to write the Excel file,
you can pass the `engine` keyword (the default engine is
automatically chosen depending on the file extension):
>>> df1.to_excel('output1.xlsx', engine='xlsxwriter') # doctest: +SKIP
"""
@Appender(_shared_docs["to_excel"] % dict(klass="object"))
def to_excel(self, excel_writer, sheet_name="Sheet1", na_rep="",
float_format=None, columns=None, header=True, index=True,
index_label=None, startrow=0, startcol=0, engine=None,
merge_cells=True, encoding=None, inf_rep="inf", verbose=True,
freeze_panes=None):
df = self if isinstance(self, ABCDataFrame) else self.to_frame()
from pandas.io.formats.excel import ExcelFormatter
formatter = ExcelFormatter(df, na_rep=na_rep, cols=columns,
header=header,
float_format=float_format, index=index,
index_label=index_label,
merge_cells=merge_cells,
inf_rep=inf_rep)
formatter.write(excel_writer, sheet_name=sheet_name, startrow=startrow,
startcol=startcol, freeze_panes=freeze_panes,
engine=engine)
def to_json(self, path_or_buf=None, orient=None, date_format=None,
double_precision=10, force_ascii=True, date_unit='ms',
default_handler=None, lines=False, compression='infer',
index=True):
"""
Convert the object to a JSON string.
Note NaN's and None will be converted to null and datetime objects
will be converted to UNIX timestamps.
Parameters
----------
path_or_buf : string or file handle, optional
File path or object. If not specified, the result is returned as
a string.
orient : string
Indication of expected JSON string format.
* Series
- default is 'index'
- allowed values are: {'split','records','index','table'}
* DataFrame
- default is 'columns'
- allowed values are:
{'split','records','index','columns','values','table'}
* The format of the JSON string
- 'split' : dict like {'index' -> [index],
'columns' -> [columns], 'data' -> [values]}
- 'records' : list like
[{column -> value}, ... , {column -> value}]
- 'index' : dict like {index -> {column -> value}}
- 'columns' : dict like {column -> {index -> value}}
- 'values' : just the values array
- 'table' : dict like {'schema': {schema}, 'data': {data}}
describing the data, and the data component is
like ``orient='records'``.
.. versionchanged:: 0.20.0
date_format : {None, 'epoch', 'iso'}
Type of date conversion. 'epoch' = epoch milliseconds,
'iso' = ISO8601. The default depends on the `orient`. For
``orient='table'``, the default is 'iso'. For all other orients,
the default is 'epoch'.
double_precision : int, default 10
The number of decimal places to use when encoding
floating point values.
force_ascii : bool, default True
Force encoded string to be ASCII.
date_unit : string, default 'ms' (milliseconds)
The time unit to encode to, governs timestamp and ISO8601
precision. One of 's', 'ms', 'us', 'ns' for second, millisecond,
microsecond, and nanosecond respectively.
default_handler : callable, default None
Handler to call if object cannot otherwise be converted to a
suitable format for JSON. Should receive a single argument which is
the object to convert and return a serialisable object.
lines : bool, default False
If 'orient' is 'records' write out line delimited json format. Will
throw ValueError if incorrect 'orient' since others are not list
like.
.. versionadded:: 0.19.0
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}
A string representing the compression to use in the output file,
only used when the first argument is a filename. By default, the
compression is inferred from the filename.
.. versionadded:: 0.21.0
.. versionchanged:: 0.24.0
'infer' option added and set to default
index : bool, default True
Whether to include the index values in the JSON string. Not
including the index (``index=False``) is only supported when
orient is 'split' or 'table'.
.. versionadded:: 0.23.0
See Also
--------
read_json
Examples
--------
>>> df = pd.DataFrame([['a', 'b'], ['c', 'd']],
... index=['row 1', 'row 2'],
... columns=['col 1', 'col 2'])
>>> df.to_json(orient='split')
'{"columns":["col 1","col 2"],
"index":["row 1","row 2"],
"data":[["a","b"],["c","d"]]}'
Encoding/decoding a Dataframe using ``'records'`` formatted JSON.
Note that index labels are not preserved with this encoding.
>>> df.to_json(orient='records')
'[{"col 1":"a","col 2":"b"},{"col 1":"c","col 2":"d"}]'
Encoding/decoding a Dataframe using ``'index'`` formatted JSON:
>>> df.to_json(orient='index')
'{"row 1":{"col 1":"a","col 2":"b"},"row 2":{"col 1":"c","col 2":"d"}}'
Encoding/decoding a Dataframe using ``'columns'`` formatted JSON:
>>> df.to_json(orient='columns')
'{"col 1":{"row 1":"a","row 2":"c"},"col 2":{"row 1":"b","row 2":"d"}}'
Encoding/decoding a Dataframe using ``'values'`` formatted JSON:
>>> df.to_json(orient='values')
'[["a","b"],["c","d"]]'
Encoding with Table Schema
>>> df.to_json(orient='table')
'{"schema": {"fields": [{"name": "index", "type": "string"},
{"name": "col 1", "type": "string"},
{"name": "col 2", "type": "string"}],
"primaryKey": "index",
"pandas_version": "0.20.0"},
"data": [{"index": "row 1", "col 1": "a", "col 2": "b"},
{"index": "row 2", "col 1": "c", "col 2": "d"}]}'
"""
from pandas.io import json
if date_format is None and orient == 'table':
date_format = 'iso'
elif date_format is None:
date_format = 'epoch'
return json.to_json(path_or_buf=path_or_buf, obj=self, orient=orient,
date_format=date_format,
double_precision=double_precision,
force_ascii=force_ascii, date_unit=date_unit,
default_handler=default_handler,
lines=lines, compression=compression,
index=index)
def to_hdf(self, path_or_buf, key, **kwargs):
"""
Write the contained data to an HDF5 file using HDFStore.
Hierarchical Data Format (HDF) is self-describing, allowing an
application to interpret the structure and contents of a file with
no outside information. One HDF file can hold a mix of related objects
which can be accessed as a group or as individual objects.
In order to add another DataFrame or Series to an existing HDF file
please use append mode and a different a key.
For more information see the :ref:`user guide <io.hdf5>`.
Parameters
----------
path_or_buf : str or pandas.HDFStore
File path or HDFStore object.
key : str
Identifier for the group in the store.
mode : {'a', 'w', 'r+'}, default 'a'
Mode to open file:
- 'w': write, a new file is created (an existing file with
the same name would be deleted).
- 'a': append, an existing file is opened for reading and
writing, and if the file does not exist it is created.
- 'r+': similar to 'a', but the file must already exist.
format : {'fixed', 'table'}, default 'fixed'
Possible values:
- 'fixed': Fixed format. Fast writing/reading. Not-appendable,
nor searchable.
- 'table': Table format. Write as a PyTables Table structure
which may perform worse but allow more flexible operations
like searching / selecting subsets of the data.
append : bool, default False
For Table formats, append the input data to the existing.
data_columns : list of columns or True, optional
List of columns to create as indexed data columns for on-disk
queries, or True to use all columns. By default only the axes
of the object are indexed. See :ref:`io.hdf5-query-data-columns`.
Applicable only to format='table'.
complevel : {0-9}, optional
Specifies a compression level for data.
A value of 0 disables compression.
complib : {'zlib', 'lzo', 'bzip2', 'blosc'}, default 'zlib'
Specifies the compression library to be used.
As of v0.20.2 these additional compressors for Blosc are supported
(default if no compressor specified: 'blosc:blosclz'):
{'blosc:blosclz', 'blosc:lz4', 'blosc:lz4hc', 'blosc:snappy',
'blosc:zlib', 'blosc:zstd'}.
Specifying a compression library which is not available issues
a ValueError.
fletcher32 : bool, default False
If applying compression use the fletcher32 checksum.
dropna : bool, default False
If true, ALL nan rows will not be written to store.
errors : str, default 'strict'
Specifies how encoding and decoding errors are to be handled.
See the errors argument for :func:`open` for a full list
of options.
See Also
--------
DataFrame.read_hdf : Read from HDF file.
DataFrame.to_parquet : Write a DataFrame to the binary parquet format.
DataFrame.to_sql : Write to a sql table.
DataFrame.to_feather : Write out feather-format for DataFrames.
DataFrame.to_csv : Write out to a csv file.
Examples
--------
>>> df = pd.DataFrame({'A': [1, 2, 3], 'B': [4, 5, 6]},
... index=['a', 'b', 'c'])
>>> df.to_hdf('data.h5', key='df', mode='w')
We can add another object to the same file:
>>> s = pd.Series([1, 2, 3, 4])
>>> s.to_hdf('data.h5', key='s')
Reading from HDF file:
>>> pd.read_hdf('data.h5', 'df')
A B
a 1 4
b 2 5
c 3 6
>>> pd.read_hdf('data.h5', 's')
0 1
1 2
2 3
3 4
dtype: int64
Deleting file with data:
>>> import os
>>> os.remove('data.h5')
"""
from pandas.io import pytables
return pytables.to_hdf(path_or_buf, key, self, **kwargs)
def to_msgpack(self, path_or_buf=None, encoding='utf-8', **kwargs):
"""
Serialize object to input file path using msgpack format.
THIS IS AN EXPERIMENTAL LIBRARY and the storage format
may not be stable until a future release.
Parameters
----------
path : string File path, buffer-like, or None
if None, return generated string
append : bool whether to append to an existing msgpack
(default is False)
compress : type of compressor (zlib or blosc), default to None (no
compression)
"""
from pandas.io import packers
return packers.to_msgpack(path_or_buf, self, encoding=encoding,
**kwargs)
def to_sql(self, name, con, schema=None, if_exists='fail', index=True,
index_label=None, chunksize=None, dtype=None, method=None):
"""
Write records stored in a DataFrame to a SQL database.
Databases supported by SQLAlchemy [1]_ are supported. Tables can be
newly created, appended to, or overwritten.
Parameters
----------
name : string
Name of SQL table.
con : sqlalchemy.engine.Engine or sqlite3.Connection
Using SQLAlchemy makes it possible to use any DB supported by that
library. Legacy support is provided for sqlite3.Connection objects.
schema : string, optional
Specify the schema (if database flavor supports this). If None, use
default schema.
if_exists : {'fail', 'replace', 'append'}, default 'fail'
How to behave if the table already exists.
* fail: Raise a ValueError.
* replace: Drop the table before inserting new values.
* append: Insert new values to the existing table.
index : bool, default True
Write DataFrame index as a column. Uses `index_label` as the column
name in the table.
index_label : string or sequence, default None
Column label for index column(s). If None is given (default) and
`index` is True, then the index names are used.
A sequence should be given if the DataFrame uses MultiIndex.
chunksize : int, optional
Rows will be written in batches of this size at a time. By default,
all rows will be written at once.
dtype : dict, optional
Specifying the datatype for columns. The keys should be the column
names and the values should be the SQLAlchemy types or strings for
the sqlite3 legacy mode.
method : {None, 'multi', callable}, default None
Controls the SQL insertion clause used:
* None : Uses standard SQL ``INSERT`` clause (one per row).
* 'multi': Pass multiple values in a single ``INSERT`` clause.
* callable with signature ``(pd_table, conn, keys, data_iter)``.
Details and a sample callable implementation can be found in the
section :ref:`insert method <io.sql.method>`.
.. versionadded:: 0.24.0
Raises
------
ValueError
When the table already exists and `if_exists` is 'fail' (the
default).
See Also
--------
read_sql : Read a DataFrame from a table.
Notes
-----
Timezone aware datetime columns will be written as
``Timestamp with timezone`` type with SQLAlchemy if supported by the
database. Otherwise, the datetimes will be stored as timezone unaware
timestamps local to the original timezone.
.. versionadded:: 0.24.0
References
----------
.. [1] http://docs.sqlalchemy.org
.. [2] https://www.python.org/dev/peps/pep-0249/
Examples
--------
Create an in-memory SQLite database.
>>> from sqlalchemy import create_engine
>>> engine = create_engine('sqlite://', echo=False)
Create a table from scratch with 3 rows.
>>> df = pd.DataFrame({'name' : ['User 1', 'User 2', 'User 3']})
>>> df
name
0 User 1
1 User 2
2 User 3
>>> df.to_sql('users', con=engine)
>>> engine.execute("SELECT * FROM users").fetchall()
[(0, 'User 1'), (1, 'User 2'), (2, 'User 3')]
>>> df1 = pd.DataFrame({'name' : ['User 4', 'User 5']})
>>> df1.to_sql('users', con=engine, if_exists='append')
>>> engine.execute("SELECT * FROM users").fetchall()
[(0, 'User 1'), (1, 'User 2'), (2, 'User 3'),
(0, 'User 4'), (1, 'User 5')]
Overwrite the table with just ``df1``.
>>> df1.to_sql('users', con=engine, if_exists='replace',
... index_label='id')
>>> engine.execute("SELECT * FROM users").fetchall()
[(0, 'User 4'), (1, 'User 5')]
Specify the dtype (especially useful for integers with missing values).
Notice that while pandas is forced to store the data as floating point,
the database supports nullable integers. When fetching the data with
Python, we get back integer scalars.
>>> df = pd.DataFrame({"A": [1, None, 2]})
>>> df
A
0 1.0
1 NaN
2 2.0
>>> from sqlalchemy.types import Integer
>>> df.to_sql('integers', con=engine, index=False,
... dtype={"A": Integer()})
>>> engine.execute("SELECT * FROM integers").fetchall()
[(1,), (None,), (2,)]
"""
from pandas.io import sql
sql.to_sql(self, name, con, schema=schema, if_exists=if_exists,
index=index, index_label=index_label, chunksize=chunksize,
dtype=dtype, method=method)
def to_pickle(self, path, compression='infer',
protocol=pkl.HIGHEST_PROTOCOL):
"""
Pickle (serialize) object to file.
Parameters
----------
path : str
File path where the pickled object will be stored.
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, \
default 'infer'
A string representing the compression to use in the output file. By
default, infers from the file extension in specified path.
.. versionadded:: 0.20.0
protocol : int
Int which indicates which protocol should be used by the pickler,
default HIGHEST_PROTOCOL (see [1]_ paragraph 12.1.2). The possible
values for this parameter depend on the version of Python. For
Python 2.x, possible values are 0, 1, 2. For Python>=3.0, 3 is a
valid value. For Python >= 3.4, 4 is a valid value. A negative
value for the protocol parameter is equivalent to setting its value
to HIGHEST_PROTOCOL.
.. [1] https://docs.python.org/3/library/pickle.html
.. versionadded:: 0.21.0
See Also
--------
read_pickle : Load pickled pandas object (or any object) from file.
DataFrame.to_hdf : Write DataFrame to an HDF5 file.
DataFrame.to_sql : Write DataFrame to a SQL database.
DataFrame.to_parquet : Write a DataFrame to the binary parquet format.
Examples
--------
>>> original_df = pd.DataFrame({"foo": range(5), "bar": range(5, 10)})
>>> original_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> original_df.to_pickle("./dummy.pkl")
>>> unpickled_df = pd.read_pickle("./dummy.pkl")
>>> unpickled_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> import os
>>> os.remove("./dummy.pkl")
"""
from pandas.io.pickle import to_pickle
return to_pickle(self, path, compression=compression,
protocol=protocol)
def to_clipboard(self, excel=True, sep=None, **kwargs):
r"""
Copy object to the system clipboard.
Write a text representation of object to the system clipboard.
This can be pasted into Excel, for example.
Parameters
----------
excel : bool, default True
- True, use the provided separator, writing in a csv format for
allowing easy pasting into excel.
- False, write a string representation of the object to the
clipboard.
sep : str, default ``'\t'``
Field delimiter.
**kwargs
These parameters will be passed to DataFrame.to_csv.
See Also
--------
DataFrame.to_csv : Write a DataFrame to a comma-separated values
(csv) file.
read_clipboard : Read text from clipboard and pass to read_table.
Notes
-----
Requirements for your platform.
- Linux : `xclip`, or `xsel` (with `gtk` or `PyQt4` modules)
- Windows : none
- OS X : none
Examples
--------
Copy the contents of a DataFrame to the clipboard.
>>> df = pd.DataFrame([[1, 2, 3], [4, 5, 6]], columns=['A', 'B', 'C'])
>>> df.to_clipboard(sep=',')
... # Wrote the following to the system clipboard:
... # ,A,B,C
... # 0,1,2,3
... # 1,4,5,6
We can omit the the index by passing the keyword `index` and setting
it to false.
>>> df.to_clipboard(sep=',', index=False)
... # Wrote the following to the system clipboard:
... # A,B,C
... # 1,2,3
... # 4,5,6
"""
from pandas.io import clipboards
clipboards.to_clipboard(self, excel=excel, sep=sep, **kwargs)
def to_xarray(self):
"""
Return an xarray object from the pandas object.
Returns
-------
xarray.DataArray or xarray.Dataset
Data in the pandas structure converted to Dataset if the object is
a DataFrame, or a DataArray if the object is a Series.
See Also
--------
DataFrame.to_hdf : Write DataFrame to an HDF5 file.
DataFrame.to_parquet : Write a DataFrame to the binary parquet format.
Notes
-----
See the `xarray docs <http://xarray.pydata.org/en/stable/>`__
Examples
--------
>>> df = pd.DataFrame([('falcon', 'bird', 389.0, 2),
... ('parrot', 'bird', 24.0, 2),
... ('lion', 'mammal', 80.5, 4),
... ('monkey', 'mammal', np.nan, 4)],
... columns=['name', 'class', 'max_speed',
... 'num_legs'])
>>> df
name class max_speed num_legs
0 falcon bird 389.0 2
1 parrot bird 24.0 2
2 lion mammal 80.5 4
3 monkey mammal NaN 4
>>> df.to_xarray()
<xarray.Dataset>
Dimensions: (index: 4)
Coordinates:
* index (index) int64 0 1 2 3
Data variables:
name (index) object 'falcon' 'parrot' 'lion' 'monkey'
class (index) object 'bird' 'bird' 'mammal' 'mammal'
max_speed (index) float64 389.0 24.0 80.5 nan
num_legs (index) int64 2 2 4 4
>>> df['max_speed'].to_xarray()
<xarray.DataArray 'max_speed' (index: 4)>
array([389. , 24. , 80.5, nan])
Coordinates:
* index (index) int64 0 1 2 3
>>> dates = pd.to_datetime(['2018-01-01', '2018-01-01',
... '2018-01-02', '2018-01-02'])
>>> df_multiindex = pd.DataFrame({'date': dates,
... 'animal': ['falcon', 'parrot', 'falcon',
... 'parrot'],
... 'speed': [350, 18, 361, 15]}).set_index(['date',
... 'animal'])
>>> df_multiindex
speed
date animal
2018-01-01 falcon 350
parrot 18
2018-01-02 falcon 361
parrot 15
>>> df_multiindex.to_xarray()
<xarray.Dataset>
Dimensions: (animal: 2, date: 2)
Coordinates:
* date (date) datetime64[ns] 2018-01-01 2018-01-02
* animal (animal) object 'falcon' 'parrot'
Data variables:
speed (date, animal) int64 350 18 361 15
"""
try:
import xarray
except ImportError:
# Give a nice error message
raise ImportError("the xarray library is not installed\n"
"you can install via conda\n"
"conda install xarray\n"
"or via pip\n"
"pip install xarray\n")
if self.ndim == 1:
return xarray.DataArray.from_series(self)
elif self.ndim == 2:
return xarray.Dataset.from_dataframe(self)
# > 2 dims
coords = [(a, self._get_axis(a)) for a in self._AXIS_ORDERS]
return xarray.DataArray(self,
coords=coords,
)
def to_latex(self, buf=None, columns=None, col_space=None, header=True,
index=True, na_rep='NaN', formatters=None, float_format=None,
sparsify=None, index_names=True, bold_rows=False,
column_format=None, longtable=None, escape=None,
encoding=None, decimal='.', multicolumn=None,
multicolumn_format=None, multirow=None):
r"""
Render an object to a LaTeX tabular environment table.
Render an object to a tabular environment table. You can splice
this into a LaTeX document. Requires \usepackage{booktabs}.
.. versionchanged:: 0.20.2
Added to Series
Parameters
----------
buf : file descriptor or None
Buffer to write to. If None, the output is returned as a string.
columns : list of label, optional
The subset of columns to write. Writes all columns by default.
col_space : int, optional
The minimum width of each column.
header : bool or list of str, default True
Write out the column names. If a list of strings is given,
it is assumed to be aliases for the column names.
index : bool, default True
Write row names (index).
na_rep : str, default 'NaN'
Missing data representation.
formatters : list of functions or dict of {str: function}, optional
Formatter functions to apply to columns' elements by position or
name. The result of each function must be a unicode string.
List must be of length equal to the number of columns.
float_format : str, optional
Format string for floating point numbers.
sparsify : bool, optional
Set to False for a DataFrame with a hierarchical index to print
every multiindex key at each row. By default, the value will be
read from the config module.
index_names : bool, default True
Prints the names of the indexes.
bold_rows : bool, default False
Make the row labels bold in the output.
column_format : str, optional
The columns format as specified in `LaTeX table format
<https://en.wikibooks.org/wiki/LaTeX/Tables>`__ e.g. 'rcl' for 3
columns. By default, 'l' will be used for all columns except
columns of numbers, which default to 'r'.
longtable : bool, optional
By default, the value will be read from the pandas config
module. Use a longtable environment instead of tabular. Requires
adding a \usepackage{longtable} to your LaTeX preamble.
escape : bool, optional
By default, the value will be read from the pandas config
module. When set to False prevents from escaping latex special
characters in column names.
encoding : str, optional
A string representing the encoding to use in the output file,
defaults to 'ascii' on Python 2 and 'utf-8' on Python 3.
decimal : str, default '.'
Character recognized as decimal separator, e.g. ',' in Europe.
.. versionadded:: 0.18.0
multicolumn : bool, default True
Use \multicolumn to enhance MultiIndex columns.
The default will be read from the config module.
.. versionadded:: 0.20.0
multicolumn_format : str, default 'l'
The alignment for multicolumns, similar to `column_format`
The default will be read from the config module.
.. versionadded:: 0.20.0
multirow : bool, default False
Use \multirow to enhance MultiIndex rows. Requires adding a
\usepackage{multirow} to your LaTeX preamble. Will print
centered labels (instead of top-aligned) across the contained
rows, separating groups via clines. The default will be read
from the pandas config module.
.. versionadded:: 0.20.0
Returns
-------
str or None
If buf is None, returns the resulting LateX format as a
string. Otherwise returns None.
See Also
--------
DataFrame.to_string : Render a DataFrame to a console-friendly
tabular output.
DataFrame.to_html : Render a DataFrame as an HTML table.
Examples
--------
>>> df = pd.DataFrame({'name': ['Raphael', 'Donatello'],
... 'mask': ['red', 'purple'],
... 'weapon': ['sai', 'bo staff']})
>>> df.to_latex(index=False) # doctest: +NORMALIZE_WHITESPACE
'\\begin{tabular}{lll}\n\\toprule\n name & mask & weapon
\\\\\n\\midrule\n Raphael & red & sai \\\\\n Donatello &
purple & bo staff \\\\\n\\bottomrule\n\\end{tabular}\n'
"""
# Get defaults from the pandas config
if self.ndim == 1:
self = self.to_frame()
if longtable is None:
longtable = config.get_option("display.latex.longtable")
if escape is None:
escape = config.get_option("display.latex.escape")
if multicolumn is None:
multicolumn = config.get_option("display.latex.multicolumn")
if multicolumn_format is None:
multicolumn_format = config.get_option(
"display.latex.multicolumn_format")
if multirow is None:
multirow = config.get_option("display.latex.multirow")
formatter = DataFrameFormatter(self, buf=buf, columns=columns,
col_space=col_space, na_rep=na_rep,
header=header, index=index,
formatters=formatters,
float_format=float_format,
bold_rows=bold_rows,
sparsify=sparsify,
index_names=index_names,
escape=escape, decimal=decimal)
formatter.to_latex(column_format=column_format, longtable=longtable,
encoding=encoding, multicolumn=multicolumn,
multicolumn_format=multicolumn_format,
multirow=multirow)
if buf is None:
return formatter.buf.getvalue()
def to_csv(self, path_or_buf=None, sep=",", na_rep='', float_format=None,
columns=None, header=True, index=True, index_label=None,
mode='w', encoding=None, compression='infer', quoting=None,
quotechar='"', line_terminator=None, chunksize=None,
tupleize_cols=None, date_format=None, doublequote=True,
escapechar=None, decimal='.'):
r"""
Write object to a comma-separated values (csv) file.
.. versionchanged:: 0.24.0
The order of arguments for Series was changed.
Parameters
----------
path_or_buf : str or file handle, default None
File path or object, if None is provided the result is returned as
a string.
.. versionchanged:: 0.24.0
Was previously named "path" for Series.
sep : str, default ','
String of length 1. Field delimiter for the output file.
na_rep : str, default ''
Missing data representation.
float_format : str, default None
Format string for floating point numbers.
columns : sequence, optional
Columns to write.
header : bool or list of str, default True
Write out the column names. If a list of strings is given it is
assumed to be aliases for the column names.
.. versionchanged:: 0.24.0
Previously defaulted to False for Series.
index : bool, default True
Write row names (index).
index_label : str or sequence, or False, default None
Column label for index column(s) if desired. If None is given, and
`header` and `index` are True, then the index names are used. A
sequence should be given if the object uses MultiIndex. If
False do not print fields for index names. Use index_label=False
for easier importing in R.
mode : str
Python write mode, default 'w'.
encoding : str, optional
A string representing the encoding to use in the output file,
defaults to 'ascii' on Python 2 and 'utf-8' on Python 3.
compression : str, default 'infer'
Compression mode among the following possible values: {'infer',
'gzip', 'bz2', 'zip', 'xz', None}. If 'infer' and `path_or_buf`
is path-like, then detect compression from the following
extensions: '.gz', '.bz2', '.zip' or '.xz'. (otherwise no
compression).
.. versionchanged:: 0.24.0
'infer' option added and set to default.
quoting : optional constant from csv module
Defaults to csv.QUOTE_MINIMAL. If you have set a `float_format`
then floats are converted to strings and thus csv.QUOTE_NONNUMERIC
will treat them as non-numeric.
quotechar : str, default '\"'
String of length 1. Character used to quote fields.
line_terminator : string, optional
The newline character or character sequence to use in the output
file. Defaults to `os.linesep`, which depends on the OS in which
this method is called ('\n' for linux, '\r\n' for Windows, i.e.).
.. versionchanged:: 0.24.0
chunksize : int or None
Rows to write at a time.
tupleize_cols : bool, default False
Write MultiIndex columns as a list of tuples (if True) or in
the new, expanded format, where each MultiIndex column is a row
in the CSV (if False).
.. deprecated:: 0.21.0
This argument will be removed and will always write each row
of the multi-index as a separate row in the CSV file.
date_format : str, default None
Format string for datetime objects.
doublequote : bool, default True
Control quoting of `quotechar` inside a field.
escapechar : str, default None
String of length 1. Character used to escape `sep` and `quotechar`
when appropriate.
decimal : str, default '.'
Character recognized as decimal separator. E.g. use ',' for
European data.
Returns
-------
None or str
If path_or_buf is None, returns the resulting csv format as a
string. Otherwise returns None.
See Also
--------
read_csv : Load a CSV file into a DataFrame.
to_excel : Load an Excel file into a DataFrame.
Examples
--------
>>> df = pd.DataFrame({'name': ['Raphael', 'Donatello'],
... 'mask': ['red', 'purple'],
... 'weapon': ['sai', 'bo staff']})
>>> df.to_csv(index=False)
'name,mask,weapon\nRaphael,red,sai\nDonatello,purple,bo staff\n'
"""
df = self if isinstance(self, ABCDataFrame) else self.to_frame()
if tupleize_cols is not None:
warnings.warn("The 'tupleize_cols' parameter is deprecated and "
"will be removed in a future version",
FutureWarning, stacklevel=2)
else:
tupleize_cols = False
from pandas.io.formats.csvs import CSVFormatter
formatter = CSVFormatter(df, path_or_buf,
line_terminator=line_terminator, sep=sep,
encoding=encoding,
compression=compression, quoting=quoting,
na_rep=na_rep, float_format=float_format,
cols=columns, header=header, index=index,
index_label=index_label, mode=mode,
chunksize=chunksize, quotechar=quotechar,
tupleize_cols=tupleize_cols,
date_format=date_format,
doublequote=doublequote,
escapechar=escapechar, decimal=decimal)
formatter.save()
if path_or_buf is None:
return formatter.path_or_buf.getvalue()
# ----------------------------------------------------------------------
# Fancy Indexing
@classmethod
def _create_indexer(cls, name, indexer):
"""Create an indexer like _name in the class."""
if getattr(cls, name, None) is None:
_indexer = functools.partial(indexer, name)
setattr(cls, name, property(_indexer, doc=indexer.__doc__))
def get(self, key, default=None):
"""
Get item from object for given key (DataFrame column, Panel slice,
etc.). Returns default value if not found.
Parameters
----------
key : object
Returns
-------
value : same type as items contained in object
"""
try:
return self[key]
except (KeyError, ValueError, IndexError):
return default
def __getitem__(self, item):
return self._get_item_cache(item)
def _get_item_cache(self, item):
"""Return the cached item, item represents a label indexer."""
cache = self._item_cache
res = cache.get(item)
if res is None:
values = self._data.get(item)
res = self._box_item_values(item, values)
cache[item] = res
res._set_as_cached(item, self)
# for a chain
res._is_copy = self._is_copy
return res
def _set_as_cached(self, item, cacher):
"""Set the _cacher attribute on the calling object with a weakref to
cacher.
"""
self._cacher = (item, weakref.ref(cacher))
def _reset_cacher(self):
"""Reset the cacher."""
if hasattr(self, '_cacher'):
del self._cacher
def _iget_item_cache(self, item):
"""Return the cached item, item represents a positional indexer."""
ax = self._info_axis
if ax.is_unique:
lower = self._get_item_cache(ax[item])
else:
lower = self._take(item, axis=self._info_axis_number)
return lower
def _box_item_values(self, key, values):
raise AbstractMethodError(self)
def _maybe_cache_changed(self, item, value):
"""The object has called back to us saying maybe it has changed.
"""
self._data.set(item, value)
@property
def _is_cached(self):
"""Return boolean indicating if self is cached or not."""
return getattr(self, '_cacher', None) is not None
def _get_cacher(self):
"""return my cacher or None"""
cacher = getattr(self, '_cacher', None)
if cacher is not None:
cacher = cacher[1]()
return cacher
@property
def _is_view(self):
"""Return boolean indicating if self is view of another array """
return self._data.is_view
def _maybe_update_cacher(self, clear=False, verify_is_copy=True):
"""
See if we need to update our parent cacher if clear, then clear our
cache.
Parameters
----------
clear : boolean, default False
clear the item cache
verify_is_copy : boolean, default True
provide is_copy checks
"""
cacher = getattr(self, '_cacher', None)
if cacher is not None:
ref = cacher[1]()
# we are trying to reference a dead referant, hence
# a copy
if ref is None:
del self._cacher
else:
try:
ref._maybe_cache_changed(cacher[0], self)
except Exception:
pass
if verify_is_copy:
self._check_setitem_copy(stacklevel=5, t='referant')
if clear:
self._clear_item_cache()
def _clear_item_cache(self, i=None):
if i is not None:
self._item_cache.pop(i, None)
else:
self._item_cache.clear()
def _slice(self, slobj, axis=0, kind=None):
"""
Construct a slice of this container.
kind parameter is maintained for compatibility with Series slicing.
"""
axis = self._get_block_manager_axis(axis)
result = self._constructor(self._data.get_slice(slobj, axis=axis))
result = result.__finalize__(self)
# this could be a view
# but only in a single-dtyped view slicable case
is_copy = axis != 0 or result._is_view
result._set_is_copy(self, copy=is_copy)
return result
def _set_item(self, key, value):
self._data.set(key, value)
self._clear_item_cache()
def _set_is_copy(self, ref=None, copy=True):
if not copy:
self._is_copy = None
else:
if ref is not None:
self._is_copy = weakref.ref(ref)
else:
self._is_copy = None
def _check_is_chained_assignment_possible(self):
"""
Check if we are a view, have a cacher, and are of mixed type.
If so, then force a setitem_copy check.
Should be called just near setting a value
Will return a boolean if it we are a view and are cached, but a
single-dtype meaning that the cacher should be updated following
setting.
"""
if self._is_view and self._is_cached:
ref = self._get_cacher()
if ref is not None and ref._is_mixed_type:
self._check_setitem_copy(stacklevel=4, t='referant',
force=True)
return True
elif self._is_copy:
self._check_setitem_copy(stacklevel=4, t='referant')
return False
def _check_setitem_copy(self, stacklevel=4, t='setting', force=False):
"""
Parameters
----------
stacklevel : integer, default 4
the level to show of the stack when the error is output
t : string, the type of setting error
force : boolean, default False
if True, then force showing an error
validate if we are doing a settitem on a chained copy.
If you call this function, be sure to set the stacklevel such that the
user will see the error *at the level of setting*
It is technically possible to figure out that we are setting on
a copy even WITH a multi-dtyped pandas object. In other words, some
blocks may be views while other are not. Currently _is_view will ALWAYS
return False for multi-blocks to avoid having to handle this case.
df = DataFrame(np.arange(0,9), columns=['count'])
df['group'] = 'b'
# This technically need not raise SettingWithCopy if both are view
# (which is not # generally guaranteed but is usually True. However,
# this is in general not a good practice and we recommend using .loc.
df.iloc[0:5]['group'] = 'a'
"""
if force or self._is_copy:
value = config.get_option('mode.chained_assignment')
if value is None:
return
# see if the copy is not actually referred; if so, then dissolve
# the copy weakref
try:
gc.collect(2)
if not gc.get_referents(self._is_copy()):
self._is_copy = None
return
except Exception:
pass
# we might be a false positive
try:
if self._is_copy().shape == self.shape:
self._is_copy = None
return
except Exception:
pass
# a custom message
if isinstance(self._is_copy, string_types):
t = self._is_copy
elif t == 'referant':
t = ("\n"
"A value is trying to be set on a copy of a slice from a "
"DataFrame\n\n"
"See the caveats in the documentation: "
"http://pandas.pydata.org/pandas-docs/stable/"
"indexing.html#indexing-view-versus-copy"
)
else:
t = ("\n"
"A value is trying to be set on a copy of a slice from a "
"DataFrame.\n"
"Try using .loc[row_indexer,col_indexer] = value "
"instead\n\nSee the caveats in the documentation: "
"http://pandas.pydata.org/pandas-docs/stable/"
"indexing.html#indexing-view-versus-copy"
)
if value == 'raise':
raise com.SettingWithCopyError(t)
elif value == 'warn':
warnings.warn(t, com.SettingWithCopyWarning,
stacklevel=stacklevel)
def __delitem__(self, key):
"""
Delete item
"""
deleted = False
maybe_shortcut = False
if hasattr(self, 'columns') and isinstance(self.columns, MultiIndex):
try:
maybe_shortcut = key not in self.columns._engine
except TypeError:
pass
if maybe_shortcut:
# Allow shorthand to delete all columns whose first len(key)
# elements match key:
if not isinstance(key, tuple):
key = (key, )
for col in self.columns:
if isinstance(col, tuple) and col[:len(key)] == key:
del self[col]
deleted = True
if not deleted:
# If the above loop ran and didn't delete anything because
# there was no match, this call should raise the appropriate
# exception:
self._data.delete(key)
# delete from the caches
try:
del self._item_cache[key]
except KeyError:
pass
def _take(self, indices, axis=0, is_copy=True):
"""
Return the elements in the given *positional* indices along an axis.
This means that we are not indexing according to actual values in
the index attribute of the object. We are indexing according to the
actual position of the element in the object.
This is the internal version of ``.take()`` and will contain a wider
selection of parameters useful for internal use but not as suitable
for public usage.
Parameters
----------
indices : array-like
An array of ints indicating which positions to take.
axis : int, default 0
The axis on which to select elements. "0" means that we are
selecting rows, "1" means that we are selecting columns, etc.
is_copy : bool, default True
Whether to return a copy of the original object or not.
Returns
-------
taken : same type as caller
An array-like containing the elements taken from the object.
See Also
--------
numpy.ndarray.take
numpy.take
"""
self._consolidate_inplace()
new_data = self._data.take(indices,
axis=self._get_block_manager_axis(axis),
verify=True)
result = self._constructor(new_data).__finalize__(self)
# Maybe set copy if we didn't actually change the index.
if is_copy:
if not result._get_axis(axis).equals(self._get_axis(axis)):
result._set_is_copy(self)
return result
def take(self, indices, axis=0, convert=None, is_copy=True, **kwargs):
"""
Return the elements in the given *positional* indices along an axis.
This means that we are not indexing according to actual values in
the index attribute of the object. We are indexing according to the
actual position of the element in the object.
Parameters
----------
indices : array-like
An array of ints indicating which positions to take.
axis : {0 or 'index', 1 or 'columns', None}, default 0
The axis on which to select elements. ``0`` means that we are
selecting rows, ``1`` means that we are selecting columns.
convert : bool, default True
Whether to convert negative indices into positive ones.
For example, ``-1`` would map to the ``len(axis) - 1``.
The conversions are similar to the behavior of indexing a
regular Python list.
.. deprecated:: 0.21.0
In the future, negative indices will always be converted.
is_copy : bool, default True
Whether to return a copy of the original object or not.
**kwargs
For compatibility with :meth:`numpy.take`. Has no effect on the
output.
Returns
-------
taken : same type as caller
An array-like containing the elements taken from the object.
See Also
--------
DataFrame.loc : Select a subset of a DataFrame by labels.
DataFrame.iloc : Select a subset of a DataFrame by positions.
numpy.take : Take elements from an array along an axis.
Examples
--------
>>> df = pd.DataFrame([('falcon', 'bird', 389.0),
... ('parrot', 'bird', 24.0),
... ('lion', 'mammal', 80.5),
... ('monkey', 'mammal', np.nan)],
... columns=['name', 'class', 'max_speed'],
... index=[0, 2, 3, 1])
>>> df
name class max_speed
0 falcon bird 389.0
2 parrot bird 24.0
3 lion mammal 80.5
1 monkey mammal NaN
Take elements at positions 0 and 3 along the axis 0 (default).
Note how the actual indices selected (0 and 1) do not correspond to
our selected indices 0 and 3. That's because we are selecting the 0th
and 3rd rows, not rows whose indices equal 0 and 3.
>>> df.take([0, 3])
name class max_speed
0 falcon bird 389.0
1 monkey mammal NaN
Take elements at indices 1 and 2 along the axis 1 (column selection).
>>> df.take([1, 2], axis=1)
class max_speed
0 bird 389.0
2 bird 24.0
3 mammal 80.5
1 mammal NaN
We may take elements using negative integers for positive indices,
starting from the end of the object, just like with Python lists.
>>> df.take([-1, -2])
name class max_speed
1 monkey mammal NaN
3 lion mammal 80.5
"""
if convert is not None:
msg = ("The 'convert' parameter is deprecated "
"and will be removed in a future version.")
warnings.warn(msg, FutureWarning, stacklevel=2)
nv.validate_take(tuple(), kwargs)
return self._take(indices, axis=axis, is_copy=is_copy)
def xs(self, key, axis=0, level=None, drop_level=True):
"""
Return cross-section from the Series/DataFrame.
This method takes a `key` argument to select data at a particular
level of a MultiIndex.
Parameters
----------
key : label or tuple of label
Label contained in the index, or partially in a MultiIndex.
axis : {0 or 'index', 1 or 'columns'}, default 0
Axis to retrieve cross-section on.
level : object, defaults to first n levels (n=1 or len(key))
In case of a key partially contained in a MultiIndex, indicate
which levels are used. Levels can be referred by label or position.
drop_level : bool, default True
If False, returns object with same levels as self.
Returns
-------
Series or DataFrame
Cross-section from the original Series or DataFrame
corresponding to the selected index levels.
See Also
--------
DataFrame.loc : Access a group of rows and columns
by label(s) or a boolean array.
DataFrame.iloc : Purely integer-location based indexing
for selection by position.
Notes
-----
`xs` can not be used to set values.
MultiIndex Slicers is a generic way to get/set values on
any level or levels.
It is a superset of `xs` functionality, see
:ref:`MultiIndex Slicers <advanced.mi_slicers>`.
Examples
--------
>>> d = {'num_legs': [4, 4, 2, 2],
... 'num_wings': [0, 0, 2, 2],
... 'class': ['mammal', 'mammal', 'mammal', 'bird'],
... 'animal': ['cat', 'dog', 'bat', 'penguin'],
... 'locomotion': ['walks', 'walks', 'flies', 'walks']}
>>> df = pd.DataFrame(data=d)
>>> df = df.set_index(['class', 'animal', 'locomotion'])
>>> df
num_legs num_wings
class animal locomotion
mammal cat walks 4 0
dog walks 4 0
bat flies 2 2
bird penguin walks 2 2
Get values at specified index
>>> df.xs('mammal')
num_legs num_wings
animal locomotion
cat walks 4 0
dog walks 4 0
bat flies 2 2
Get values at several indexes
>>> df.xs(('mammal', 'dog'))
num_legs num_wings
locomotion
walks 4 0
Get values at specified index and level
>>> df.xs('cat', level=1)
num_legs num_wings
class locomotion
mammal walks 4 0
Get values at several indexes and levels
>>> df.xs(('bird', 'walks'),
... level=[0, 'locomotion'])
num_legs num_wings
animal
penguin 2 2
Get values at specified column and axis
>>> df.xs('num_wings', axis=1)
class animal locomotion
mammal cat walks 0
dog walks 0
bat flies 2
bird penguin walks 2
Name: num_wings, dtype: int64
"""
axis = self._get_axis_number(axis)
labels = self._get_axis(axis)
if level is not None:
loc, new_ax = labels.get_loc_level(key, level=level,
drop_level=drop_level)
# create the tuple of the indexer
indexer = [slice(None)] * self.ndim
indexer[axis] = loc
indexer = tuple(indexer)
result = self.iloc[indexer]
setattr(result, result._get_axis_name(axis), new_ax)
return result
if axis == 1:
return self[key]
self._consolidate_inplace()
index = self.index
if isinstance(index, MultiIndex):
loc, new_index = self.index.get_loc_level(key,
drop_level=drop_level)
else:
loc = self.index.get_loc(key)
if isinstance(loc, np.ndarray):
if loc.dtype == np.bool_:
inds, = loc.nonzero()
return self._take(inds, axis=axis)
else:
return self._take(loc, axis=axis)
if not is_scalar(loc):
new_index = self.index[loc]
if is_scalar(loc):
new_values = self._data.fast_xs(loc)
# may need to box a datelike-scalar
#
# if we encounter an array-like and we only have 1 dim
# that means that their are list/ndarrays inside the Series!
# so just return them (GH 6394)
if not is_list_like(new_values) or self.ndim == 1:
return com.maybe_box_datetimelike(new_values)
result = self._constructor_sliced(
new_values, index=self.columns,
name=self.index[loc], dtype=new_values.dtype)
else:
result = self.iloc[loc]
result.index = new_index
# this could be a view
# but only in a single-dtyped view slicable case
result._set_is_copy(self, copy=not result._is_view)
return result
_xs = xs
def select(self, crit, axis=0):
"""
Return data corresponding to axis labels matching criteria.
.. deprecated:: 0.21.0
Use df.loc[df.index.map(crit)] to select via labels
Parameters
----------
crit : function
To be called on each index (label). Should return True or False
axis : int
Returns
-------
selection : same type as caller
"""
warnings.warn("'select' is deprecated and will be removed in a "
"future release. You can use "
".loc[labels.map(crit)] as a replacement",
FutureWarning, stacklevel=2)
axis = self._get_axis_number(axis)
axis_name = self._get_axis_name(axis)
axis_values = self._get_axis(axis)
if len(axis_values) > 0:
new_axis = axis_values[
np.asarray([bool(crit(label)) for label in axis_values])]
else:
new_axis = axis_values
return self.reindex(**{axis_name: new_axis})
def reindex_like(self, other, method=None, copy=True, limit=None,
tolerance=None):
"""
Return an object with matching indices as other object.
Conform the object to the same index on all axes. Optional
filling logic, placing NaN in locations having no value
in the previous index. A new object is produced unless the
new index is equivalent to the current one and copy=False.
Parameters
----------
other : Object of the same data type
Its row and column indices are used to define the new indices
of this object.
method : {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}
Method to use for filling holes in reindexed DataFrame.
Please note: this is only applicable to DataFrames/Series with a
monotonically increasing/decreasing index.
* None (default): don't fill gaps
* pad / ffill: propagate last valid observation forward to next
valid
* backfill / bfill: use next valid observation to fill gap
* nearest: use nearest valid observations to fill gap
copy : bool, default True
Return a new object, even if the passed indexes are the same.
limit : int, default None
Maximum number of consecutive labels to fill for inexact matches.
tolerance : optional
Maximum distance between original and new labels for inexact
matches. The values of the index at the matching locations most
satisfy the equation ``abs(index[indexer] - target) <= tolerance``.
Tolerance may be a scalar value, which applies the same tolerance
to all values, or list-like, which applies variable tolerance per
element. List-like includes list, tuple, array, Series, and must be
the same size as the index and its dtype must exactly match the
index's type.
.. versionadded:: 0.21.0 (list-like tolerance)
Returns
-------
Series or DataFrame
Same type as caller, but with changed indices on each axis.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex : Change to new indices or expand indices.
Notes
-----
Same as calling
``.reindex(index=other.index, columns=other.columns,...)``.
Examples
--------
>>> df1 = pd.DataFrame([[24.3, 75.7, 'high'],
... [31, 87.8, 'high'],
... [22, 71.6, 'medium'],
... [35, 95, 'medium']],
... columns=['temp_celsius', 'temp_fahrenheit', 'windspeed'],
... index=pd.date_range(start='2014-02-12',
... end='2014-02-15', freq='D'))
>>> df1
temp_celsius temp_fahrenheit windspeed
2014-02-12 24.3 75.7 high
2014-02-13 31.0 87.8 high
2014-02-14 22.0 71.6 medium
2014-02-15 35.0 95.0 medium
>>> df2 = pd.DataFrame([[28, 'low'],
... [30, 'low'],
... [35.1, 'medium']],
... columns=['temp_celsius', 'windspeed'],
... index=pd.DatetimeIndex(['2014-02-12', '2014-02-13',
... '2014-02-15']))
>>> df2
temp_celsius windspeed
2014-02-12 28.0 low
2014-02-13 30.0 low
2014-02-15 35.1 medium
>>> df2.reindex_like(df1)
temp_celsius temp_fahrenheit windspeed
2014-02-12 28.0 NaN low
2014-02-13 30.0 NaN low
2014-02-14 NaN NaN NaN
2014-02-15 35.1 NaN medium
"""
d = other._construct_axes_dict(axes=self._AXIS_ORDERS, method=method,
copy=copy, limit=limit,
tolerance=tolerance)
return self.reindex(**d)
def drop(self, labels=None, axis=0, index=None, columns=None, level=None,
inplace=False, errors='raise'):
inplace = validate_bool_kwarg(inplace, 'inplace')
if labels is not None:
if index is not None or columns is not None:
raise ValueError("Cannot specify both 'labels' and "
"'index'/'columns'")
axis_name = self._get_axis_name(axis)
axes = {axis_name: labels}
elif index is not None or columns is not None:
axes, _ = self._construct_axes_from_arguments((index, columns), {})
else:
raise ValueError("Need to specify at least one of 'labels', "
"'index' or 'columns'")
obj = self
for axis, labels in axes.items():
if labels is not None:
obj = obj._drop_axis(labels, axis, level=level, errors=errors)
if inplace:
self._update_inplace(obj)
else:
return obj
def _drop_axis(self, labels, axis, level=None, errors='raise'):
"""
Drop labels from specified axis. Used in the ``drop`` method
internally.
Parameters
----------
labels : single label or list-like
axis : int or axis name
level : int or level name, default None
For MultiIndex
errors : {'ignore', 'raise'}, default 'raise'
If 'ignore', suppress error and existing labels are dropped.
"""
axis = self._get_axis_number(axis)
axis_name = self._get_axis_name(axis)
axis = self._get_axis(axis)
if axis.is_unique:
if level is not None:
if not isinstance(axis, MultiIndex):
raise AssertionError('axis must be a MultiIndex')
new_axis = axis.drop(labels, level=level, errors=errors)
else:
new_axis = axis.drop(labels, errors=errors)
result = self.reindex(**{axis_name: new_axis})
# Case for non-unique axis
else:
labels = ensure_object(com.index_labels_to_array(labels))
if level is not None:
if not isinstance(axis, MultiIndex):
raise AssertionError('axis must be a MultiIndex')
indexer = ~axis.get_level_values(level).isin(labels)
# GH 18561 MultiIndex.drop should raise if label is absent
if errors == 'raise' and indexer.all():
raise KeyError('{} not found in axis'.format(labels))
else:
indexer = ~axis.isin(labels)
# Check if label doesn't exist along axis
labels_missing = (axis.get_indexer_for(labels) == -1).any()
if errors == 'raise' and labels_missing:
raise KeyError('{} not found in axis'.format(labels))
slicer = [slice(None)] * self.ndim
slicer[self._get_axis_number(axis_name)] = indexer
result = self.loc[tuple(slicer)]
return result
def _update_inplace(self, result, verify_is_copy=True):
"""
Replace self internals with result.
Parameters
----------
verify_is_copy : boolean, default True
provide is_copy checks
"""
# NOTE: This does *not* call __finalize__ and that's an explicit
# decision that we may revisit in the future.
self._reset_cache()
self._clear_item_cache()
self._data = getattr(result, '_data', result)
self._maybe_update_cacher(verify_is_copy=verify_is_copy)
def add_prefix(self, prefix):
"""
Prefix labels with string `prefix`.
For Series, the row labels are prefixed.
For DataFrame, the column labels are prefixed.
Parameters
----------
prefix : str
The string to add before each label.
Returns
-------
Series or DataFrame
New Series or DataFrame with updated labels.
See Also
--------
Series.add_suffix: Suffix row labels with string `suffix`.
DataFrame.add_suffix: Suffix column labels with string `suffix`.
Examples
--------
>>> s = pd.Series([1, 2, 3, 4])
>>> s
0 1
1 2
2 3
3 4
dtype: int64
>>> s.add_prefix('item_')
item_0 1
item_1 2
item_2 3
item_3 4
dtype: int64
>>> df = pd.DataFrame({'A': [1, 2, 3, 4], 'B': [3, 4, 5, 6]})
>>> df
A B
0 1 3
1 2 4
2 3 5
3 4 6
>>> df.add_prefix('col_')
col_A col_B
0 1 3
1 2 4
2 3 5
3 4 6
"""
f = functools.partial('{prefix}{}'.format, prefix=prefix)
mapper = {self._info_axis_name: f}
return self.rename(**mapper)
def add_suffix(self, suffix):
"""
Suffix labels with string `suffix`.
For Series, the row labels are suffixed.
For DataFrame, the column labels are suffixed.
Parameters
----------
suffix : str
The string to add after each label.
Returns
-------
Series or DataFrame
New Series or DataFrame with updated labels.
See Also
--------
Series.add_prefix: Prefix row labels with string `prefix`.
DataFrame.add_prefix: Prefix column labels with string `prefix`.
Examples
--------
>>> s = pd.Series([1, 2, 3, 4])
>>> s
0 1
1 2
2 3
3 4
dtype: int64
>>> s.add_suffix('_item')
0_item 1
1_item 2
2_item 3
3_item 4
dtype: int64
>>> df = pd.DataFrame({'A': [1, 2, 3, 4], 'B': [3, 4, 5, 6]})
>>> df
A B
0 1 3
1 2 4
2 3 5
3 4 6
>>> df.add_suffix('_col')
A_col B_col
0 1 3
1 2 4
2 3 5
3 4 6
"""
f = functools.partial('{}{suffix}'.format, suffix=suffix)
mapper = {self._info_axis_name: f}
return self.rename(**mapper)
def sort_values(self, by=None, axis=0, ascending=True, inplace=False,
kind='quicksort', na_position='last'):
"""
Sort by the values along either axis.
Parameters
----------%(optional_by)s
axis : %(axes_single_arg)s, default 0
Axis to be sorted.
ascending : bool or list of bool, default True
Sort ascending vs. descending. Specify list for multiple sort
orders. If this is a list of bools, must match the length of
the by.
inplace : bool, default False
If True, perform operation in-place.
kind : {'quicksort', 'mergesort', 'heapsort'}, default 'quicksort'
Choice of sorting algorithm. See also ndarray.np.sort for more
information. `mergesort` is the only stable algorithm. For
DataFrames, this option is only applied when sorting on a single
column or label.
na_position : {'first', 'last'}, default 'last'
Puts NaNs at the beginning if `first`; `last` puts NaNs at the
end.
Returns
-------
sorted_obj : DataFrame or None
DataFrame with sorted values if inplace=False, None otherwise.
Examples
--------
>>> df = pd.DataFrame({
... 'col1': ['A', 'A', 'B', np.nan, 'D', 'C'],
... 'col2': [2, 1, 9, 8, 7, 4],
... 'col3': [0, 1, 9, 4, 2, 3],
... })
>>> df
col1 col2 col3
0 A 2 0
1 A 1 1
2 B 9 9
3 NaN 8 4
4 D 7 2
5 C 4 3
Sort by col1
>>> df.sort_values(by=['col1'])
col1 col2 col3
0 A 2 0
1 A 1 1
2 B 9 9
5 C 4 3
4 D 7 2
3 NaN 8 4
Sort by multiple columns
>>> df.sort_values(by=['col1', 'col2'])
col1 col2 col3
1 A 1 1
0 A 2 0
2 B 9 9
5 C 4 3
4 D 7 2
3 NaN 8 4
Sort Descending
>>> df.sort_values(by='col1', ascending=False)
col1 col2 col3
4 D 7 2
5 C 4 3
2 B 9 9
0 A 2 0
1 A 1 1
3 NaN 8 4
Putting NAs first
>>> df.sort_values(by='col1', ascending=False, na_position='first')
col1 col2 col3
3 NaN 8 4
4 D 7 2
5 C 4 3
2 B 9 9
0 A 2 0
1 A 1 1
"""
raise NotImplementedError("sort_values has not been implemented "
"on Panel or Panel4D objects.")
def sort_index(self, axis=0, level=None, ascending=True, inplace=False,
kind='quicksort', na_position='last', sort_remaining=True):
"""
Sort object by labels (along an axis).
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis along which to sort. The value 0 identifies the rows,
and 1 identifies the columns.
level : int or level name or list of ints or list of level names
If not None, sort on values in specified index level(s).
ascending : bool, default True
Sort ascending vs. descending.
inplace : bool, default False
If True, perform operation in-place.
kind : {'quicksort', 'mergesort', 'heapsort'}, default 'quicksort'
Choice of sorting algorithm. See also ndarray.np.sort for more
information. `mergesort` is the only stable algorithm. For
DataFrames, this option is only applied when sorting on a single
column or label.
na_position : {'first', 'last'}, default 'last'
Puts NaNs at the beginning if `first`; `last` puts NaNs at the end.
Not implemented for MultiIndex.
sort_remaining : bool, default True
If True and sorting by level and index is multilevel, sort by other
levels too (in order) after sorting by specified level.
Returns
-------
sorted_obj : DataFrame or None
DataFrame with sorted index if inplace=False, None otherwise.
"""
inplace = validate_bool_kwarg(inplace, 'inplace')
axis = self._get_axis_number(axis)
axis_name = self._get_axis_name(axis)
labels = self._get_axis(axis)
if level is not None:
raise NotImplementedError("level is not implemented")
if inplace:
raise NotImplementedError("inplace is not implemented")
sort_index = labels.argsort()
if not ascending:
sort_index = sort_index[::-1]
new_axis = labels.take(sort_index)
return self.reindex(**{axis_name: new_axis})
def reindex(self, *args, **kwargs):
"""
Conform %(klass)s to new index with optional filling logic, placing
NA/NaN in locations having no value in the previous index. A new object
is produced unless the new index is equivalent to the current one and
``copy=False``.
Parameters
----------
%(optional_labels)s
%(axes)s : array-like, optional
New labels / index to conform to, should be specified using
keywords. Preferably an Index object to avoid duplicating data
%(optional_axis)s
method : {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}
Method to use for filling holes in reindexed DataFrame.
Please note: this is only applicable to DataFrames/Series with a
monotonically increasing/decreasing index.
* None (default): don't fill gaps
* pad / ffill: propagate last valid observation forward to next
valid
* backfill / bfill: use next valid observation to fill gap
* nearest: use nearest valid observations to fill gap
copy : bool, default True
Return a new object, even if the passed indexes are the same.
level : int or name
Broadcast across a level, matching Index values on the
passed MultiIndex level.
fill_value : scalar, default np.NaN
Value to use for missing values. Defaults to NaN, but can be any
"compatible" value.
limit : int, default None
Maximum number of consecutive elements to forward or backward fill.
tolerance : optional
Maximum distance between original and new labels for inexact
matches. The values of the index at the matching locations most
satisfy the equation ``abs(index[indexer] - target) <= tolerance``.
Tolerance may be a scalar value, which applies the same tolerance
to all values, or list-like, which applies variable tolerance per
element. List-like includes list, tuple, array, Series, and must be
the same size as the index and its dtype must exactly match the
index's type.
.. versionadded:: 0.21.0 (list-like tolerance)
Returns
-------
%(klass)s with changed index.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex_like : Change to same indices as other DataFrame.
Examples
--------
``DataFrame.reindex`` supports two calling conventions
* ``(index=index_labels, columns=column_labels, ...)``
* ``(labels, axis={'index', 'columns'}, ...)``
We *highly* recommend using keyword arguments to clarify your
intent.
Create a dataframe with some fictional data.
>>> index = ['Firefox', 'Chrome', 'Safari', 'IE10', 'Konqueror']
>>> df = pd.DataFrame({
... 'http_status': [200,200,404,404,301],
... 'response_time': [0.04, 0.02, 0.07, 0.08, 1.0]},
... index=index)
>>> df
http_status response_time
Firefox 200 0.04
Chrome 200 0.02
Safari 404 0.07
IE10 404 0.08
Konqueror 301 1.00
Create a new index and reindex the dataframe. By default
values in the new index that do not have corresponding
records in the dataframe are assigned ``NaN``.
>>> new_index= ['Safari', 'Iceweasel', 'Comodo Dragon', 'IE10',
... 'Chrome']
>>> df.reindex(new_index)
http_status response_time
Safari 404.0 0.07
Iceweasel NaN NaN
Comodo Dragon NaN NaN
IE10 404.0 0.08
Chrome 200.0 0.02
We can fill in the missing values by passing a value to
the keyword ``fill_value``. Because the index is not monotonically
increasing or decreasing, we cannot use arguments to the keyword
``method`` to fill the ``NaN`` values.
>>> df.reindex(new_index, fill_value=0)
http_status response_time
Safari 404 0.07
Iceweasel 0 0.00
Comodo Dragon 0 0.00
IE10 404 0.08
Chrome 200 0.02
>>> df.reindex(new_index, fill_value='missing')
http_status response_time
Safari 404 0.07
Iceweasel missing missing
Comodo Dragon missing missing
IE10 404 0.08
Chrome 200 0.02
We can also reindex the columns.
>>> df.reindex(columns=['http_status', 'user_agent'])
http_status user_agent
Firefox 200 NaN
Chrome 200 NaN
Safari 404 NaN
IE10 404 NaN
Konqueror 301 NaN
Or we can use "axis-style" keyword arguments
>>> df.reindex(['http_status', 'user_agent'], axis="columns")
http_status user_agent
Firefox 200 NaN
Chrome 200 NaN
Safari 404 NaN
IE10 404 NaN
Konqueror 301 NaN
To further illustrate the filling functionality in
``reindex``, we will create a dataframe with a
monotonically increasing index (for example, a sequence
of dates).
>>> date_index = pd.date_range('1/1/2010', periods=6, freq='D')
>>> df2 = pd.DataFrame({"prices": [100, 101, np.nan, 100, 89, 88]},
... index=date_index)
>>> df2
prices
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
Suppose we decide to expand the dataframe to cover a wider
date range.
>>> date_index2 = pd.date_range('12/29/2009', periods=10, freq='D')
>>> df2.reindex(date_index2)
prices
2009-12-29 NaN
2009-12-30 NaN
2009-12-31 NaN
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
2010-01-07 NaN
The index entries that did not have a value in the original data frame
(for example, '2009-12-29') are by default filled with ``NaN``.
If desired, we can fill in the missing values using one of several
options.
For example, to back-propagate the last valid value to fill the ``NaN``
values, pass ``bfill`` as an argument to the ``method`` keyword.
>>> df2.reindex(date_index2, method='bfill')
prices
2009-12-29 100.0
2009-12-30 100.0
2009-12-31 100.0
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
2010-01-07 NaN
Please note that the ``NaN`` value present in the original dataframe
(at index value 2010-01-03) will not be filled by any of the
value propagation schemes. This is because filling while reindexing
does not look at dataframe values, but only compares the original and
desired indexes. If you do want to fill in the ``NaN`` values present
in the original dataframe, use the ``fillna()`` method.
See the :ref:`user guide <basics.reindexing>` for more.
"""
# TODO: Decide if we care about having different examples for different
# kinds
# construct the args
axes, kwargs = self._construct_axes_from_arguments(args, kwargs)
method = missing.clean_reindex_fill_method(kwargs.pop('method', None))
level = kwargs.pop('level', None)
copy = kwargs.pop('copy', True)
limit = kwargs.pop('limit', None)
tolerance = kwargs.pop('tolerance', None)
fill_value = kwargs.pop('fill_value', None)
# Series.reindex doesn't use / need the axis kwarg
# We pop and ignore it here, to make writing Series/Frame generic code
# easier
kwargs.pop("axis", None)
if kwargs:
raise TypeError('reindex() got an unexpected keyword '
'argument "{0}"'.format(list(kwargs.keys())[0]))
self._consolidate_inplace()
# if all axes that are requested to reindex are equal, then only copy
# if indicated must have index names equal here as well as values
if all(self._get_axis(axis).identical(ax)
for axis, ax in axes.items() if ax is not None):
if copy:
return self.copy()
return self
# check if we are a multi reindex
if self._needs_reindex_multi(axes, method, level):
try:
return self._reindex_multi(axes, copy, fill_value)
except Exception:
pass
# perform the reindex on the axes
return self._reindex_axes(axes, level, limit, tolerance, method,
fill_value, copy).__finalize__(self)
def _reindex_axes(self, axes, level, limit, tolerance, method, fill_value,
copy):
"""Perform the reindex for all the axes."""
obj = self
for a in self._AXIS_ORDERS:
labels = axes[a]
if labels is None:
continue
ax = self._get_axis(a)
new_index, indexer = ax.reindex(labels, level=level, limit=limit,
tolerance=tolerance, method=method)
axis = self._get_axis_number(a)
obj = obj._reindex_with_indexers({axis: [new_index, indexer]},
fill_value=fill_value,
copy=copy, allow_dups=False)
return obj
def _needs_reindex_multi(self, axes, method, level):
"""Check if we do need a multi reindex."""
return ((com.count_not_none(*axes.values()) == self._AXIS_LEN) and
method is None and level is None and not self._is_mixed_type)
def _reindex_multi(self, axes, copy, fill_value):
return NotImplemented
_shared_docs['reindex_axis'] = ("""
Conform input object to new index.
.. deprecated:: 0.21.0
Use `reindex` instead.
By default, places NaN in locations having no value in the
previous index. A new object is produced unless the new index
is equivalent to the current one and copy=False.
Parameters
----------
labels : array-like
New labels / index to conform to. Preferably an Index object to
avoid duplicating data.
axis : %(axes_single_arg)s
Indicate whether to use rows or columns.
method : {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}, optional
Method to use for filling holes in reindexed DataFrame:
* default: don't fill gaps.
* pad / ffill: propagate last valid observation forward to next
valid.
* backfill / bfill: use next valid observation to fill gap.
* nearest: use nearest valid observations to fill gap.
level : int or str
Broadcast across a level, matching Index values on the
passed MultiIndex level.
copy : bool, default True
Return a new object, even if the passed indexes are the same.
limit : int, optional
Maximum number of consecutive elements to forward or backward fill.
fill_value : float, default NaN
Value used to fill in locations having no value in the previous
index.
.. versionadded:: 0.21.0 (list-like tolerance)
Returns
-------
%(klass)s
Returns a new DataFrame object with new indices, unless the new
index is equivalent to the current one and copy=False.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex : Change to new indices or expand indices.
DataFrame.reindex_like : Change to same indices as other DataFrame.
Examples
--------
>>> df = pd.DataFrame({'num_legs': [4, 2], 'num_wings': [0, 2]},
... index=['dog', 'hawk'])
>>> df
num_legs num_wings
dog 4 0
hawk 2 2
>>> df.reindex(['num_wings', 'num_legs', 'num_heads'],
... axis='columns')
num_wings num_legs num_heads
dog 0 4 NaN
hawk 2 2 NaN
""")
@Appender(_shared_docs['reindex_axis'] % _shared_doc_kwargs)
def reindex_axis(self, labels, axis=0, method=None, level=None, copy=True,
limit=None, fill_value=None):
msg = ("'.reindex_axis' is deprecated and will be removed in a future "
"version. Use '.reindex' instead.")
self._consolidate_inplace()
axis_name = self._get_axis_name(axis)
axis_values = self._get_axis(axis_name)
method = missing.clean_reindex_fill_method(method)
warnings.warn(msg, FutureWarning, stacklevel=3)
new_index, indexer = axis_values.reindex(labels, method, level,
limit=limit)
return self._reindex_with_indexers({axis: [new_index, indexer]},
fill_value=fill_value, copy=copy)
def _reindex_with_indexers(self, reindexers, fill_value=None, copy=False,
allow_dups=False):
"""allow_dups indicates an internal call here """
# reindex doing multiple operations on different axes if indicated
new_data = self._data
for axis in sorted(reindexers.keys()):
index, indexer = reindexers[axis]
baxis = self._get_block_manager_axis(axis)
if index is None:
continue
index = ensure_index(index)
if indexer is not None:
indexer = ensure_int64(indexer)
# TODO: speed up on homogeneous DataFrame objects
new_data = new_data.reindex_indexer(index, indexer, axis=baxis,
fill_value=fill_value,
allow_dups=allow_dups,
copy=copy)
if copy and new_data is self._data:
new_data = new_data.copy()
return self._constructor(new_data).__finalize__(self)
def filter(self, items=None, like=None, regex=None, axis=None):
"""
Subset rows or columns of dataframe according to labels in
the specified index.
Note that this routine does not filter a dataframe on its
contents. The filter is applied to the labels of the index.
Parameters
----------
items : list-like
List of axis to restrict to (must not all be present).
like : string
Keep axis where "arg in col == True".
regex : string (regular expression)
Keep axis with re.search(regex, col) == True.
axis : int or string axis name
The axis to filter on. By default this is the info axis,
'index' for Series, 'columns' for DataFrame.
Returns
-------
same type as input object
See Also
--------
DataFrame.loc
Notes
-----
The ``items``, ``like``, and ``regex`` parameters are
enforced to be mutually exclusive.
``axis`` defaults to the info axis that is used when indexing
with ``[]``.
Examples
--------
>>> df = pd.DataFrame(np.array(([1,2,3], [4,5,6])),
... index=['mouse', 'rabbit'],
... columns=['one', 'two', 'three'])
>>> # select columns by name
>>> df.filter(items=['one', 'three'])
one three
mouse 1 3
rabbit 4 6
>>> # select columns by regular expression
>>> df.filter(regex='e$', axis=1)
one three
mouse 1 3
rabbit 4 6
>>> # select rows containing 'bbi'
>>> df.filter(like='bbi', axis=0)
one two three
rabbit 4 5 6
"""
import re
nkw = com.count_not_none(items, like, regex)
if nkw > 1:
raise TypeError('Keyword arguments `items`, `like`, or `regex` '
'are mutually exclusive')
if axis is None:
axis = self._info_axis_name
labels = self._get_axis(axis)
if items is not None:
name = self._get_axis_name(axis)
return self.reindex(
**{name: [r for r in items if r in labels]})
elif like:
def f(x):
return like in to_str(x)
values = labels.map(f)
return self.loc(axis=axis)[values]
elif regex:
def f(x):
return matcher.search(to_str(x)) is not None
matcher = re.compile(regex)
values = labels.map(f)
return self.loc(axis=axis)[values]
else:
raise TypeError('Must pass either `items`, `like`, or `regex`')
def head(self, n=5):
"""
Return the first `n` rows.
This function returns the first `n` rows for the object based
on position. It is useful for quickly testing if your object
has the right type of data in it.
Parameters
----------
n : int, default 5
Number of rows to select.
Returns
-------
obj_head : same type as caller
The first `n` rows of the caller object.
See Also
--------
DataFrame.tail: Returns the last `n` rows.
Examples
--------
>>> df = pd.DataFrame({'animal':['alligator', 'bee', 'falcon', 'lion',
... 'monkey', 'parrot', 'shark', 'whale', 'zebra']})
>>> df
animal
0 alligator
1 bee
2 falcon
3 lion
4 monkey
5 parrot
6 shark
7 whale
8 zebra
Viewing the first 5 lines
>>> df.head()
animal
0 alligator
1 bee
2 falcon
3 lion
4 monkey
Viewing the first `n` lines (three in this case)
>>> df.head(3)
animal
0 alligator
1 bee
2 falcon
"""
return self.iloc[:n]
def tail(self, n=5):
"""
Return the last `n` rows.
This function returns last `n` rows from the object based on
position. It is useful for quickly verifying data, for example,
after sorting or appending rows.
Parameters
----------
n : int, default 5
Number of rows to select.
Returns
-------
type of caller
The last `n` rows of the caller object.
See Also
--------
DataFrame.head : The first `n` rows of the caller object.
Examples
--------
>>> df = pd.DataFrame({'animal':['alligator', 'bee', 'falcon', 'lion',
... 'monkey', 'parrot', 'shark', 'whale', 'zebra']})
>>> df
animal
0 alligator
1 bee
2 falcon
3 lion
4 monkey
5 parrot
6 shark
7 whale
8 zebra
Viewing the last 5 lines
>>> df.tail()
animal
4 monkey
5 parrot
6 shark
7 whale
8 zebra
Viewing the last `n` lines (three in this case)
>>> df.tail(3)
animal
6 shark
7 whale
8 zebra
"""
if n == 0:
return self.iloc[0:0]
return self.iloc[-n:]
def sample(self, n=None, frac=None, replace=False, weights=None,
random_state=None, axis=None):
"""
Return a random sample of items from an axis of object.
You can use `random_state` for reproducibility.
Parameters
----------
n : int, optional
Number of items from axis to return. Cannot be used with `frac`.
Default = 1 if `frac` = None.
frac : float, optional
Fraction of axis items to return. Cannot be used with `n`.
replace : bool, default False
Sample with or without replacement.
weights : str or ndarray-like, optional
Default 'None' results in equal probability weighting.
If passed a Series, will align with target object on index. Index
values in weights not found in sampled object will be ignored and
index values in sampled object not in weights will be assigned
weights of zero.
If called on a DataFrame, will accept the name of a column
when axis = 0.
Unless weights are a Series, weights must be same length as axis
being sampled.
If weights do not sum to 1, they will be normalized to sum to 1.
Missing values in the weights column will be treated as zero.
Infinite values not allowed.
random_state : int or numpy.random.RandomState, optional
Seed for the random number generator (if int), or numpy RandomState
object.
axis : int or string, optional
Axis to sample. Accepts axis number or name. Default is stat axis
for given data type (0 for Series and DataFrames, 1 for Panels).
Returns
-------
Series or DataFrame
A new object of same type as caller containing `n` items randomly
sampled from the caller object.
See Also
--------
numpy.random.choice: Generates a random sample from a given 1-D numpy
array.
Examples
--------
>>> df = pd.DataFrame({'num_legs': [2, 4, 8, 0],
... 'num_wings': [2, 0, 0, 0],
... 'num_specimen_seen': [10, 2, 1, 8]},
... index=['falcon', 'dog', 'spider', 'fish'])
>>> df
num_legs num_wings num_specimen_seen
falcon 2 2 10
dog 4 0 2
spider 8 0 1
fish 0 0 8
Extract 3 random elements from the ``Series`` ``df['num_legs']``:
Note that we use `random_state` to ensure the reproducibility of
the examples.
>>> df['num_legs'].sample(n=3, random_state=1)
fish 0
spider 8
falcon 2
Name: num_legs, dtype: int64
A random 50% sample of the ``DataFrame`` with replacement:
>>> df.sample(frac=0.5, replace=True, random_state=1)
num_legs num_wings num_specimen_seen
dog 4 0 2
fish 0 0 8
Using a DataFrame column as weights. Rows with larger value in the
`num_specimen_seen` column are more likely to be sampled.
>>> df.sample(n=2, weights='num_specimen_seen', random_state=1)
num_legs num_wings num_specimen_seen
falcon 2 2 10
fish 0 0 8
"""
if axis is None:
axis = self._stat_axis_number
axis = self._get_axis_number(axis)
axis_length = self.shape[axis]
# Process random_state argument
rs = com.random_state(random_state)
# Check weights for compliance
if weights is not None:
# If a series, align with frame
if isinstance(weights, pd.Series):
weights = weights.reindex(self.axes[axis])
# Strings acceptable if a dataframe and axis = 0
if isinstance(weights, string_types):
if isinstance(self, pd.DataFrame):
if axis == 0:
try:
weights = self[weights]
except KeyError:
raise KeyError("String passed to weights not a "
"valid column")
else:
raise ValueError("Strings can only be passed to "
"weights when sampling from rows on "
"a DataFrame")
else:
raise ValueError("Strings cannot be passed as weights "
"when sampling from a Series or Panel.")
weights = pd.Series(weights, dtype='float64')
if len(weights) != axis_length:
raise ValueError("Weights and axis to be sampled must be of "
"same length")
if (weights == np.inf).any() or (weights == -np.inf).any():
raise ValueError("weight vector may not include `inf` values")
if (weights < 0).any():
raise ValueError("weight vector many not include negative "
"values")
# If has nan, set to zero.
weights = weights.fillna(0)
# Renormalize if don't sum to 1
if weights.sum() != 1:
if weights.sum() != 0:
weights = weights / weights.sum()
else:
raise ValueError("Invalid weights: weights sum to zero")
weights = weights.values
# If no frac or n, default to n=1.
if n is None and frac is None:
n = 1
elif n is not None and frac is None and n % 1 != 0:
raise ValueError("Only integers accepted as `n` values")
elif n is None and frac is not None:
n = int(round(frac * axis_length))
elif n is not None and frac is not None:
raise ValueError('Please enter a value for `frac` OR `n`, not '
'both')
# Check for negative sizes
if n < 0:
raise ValueError("A negative number of rows requested. Please "
"provide positive value.")
locs = rs.choice(axis_length, size=n, replace=replace, p=weights)
return self.take(locs, axis=axis, is_copy=False)
_shared_docs['pipe'] = (r"""
Apply func(self, \*args, \*\*kwargs).
Parameters
----------
func : function
function to apply to the %(klass)s.
``args``, and ``kwargs`` are passed into ``func``.
Alternatively a ``(callable, data_keyword)`` tuple where
``data_keyword`` is a string indicating the keyword of
``callable`` that expects the %(klass)s.
args : iterable, optional
positional arguments passed into ``func``.
kwargs : mapping, optional
a dictionary of keyword arguments passed into ``func``.
Returns
-------
object : the return type of ``func``.
See Also
--------
DataFrame.apply
DataFrame.applymap
Series.map
Notes
-----
Use ``.pipe`` when chaining together functions that expect
Series, DataFrames or GroupBy objects. Instead of writing
>>> f(g(h(df), arg1=a), arg2=b, arg3=c)
You can write
>>> (df.pipe(h)
... .pipe(g, arg1=a)
... .pipe(f, arg2=b, arg3=c)
... )
If you have a function that takes the data as (say) the second
argument, pass a tuple indicating which keyword expects the
data. For example, suppose ``f`` takes its data as ``arg2``:
>>> (df.pipe(h)
... .pipe(g, arg1=a)
... .pipe((f, 'arg2'), arg1=a, arg3=c)
... )
""")
@Appender(_shared_docs['pipe'] % _shared_doc_kwargs)
def pipe(self, func, *args, **kwargs):
return com._pipe(self, func, *args, **kwargs)
_shared_docs['aggregate'] = dedent("""
Aggregate using one or more operations over the specified axis.
%(versionadded)s
Parameters
----------
func : function, str, list or dict
Function to use for aggregating the data. If a function, must either
work when passed a %(klass)s or when passed to %(klass)s.apply.
Accepted combinations are:
- function
- string function name
- list of functions and/or function names, e.g. ``[np.sum, 'mean']``
- dict of axis labels -> functions, function names or list of such.
%(axis)s
*args
Positional arguments to pass to `func`.
**kwargs
Keyword arguments to pass to `func`.
Returns
-------
DataFrame, Series or scalar
If DataFrame.agg is called with a single function, returns a Series
If DataFrame.agg is called with several functions, returns a DataFrame
If Series.agg is called with single function, returns a scalar
If Series.agg is called with several functions, returns a Series
%(see_also)s
Notes
-----
`agg` is an alias for `aggregate`. Use the alias.
A passed user-defined-function will be passed a Series for evaluation.
%(examples)s
""")
_shared_docs['transform'] = ("""
Call ``func`` on self producing a %(klass)s with transformed values
and that has the same axis length as self.
.. versionadded:: 0.20.0
Parameters
----------
func : function, str, list or dict
Function to use for transforming the data. If a function, must either
work when passed a %(klass)s or when passed to %(klass)s.apply.
Accepted combinations are:
- function
- string function name
- list of functions and/or function names, e.g. ``[np.exp. 'sqrt']``
- dict of axis labels -> functions, function names or list of such.
%(axis)s
*args
Positional arguments to pass to `func`.
**kwargs
Keyword arguments to pass to `func`.
Returns
-------
%(klass)s
A %(klass)s that must have the same length as self.
Raises
------
ValueError : If the returned %(klass)s has a different length than self.
See Also
--------
%(klass)s.agg : Only perform aggregating type operations.
%(klass)s.apply : Invoke function on a %(klass)s.
Examples
--------
>>> df = pd.DataFrame({'A': range(3), 'B': range(1, 4)})
>>> df
A B
0 0 1
1 1 2
2 2 3
>>> df.transform(lambda x: x + 1)
A B
0 1 2
1 2 3
2 3 4
Even though the resulting %(klass)s must have the same length as the
input %(klass)s, it is possible to provide several input functions:
>>> s = pd.Series(range(3))
>>> s
0 0
1 1
2 2
dtype: int64
>>> s.transform([np.sqrt, np.exp])
sqrt exp
0 0.000000 1.000000
1 1.000000 2.718282
2 1.414214 7.389056
""")
# ----------------------------------------------------------------------
# Attribute access
def __finalize__(self, other, method=None, **kwargs):
"""
Propagate metadata from other to self.
Parameters
----------
other : the object from which to get the attributes that we are going
to propagate
method : optional, a passed method name ; possibly to take different
types of propagation actions based on this
"""
if isinstance(other, NDFrame):
for name in self._metadata:
object.__setattr__(self, name, getattr(other, name, None))
return self
def __getattr__(self, name):
"""After regular attribute access, try looking up the name
This allows simpler access to columns for interactive use.
"""
# Note: obj.x will always call obj.__getattribute__('x') prior to
# calling obj.__getattr__('x').
if (name in self._internal_names_set or name in self._metadata or
name in self._accessors):
return object.__getattribute__(self, name)
else:
if self._info_axis._can_hold_identifiers_and_holds_name(name):
return self[name]
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
"""After regular attribute access, try setting the name
This allows simpler access to columns for interactive use.
"""
# first try regular attribute access via __getattribute__, so that
# e.g. ``obj.x`` and ``obj.x = 4`` will always reference/modify
# the same attribute.
try:
object.__getattribute__(self, name)
return object.__setattr__(self, name, value)
except AttributeError:
pass
# if this fails, go on to more involved attribute setting
# (note that this matches __getattr__, above).
if name in self._internal_names_set:
object.__setattr__(self, name, value)
elif name in self._metadata:
object.__setattr__(self, name, value)
else:
try:
existing = getattr(self, name)
if isinstance(existing, Index):
object.__setattr__(self, name, value)
elif name in self._info_axis:
self[name] = value
else:
object.__setattr__(self, name, value)
except (AttributeError, TypeError):
if isinstance(self, ABCDataFrame) and (is_list_like(value)):
warnings.warn("Pandas doesn't allow columns to be "
"created via a new attribute name - see "
"https://pandas.pydata.org/pandas-docs/"
"stable/indexing.html#attribute-access",
stacklevel=2)
object.__setattr__(self, name, value)
def _dir_additions(self):
""" add the string-like attributes from the info_axis.
If info_axis is a MultiIndex, it's first level values are used.
"""
additions = {c for c in self._info_axis.unique(level=0)[:100]
if isinstance(c, string_types) and isidentifier(c)}
return super(NDFrame, self)._dir_additions().union(additions)
# ----------------------------------------------------------------------
# Getting and setting elements
# ----------------------------------------------------------------------
# Consolidation of internals
def _protect_consolidate(self, f):
"""Consolidate _data -- if the blocks have changed, then clear the
cache
"""
blocks_before = len(self._data.blocks)
result = f()
if len(self._data.blocks) != blocks_before:
self._clear_item_cache()
return result
def _consolidate_inplace(self):
"""Consolidate data in place and return None"""
def f():
self._data = self._data.consolidate()
self._protect_consolidate(f)
def _consolidate(self, inplace=False):
"""
Compute NDFrame with "consolidated" internals (data of each dtype
grouped together in a single ndarray).
Parameters
----------
inplace : boolean, default False
If False return new object, otherwise modify existing object
Returns
-------
consolidated : same type as caller
"""
inplace = validate_bool_kwarg(inplace, 'inplace')
if inplace:
self._consolidate_inplace()
else:
f = lambda: self._data.consolidate()
cons_data = self._protect_consolidate(f)
return self._constructor(cons_data).__finalize__(self)
@property
def _is_mixed_type(self):
f = lambda: self._data.is_mixed_type
return self._protect_consolidate(f)
@property
def _is_numeric_mixed_type(self):
f = lambda: self._data.is_numeric_mixed_type
return self._protect_consolidate(f)
@property
def _is_datelike_mixed_type(self):
f = lambda: self._data.is_datelike_mixed_type
return self._protect_consolidate(f)
def _check_inplace_setting(self, value):
""" check whether we allow in-place setting with this type of value """
if self._is_mixed_type:
if not self._is_numeric_mixed_type:
# allow an actual np.nan thru
try:
if np.isnan(value):
return True
except Exception:
pass
raise TypeError('Cannot do inplace boolean setting on '
'mixed-types with a non np.nan value')
return True
def _get_numeric_data(self):
return self._constructor(
self._data.get_numeric_data()).__finalize__(self)
def _get_bool_data(self):
return self._constructor(self._data.get_bool_data()).__finalize__(self)
# ----------------------------------------------------------------------
# Internal Interface Methods
def as_matrix(self, columns=None):
"""
Convert the frame to its Numpy-array representation.
.. deprecated:: 0.23.0
Use :meth:`DataFrame.values` instead.
Parameters
----------
columns : list, optional, default:None
If None, return all columns, otherwise, returns specified columns.
Returns
-------
values : ndarray
If the caller is heterogeneous and contains booleans or objects,
the result will be of dtype=object. See Notes.
See Also
--------
DataFrame.values
Notes
-----
Return is NOT a Numpy-matrix, rather, a Numpy-array.
The dtype will be a lower-common-denominator dtype (implicit
upcasting); that is to say if the dtypes (even of numeric types)
are mixed, the one that accommodates all will be chosen. Use this
with care if you are not dealing with the blocks.
e.g. If the dtypes are float16 and float32, dtype will be upcast to
float32. If dtypes are int32 and uint8, dtype will be upcase to
int32. By numpy.find_common_type convention, mixing int64 and uint64
will result in a float64 dtype.
This method is provided for backwards compatibility. Generally,
it is recommended to use '.values'.
"""
warnings.warn("Method .as_matrix will be removed in a future version. "
"Use .values instead.", FutureWarning, stacklevel=2)
self._consolidate_inplace()
return self._data.as_array(transpose=self._AXIS_REVERSED,
items=columns)
@property
def values(self):
"""
Return a Numpy representation of the DataFrame.
.. warning::
We recommend using :meth:`DataFrame.to_numpy` instead.
Only the values in the DataFrame will be returned, the axes labels
will be removed.
Returns
-------
numpy.ndarray
The values of the DataFrame.
See Also
--------
DataFrame.to_numpy : Recommended alternative to this method.
DataFrame.index : Retrieve the index labels.
DataFrame.columns : Retrieving the column names.
Notes
-----
The dtype will be a lower-common-denominator dtype (implicit
upcasting); that is to say if the dtypes (even of numeric types)
are mixed, the one that accommodates all will be chosen. Use this
with care if you are not dealing with the blocks.
e.g. If the dtypes are float16 and float32, dtype will be upcast to
float32. If dtypes are int32 and uint8, dtype will be upcast to
int32. By :func:`numpy.find_common_type` convention, mixing int64
and uint64 will result in a float64 dtype.
Examples
--------
A DataFrame where all columns are the same type (e.g., int64) results
in an array of the same type.
>>> df = pd.DataFrame({'age': [ 3, 29],
... 'height': [94, 170],
... 'weight': [31, 115]})
>>> df
age height weight
0 3 94 31
1 29 170 115
>>> df.dtypes
age int64
height int64
weight int64
dtype: object
>>> df.values
array([[ 3, 94, 31],
[ 29, 170, 115]], dtype=int64)
A DataFrame with mixed type columns(e.g., str/object, int64, float32)
results in an ndarray of the broadest type that accommodates these
mixed types (e.g., object).
>>> df2 = pd.DataFrame([('parrot', 24.0, 'second'),
... ('lion', 80.5, 1),
... ('monkey', np.nan, None)],
... columns=('name', 'max_speed', 'rank'))
>>> df2.dtypes
name object
max_speed float64
rank object
dtype: object
>>> df2.values
array([['parrot', 24.0, 'second'],
['lion', 80.5, 1],
['monkey', nan, None]], dtype=object)
"""
self._consolidate_inplace()
return self._data.as_array(transpose=self._AXIS_REVERSED)
@property
def _values(self):
"""internal implementation"""
return self.values
@property
def _get_values(self):
# compat
return self.values
def get_values(self):
"""
Return an ndarray after converting sparse values to dense.
This is the same as ``.values`` for non-sparse data. For sparse
data contained in a `SparseArray`, the data are first
converted to a dense representation.
Returns
-------
numpy.ndarray
Numpy representation of DataFrame
See Also
--------
values : Numpy representation of DataFrame.
SparseArray : Container for sparse data.
Examples
--------
>>> df = pd.DataFrame({'a': [1, 2], 'b': [True, False],
... 'c': [1.0, 2.0]})
>>> df
a b c
0 1 True 1.0
1 2 False 2.0
>>> df.get_values()
array([[1, True, 1.0], [2, False, 2.0]], dtype=object)
>>> df = pd.DataFrame({"a": pd.SparseArray([1, None, None]),
... "c": [1.0, 2.0, 3.0]})
>>> df
a c
0 1.0 1.0
1 NaN 2.0
2 NaN 3.0
>>> df.get_values()
array([[ 1., 1.],
[nan, 2.],
[nan, 3.]])
"""
return self.values
def get_dtype_counts(self):
"""
Return counts of unique dtypes in this object.
Returns
-------
dtype : Series
Series with the count of columns with each dtype.
See Also
--------
dtypes : Return the dtypes in this object.
Examples
--------
>>> a = [['a', 1, 1.0], ['b', 2, 2.0], ['c', 3, 3.0]]
>>> df = pd.DataFrame(a, columns=['str', 'int', 'float'])
>>> df
str int float
0 a 1 1.0
1 b 2 2.0
2 c 3 3.0
>>> df.get_dtype_counts()
float64 1
int64 1
object 1
dtype: int64
"""
from pandas import Series
return Series(self._data.get_dtype_counts())
def get_ftype_counts(self):
"""
Return counts of unique ftypes in this object.
.. deprecated:: 0.23.0
This is useful for SparseDataFrame or for DataFrames containing
sparse arrays.
Returns
-------
dtype : Series
Series with the count of columns with each type and
sparsity (dense/sparse)
See Also
--------
ftypes : Return ftypes (indication of sparse/dense and dtype) in
this object.
Examples
--------
>>> a = [['a', 1, 1.0], ['b', 2, 2.0], ['c', 3, 3.0]]
>>> df = pd.DataFrame(a, columns=['str', 'int', 'float'])
>>> df
str int float
0 a 1 1.0
1 b 2 2.0
2 c 3 3.0
>>> df.get_ftype_counts() # doctest: +SKIP
float64:dense 1
int64:dense 1
object:dense 1
dtype: int64
"""
warnings.warn("get_ftype_counts is deprecated and will "
"be removed in a future version",
FutureWarning, stacklevel=2)
from pandas import Series
return Series(self._data.get_ftype_counts())
@property
def dtypes(self):
"""
Return the dtypes in the DataFrame.
This returns a Series with the data type of each column.
The result's index is the original DataFrame's columns. Columns
with mixed types are stored with the ``object`` dtype. See
:ref:`the User Guide <basics.dtypes>` for more.
Returns
-------
pandas.Series
The data type of each column.
See Also
--------
DataFrame.ftypes : Dtype and sparsity information.
Examples
--------
>>> df = pd.DataFrame({'float': [1.0],
... 'int': [1],
... 'datetime': [pd.Timestamp('20180310')],
... 'string': ['foo']})
>>> df.dtypes
float float64
int int64
datetime datetime64[ns]
string object
dtype: object
"""
from pandas import Series
return Series(self._data.get_dtypes(), index=self._info_axis,
dtype=np.object_)
@property
def ftypes(self):
"""
Return the ftypes (indication of sparse/dense and dtype) in DataFrame.
This returns a Series with the data type of each column.
The result's index is the original DataFrame's columns. Columns
with mixed types are stored with the ``object`` dtype. See
:ref:`the User Guide <basics.dtypes>` for more.
Returns
-------
pandas.Series
The data type and indication of sparse/dense of each column.
See Also
--------
DataFrame.dtypes: Series with just dtype information.
SparseDataFrame : Container for sparse tabular data.
Notes
-----
Sparse data should have the same dtypes as its dense representation.
Examples
--------
>>> arr = np.random.RandomState(0).randn(100, 4)
>>> arr[arr < .8] = np.nan
>>> pd.DataFrame(arr).ftypes
0 float64:dense
1 float64:dense
2 float64:dense
3 float64:dense
dtype: object
>>> pd.SparseDataFrame(arr).ftypes
0 float64:sparse
1 float64:sparse
2 float64:sparse
3 float64:sparse
dtype: object
"""
from pandas import Series
return Series(self._data.get_ftypes(), index=self._info_axis,
dtype=np.object_)
def as_blocks(self, copy=True):
"""
Convert the frame to a dict of dtype -> Constructor Types that each has
a homogeneous dtype.
.. deprecated:: 0.21.0
NOTE: the dtypes of the blocks WILL BE PRESERVED HERE (unlike in
as_matrix)
Parameters
----------
copy : boolean, default True
Returns
-------
values : a dict of dtype -> Constructor Types
"""
warnings.warn("as_blocks is deprecated and will "
"be removed in a future version",
FutureWarning, stacklevel=2)
return self._to_dict_of_blocks(copy=copy)
@property
def blocks(self):
"""
Internal property, property synonym for as_blocks().
.. deprecated:: 0.21.0
"""
return self.as_blocks()
def _to_dict_of_blocks(self, copy=True):
"""
Return a dict of dtype -> Constructor Types that
each is a homogeneous dtype.
Internal ONLY
"""
return {k: self._constructor(v).__finalize__(self)
for k, v, in self._data.to_dict(copy=copy).items()}
def astype(self, dtype, copy=True, errors='raise', **kwargs):
"""
Cast a pandas object to a specified dtype ``dtype``.
Parameters
----------
dtype : data type, or dict of column name -> data type
Use a numpy.dtype or Python type to cast entire pandas object to
the same type. Alternatively, use {col: dtype, ...}, where col is a
column label and dtype is a numpy.dtype or Python type to cast one
or more of the DataFrame's columns to column-specific types.
copy : bool, default True
Return a copy when ``copy=True`` (be very careful setting
``copy=False`` as changes to values then may propagate to other
pandas objects).
errors : {'raise', 'ignore'}, default 'raise'
Control raising of exceptions on invalid data for provided dtype.
- ``raise`` : allow exceptions to be raised
- ``ignore`` : suppress exceptions. On error return original object
.. versionadded:: 0.20.0
kwargs : keyword arguments to pass on to the constructor
Returns
-------
casted : same type as caller
See Also
--------
to_datetime : Convert argument to datetime.
to_timedelta : Convert argument to timedelta.
to_numeric : Convert argument to a numeric type.
numpy.ndarray.astype : Cast a numpy array to a specified type.
Examples
--------
>>> ser = pd.Series([1, 2], dtype='int32')
>>> ser
0 1
1 2
dtype: int32
>>> ser.astype('int64')
0 1
1 2
dtype: int64
Convert to categorical type:
>>> ser.astype('category')
0 1
1 2
dtype: category
Categories (2, int64): [1, 2]
Convert to ordered categorical type with custom ordering:
>>> cat_dtype = pd.api.types.CategoricalDtype(
... categories=[2, 1], ordered=True)
>>> ser.astype(cat_dtype)
0 1
1 2
dtype: category
Categories (2, int64): [2 < 1]
Note that using ``copy=False`` and changing data on a new
pandas object may propagate changes:
>>> s1 = pd.Series([1,2])
>>> s2 = s1.astype('int64', copy=False)
>>> s2[0] = 10
>>> s1 # note that s1[0] has changed too
0 10
1 2
dtype: int64
"""
if is_dict_like(dtype):
if self.ndim == 1: # i.e. Series
if len(dtype) > 1 or self.name not in dtype:
raise KeyError('Only the Series name can be used for '
'the key in Series dtype mappings.')
new_type = dtype[self.name]
return self.astype(new_type, copy, errors, **kwargs)
elif self.ndim > 2:
raise NotImplementedError(
'astype() only accepts a dtype arg of type dict when '
'invoked on Series and DataFrames. A single dtype must be '
'specified when invoked on a Panel.'
)
for col_name in dtype.keys():
if col_name not in self:
raise KeyError('Only a column name can be used for the '
'key in a dtype mappings argument.')
results = []
for col_name, col in self.iteritems():
if col_name in dtype:
results.append(col.astype(dtype[col_name], copy=copy))
else:
results.append(results.append(col.copy() if copy else col))
elif is_extension_array_dtype(dtype) and self.ndim > 1:
# GH 18099/22869: columnwise conversion to extension dtype
# GH 24704: use iloc to handle duplicate column names
results = (self.iloc[:, i].astype(dtype, copy=copy)
for i in range(len(self.columns)))
else:
# else, only a single dtype is given
new_data = self._data.astype(dtype=dtype, copy=copy, errors=errors,
**kwargs)
return self._constructor(new_data).__finalize__(self)
# GH 19920: retain column metadata after concat
result = pd.concat(results, axis=1, copy=False)
result.columns = self.columns
return result
def copy(self, deep=True):
"""
Make a copy of this object's indices and data.
When ``deep=True`` (default), a new object will be created with a
copy of the calling object's data and indices. Modifications to
the data or indices of the copy will not be reflected in the
original object (see notes below).
When ``deep=False``, a new object will be created without copying
the calling object's data or index (only references to the data
and index are copied). Any changes to the data of the original
will be reflected in the shallow copy (and vice versa).
Parameters
----------
deep : bool, default True
Make a deep copy, including a copy of the data and the indices.
With ``deep=False`` neither the indices nor the data are copied.
Returns
-------
copy : Series, DataFrame or Panel
Object type matches caller.
Notes
-----
When ``deep=True``, data is copied but actual Python objects
will not be copied recursively, only the reference to the object.
This is in contrast to `copy.deepcopy` in the Standard Library,
which recursively copies object data (see examples below).
While ``Index`` objects are copied when ``deep=True``, the underlying
numpy array is not copied for performance reasons. Since ``Index`` is
immutable, the underlying data can be safely shared and a copy
is not needed.
Examples
--------
>>> s = pd.Series([1, 2], index=["a", "b"])
>>> s
a 1
b 2
dtype: int64
>>> s_copy = s.copy()
>>> s_copy
a 1
b 2
dtype: int64
**Shallow copy versus default (deep) copy:**
>>> s = pd.Series([1, 2], index=["a", "b"])
>>> deep = s.copy()
>>> shallow = s.copy(deep=False)
Shallow copy shares data and index with original.
>>> s is shallow
False
>>> s.values is shallow.values and s.index is shallow.index
True
Deep copy has own copy of data and index.
>>> s is deep
False
>>> s.values is deep.values or s.index is deep.index
False
Updates to the data shared by shallow copy and original is reflected
in both; deep copy remains unchanged.
>>> s[0] = 3
>>> shallow[1] = 4
>>> s
a 3
b 4
dtype: int64
>>> shallow
a 3
b 4
dtype: int64
>>> deep
a 1
b 2
dtype: int64
Note that when copying an object containing Python objects, a deep copy
will copy the data, but will not do so recursively. Updating a nested
data object will be reflected in the deep copy.
>>> s = pd.Series([[1, 2], [3, 4]])
>>> deep = s.copy()
>>> s[0][0] = 10
>>> s
0 [10, 2]
1 [3, 4]
dtype: object
>>> deep
0 [10, 2]
1 [3, 4]
dtype: object
"""
data = self._data.copy(deep=deep)
return self._constructor(data).__finalize__(self)
def __copy__(self, deep=True):
return self.copy(deep=deep)
def __deepcopy__(self, memo=None):
"""
Parameters
----------
memo, default None
Standard signature. Unused
"""
if memo is None:
memo = {}
return self.copy(deep=True)
def _convert(self, datetime=False, numeric=False, timedelta=False,
coerce=False, copy=True):
"""
Attempt to infer better dtype for object columns
Parameters
----------
datetime : boolean, default False
If True, convert to date where possible.
numeric : boolean, default False
If True, attempt to convert to numbers (including strings), with
unconvertible values becoming NaN.
timedelta : boolean, default False
If True, convert to timedelta where possible.
coerce : boolean, default False
If True, force conversion with unconvertible values converted to
nulls (NaN or NaT)
copy : boolean, default True
If True, return a copy even if no copy is necessary (e.g. no
conversion was done). Note: This is meant for internal use, and
should not be confused with inplace.
Returns
-------
converted : same as input object
"""
return self._constructor(
self._data.convert(datetime=datetime, numeric=numeric,
timedelta=timedelta, coerce=coerce,
copy=copy)).__finalize__(self)
def convert_objects(self, convert_dates=True, convert_numeric=False,
convert_timedeltas=True, copy=True):
"""
Attempt to infer better dtype for object columns.
.. deprecated:: 0.21.0
Parameters
----------
convert_dates : boolean, default True
If True, convert to date where possible. If 'coerce', force
conversion, with unconvertible values becoming NaT.
convert_numeric : boolean, default False
If True, attempt to coerce to numbers (including strings), with
unconvertible values becoming NaN.
convert_timedeltas : boolean, default True
If True, convert to timedelta where possible. If 'coerce', force
conversion, with unconvertible values becoming NaT.
copy : boolean, default True
If True, return a copy even if no copy is necessary (e.g. no
conversion was done). Note: This is meant for internal use, and
should not be confused with inplace.
Returns
-------
converted : same as input object
See Also
--------
to_datetime : Convert argument to datetime.
to_timedelta : Convert argument to timedelta.
to_numeric : Convert argument to numeric type.
"""
msg = ("convert_objects is deprecated. To re-infer data dtypes for "
"object columns, use {klass}.infer_objects()\nFor all "
"other conversions use the data-type specific converters "
"pd.to_datetime, pd.to_timedelta and pd.to_numeric."
).format(klass=self.__class__.__name__)
warnings.warn(msg, FutureWarning, stacklevel=2)
return self._constructor(
self._data.convert(convert_dates=convert_dates,
convert_numeric=convert_numeric,
convert_timedeltas=convert_timedeltas,
copy=copy)).__finalize__(self)
def infer_objects(self):
"""
Attempt to infer better dtypes for object columns.
Attempts soft conversion of object-dtyped
columns, leaving non-object and unconvertible
columns unchanged. The inference rules are the
same as during normal Series/DataFrame construction.
.. versionadded:: 0.21.0
Returns
-------
converted : same type as input object
See Also
--------
to_datetime : Convert argument to datetime.
to_timedelta : Convert argument to timedelta.
to_numeric : Convert argument to numeric type.
Examples
--------
>>> df = pd.DataFrame({"A": ["a", 1, 2, 3]})
>>> df = df.iloc[1:]
>>> df
A
1 1
2 2
3 3
>>> df.dtypes
A object
dtype: object
>>> df.infer_objects().dtypes
A int64
dtype: object
"""
# numeric=False necessary to only soft convert;
# python objects will still be converted to
# native numpy numeric types
return self._constructor(
self._data.convert(datetime=True, numeric=False,
timedelta=True, coerce=False,
copy=True)).__finalize__(self)
# ----------------------------------------------------------------------
# Filling NA's
def fillna(self, value=None, method=None, axis=None, inplace=False,
limit=None, downcast=None):
"""
Fill NA/NaN values using the specified method.
Parameters
----------
value : scalar, dict, Series, or DataFrame
Value to use to fill holes (e.g. 0), alternately a
dict/Series/DataFrame of values specifying which value to use for
each index (for a Series) or column (for a DataFrame). (values not
in the dict/Series/DataFrame will not be filled). This value cannot
be a list.
method : {'backfill', 'bfill', 'pad', 'ffill', None}, default None
Method to use for filling holes in reindexed Series
pad / ffill: propagate last valid observation forward to next valid
backfill / bfill: use NEXT valid observation to fill gap
axis : %(axes_single_arg)s
inplace : boolean, default False
If True, fill in place. Note: this will modify any
other views on this object, (e.g. a no-copy slice for a column in a
DataFrame).
limit : int, default None
If method is specified, this is the maximum number of consecutive
NaN values to forward/backward fill. In other words, if there is
a gap with more than this number of consecutive NaNs, it will only
be partially filled. If method is not specified, this is the
maximum number of entries along the entire axis where NaNs will be
filled. Must be greater than 0 if not None.
downcast : dict, default is None
a dict of item->dtype of what to downcast if possible,
or the string 'infer' which will try to downcast to an appropriate
equal type (e.g. float64 to int64 if possible)
Returns
-------
filled : %(klass)s
See Also
--------
interpolate : Fill NaN values using interpolation.
reindex, asfreq
Examples
--------
>>> df = pd.DataFrame([[np.nan, 2, np.nan, 0],
... [3, 4, np.nan, 1],
... [np.nan, np.nan, np.nan, 5],
... [np.nan, 3, np.nan, 4]],
... columns=list('ABCD'))
>>> df
A B C D
0 NaN 2.0 NaN 0
1 3.0 4.0 NaN 1
2 NaN NaN NaN 5
3 NaN 3.0 NaN 4
Replace all NaN elements with 0s.
>>> df.fillna(0)
A B C D
0 0.0 2.0 0.0 0
1 3.0 4.0 0.0 1
2 0.0 0.0 0.0 5
3 0.0 3.0 0.0 4
We can also propagate non-null values forward or backward.
>>> df.fillna(method='ffill')
A B C D
0 NaN 2.0 NaN 0
1 3.0 4.0 NaN 1
2 3.0 4.0 NaN 5
3 3.0 3.0 NaN 4
Replace all NaN elements in column 'A', 'B', 'C', and 'D', with 0, 1,
2, and 3 respectively.
>>> values = {'A': 0, 'B': 1, 'C': 2, 'D': 3}
>>> df.fillna(value=values)
A B C D
0 0.0 2.0 2.0 0
1 3.0 4.0 2.0 1
2 0.0 1.0 2.0 5
3 0.0 3.0 2.0 4
Only replace the first NaN element.
>>> df.fillna(value=values, limit=1)
A B C D
0 0.0 2.0 2.0 0
1 3.0 4.0 NaN 1
2 NaN 1.0 NaN 5
3 NaN 3.0 NaN 4
"""
inplace = validate_bool_kwarg(inplace, 'inplace')
value, method = validate_fillna_kwargs(value, method)
self._consolidate_inplace()
# set the default here, so functions examining the signaure
# can detect if something was set (e.g. in groupby) (GH9221)
if axis is None:
axis = 0
axis = self._get_axis_number(axis)
from pandas import DataFrame
if value is None:
if self._is_mixed_type and axis == 1:
if inplace:
raise NotImplementedError()
result = self.T.fillna(method=method, limit=limit).T
# need to downcast here because of all of the transposes
result._data = result._data.downcast()
return result
# > 3d
if self.ndim > 3:
raise NotImplementedError('Cannot fillna with a method for > '
'3dims')
# 3d
elif self.ndim == 3:
# fill in 2d chunks
result = {col: s.fillna(method=method, value=value)
for col, s in self.iteritems()}
prelim_obj = self._constructor.from_dict(result)
new_obj = prelim_obj.__finalize__(self)
new_data = new_obj._data
else:
# 2d or less
new_data = self._data.interpolate(method=method, axis=axis,
limit=limit, inplace=inplace,
coerce=True,
downcast=downcast)
else:
if len(self._get_axis(axis)) == 0:
return self
if self.ndim == 1:
if isinstance(value, (dict, ABCSeries)):
from pandas import Series
value = Series(value)
elif not is_list_like(value):
pass
else:
raise TypeError('"value" parameter must be a scalar, dict '
'or Series, but you passed a '
'"{0}"'.format(type(value).__name__))
new_data = self._data.fillna(value=value, limit=limit,
inplace=inplace,
downcast=downcast)
elif isinstance(value, (dict, ABCSeries)):
if axis == 1:
raise NotImplementedError('Currently only can fill '
'with dict/Series column '
'by column')
result = self if inplace else self.copy()
for k, v in compat.iteritems(value):
if k not in result:
continue
obj = result[k]
obj.fillna(v, limit=limit, inplace=True, downcast=downcast)
return result if not inplace else None
elif not is_list_like(value):
new_data = self._data.fillna(value=value, limit=limit,
inplace=inplace,
downcast=downcast)
elif isinstance(value, DataFrame) and self.ndim == 2:
new_data = self.where(self.notna(), value)
else:
raise ValueError("invalid fill value with a %s" % type(value))
if inplace:
self._update_inplace(new_data)
else:
return self._constructor(new_data).__finalize__(self)
def ffill(self, axis=None, inplace=False, limit=None, downcast=None):
"""
Synonym for :meth:`DataFrame.fillna` with ``method='ffill'``.
"""
return self.fillna(method='ffill', axis=axis, inplace=inplace,
limit=limit, downcast=downcast)
def bfill(self, axis=None, inplace=False, limit=None, downcast=None):
"""
Synonym for :meth:`DataFrame.fillna` with ``method='bfill'``.
"""
return self.fillna(method='bfill', axis=axis, inplace=inplace,
limit=limit, downcast=downcast)
_shared_docs['replace'] = ("""
Replace values given in `to_replace` with `value`.
Values of the %(klass)s are replaced with other values dynamically.
This differs from updating with ``.loc`` or ``.iloc``, which require
you to specify a location to update with some value.
Parameters
----------
to_replace : str, regex, list, dict, Series, int, float, or None
How to find the values that will be replaced.
* numeric, str or regex:
- numeric: numeric values equal to `to_replace` will be
replaced with `value`
- str: string exactly matching `to_replace` will be replaced
with `value`
- regex: regexs matching `to_replace` will be replaced with
`value`
* list of str, regex, or numeric:
- First, if `to_replace` and `value` are both lists, they
**must** be the same length.
- Second, if ``regex=True`` then all of the strings in **both**
lists will be interpreted as regexs otherwise they will match
directly. This doesn't matter much for `value` since there
are only a few possible substitution regexes you can use.
- str, regex and numeric rules apply as above.
* dict:
- Dicts can be used to specify different replacement values
for different existing values. For example,
``{'a': 'b', 'y': 'z'}`` replaces the value 'a' with 'b' and
'y' with 'z'. To use a dict in this way the `value`
parameter should be `None`.
- For a DataFrame a dict can specify that different values
should be replaced in different columns. For example,
``{'a': 1, 'b': 'z'}`` looks for the value 1 in column 'a'
and the value 'z' in column 'b' and replaces these values
with whatever is specified in `value`. The `value` parameter
should not be ``None`` in this case. You can treat this as a
special case of passing two lists except that you are
specifying the column to search in.
- For a DataFrame nested dictionaries, e.g.,
``{'a': {'b': np.nan}}``, are read as follows: look in column
'a' for the value 'b' and replace it with NaN. The `value`
parameter should be ``None`` to use a nested dict in this
way. You can nest regular expressions as well. Note that
column names (the top-level dictionary keys in a nested
dictionary) **cannot** be regular expressions.
* None:
- This means that the `regex` argument must be a string,
compiled regular expression, or list, dict, ndarray or
Series of such elements. If `value` is also ``None`` then
this **must** be a nested dictionary or Series.
See the examples section for examples of each of these.
value : scalar, dict, list, str, regex, default None
Value to replace any values matching `to_replace` with.
For a DataFrame a dict of values can be used to specify which
value to use for each column (columns not in the dict will not be
filled). Regular expressions, strings and lists or dicts of such
objects are also allowed.
inplace : bool, default False
If True, in place. Note: this will modify any
other views on this object (e.g. a column from a DataFrame).
Returns the caller if this is True.
limit : int, default None
Maximum size gap to forward or backward fill.
regex : bool or same types as `to_replace`, default False
Whether to interpret `to_replace` and/or `value` as regular
expressions. If this is ``True`` then `to_replace` *must* be a
string. Alternatively, this could be a regular expression or a
list, dict, or array of regular expressions in which case
`to_replace` must be ``None``.
method : {'pad', 'ffill', 'bfill', `None`}
The method to use when for replacement, when `to_replace` is a
scalar, list or tuple and `value` is ``None``.
.. versionchanged:: 0.23.0
Added to DataFrame.
Returns
-------
%(klass)s
Object after replacement.
Raises
------
AssertionError
* If `regex` is not a ``bool`` and `to_replace` is not
``None``.
TypeError
* If `to_replace` is a ``dict`` and `value` is not a ``list``,
``dict``, ``ndarray``, or ``Series``
* If `to_replace` is ``None`` and `regex` is not compilable
into a regular expression or is a list, dict, ndarray, or
Series.
* When replacing multiple ``bool`` or ``datetime64`` objects and
the arguments to `to_replace` does not match the type of the
value being replaced
ValueError
* If a ``list`` or an ``ndarray`` is passed to `to_replace` and
`value` but they are not the same length.
See Also
--------
%(klass)s.fillna : Fill NA values.
%(klass)s.where : Replace values based on boolean condition.
Series.str.replace : Simple string replacement.
Notes
-----
* Regex substitution is performed under the hood with ``re.sub``. The
rules for substitution for ``re.sub`` are the same.
* Regular expressions will only substitute on strings, meaning you
cannot provide, for example, a regular expression matching floating
point numbers and expect the columns in your frame that have a
numeric dtype to be matched. However, if those floating point
numbers *are* strings, then you can do this.
* This method has *a lot* of options. You are encouraged to experiment
and play with this method to gain intuition about how it works.
* When dict is used as the `to_replace` value, it is like
key(s) in the dict are the to_replace part and
value(s) in the dict are the value parameter.
Examples
--------
**Scalar `to_replace` and `value`**
>>> s = pd.Series([0, 1, 2, 3, 4])
>>> s.replace(0, 5)
0 5
1 1
2 2
3 3
4 4
dtype: int64
>>> df = pd.DataFrame({'A': [0, 1, 2, 3, 4],
... 'B': [5, 6, 7, 8, 9],
... 'C': ['a', 'b', 'c', 'd', 'e']})
>>> df.replace(0, 5)
A B C
0 5 5 a
1 1 6 b
2 2 7 c
3 3 8 d
4 4 9 e
**List-like `to_replace`**
>>> df.replace([0, 1, 2, 3], 4)
A B C
0 4 5 a
1 4 6 b
2 4 7 c
3 4 8 d
4 4 9 e
>>> df.replace([0, 1, 2, 3], [4, 3, 2, 1])
A B C
0 4 5 a
1 3 6 b
2 2 7 c
3 1 8 d
4 4 9 e
>>> s.replace([1, 2], method='bfill')
0 0
1 3
2 3
3 3
4 4
dtype: int64
**dict-like `to_replace`**
>>> df.replace({0: 10, 1: 100})
A B C
0 10 5 a
1 100 6 b
2 2 7 c
3 3 8 d
4 4 9 e
>>> df.replace({'A': 0, 'B': 5}, 100)
A B C
0 100 100 a
1 1 6 b
2 2 7 c
3 3 8 d
4 4 9 e
>>> df.replace({'A': {0: 100, 4: 400}})
A B C
0 100 5 a
1 1 6 b
2 2 7 c
3 3 8 d
4 400 9 e
**Regular expression `to_replace`**
>>> df = pd.DataFrame({'A': ['bat', 'foo', 'bait'],
... 'B': ['abc', 'bar', 'xyz']})
>>> df.replace(to_replace=r'^ba.$', value='new', regex=True)
A B
0 new abc
1 foo new
2 bait xyz
>>> df.replace({'A': r'^ba.$'}, {'A': 'new'}, regex=True)
A B
0 new abc
1 foo bar
2 bait xyz
>>> df.replace(regex=r'^ba.$', value='new')
A B
0 new abc
1 foo new
2 bait xyz
>>> df.replace(regex={r'^ba.$': 'new', 'foo': 'xyz'})
A B
0 new abc
1 xyz new
2 bait xyz
>>> df.replace(regex=[r'^ba.$', 'foo'], value='new')
A B
0 new abc
1 new new
2 bait xyz
Note that when replacing multiple ``bool`` or ``datetime64`` objects,
the data types in the `to_replace` parameter must match the data
type of the value being replaced:
>>> df = pd.DataFrame({'A': [True, False, True],
... 'B': [False, True, False]})
>>> df.replace({'a string': 'new value', True: False}) # raises
Traceback (most recent call last):
...
TypeError: Cannot compare types 'ndarray(dtype=bool)' and 'str'
This raises a ``TypeError`` because one of the ``dict`` keys is not of
the correct type for replacement.
Compare the behavior of ``s.replace({'a': None})`` and
``s.replace('a', None)`` to understand the peculiarities
of the `to_replace` parameter:
>>> s = pd.Series([10, 'a', 'a', 'b', 'a'])
When one uses a dict as the `to_replace` value, it is like the
value(s) in the dict are equal to the `value` parameter.
``s.replace({'a': None})`` is equivalent to
``s.replace(to_replace={'a': None}, value=None, method=None)``:
>>> s.replace({'a': None})
0 10
1 None
2 None
3 b
4 None
dtype: object
When ``value=None`` and `to_replace` is a scalar, list or
tuple, `replace` uses the method parameter (default 'pad') to do the
replacement. So this is why the 'a' values are being replaced by 10
in rows 1 and 2 and 'b' in row 4 in this case.
The command ``s.replace('a', None)`` is actually equivalent to
``s.replace(to_replace='a', value=None, method='pad')``:
>>> s.replace('a', None)
0 10
1 10
2 10
3 b
4 b
dtype: object
""")
@Appender(_shared_docs['replace'] % _shared_doc_kwargs)
def replace(self, to_replace=None, value=None, inplace=False, limit=None,
regex=False, method='pad'):
inplace = validate_bool_kwarg(inplace, 'inplace')
if not is_bool(regex) and to_replace is not None:
raise AssertionError("'to_replace' must be 'None' if 'regex' is "
"not a bool")
self._consolidate_inplace()
if value is None:
# passing a single value that is scalar like
# when value is None (GH5319), for compat
if not is_dict_like(to_replace) and not is_dict_like(regex):
to_replace = [to_replace]
if isinstance(to_replace, (tuple, list)):
if isinstance(self, pd.DataFrame):
return self.apply(_single_replace,
args=(to_replace, method, inplace,
limit))
return _single_replace(self, to_replace, method, inplace,
limit)
if not is_dict_like(to_replace):
if not is_dict_like(regex):
raise TypeError('If "to_replace" and "value" are both None'
' and "to_replace" is not a list, then '
'regex must be a mapping')
to_replace = regex
regex = True
items = list(compat.iteritems(to_replace))
keys, values = lzip(*items) or ([], [])
are_mappings = [is_dict_like(v) for v in values]
if any(are_mappings):
if not all(are_mappings):
raise TypeError("If a nested mapping is passed, all values"
" of the top level mapping must be "
"mappings")
# passed a nested dict/Series
to_rep_dict = {}
value_dict = {}
for k, v in items:
keys, values = lzip(*v.items()) or ([], [])
if set(keys) & set(values):
raise ValueError("Replacement not allowed with "
"overlapping keys and values")
to_rep_dict[k] = list(keys)
value_dict[k] = list(values)
to_replace, value = to_rep_dict, value_dict
else:
to_replace, value = keys, values
return self.replace(to_replace, value, inplace=inplace,
limit=limit, regex=regex)
else:
# need a non-zero len on all axes
for a in self._AXIS_ORDERS:
if not len(self._get_axis(a)):
return self
new_data = self._data
if is_dict_like(to_replace):
if is_dict_like(value): # {'A' : NA} -> {'A' : 0}
res = self if inplace else self.copy()
for c, src in compat.iteritems(to_replace):
if c in value and c in self:
# object conversion is handled in
# series.replace which is called recursivelly
res[c] = res[c].replace(to_replace=src,
value=value[c],
inplace=False,
regex=regex)
return None if inplace else res
# {'A': NA} -> 0
elif not is_list_like(value):
keys = [(k, src) for k, src in compat.iteritems(to_replace)
if k in self]
keys_len = len(keys) - 1
for i, (k, src) in enumerate(keys):
convert = i == keys_len
new_data = new_data.replace(to_replace=src,
value=value,
filter=[k],
inplace=inplace,
regex=regex,
convert=convert)
else:
raise TypeError('value argument must be scalar, dict, or '
'Series')
elif is_list_like(to_replace): # [NA, ''] -> [0, 'missing']
if is_list_like(value):
if len(to_replace) != len(value):
raise ValueError('Replacement lists must match '
'in length. Expecting %d got %d ' %
(len(to_replace), len(value)))
new_data = self._data.replace_list(src_list=to_replace,
dest_list=value,
inplace=inplace,
regex=regex)
else: # [NA, ''] -> 0
new_data = self._data.replace(to_replace=to_replace,
value=value, inplace=inplace,
regex=regex)
elif to_replace is None:
if not (is_re_compilable(regex) or
is_list_like(regex) or is_dict_like(regex)):
raise TypeError("'regex' must be a string or a compiled "
"regular expression or a list or dict of "
"strings or regular expressions, you "
"passed a"
" {0!r}".format(type(regex).__name__))
return self.replace(regex, value, inplace=inplace, limit=limit,
regex=True)
else:
# dest iterable dict-like
if is_dict_like(value): # NA -> {'A' : 0, 'B' : -1}
new_data = self._data
for k, v in compat.iteritems(value):
if k in self:
new_data = new_data.replace(to_replace=to_replace,
value=v, filter=[k],
inplace=inplace,
regex=regex)
elif not is_list_like(value): # NA -> 0
new_data = self._data.replace(to_replace=to_replace,
value=value, inplace=inplace,
regex=regex)
else:
msg = ('Invalid "to_replace" type: '
'{0!r}').format(type(to_replace).__name__)
raise TypeError(msg) # pragma: no cover
if inplace:
self._update_inplace(new_data)
else:
return self._constructor(new_data).__finalize__(self)
_shared_docs['interpolate'] = """
Please note that only ``method='linear'`` is supported for
DataFrame/Series with a MultiIndex.
Parameters
----------
method : str, default 'linear'
Interpolation technique to use. One of:
* 'linear': Ignore the index and treat the values as equally
spaced. This is the only method supported on MultiIndexes.
* 'time': Works on daily and higher resolution data to interpolate
given length of interval.
* 'index', 'values': use the actual numerical values of the index.
* 'pad': Fill in NaNs using existing values.
* 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', 'spline',
'barycentric', 'polynomial': Passed to
`scipy.interpolate.interp1d`. Both 'polynomial' and 'spline'
require that you also specify an `order` (int),
e.g. ``df.interpolate(method='polynomial', order=5)``.
These use the numerical values of the index.
* 'krogh', 'piecewise_polynomial', 'spline', 'pchip', 'akima':
Wrappers around the SciPy interpolation methods of similar
names. See `Notes`.
* 'from_derivatives': Refers to
`scipy.interpolate.BPoly.from_derivatives` which
replaces 'piecewise_polynomial' interpolation method in
scipy 0.18.
.. versionadded:: 0.18.1
Added support for the 'akima' method.
Added interpolate method 'from_derivatives' which replaces
'piecewise_polynomial' in SciPy 0.18; backwards-compatible with
SciPy < 0.18
axis : {0 or 'index', 1 or 'columns', None}, default None
Axis to interpolate along.
limit : int, optional
Maximum number of consecutive NaNs to fill. Must be greater than
0.
inplace : bool, default False
Update the data in place if possible.
limit_direction : {'forward', 'backward', 'both'}, default 'forward'
If limit is specified, consecutive NaNs will be filled in this
direction.
limit_area : {`None`, 'inside', 'outside'}, default None
If limit is specified, consecutive NaNs will be filled with this
restriction.
* ``None``: No fill restriction.
* 'inside': Only fill NaNs surrounded by valid values
(interpolate).
* 'outside': Only fill NaNs outside valid values (extrapolate).
.. versionadded:: 0.21.0
downcast : optional, 'infer' or None, defaults to None
Downcast dtypes if possible.
**kwargs
Keyword arguments to pass on to the interpolating function.
Returns
-------
Series or DataFrame
Returns the same object type as the caller, interpolated at
some or all ``NaN`` values
See Also
--------
fillna : Fill missing values using different methods.
scipy.interpolate.Akima1DInterpolator : Piecewise cubic polynomials
(Akima interpolator).
scipy.interpolate.BPoly.from_derivatives : Piecewise polynomial in the
Bernstein basis.
scipy.interpolate.interp1d : Interpolate a 1-D function.
scipy.interpolate.KroghInterpolator : Interpolate polynomial (Krogh
interpolator).
scipy.interpolate.PchipInterpolator : PCHIP 1-d monotonic cubic
interpolation.
scipy.interpolate.CubicSpline : Cubic spline data interpolator.
Notes
-----
The 'krogh', 'piecewise_polynomial', 'spline', 'pchip' and 'akima'
methods are wrappers around the respective SciPy implementations of
similar names. These use the actual numerical values of the index.
For more information on their behavior, see the
`SciPy documentation
<http://docs.scipy.org/doc/scipy/reference/interpolate.html#univariate-interpolation>`__
and `SciPy tutorial
<http://docs.scipy.org/doc/scipy/reference/tutorial/interpolate.html>`__.
Examples
--------
Filling in ``NaN`` in a :class:`~pandas.Series` via linear
interpolation.
>>> s = pd.Series([0, 1, np.nan, 3])
>>> s
0 0.0
1 1.0
2 NaN
3 3.0
dtype: float64
>>> s.interpolate()
0 0.0
1 1.0
2 2.0
3 3.0
dtype: float64
Filling in ``NaN`` in a Series by padding, but filling at most two
consecutive ``NaN`` at a time.
>>> s = pd.Series([np.nan, "single_one", np.nan,
... "fill_two_more", np.nan, np.nan, np.nan,
... 4.71, np.nan])
>>> s
0 NaN
1 single_one
2 NaN
3 fill_two_more
4 NaN
5 NaN
6 NaN
7 4.71
8 NaN
dtype: object
>>> s.interpolate(method='pad', limit=2)
0 NaN
1 single_one
2 single_one
3 fill_two_more
4 fill_two_more
5 fill_two_more
6 NaN
7 4.71
8 4.71
dtype: object
Filling in ``NaN`` in a Series via polynomial interpolation or splines:
Both 'polynomial' and 'spline' methods require that you also specify
an ``order`` (int).
>>> s = pd.Series([0, 2, np.nan, 8])
>>> s.interpolate(method='polynomial', order=2)
0 0.000000
1 2.000000
2 4.666667
3 8.000000
dtype: float64
Fill the DataFrame forward (that is, going down) along each column
using linear interpolation.
Note how the last entry in column 'a' is interpolated differently,
because there is no entry after it to use for interpolation.
Note how the first entry in column 'b' remains ``NaN``, because there
is no entry befofe it to use for interpolation.
>>> df = pd.DataFrame([(0.0, np.nan, -1.0, 1.0),
... (np.nan, 2.0, np.nan, np.nan),
... (2.0, 3.0, np.nan, 9.0),
... (np.nan, 4.0, -4.0, 16.0)],
... columns=list('abcd'))
>>> df
a b c d
0 0.0 NaN -1.0 1.0
1 NaN 2.0 NaN NaN
2 2.0 3.0 NaN 9.0
3 NaN 4.0 -4.0 16.0
>>> df.interpolate(method='linear', limit_direction='forward', axis=0)
a b c d
0 0.0 NaN -1.0 1.0
1 1.0 2.0 -2.0 5.0
2 2.0 3.0 -3.0 9.0
3 2.0 4.0 -4.0 16.0
Using polynomial interpolation.
>>> df['d'].interpolate(method='polynomial', order=2)
0 1.0
1 4.0
2 9.0
3 16.0
Name: d, dtype: float64
"""
@Appender(_shared_docs['interpolate'] % _shared_doc_kwargs)
def interpolate(self, method='linear', axis=0, limit=None, inplace=False,
limit_direction='forward', limit_area=None,
downcast=None, **kwargs):
"""
Interpolate values according to different methods.
"""
inplace = validate_bool_kwarg(inplace, 'inplace')
if self.ndim > 2:
raise NotImplementedError("Interpolate has not been implemented "
"on Panel and Panel 4D objects.")
if axis == 0:
ax = self._info_axis_name
_maybe_transposed_self = self
elif axis == 1:
_maybe_transposed_self = self.T
ax = 1
else:
_maybe_transposed_self = self
ax = _maybe_transposed_self._get_axis_number(ax)
if _maybe_transposed_self.ndim == 2:
alt_ax = 1 - ax
else:
alt_ax = ax
if (isinstance(_maybe_transposed_self.index, MultiIndex) and
method != 'linear'):
raise ValueError("Only `method=linear` interpolation is supported "
"on MultiIndexes.")
if _maybe_transposed_self._data.get_dtype_counts().get(
'object') == len(_maybe_transposed_self.T):
raise TypeError("Cannot interpolate with all object-dtype columns "
"in the DataFrame. Try setting at least one "
"column to a numeric dtype.")
# create/use the index
if method == 'linear':
# prior default
index = np.arange(len(_maybe_transposed_self._get_axis(alt_ax)))
else:
index = _maybe_transposed_self._get_axis(alt_ax)
if isna(index).any():
raise NotImplementedError("Interpolation with NaNs in the index "
"has not been implemented. Try filling "
"those NaNs before interpolating.")
data = _maybe_transposed_self._data
new_data = data.interpolate(method=method, axis=ax, index=index,
values=_maybe_transposed_self, limit=limit,
limit_direction=limit_direction,
limit_area=limit_area,
inplace=inplace, downcast=downcast,
**kwargs)
if inplace:
if axis == 1:
new_data = self._constructor(new_data).T._data
self._update_inplace(new_data)
else:
res = self._constructor(new_data).__finalize__(self)
if axis == 1:
res = res.T
return res
# ----------------------------------------------------------------------
# Timeseries methods Methods
def asof(self, where, subset=None):
"""
Return the last row(s) without any NaNs before `where`.
The last row (for each element in `where`, if list) without any
NaN is taken.
In case of a :class:`~pandas.DataFrame`, the last row without NaN
considering only the subset of columns (if not `None`)
.. versionadded:: 0.19.0 For DataFrame
If there is no good value, NaN is returned for a Series or
a Series of NaN values for a DataFrame
Parameters
----------
where : date or array-like of dates
Date(s) before which the last row(s) are returned.
subset : str or array-like of str, default `None`
For DataFrame, if not `None`, only use these columns to
check for NaNs.
Returns
-------
scalar, Series, or DataFrame
Scalar : when `self` is a Series and `where` is a scalar
Series: when `self` is a Series and `where` is an array-like,
or when `self` is a DataFrame and `where` is a scalar
DataFrame : when `self` is a DataFrame and `where` is an
array-like
See Also
--------
merge_asof : Perform an asof merge. Similar to left join.
Notes
-----
Dates are assumed to be sorted. Raises if this is not the case.
Examples
--------
A Series and a scalar `where`.
>>> s = pd.Series([1, 2, np.nan, 4], index=[10, 20, 30, 40])
>>> s
10 1.0
20 2.0
30 NaN
40 4.0
dtype: float64
>>> s.asof(20)
2.0
For a sequence `where`, a Series is returned. The first value is
NaN, because the first element of `where` is before the first
index value.
>>> s.asof([5, 20])
5 NaN
20 2.0
dtype: float64
Missing values are not considered. The following is ``2.0``, not
NaN, even though NaN is at the index location for ``30``.
>>> s.asof(30)
2.0
Take all columns into consideration
>>> df = pd.DataFrame({'a': [10, 20, 30, 40, 50],
... 'b': [None, None, None, None, 500]},
... index=pd.DatetimeIndex(['2018-02-27 09:01:00',
... '2018-02-27 09:02:00',
... '2018-02-27 09:03:00',
... '2018-02-27 09:04:00',
... '2018-02-27 09:05:00']))
>>> df.asof(pd.DatetimeIndex(['2018-02-27 09:03:30',
... '2018-02-27 09:04:30']))
a b
2018-02-27 09:03:30 NaN NaN
2018-02-27 09:04:30 NaN NaN
Take a single column into consideration
>>> df.asof(pd.DatetimeIndex(['2018-02-27 09:03:30',
... '2018-02-27 09:04:30']),
... subset=['a'])
a b
2018-02-27 09:03:30 30.0 NaN
2018-02-27 09:04:30 40.0 NaN
"""
if isinstance(where, compat.string_types):
from pandas import to_datetime
where = to_datetime(where)
if not self.index.is_monotonic:
raise ValueError("asof requires a sorted index")
is_series = isinstance(self, ABCSeries)
if is_series:
if subset is not None:
raise ValueError("subset is not valid for Series")
elif self.ndim > 2:
raise NotImplementedError("asof is not implemented "
"for {type}".format(type=type(self)))
else:
if subset is None:
subset = self.columns
if not is_list_like(subset):
subset = [subset]
is_list = is_list_like(where)
if not is_list:
start = self.index[0]
if isinstance(self.index, PeriodIndex):
where = Period(where, freq=self.index.freq).ordinal
start = start.ordinal
if where < start:
if not is_series:
from pandas import Series
return Series(index=self.columns, name=where)
return np.nan
# It's always much faster to use a *while* loop here for
# Series than pre-computing all the NAs. However a
# *while* loop is extremely expensive for DataFrame
# so we later pre-compute all the NAs and use the same
# code path whether *where* is a scalar or list.
# See PR: https://github.com/pandas-dev/pandas/pull/14476
if is_series:
loc = self.index.searchsorted(where, side='right')
if loc > 0:
loc -= 1
values = self._values
while loc > 0 and isna(values[loc]):
loc -= 1
return values[loc]
if not isinstance(where, Index):
where = Index(where) if is_list else Index([where])
nulls = self.isna() if is_series else self[subset].isna().any(1)
if nulls.all():
if is_series:
return self._constructor(np.nan, index=where, name=self.name)
elif is_list:
from pandas import DataFrame
return DataFrame(np.nan, index=where, columns=self.columns)
else:
from pandas import Series
return Series(np.nan, index=self.columns, name=where[0])
locs = self.index.asof_locs(where, ~(nulls.values))
# mask the missing
missing = locs == -1
data = self.take(locs, is_copy=False)
data.index = where
data.loc[missing] = np.nan
return data if is_list else data.iloc[-1]
# ----------------------------------------------------------------------
# Action Methods
_shared_docs['isna'] = """
Detect missing values.
Return a boolean same-sized object indicating if the values are NA.
NA values, such as None or :attr:`numpy.NaN`, gets mapped to True
values.
Everything else gets mapped to False values. Characters such as empty
strings ``''`` or :attr:`numpy.inf` are not considered NA values
(unless you set ``pandas.options.mode.use_inf_as_na = True``).
Returns
-------
%(klass)s
Mask of bool values for each element in %(klass)s that
indicates whether an element is not an NA value.
See Also
--------
%(klass)s.isnull : Alias of isna.
%(klass)s.notna : Boolean inverse of isna.
%(klass)s.dropna : Omit axes labels with missing values.
isna : Top-level isna.
Examples
--------
Show which entries in a DataFrame are NA.
>>> df = pd.DataFrame({'age': [5, 6, np.NaN],
... 'born': [pd.NaT, pd.Timestamp('1939-05-27'),
... pd.Timestamp('1940-04-25')],
... 'name': ['Alfred', 'Batman', ''],
... 'toy': [None, 'Batmobile', 'Joker']})
>>> df
age born name toy
0 5.0 NaT Alfred None
1 6.0 1939-05-27 Batman Batmobile
2 NaN 1940-04-25 Joker
>>> df.isna()
age born name toy
0 False True False True
1 False False False False
2 True False False False
Show which entries in a Series are NA.
>>> ser = pd.Series([5, 6, np.NaN])
>>> ser
0 5.0
1 6.0
2 NaN
dtype: float64
>>> ser.isna()
0 False
1 False
2 True
dtype: bool
"""
@Appender(_shared_docs['isna'] % _shared_doc_kwargs)
def isna(self):
return isna(self).__finalize__(self)
@Appender(_shared_docs['isna'] % _shared_doc_kwargs)
def isnull(self):
return isna(self).__finalize__(self)
_shared_docs['notna'] = """
Detect existing (non-missing) values.
Return a boolean same-sized object indicating if the values are not NA.
Non-missing values get mapped to True. Characters such as empty
strings ``''`` or :attr:`numpy.inf` are not considered NA values
(unless you set ``pandas.options.mode.use_inf_as_na = True``).
NA values, such as None or :attr:`numpy.NaN`, get mapped to False
values.
Returns
-------
%(klass)s
Mask of bool values for each element in %(klass)s that
indicates whether an element is not an NA value.
See Also
--------
%(klass)s.notnull : Alias of notna.
%(klass)s.isna : Boolean inverse of notna.
%(klass)s.dropna : Omit axes labels with missing values.
notna : Top-level notna.
Examples
--------
Show which entries in a DataFrame are not NA.
>>> df = pd.DataFrame({'age': [5, 6, np.NaN],
... 'born': [pd.NaT, pd.Timestamp('1939-05-27'),
... pd.Timestamp('1940-04-25')],
... 'name': ['Alfred', 'Batman', ''],
... 'toy': [None, 'Batmobile', 'Joker']})
>>> df
age born name toy
0 5.0 NaT Alfred None
1 6.0 1939-05-27 Batman Batmobile
2 NaN 1940-04-25 Joker
>>> df.notna()
age born name toy
0 True False True False
1 True True True True
2 False True True True
Show which entries in a Series are not NA.
>>> ser = pd.Series([5, 6, np.NaN])
>>> ser
0 5.0
1 6.0
2 NaN
dtype: float64
>>> ser.notna()
0 True
1 True
2 False
dtype: bool
"""
@Appender(_shared_docs['notna'] % _shared_doc_kwargs)
def notna(self):
return notna(self).__finalize__(self)
@Appender(_shared_docs['notna'] % _shared_doc_kwargs)
def notnull(self):
return notna(self).__finalize__(self)
def _clip_with_scalar(self, lower, upper, inplace=False):
if ((lower is not None and np.any(isna(lower))) or
(upper is not None and np.any(isna(upper)))):
raise ValueError("Cannot use an NA value as a clip threshold")
result = self
mask = isna(self.values)
with np.errstate(all='ignore'):
if upper is not None:
subset = self.to_numpy() <= upper
result = result.where(subset, upper, axis=None, inplace=False)
if lower is not None:
subset = self.to_numpy() >= lower
result = result.where(subset, lower, axis=None, inplace=False)
if np.any(mask):
result[mask] = np.nan
if inplace:
self._update_inplace(result)
else:
return result
def _clip_with_one_bound(self, threshold, method, axis, inplace):
if axis is not None:
axis = self._get_axis_number(axis)
# method is self.le for upper bound and self.ge for lower bound
if is_scalar(threshold) and is_number(threshold):
if method.__name__ == 'le':
return self._clip_with_scalar(None, threshold, inplace=inplace)
return self._clip_with_scalar(threshold, None, inplace=inplace)
subset = method(threshold, axis=axis) | isna(self)
# GH #15390
# In order for where method to work, the threshold must
# be transformed to NDFrame from other array like structure.
if (not isinstance(threshold, ABCSeries)) and is_list_like(threshold):
if isinstance(self, ABCSeries):
threshold = pd.Series(threshold, index=self.index)
else:
threshold = _align_method_FRAME(self, threshold,
axis)
return self.where(subset, threshold, axis=axis, inplace=inplace)
def clip(self, lower=None, upper=None, axis=None, inplace=False,
*args, **kwargs):
"""
Trim values at input threshold(s).
Assigns values outside boundary to boundary values. Thresholds
can be singular values or array like, and in the latter case
the clipping is performed element-wise in the specified axis.
Parameters
----------
lower : float or array_like, default None
Minimum threshold value. All values below this
threshold will be set to it.
upper : float or array_like, default None
Maximum threshold value. All values above this
threshold will be set to it.
axis : int or string axis name, optional
Align object with lower and upper along the given axis.
inplace : boolean, default False
Whether to perform the operation in place on the data.
.. versionadded:: 0.21.0
*args, **kwargs
Additional keywords have no effect but might be accepted
for compatibility with numpy.
Returns
-------
Series or DataFrame
Same type as calling object with the values outside the
clip boundaries replaced
Examples
--------
>>> data = {'col_0': [9, -3, 0, -1, 5], 'col_1': [-2, -7, 6, 8, -5]}
>>> df = pd.DataFrame(data)
>>> df
col_0 col_1
0 9 -2
1 -3 -7
2 0 6
3 -1 8
4 5 -5
Clips per column using lower and upper thresholds:
>>> df.clip(-4, 6)
col_0 col_1
0 6 -2
1 -3 -4
2 0 6
3 -1 6
4 5 -4
Clips using specific lower and upper thresholds per column element:
>>> t = pd.Series([2, -4, -1, 6, 3])
>>> t
0 2
1 -4
2 -1
3 6
4 3
dtype: int64
>>> df.clip(t, t + 4, axis=0)
col_0 col_1
0 6 2
1 -3 -4
2 0 3
3 6 8
4 5 3
"""
if isinstance(self, ABCPanel):
raise NotImplementedError("clip is not supported yet for panels")
inplace = validate_bool_kwarg(inplace, 'inplace')
axis = nv.validate_clip_with_axis(axis, args, kwargs)
if axis is not None:
axis = self._get_axis_number(axis)
# GH 17276
# numpy doesn't like NaN as a clip value
# so ignore
# GH 19992
# numpy doesn't drop a list-like bound containing NaN
if not is_list_like(lower) and np.any(pd.isnull(lower)):
lower = None
if not is_list_like(upper) and np.any(pd.isnull(upper)):
upper = None
# GH 2747 (arguments were reversed)
if lower is not None and upper is not None:
if is_scalar(lower) and is_scalar(upper):
lower, upper = min(lower, upper), max(lower, upper)
# fast-path for scalars
if ((lower is None or (is_scalar(lower) and is_number(lower))) and
(upper is None or (is_scalar(upper) and is_number(upper)))):
return self._clip_with_scalar(lower, upper, inplace=inplace)
result = self
if lower is not None:
result = result._clip_with_one_bound(lower, method=self.ge,
axis=axis, inplace=inplace)
if upper is not None:
if inplace:
result = self
result = result._clip_with_one_bound(upper, method=self.le,
axis=axis, inplace=inplace)
return result
def clip_upper(self, threshold, axis=None, inplace=False):
"""
Trim values above a given threshold.
.. deprecated:: 0.24.0
Use clip(upper=threshold) instead.
Elements above the `threshold` will be changed to match the
`threshold` value(s). Threshold can be a single value or an array,
in the latter case it performs the truncation element-wise.
Parameters
----------
threshold : numeric or array-like
Maximum value allowed. All values above threshold will be set to
this value.
* float : every value is compared to `threshold`.
* array-like : The shape of `threshold` should match the object
it's compared to. When `self` is a Series, `threshold` should be
the length. When `self` is a DataFrame, `threshold` should 2-D
and the same shape as `self` for ``axis=None``, or 1-D and the
same length as the axis being compared.
axis : {0 or 'index', 1 or 'columns'}, default 0
Align object with `threshold` along the given axis.
inplace : boolean, default False
Whether to perform the operation in place on the data.
.. versionadded:: 0.21.0
Returns
-------
Series or DataFrame
Original data with values trimmed.
See Also
--------
Series.clip : General purpose method to trim Series values to given
threshold(s).
DataFrame.clip : General purpose method to trim DataFrame values to
given threshold(s).
Examples
--------
>>> s = pd.Series([1, 2, 3, 4, 5])
>>> s
0 1
1 2
2 3
3 4
4 5
dtype: int64
>>> s.clip(upper=3)
0 1
1 2
2 3
3 3
4 3
dtype: int64
>>> elemwise_thresholds = [5, 4, 3, 2, 1]
>>> elemwise_thresholds
[5, 4, 3, 2, 1]
>>> s.clip(upper=elemwise_thresholds)
0 1
1 2
2 3
3 2
4 1
dtype: int64
"""
warnings.warn('clip_upper(threshold) is deprecated, '
'use clip(upper=threshold) instead',
FutureWarning, stacklevel=2)
return self._clip_with_one_bound(threshold, method=self.le,
axis=axis, inplace=inplace)
def clip_lower(self, threshold, axis=None, inplace=False):
"""
Trim values below a given threshold.
.. deprecated:: 0.24.0
Use clip(lower=threshold) instead.
Elements below the `threshold` will be changed to match the
`threshold` value(s). Threshold can be a single value or an array,
in the latter case it performs the truncation element-wise.
Parameters
----------
threshold : numeric or array-like
Minimum value allowed. All values below threshold will be set to
this value.
* float : every value is compared to `threshold`.
* array-like : The shape of `threshold` should match the object
it's compared to. When `self` is a Series, `threshold` should be
the length. When `self` is a DataFrame, `threshold` should 2-D
and the same shape as `self` for ``axis=None``, or 1-D and the
same length as the axis being compared.
axis : {0 or 'index', 1 or 'columns'}, default 0
Align `self` with `threshold` along the given axis.
inplace : boolean, default False
Whether to perform the operation in place on the data.
.. versionadded:: 0.21.0
Returns
-------
Series or DataFrame
Original data with values trimmed.
See Also
--------
Series.clip : General purpose method to trim Series values to given
threshold(s).
DataFrame.clip : General purpose method to trim DataFrame values to
given threshold(s).
Examples
--------
Series single threshold clipping:
>>> s = pd.Series([5, 6, 7, 8, 9])
>>> s.clip(lower=8)
0 8
1 8
2 8
3 8
4 9
dtype: int64
Series clipping element-wise using an array of thresholds. `threshold`
should be the same length as the Series.
>>> elemwise_thresholds = [4, 8, 7, 2, 5]
>>> s.clip(lower=elemwise_thresholds)
0 5
1 8
2 7
3 8
4 9
dtype: int64
DataFrames can be compared to a scalar.
>>> df = pd.DataFrame({"A": [1, 3, 5], "B": [2, 4, 6]})
>>> df
A B
0 1 2
1 3 4
2 5 6
>>> df.clip(lower=3)
A B
0 3 3
1 3 4
2 5 6
Or to an array of values. By default, `threshold` should be the same
shape as the DataFrame.
>>> df.clip(lower=np.array([[3, 4], [2, 2], [6, 2]]))
A B
0 3 4
1 3 4
2 6 6
Control how `threshold` is broadcast with `axis`. In this case
`threshold` should be the same length as the axis specified by
`axis`.
>>> df.clip(lower=[3, 3, 5], axis='index')
A B
0 3 3
1 3 4
2 5 6
>>> df.clip(lower=[4, 5], axis='columns')
A B
0 4 5
1 4 5
2 5 6
"""
warnings.warn('clip_lower(threshold) is deprecated, '
'use clip(lower=threshold) instead',
FutureWarning, stacklevel=2)
return self._clip_with_one_bound(threshold, method=self.ge,
axis=axis, inplace=inplace)
def groupby(self, by=None, axis=0, level=None, as_index=True, sort=True,
group_keys=True, squeeze=False, observed=False, **kwargs):
"""
Group DataFrame or Series using a mapper or by a Series of columns.
A groupby operation involves some combination of splitting the
object, applying a function, and combining the results. This can be
used to group large amounts of data and compute operations on these
groups.
Parameters
----------
by : mapping, function, label, or list of labels
Used to determine the groups for the groupby.
If ``by`` is a function, it's called on each value of the object's
index. If a dict or Series is passed, the Series or dict VALUES
will be used to determine the groups (the Series' values are first
aligned; see ``.align()`` method). If an ndarray is passed, the
values are used as-is determine the groups. A label or list of
labels may be passed to group by the columns in ``self``. Notice
that a tuple is interpreted a (single) key.
axis : {0 or 'index', 1 or 'columns'}, default 0
Split along rows (0) or columns (1).
level : int, level name, or sequence of such, default None
If the axis is a MultiIndex (hierarchical), group by a particular
level or levels.
as_index : bool, default True
For aggregated output, return object with group labels as the
index. Only relevant for DataFrame input. as_index=False is
effectively "SQL-style" grouped output.
sort : bool, default True
Sort group keys. Get better performance by turning this off.
Note this does not influence the order of observations within each
group. Groupby preserves the order of rows within each group.
group_keys : bool, default True
When calling apply, add group keys to index to identify pieces.
squeeze : bool, default False
Reduce the dimensionality of the return type if possible,
otherwise return a consistent type.
observed : bool, default False
This only applies if any of the groupers are Categoricals.
If True: only show observed values for categorical groupers.
If False: show all values for categorical groupers.
.. versionadded:: 0.23.0
**kwargs
Optional, only accepts keyword argument 'mutated' and is passed
to groupby.
Returns
-------
DataFrameGroupBy or SeriesGroupBy
Depends on the calling object and returns groupby object that
contains information about the groups.
See Also
--------
resample : Convenience method for frequency conversion and resampling
of time series.
Notes
-----
See the `user guide
<http://pandas.pydata.org/pandas-docs/stable/groupby.html>`_ for more.
Examples
--------
>>> df = pd.DataFrame({'Animal' : ['Falcon', 'Falcon',
... 'Parrot', 'Parrot'],
... 'Max Speed' : [380., 370., 24., 26.]})
>>> df
Animal Max Speed
0 Falcon 380.0
1 Falcon 370.0
2 Parrot 24.0
3 Parrot 26.0
>>> df.groupby(['Animal']).mean()
Max Speed
Animal
Falcon 375.0
Parrot 25.0
**Hierarchical Indexes**
We can groupby different levels of a hierarchical index
using the `level` parameter:
>>> arrays = [['Falcon', 'Falcon', 'Parrot', 'Parrot'],
... ['Capitve', 'Wild', 'Capitve', 'Wild']]
>>> index = pd.MultiIndex.from_arrays(arrays, names=('Animal', 'Type'))
>>> df = pd.DataFrame({'Max Speed' : [390., 350., 30., 20.]},
... index=index)
>>> df
Max Speed
Animal Type
Falcon Capitve 390.0
Wild 350.0
Parrot Capitve 30.0
Wild 20.0
>>> df.groupby(level=0).mean()
Max Speed
Animal
Falcon 370.0
Parrot 25.0
>>> df.groupby(level=1).mean()
Max Speed
Type
Capitve 210.0
Wild 185.0
"""
from pandas.core.groupby.groupby import groupby
if level is None and by is None:
raise TypeError("You have to supply one of 'by' and 'level'")
axis = self._get_axis_number(axis)
return groupby(self, by=by, axis=axis, level=level, as_index=as_index,
sort=sort, group_keys=group_keys, squeeze=squeeze,
observed=observed, **kwargs)
def asfreq(self, freq, method=None, how=None, normalize=False,
fill_value=None):
"""
Convert TimeSeries to specified frequency.
Optionally provide filling method to pad/backfill missing values.
Returns the original data conformed to a new index with the specified
frequency. ``resample`` is more appropriate if an operation, such as
summarization, is necessary to represent the data at the new frequency.
Parameters
----------
freq : DateOffset object, or string
method : {'backfill'/'bfill', 'pad'/'ffill'}, default None
Method to use for filling holes in reindexed Series (note this
does not fill NaNs that already were present):
* 'pad' / 'ffill': propagate last valid observation forward to next
valid
* 'backfill' / 'bfill': use NEXT valid observation to fill
how : {'start', 'end'}, default end
For PeriodIndex only, see PeriodIndex.asfreq
normalize : bool, default False
Whether to reset output index to midnight
fill_value : scalar, optional
Value to use for missing values, applied during upsampling (note
this does not fill NaNs that already were present).
.. versionadded:: 0.20.0
Returns
-------
converted : same type as caller
See Also
--------
reindex
Notes
-----
To learn more about the frequency strings, please see `this link
<http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases>`__.
Examples
--------
Start by creating a series with 4 one minute timestamps.
>>> index = pd.date_range('1/1/2000', periods=4, freq='T')
>>> series = pd.Series([0.0, None, 2.0, 3.0], index=index)
>>> df = pd.DataFrame({'s':series})
>>> df
s
2000-01-01 00:00:00 0.0
2000-01-01 00:01:00 NaN
2000-01-01 00:02:00 2.0
2000-01-01 00:03:00 3.0
Upsample the series into 30 second bins.
>>> df.asfreq(freq='30S')
s
2000-01-01 00:00:00 0.0
2000-01-01 00:00:30 NaN
2000-01-01 00:01:00 NaN
2000-01-01 00:01:30 NaN
2000-01-01 00:02:00 2.0
2000-01-01 00:02:30 NaN
2000-01-01 00:03:00 3.0
Upsample again, providing a ``fill value``.
>>> df.asfreq(freq='30S', fill_value=9.0)
s
2000-01-01 00:00:00 0.0
2000-01-01 00:00:30 9.0
2000-01-01 00:01:00 NaN
2000-01-01 00:01:30 9.0
2000-01-01 00:02:00 2.0
2000-01-01 00:02:30 9.0
2000-01-01 00:03:00 3.0
Upsample again, providing a ``method``.
>>> df.asfreq(freq='30S', method='bfill')
s
2000-01-01 00:00:00 0.0
2000-01-01 00:00:30 NaN
2000-01-01 00:01:00 NaN
2000-01-01 00:01:30 2.0
2000-01-01 00:02:00 2.0
2000-01-01 00:02:30 3.0
2000-01-01 00:03:00 3.0
"""
from pandas.core.resample import asfreq
return asfreq(self, freq, method=method, how=how, normalize=normalize,
fill_value=fill_value)
def at_time(self, time, asof=False, axis=None):
"""
Select values at particular time of day (e.g. 9:30AM).
Parameters
----------
time : datetime.time or string
axis : {0 or 'index', 1 or 'columns'}, default 0
.. versionadded:: 0.24.0
Returns
-------
values_at_time : same type as caller
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
between_time : Select values between particular times of the day.
first : Select initial periods of time series based on a date offset.
last : Select final periods of time series based on a date offset.
DatetimeIndex.indexer_at_time : Get just the index locations for
values at particular time of the day.
Examples
--------
>>> i = pd.date_range('2018-04-09', periods=4, freq='12H')
>>> ts = pd.DataFrame({'A': [1,2,3,4]}, index=i)
>>> ts
A
2018-04-09 00:00:00 1
2018-04-09 12:00:00 2
2018-04-10 00:00:00 3
2018-04-10 12:00:00 4
>>> ts.at_time('12:00')
A
2018-04-09 12:00:00 2
2018-04-10 12:00:00 4
"""
if axis is None:
axis = self._stat_axis_number
axis = self._get_axis_number(axis)
index = self._get_axis(axis)
try:
indexer = index.indexer_at_time(time, asof=asof)
except AttributeError:
raise TypeError('Index must be DatetimeIndex')
return self._take(indexer, axis=axis)
def between_time(self, start_time, end_time, include_start=True,
include_end=True, axis=None):
"""
Select values between particular times of the day (e.g., 9:00-9:30 AM).
By setting ``start_time`` to be later than ``end_time``,
you can get the times that are *not* between the two times.
Parameters
----------
start_time : datetime.time or string
end_time : datetime.time or string
include_start : boolean, default True
include_end : boolean, default True
axis : {0 or 'index', 1 or 'columns'}, default 0
.. versionadded:: 0.24.0
Returns
-------
values_between_time : same type as caller
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
at_time : Select values at a particular time of the day.
first : Select initial periods of time series based on a date offset.
last : Select final periods of time series based on a date offset.
DatetimeIndex.indexer_between_time : Get just the index locations for
values between particular times of the day.
Examples
--------
>>> i = pd.date_range('2018-04-09', periods=4, freq='1D20min')
>>> ts = pd.DataFrame({'A': [1,2,3,4]}, index=i)
>>> ts
A
2018-04-09 00:00:00 1
2018-04-10 00:20:00 2
2018-04-11 00:40:00 3
2018-04-12 01:00:00 4
>>> ts.between_time('0:15', '0:45')
A
2018-04-10 00:20:00 2
2018-04-11 00:40:00 3
You get the times that are *not* between two times by setting
``start_time`` later than ``end_time``:
>>> ts.between_time('0:45', '0:15')
A
2018-04-09 00:00:00 1
2018-04-12 01:00:00 4
"""
if axis is None:
axis = self._stat_axis_number
axis = self._get_axis_number(axis)
index = self._get_axis(axis)
try:
indexer = index.indexer_between_time(
start_time, end_time, include_start=include_start,
include_end=include_end)
except AttributeError:
raise TypeError('Index must be DatetimeIndex')
return self._take(indexer, axis=axis)
def resample(self, rule, how=None, axis=0, fill_method=None, closed=None,
label=None, convention='start', kind=None, loffset=None,
limit=None, base=0, on=None, level=None):
"""
Resample time-series data.
Convenience method for frequency conversion and resampling of time
series. Object must have a datetime-like index (`DatetimeIndex`,
`PeriodIndex`, or `TimedeltaIndex`), or pass datetime-like values
to the `on` or `level` keyword.
Parameters
----------
rule : str
The offset string or object representing target conversion.
how : str
Method for down/re-sampling, default to 'mean' for downsampling.
.. deprecated:: 0.18.0
The new syntax is ``.resample(...).mean()``, or
``.resample(...).apply(<func>)``
axis : {0 or 'index', 1 or 'columns'}, default 0
Which axis to use for up- or down-sampling. For `Series` this
will default to 0, i.e. along the rows. Must be
`DatetimeIndex`, `TimedeltaIndex` or `PeriodIndex`.
fill_method : str, default None
Filling method for upsampling.
.. deprecated:: 0.18.0
The new syntax is ``.resample(...).<func>()``,
e.g. ``.resample(...).pad()``
closed : {'right', 'left'}, default None
Which side of bin interval is closed. The default is 'left'
for all frequency offsets except for 'M', 'A', 'Q', 'BM',
'BA', 'BQ', and 'W' which all have a default of 'right'.
label : {'right', 'left'}, default None
Which bin edge label to label bucket with. The default is 'left'
for all frequency offsets except for 'M', 'A', 'Q', 'BM',
'BA', 'BQ', and 'W' which all have a default of 'right'.
convention : {'start', 'end', 's', 'e'}, default 'start'
For `PeriodIndex` only, controls whether to use the start or
end of `rule`.
kind : {'timestamp', 'period'}, optional, default None
Pass 'timestamp' to convert the resulting index to a
`DateTimeIndex` or 'period' to convert it to a `PeriodIndex`.
By default the input representation is retained.
loffset : timedelta, default None
Adjust the resampled time labels.
limit : int, default None
Maximum size gap when reindexing with `fill_method`.
.. deprecated:: 0.18.0
base : int, default 0
For frequencies that evenly subdivide 1 day, the "origin" of the
aggregated intervals. For example, for '5min' frequency, base could
range from 0 through 4. Defaults to 0.
on : str, optional
For a DataFrame, column to use instead of index for resampling.
Column must be datetime-like.
.. versionadded:: 0.19.0
level : str or int, optional
For a MultiIndex, level (name or number) to use for
resampling. `level` must be datetime-like.
.. versionadded:: 0.19.0
Returns
-------
Resampler object
See Also
--------
groupby : Group by mapping, function, label, or list of labels.
Series.resample : Resample a Series.
DataFrame.resample: Resample a DataFrame.
Notes
-----
See the `user guide
<http://pandas.pydata.org/pandas-docs/stable/timeseries.html#resampling>`_
for more.
To learn more about the offset strings, please see `this link
<http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases>`__.
Examples
--------
Start by creating a series with 9 one minute timestamps.
>>> index = pd.date_range('1/1/2000', periods=9, freq='T')
>>> series = pd.Series(range(9), index=index)
>>> series
2000-01-01 00:00:00 0
2000-01-01 00:01:00 1
2000-01-01 00:02:00 2
2000-01-01 00:03:00 3
2000-01-01 00:04:00 4
2000-01-01 00:05:00 5
2000-01-01 00:06:00 6
2000-01-01 00:07:00 7
2000-01-01 00:08:00 8
Freq: T, dtype: int64
Downsample the series into 3 minute bins and sum the values
of the timestamps falling into a bin.
>>> series.resample('3T').sum()
2000-01-01 00:00:00 3
2000-01-01 00:03:00 12
2000-01-01 00:06:00 21
Freq: 3T, dtype: int64
Downsample the series into 3 minute bins as above, but label each
bin using the right edge instead of the left. Please note that the
value in the bucket used as the label is not included in the bucket,
which it labels. For example, in the original series the
bucket ``2000-01-01 00:03:00`` contains the value 3, but the summed
value in the resampled bucket with the label ``2000-01-01 00:03:00``
does not include 3 (if it did, the summed value would be 6, not 3).
To include this value close the right side of the bin interval as
illustrated in the example below this one.
>>> series.resample('3T', label='right').sum()
2000-01-01 00:03:00 3
2000-01-01 00:06:00 12
2000-01-01 00:09:00 21
Freq: 3T, dtype: int64
Downsample the series into 3 minute bins as above, but close the right
side of the bin interval.
>>> series.resample('3T', label='right', closed='right').sum()
2000-01-01 00:00:00 0
2000-01-01 00:03:00 6
2000-01-01 00:06:00 15
2000-01-01 00:09:00 15
Freq: 3T, dtype: int64
Upsample the series into 30 second bins.
>>> series.resample('30S').asfreq()[0:5] # Select first 5 rows
2000-01-01 00:00:00 0.0
2000-01-01 00:00:30 NaN
2000-01-01 00:01:00 1.0
2000-01-01 00:01:30 NaN
2000-01-01 00:02:00 2.0
Freq: 30S, dtype: float64
Upsample the series into 30 second bins and fill the ``NaN``
values using the ``pad`` method.
>>> series.resample('30S').pad()[0:5]
2000-01-01 00:00:00 0
2000-01-01 00:00:30 0
2000-01-01 00:01:00 1
2000-01-01 00:01:30 1
2000-01-01 00:02:00 2
Freq: 30S, dtype: int64
Upsample the series into 30 second bins and fill the
``NaN`` values using the ``bfill`` method.
>>> series.resample('30S').bfill()[0:5]
2000-01-01 00:00:00 0
2000-01-01 00:00:30 1
2000-01-01 00:01:00 1
2000-01-01 00:01:30 2
2000-01-01 00:02:00 2
Freq: 30S, dtype: int64
Pass a custom function via ``apply``
>>> def custom_resampler(array_like):
... return np.sum(array_like) + 5
...
>>> series.resample('3T').apply(custom_resampler)
2000-01-01 00:00:00 8
2000-01-01 00:03:00 17
2000-01-01 00:06:00 26
Freq: 3T, dtype: int64
For a Series with a PeriodIndex, the keyword `convention` can be
used to control whether to use the start or end of `rule`.
Resample a year by quarter using 'start' `convention`. Values are
assigned to the first quarter of the period.
>>> s = pd.Series([1, 2], index=pd.period_range('2012-01-01',
... freq='A',
... periods=2))
>>> s
2012 1
2013 2
Freq: A-DEC, dtype: int64
>>> s.resample('Q', convention='start').asfreq()
2012Q1 1.0
2012Q2 NaN
2012Q3 NaN
2012Q4 NaN
2013Q1 2.0
2013Q2 NaN
2013Q3 NaN
2013Q4 NaN
Freq: Q-DEC, dtype: float64
Resample quarters by month using 'end' `convention`. Values are
assigned to the last month of the period.
>>> q = pd.Series([1, 2, 3, 4], index=pd.period_range('2018-01-01',
... freq='Q',
... periods=4))
>>> q
2018Q1 1
2018Q2 2
2018Q3 3
2018Q4 4
Freq: Q-DEC, dtype: int64
>>> q.resample('M', convention='end').asfreq()
2018-03 1.0
2018-04 NaN
2018-05 NaN
2018-06 2.0
2018-07 NaN
2018-08 NaN
2018-09 3.0
2018-10 NaN
2018-11 NaN
2018-12 4.0
Freq: M, dtype: float64
For DataFrame objects, the keyword `on` can be used to specify the
column instead of the index for resampling.
>>> d = dict({'price': [10, 11, 9, 13, 14, 18, 17, 19],
... 'volume': [50, 60, 40, 100, 50, 100, 40, 50]})
>>> df = pd.DataFrame(d)
>>> df['week_starting'] = pd.date_range('01/01/2018',
... periods=8,
... freq='W')
>>> df
price volume week_starting
0 10 50 2018-01-07
1 11 60 2018-01-14
2 9 40 2018-01-21
3 13 100 2018-01-28
4 14 50 2018-02-04
5 18 100 2018-02-11
6 17 40 2018-02-18
7 19 50 2018-02-25
>>> df.resample('M', on='week_starting').mean()
price volume
week_starting
2018-01-31 10.75 62.5
2018-02-28 17.00 60.0
For a DataFrame with MultiIndex, the keyword `level` can be used to
specify on which level the resampling needs to take place.
>>> days = pd.date_range('1/1/2000', periods=4, freq='D')
>>> d2 = dict({'price': [10, 11, 9, 13, 14, 18, 17, 19],
... 'volume': [50, 60, 40, 100, 50, 100, 40, 50]})
>>> df2 = pd.DataFrame(d2,
... index=pd.MultiIndex.from_product([days,
... ['morning',
... 'afternoon']]
... ))
>>> df2
price volume
2000-01-01 morning 10 50
afternoon 11 60
2000-01-02 morning 9 40
afternoon 13 100
2000-01-03 morning 14 50
afternoon 18 100
2000-01-04 morning 17 40
afternoon 19 50
>>> df2.resample('D', level=0).sum()
price volume
2000-01-01 21 110
2000-01-02 22 140
2000-01-03 32 150
2000-01-04 36 90
"""
from pandas.core.resample import (resample,
_maybe_process_deprecations)
axis = self._get_axis_number(axis)
r = resample(self, freq=rule, label=label, closed=closed,
axis=axis, kind=kind, loffset=loffset,
convention=convention,
base=base, key=on, level=level)
return _maybe_process_deprecations(r,
how=how,
fill_method=fill_method,
limit=limit)
def first(self, offset):
"""
Convenience method for subsetting initial periods of time series data
based on a date offset.
Parameters
----------
offset : string, DateOffset, dateutil.relativedelta
Returns
-------
subset : same type as caller
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
last : Select final periods of time series based on a date offset.
at_time : Select values at a particular time of the day.
between_time : Select values between particular times of the day.
Examples
--------
>>> i = pd.date_range('2018-04-09', periods=4, freq='2D')
>>> ts = pd.DataFrame({'A': [1,2,3,4]}, index=i)
>>> ts
A
2018-04-09 1
2018-04-11 2
2018-04-13 3
2018-04-15 4
Get the rows for the first 3 days:
>>> ts.first('3D')
A
2018-04-09 1
2018-04-11 2
Notice the data for 3 first calender days were returned, not the first
3 days observed in the dataset, and therefore data for 2018-04-13 was
not returned.
"""
if not isinstance(self.index, DatetimeIndex):
raise TypeError("'first' only supports a DatetimeIndex index")
if len(self.index) == 0:
return self
offset = to_offset(offset)
end_date = end = self.index[0] + offset
# Tick-like, e.g. 3 weeks
if not offset.isAnchored() and hasattr(offset, '_inc'):
if end_date in self.index:
end = self.index.searchsorted(end_date, side='left')
return self.iloc[:end]
return self.loc[:end]
def last(self, offset):
"""
Convenience method for subsetting final periods of time series data
based on a date offset.
Parameters
----------
offset : string, DateOffset, dateutil.relativedelta
Returns
-------
subset : same type as caller
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
first : Select initial periods of time series based on a date offset.
at_time : Select values at a particular time of the day.
between_time : Select values between particular times of the day.
Examples
--------
>>> i = pd.date_range('2018-04-09', periods=4, freq='2D')
>>> ts = pd.DataFrame({'A': [1,2,3,4]}, index=i)
>>> ts
A
2018-04-09 1
2018-04-11 2
2018-04-13 3
2018-04-15 4
Get the rows for the last 3 days:
>>> ts.last('3D')
A
2018-04-13 3
2018-04-15 4
Notice the data for 3 last calender days were returned, not the last
3 observed days in the dataset, and therefore data for 2018-04-11 was
not returned.
"""
if not isinstance(self.index, DatetimeIndex):
raise TypeError("'last' only supports a DatetimeIndex index")
if len(self.index) == 0:
return self
offset = to_offset(offset)
start_date = self.index[-1] - offset
start = self.index.searchsorted(start_date, side='right')
return self.iloc[start:]
def rank(self, axis=0, method='average', numeric_only=None,
na_option='keep', ascending=True, pct=False):
"""
Compute numerical data ranks (1 through n) along axis. Equal values are
assigned a rank that is the average of the ranks of those values.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
index to direct ranking
method : {'average', 'min', 'max', 'first', 'dense'}
* average: average rank of group
* min: lowest rank in group
* max: highest rank in group
* first: ranks assigned in order they appear in the array
* dense: like 'min', but rank always increases by 1 between groups
numeric_only : boolean, default None
Include only float, int, boolean data. Valid only for DataFrame or
Panel objects
na_option : {'keep', 'top', 'bottom'}
* keep: leave NA values where they are
* top: smallest rank if ascending
* bottom: smallest rank if descending
ascending : boolean, default True
False for ranks by high (1) to low (N)
pct : boolean, default False
Computes percentage rank of data
Returns
-------
ranks : same type as caller
"""
axis = self._get_axis_number(axis)
if self.ndim > 2:
msg = "rank does not make sense when ndim > 2"
raise NotImplementedError(msg)
if na_option not in {'keep', 'top', 'bottom'}:
msg = "na_option must be one of 'keep', 'top', or 'bottom'"
raise ValueError(msg)
def ranker(data):
ranks = algos.rank(data.values, axis=axis, method=method,
ascending=ascending, na_option=na_option,
pct=pct)
ranks = self._constructor(ranks, **data._construct_axes_dict())
return ranks.__finalize__(self)
# if numeric_only is None, and we can't get anything, we try with
# numeric_only=True
if numeric_only is None:
try:
return ranker(self)
except TypeError:
numeric_only = True
if numeric_only:
data = self._get_numeric_data()
else:
data = self
return ranker(data)
_shared_docs['align'] = ("""
Align two objects on their axes with the
specified join method for each axis Index.
Parameters
----------
other : DataFrame or Series
join : {'outer', 'inner', 'left', 'right'}, default 'outer'
axis : allowed axis of the other object, default None
Align on index (0), columns (1), or both (None)
level : int or level name, default None
Broadcast across a level, matching Index values on the
passed MultiIndex level
copy : boolean, default True
Always returns new objects. If copy=False and no reindexing is
required then original objects are returned.
fill_value : scalar, default np.NaN
Value to use for missing values. Defaults to NaN, but can be any
"compatible" value
method : {'backfill', 'bfill', 'pad', 'ffill', None}, default None
Method to use for filling holes in reindexed Series
pad / ffill: propagate last valid observation forward to next valid
backfill / bfill: use NEXT valid observation to fill gap
limit : int, default None
If method is specified, this is the maximum number of consecutive
NaN values to forward/backward fill. In other words, if there is
a gap with more than this number of consecutive NaNs, it will only
be partially filled. If method is not specified, this is the
maximum number of entries along the entire axis where NaNs will be
filled. Must be greater than 0 if not None.
fill_axis : %(axes_single_arg)s, default 0
Filling axis, method and limit
broadcast_axis : %(axes_single_arg)s, default None
Broadcast values along this axis, if aligning two objects of
different dimensions
Returns
-------
(left, right) : (%(klass)s, type of other)
Aligned objects
""")
@Appender(_shared_docs['align'] % _shared_doc_kwargs)
def align(self, other, join='outer', axis=None, level=None, copy=True,
fill_value=None, method=None, limit=None, fill_axis=0,
broadcast_axis=None):
from pandas import DataFrame, Series
method = missing.clean_fill_method(method)
if broadcast_axis == 1 and self.ndim != other.ndim:
if isinstance(self, Series):
# this means other is a DataFrame, and we need to broadcast
# self
cons = self._constructor_expanddim
df = cons({c: self for c in other.columns},
**other._construct_axes_dict())
return df._align_frame(other, join=join, axis=axis,
level=level, copy=copy,
fill_value=fill_value, method=method,
limit=limit, fill_axis=fill_axis)
elif isinstance(other, Series):
# this means self is a DataFrame, and we need to broadcast
# other
cons = other._constructor_expanddim
df = cons({c: other for c in self.columns},
**self._construct_axes_dict())
return self._align_frame(df, join=join, axis=axis, level=level,
copy=copy, fill_value=fill_value,
method=method, limit=limit,
fill_axis=fill_axis)
if axis is not None:
axis = self._get_axis_number(axis)
if isinstance(other, DataFrame):
return self._align_frame(other, join=join, axis=axis, level=level,
copy=copy, fill_value=fill_value,
method=method, limit=limit,
fill_axis=fill_axis)
elif isinstance(other, Series):
return self._align_series(other, join=join, axis=axis, level=level,
copy=copy, fill_value=fill_value,
method=method, limit=limit,
fill_axis=fill_axis)
else: # pragma: no cover
raise TypeError('unsupported type: %s' % type(other))
def _align_frame(self, other, join='outer', axis=None, level=None,
copy=True, fill_value=None, method=None, limit=None,
fill_axis=0):
# defaults
join_index, join_columns = None, None
ilidx, iridx = None, None
clidx, cridx = None, None
is_series = isinstance(self, ABCSeries)
if axis is None or axis == 0:
if not self.index.equals(other.index):
join_index, ilidx, iridx = self.index.join(
other.index, how=join, level=level, return_indexers=True)
if axis is None or axis == 1:
if not is_series and not self.columns.equals(other.columns):
join_columns, clidx, cridx = self.columns.join(
other.columns, how=join, level=level, return_indexers=True)
if is_series:
reindexers = {0: [join_index, ilidx]}
else:
reindexers = {0: [join_index, ilidx], 1: [join_columns, clidx]}
left = self._reindex_with_indexers(reindexers, copy=copy,
fill_value=fill_value,
allow_dups=True)
# other must be always DataFrame
right = other._reindex_with_indexers({0: [join_index, iridx],
1: [join_columns, cridx]},
copy=copy, fill_value=fill_value,
allow_dups=True)
if method is not None:
left = left.fillna(axis=fill_axis, method=method, limit=limit)
right = right.fillna(axis=fill_axis, method=method, limit=limit)
# if DatetimeIndex have different tz, convert to UTC
if is_datetime64tz_dtype(left.index):
if left.index.tz != right.index.tz:
if join_index is not None:
left.index = join_index
right.index = join_index
return left.__finalize__(self), right.__finalize__(other)
def _align_series(self, other, join='outer', axis=None, level=None,
copy=True, fill_value=None, method=None, limit=None,
fill_axis=0):
is_series = isinstance(self, ABCSeries)
# series/series compat, other must always be a Series
if is_series:
if axis:
raise ValueError('cannot align series to a series other than '
'axis 0')
# equal
if self.index.equals(other.index):
join_index, lidx, ridx = None, None, None
else:
join_index, lidx, ridx = self.index.join(other.index, how=join,
level=level,
return_indexers=True)
left = self._reindex_indexer(join_index, lidx, copy)
right = other._reindex_indexer(join_index, ridx, copy)
else:
# one has > 1 ndim
fdata = self._data
if axis == 0:
join_index = self.index
lidx, ridx = None, None
if not self.index.equals(other.index):
join_index, lidx, ridx = self.index.join(
other.index, how=join, level=level,
return_indexers=True)
if lidx is not None:
fdata = fdata.reindex_indexer(join_index, lidx, axis=1)
elif axis == 1:
join_index = self.columns
lidx, ridx = None, None
if not self.columns.equals(other.index):
join_index, lidx, ridx = self.columns.join(
other.index, how=join, level=level,
return_indexers=True)
if lidx is not None:
fdata = fdata.reindex_indexer(join_index, lidx, axis=0)
else:
raise ValueError('Must specify axis=0 or 1')
if copy and fdata is self._data:
fdata = fdata.copy()
left = self._constructor(fdata)
if ridx is None:
right = other
else:
right = other.reindex(join_index, level=level)
# fill
fill_na = notna(fill_value) or (method is not None)
if fill_na:
left = left.fillna(fill_value, method=method, limit=limit,
axis=fill_axis)
right = right.fillna(fill_value, method=method, limit=limit)
# if DatetimeIndex have different tz, convert to UTC
if is_series or (not is_series and axis == 0):
if is_datetime64tz_dtype(left.index):
if left.index.tz != right.index.tz:
if join_index is not None:
left.index = join_index
right.index = join_index
return left.__finalize__(self), right.__finalize__(other)
def _where(self, cond, other=np.nan, inplace=False, axis=None, level=None,
errors='raise', try_cast=False):
"""
Equivalent to public method `where`, except that `other` is not
applied as a function even if callable. Used in __setitem__.
"""
inplace = validate_bool_kwarg(inplace, 'inplace')
# align the cond to same shape as myself
cond = com.apply_if_callable(cond, self)
if isinstance(cond, NDFrame):
cond, _ = cond.align(self, join='right', broadcast_axis=1)
else:
if not hasattr(cond, 'shape'):
cond = np.asanyarray(cond)
if cond.shape != self.shape:
raise ValueError('Array conditional must be same shape as '
'self')
cond = self._constructor(cond, **self._construct_axes_dict())
# make sure we are boolean
fill_value = True if inplace else False
cond = cond.fillna(fill_value)
msg = "Boolean array expected for the condition, not {dtype}"
if not isinstance(cond, pd.DataFrame):
# This is a single-dimensional object.
if not is_bool_dtype(cond):
raise ValueError(msg.format(dtype=cond.dtype))
elif not cond.empty:
for dt in cond.dtypes:
if not is_bool_dtype(dt):
raise ValueError(msg.format(dtype=dt))
cond = -cond if inplace else cond
# try to align with other
try_quick = True
if hasattr(other, 'align'):
# align with me
if other.ndim <= self.ndim:
_, other = self.align(other, join='left', axis=axis,
level=level, fill_value=np.nan)
# if we are NOT aligned, raise as we cannot where index
if (axis is None and
not all(other._get_axis(i).equals(ax)
for i, ax in enumerate(self.axes))):
raise InvalidIndexError
# slice me out of the other
else:
raise NotImplementedError("cannot align with a higher "
"dimensional NDFrame")
if isinstance(other, np.ndarray):
if other.shape != self.shape:
if self.ndim == 1:
icond = cond.values
# GH 2745 / GH 4192
# treat like a scalar
if len(other) == 1:
other = np.array(other[0])
# GH 3235
# match True cond to other
elif len(cond[icond]) == len(other):
# try to not change dtype at first (if try_quick)
if try_quick:
try:
new_other = com.values_from_object(self)
new_other = new_other.copy()
new_other[icond] = other
other = new_other
except Exception:
try_quick = False
# let's create a new (if we failed at the above
# or not try_quick
if not try_quick:
dtype, fill_value = maybe_promote(other.dtype)
new_other = np.empty(len(icond), dtype=dtype)
new_other.fill(fill_value)
maybe_upcast_putmask(new_other, icond, other)
other = new_other
else:
raise ValueError('Length of replacements must equal '
'series length')
else:
raise ValueError('other must be the same shape as self '
'when an ndarray')
# we are the same shape, so create an actual object for alignment
else:
other = self._constructor(other, **self._construct_axes_dict())
if axis is None:
axis = 0
if self.ndim == getattr(other, 'ndim', 0):
align = True
else:
align = (self._get_axis_number(axis) == 1)
block_axis = self._get_block_manager_axis(axis)
if inplace:
# we may have different type blocks come out of putmask, so
# reconstruct the block manager
self._check_inplace_setting(other)
new_data = self._data.putmask(mask=cond, new=other, align=align,
inplace=True, axis=block_axis,
transpose=self._AXIS_REVERSED)
self._update_inplace(new_data)
else:
new_data = self._data.where(other=other, cond=cond, align=align,
errors=errors,
try_cast=try_cast, axis=block_axis,
transpose=self._AXIS_REVERSED)
return self._constructor(new_data).__finalize__(self)
_shared_docs['where'] = ("""
Replace values where the condition is %(cond_rev)s.
Parameters
----------
cond : boolean %(klass)s, array-like, or callable
Where `cond` is %(cond)s, keep the original value. Where
%(cond_rev)s, replace with corresponding value from `other`.
If `cond` is callable, it is computed on the %(klass)s and
should return boolean %(klass)s or array. The callable must
not change input %(klass)s (though pandas doesn't check it).
.. versionadded:: 0.18.1
A callable can be used as cond.
other : scalar, %(klass)s, or callable
Entries where `cond` is %(cond_rev)s are replaced with
corresponding value from `other`.
If other is callable, it is computed on the %(klass)s and
should return scalar or %(klass)s. The callable must not
change input %(klass)s (though pandas doesn't check it).
.. versionadded:: 0.18.1
A callable can be used as other.
inplace : boolean, default False
Whether to perform the operation in place on the data.
axis : int, default None
Alignment axis if needed.
level : int, default None
Alignment level if needed.
errors : str, {'raise', 'ignore'}, default `raise`
Note that currently this parameter won't affect
the results and will always coerce to a suitable dtype.
- `raise` : allow exceptions to be raised.
- `ignore` : suppress exceptions. On error return original object.
try_cast : boolean, default False
Try to cast the result back to the input type (if possible).
raise_on_error : boolean, default True
Whether to raise on invalid data types (e.g. trying to where on
strings).
.. deprecated:: 0.21.0
Use `errors`.
Returns
-------
wh : same type as caller
See Also
--------
:func:`DataFrame.%(name_other)s` : Return an object of same shape as
self.
Notes
-----
The %(name)s method is an application of the if-then idiom. For each
element in the calling DataFrame, if ``cond`` is ``%(cond)s`` the
element is used; otherwise the corresponding element from the DataFrame
``other`` is used.
The signature for :func:`DataFrame.where` differs from
:func:`numpy.where`. Roughly ``df1.where(m, df2)`` is equivalent to
``np.where(m, df1, df2)``.
For further details and examples see the ``%(name)s`` documentation in
:ref:`indexing <indexing.where_mask>`.
Examples
--------
>>> s = pd.Series(range(5))
>>> s.where(s > 0)
0 NaN
1 1.0
2 2.0
3 3.0
4 4.0
dtype: float64
>>> s.mask(s > 0)
0 0.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
>>> s.where(s > 1, 10)
0 10
1 10
2 2
3 3
4 4
dtype: int64
>>> df = pd.DataFrame(np.arange(10).reshape(-1, 2), columns=['A', 'B'])
>>> m = df %% 3 == 0
>>> df.where(m, -df)
A B
0 0 -1
1 -2 3
2 -4 -5
3 6 -7
4 -8 9
>>> df.where(m, -df) == np.where(m, df, -df)
A B
0 True True
1 True True
2 True True
3 True True
4 True True
>>> df.where(m, -df) == df.mask(~m, -df)
A B
0 True True
1 True True
2 True True
3 True True
4 True True
""")
@Appender(_shared_docs['where'] % dict(_shared_doc_kwargs, cond="True",
cond_rev="False", name='where',
name_other='mask'))
def where(self, cond, other=np.nan, inplace=False, axis=None, level=None,
errors='raise', try_cast=False, raise_on_error=None):
if raise_on_error is not None:
warnings.warn(
"raise_on_error is deprecated in "
"favor of errors='raise|ignore'",
FutureWarning, stacklevel=2)
if raise_on_error:
errors = 'raise'
else:
errors = 'ignore'
other = com.apply_if_callable(other, self)
return self._where(cond, other, inplace, axis, level,
errors=errors, try_cast=try_cast)
@Appender(_shared_docs['where'] % dict(_shared_doc_kwargs, cond="False",
cond_rev="True", name='mask',
name_other='where'))
def mask(self, cond, other=np.nan, inplace=False, axis=None, level=None,
errors='raise', try_cast=False, raise_on_error=None):
if raise_on_error is not None:
warnings.warn(
"raise_on_error is deprecated in "
"favor of errors='raise|ignore'",
FutureWarning, stacklevel=2)
if raise_on_error:
errors = 'raise'
else:
errors = 'ignore'
inplace = validate_bool_kwarg(inplace, 'inplace')
cond = com.apply_if_callable(cond, self)
# see gh-21891
if not hasattr(cond, "__invert__"):
cond = np.array(cond)
return self.where(~cond, other=other, inplace=inplace, axis=axis,
level=level, try_cast=try_cast,
errors=errors)
_shared_docs['shift'] = ("""
Shift index by desired number of periods with an optional time `freq`.
When `freq` is not passed, shift the index without realigning the data.
If `freq` is passed (in this case, the index must be date or datetime,
or it will raise a `NotImplementedError`), the index will be
increased using the periods and the `freq`.
Parameters
----------
periods : int
Number of periods to shift. Can be positive or negative.
freq : DateOffset, tseries.offsets, timedelta, or str, optional
Offset to use from the tseries module or time rule (e.g. 'EOM').
If `freq` is specified then the index values are shifted but the
data is not realigned. That is, use `freq` if you would like to
extend the index when shifting and preserve the original data.
axis : {0 or 'index', 1 or 'columns', None}, default None
Shift direction.
fill_value : object, optional
The scalar value to use for newly introduced missing values.
the default depends on the dtype of `self`.
For numeric data, ``np.nan`` is used.
For datetime, timedelta, or period data, etc. :attr:`NaT` is used.
For extension dtypes, ``self.dtype.na_value`` is used.
.. versionchanged:: 0.24.0
Returns
-------
%(klass)s
Copy of input object, shifted.
See Also
--------
Index.shift : Shift values of Index.
DatetimeIndex.shift : Shift values of DatetimeIndex.
PeriodIndex.shift : Shift values of PeriodIndex.
tshift : Shift the time index, using the index's frequency if
available.
Examples
--------
>>> df = pd.DataFrame({'Col1': [10, 20, 15, 30, 45],
... 'Col2': [13, 23, 18, 33, 48],
... 'Col3': [17, 27, 22, 37, 52]})
>>> df.shift(periods=3)
Col1 Col2 Col3
0 NaN NaN NaN
1 NaN NaN NaN
2 NaN NaN NaN
3 10.0 13.0 17.0
4 20.0 23.0 27.0
>>> df.shift(periods=1, axis='columns')
Col1 Col2 Col3
0 NaN 10.0 13.0
1 NaN 20.0 23.0
2 NaN 15.0 18.0
3 NaN 30.0 33.0
4 NaN 45.0 48.0
>>> df.shift(periods=3, fill_value=0)
Col1 Col2 Col3
0 0 0 0
1 0 0 0
2 0 0 0
3 10 13 17
4 20 23 27
""")
@Appender(_shared_docs['shift'] % _shared_doc_kwargs)
def shift(self, periods=1, freq=None, axis=0, fill_value=None):
if periods == 0:
return self.copy()
block_axis = self._get_block_manager_axis(axis)
if freq is None:
new_data = self._data.shift(periods=periods, axis=block_axis,
fill_value=fill_value)
else:
return self.tshift(periods, freq)
return self._constructor(new_data).__finalize__(self)
def slice_shift(self, periods=1, axis=0):
"""
Equivalent to `shift` without copying data. The shifted data will
not include the dropped periods and the shifted axis will be smaller
than the original.
Parameters
----------
periods : int
Number of periods to move, can be positive or negative
Returns
-------
shifted : same type as caller
Notes
-----
While the `slice_shift` is faster than `shift`, you may pay for it
later during alignment.
"""
if periods == 0:
return self
if periods > 0:
vslicer = slice(None, -periods)
islicer = slice(periods, None)
else:
vslicer = slice(-periods, None)
islicer = slice(None, periods)
new_obj = self._slice(vslicer, axis=axis)
shifted_axis = self._get_axis(axis)[islicer]
new_obj.set_axis(shifted_axis, axis=axis, inplace=True)
return new_obj.__finalize__(self)
def tshift(self, periods=1, freq=None, axis=0):
"""
Shift the time index, using the index's frequency if available.
Parameters
----------
periods : int
Number of periods to move, can be positive or negative
freq : DateOffset, timedelta, or time rule string, default None
Increment to use from the tseries module or time rule (e.g. 'EOM')
axis : int or basestring
Corresponds to the axis that contains the Index
Returns
-------
shifted : NDFrame
Notes
-----
If freq is not specified then tries to use the freq or inferred_freq
attributes of the index. If neither of those attributes exist, a
ValueError is thrown
"""
index = self._get_axis(axis)
if freq is None:
freq = getattr(index, 'freq', None)
if freq is None:
freq = getattr(index, 'inferred_freq', None)
if freq is None:
msg = 'Freq was not given and was not set in the index'
raise ValueError(msg)
if periods == 0:
return self
if isinstance(freq, string_types):
freq = to_offset(freq)
block_axis = self._get_block_manager_axis(axis)
if isinstance(index, PeriodIndex):
orig_freq = to_offset(index.freq)
if freq == orig_freq:
new_data = self._data.copy()
new_data.axes[block_axis] = index.shift(periods)
else:
msg = ('Given freq %s does not match PeriodIndex freq %s' %
(freq.rule_code, orig_freq.rule_code))
raise ValueError(msg)
else:
new_data = self._data.copy()
new_data.axes[block_axis] = index.shift(periods, freq)
return self._constructor(new_data).__finalize__(self)
def truncate(self, before=None, after=None, axis=None, copy=True):
"""
Truncate a Series or DataFrame before and after some index value.
This is a useful shorthand for boolean indexing based on index
values above or below certain thresholds.
Parameters
----------
before : date, string, int
Truncate all rows before this index value.
after : date, string, int
Truncate all rows after this index value.
axis : {0 or 'index', 1 or 'columns'}, optional
Axis to truncate. Truncates the index (rows) by default.
copy : boolean, default is True,
Return a copy of the truncated section.
Returns
-------
type of caller
The truncated Series or DataFrame.
See Also
--------
DataFrame.loc : Select a subset of a DataFrame by label.
DataFrame.iloc : Select a subset of a DataFrame by position.
Notes
-----
If the index being truncated contains only datetime values,
`before` and `after` may be specified as strings instead of
Timestamps.
Examples
--------
>>> df = pd.DataFrame({'A': ['a', 'b', 'c', 'd', 'e'],
... 'B': ['f', 'g', 'h', 'i', 'j'],
... 'C': ['k', 'l', 'm', 'n', 'o']},
... index=[1, 2, 3, 4, 5])
>>> df
A B C
1 a f k
2 b g l
3 c h m
4 d i n
5 e j o
>>> df.truncate(before=2, after=4)
A B C
2 b g l
3 c h m
4 d i n
The columns of a DataFrame can be truncated.
>>> df.truncate(before="A", after="B", axis="columns")
A B
1 a f
2 b g
3 c h
4 d i
5 e j
For Series, only rows can be truncated.
>>> df['A'].truncate(before=2, after=4)
2 b
3 c
4 d
Name: A, dtype: object
The index values in ``truncate`` can be datetimes or string
dates.
>>> dates = pd.date_range('2016-01-01', '2016-02-01', freq='s')
>>> df = pd.DataFrame(index=dates, data={'A': 1})
>>> df.tail()
A
2016-01-31 23:59:56 1
2016-01-31 23:59:57 1
2016-01-31 23:59:58 1
2016-01-31 23:59:59 1
2016-02-01 00:00:00 1
>>> df.truncate(before=pd.Timestamp('2016-01-05'),
... after=pd.Timestamp('2016-01-10')).tail()
A
2016-01-09 23:59:56 1
2016-01-09 23:59:57 1
2016-01-09 23:59:58 1
2016-01-09 23:59:59 1
2016-01-10 00:00:00 1
Because the index is a DatetimeIndex containing only dates, we can
specify `before` and `after` as strings. They will be coerced to
Timestamps before truncation.
>>> df.truncate('2016-01-05', '2016-01-10').tail()
A
2016-01-09 23:59:56 1
2016-01-09 23:59:57 1
2016-01-09 23:59:58 1
2016-01-09 23:59:59 1
2016-01-10 00:00:00 1
Note that ``truncate`` assumes a 0 value for any unspecified time
component (midnight). This differs from partial string slicing, which
returns any partially matching dates.
>>> df.loc['2016-01-05':'2016-01-10', :].tail()
A
2016-01-10 23:59:55 1
2016-01-10 23:59:56 1
2016-01-10 23:59:57 1
2016-01-10 23:59:58 1
2016-01-10 23:59:59 1
"""
if axis is None:
axis = self._stat_axis_number
axis = self._get_axis_number(axis)
ax = self._get_axis(axis)
# GH 17935
# Check that index is sorted
if not ax.is_monotonic_increasing and not ax.is_monotonic_decreasing:
raise ValueError("truncate requires a sorted index")
# if we have a date index, convert to dates, otherwise
# treat like a slice
if ax.is_all_dates:
from pandas.core.tools.datetimes import to_datetime
before = to_datetime(before)
after = to_datetime(after)
if before is not None and after is not None:
if before > after:
raise ValueError('Truncate: %s must be after %s' %
(after, before))
slicer = [slice(None, None)] * self._AXIS_LEN
slicer[axis] = slice(before, after)
result = self.loc[tuple(slicer)]
if isinstance(ax, MultiIndex):
setattr(result, self._get_axis_name(axis),
ax.truncate(before, after))
if copy:
result = result.copy()
return result
def tz_convert(self, tz, axis=0, level=None, copy=True):
"""
Convert tz-aware axis to target time zone.
Parameters
----------
tz : string or pytz.timezone object
axis : the axis to convert
level : int, str, default None
If axis ia a MultiIndex, convert a specific level. Otherwise
must be None
copy : boolean, default True
Also make a copy of the underlying data
Returns
-------
Raises
------
TypeError
If the axis is tz-naive.
"""
axis = self._get_axis_number(axis)
ax = self._get_axis(axis)
def _tz_convert(ax, tz):
if not hasattr(ax, 'tz_convert'):
if len(ax) > 0:
ax_name = self._get_axis_name(axis)
raise TypeError('%s is not a valid DatetimeIndex or '
'PeriodIndex' % ax_name)
else:
ax = DatetimeIndex([], tz=tz)
else:
ax = ax.tz_convert(tz)
return ax
# if a level is given it must be a MultiIndex level or
# equivalent to the axis name
if isinstance(ax, MultiIndex):
level = ax._get_level_number(level)
new_level = _tz_convert(ax.levels[level], tz)
ax = ax.set_levels(new_level, level=level)
else:
if level not in (None, 0, ax.name):
raise ValueError("The level {0} is not valid".format(level))
ax = _tz_convert(ax, tz)
result = self._constructor(self._data, copy=copy)
result = result.set_axis(ax, axis=axis, inplace=False)
return result.__finalize__(self)
def tz_localize(self, tz, axis=0, level=None, copy=True,
ambiguous='raise', nonexistent='raise'):
"""
Localize tz-naive index of a Series or DataFrame to target time zone.
This operation localizes the Index. To localize the values in a
timezone-naive Series, use :meth:`Series.dt.tz_localize`.
Parameters
----------
tz : string or pytz.timezone object
axis : the axis to localize
level : int, str, default None
If axis ia a MultiIndex, localize a specific level. Otherwise
must be None
copy : boolean, default True
Also make a copy of the underlying data
ambiguous : 'infer', bool-ndarray, 'NaT', default 'raise'
When clocks moved backward due to DST, ambiguous times may arise.
For example in Central European Time (UTC+01), when going from
03:00 DST to 02:00 non-DST, 02:30:00 local time occurs both at
00:30:00 UTC and at 01:30:00 UTC. In such a situation, the
`ambiguous` parameter dictates how ambiguous times should be
handled.
- 'infer' will attempt to infer fall dst-transition hours based on
order
- bool-ndarray where True signifies a DST time, False designates
a non-DST time (note that this flag is only applicable for
ambiguous times)
- 'NaT' will return NaT where there are ambiguous times
- 'raise' will raise an AmbiguousTimeError if there are ambiguous
times
nonexistent : str, default 'raise'
A nonexistent time does not exist in a particular timezone
where clocks moved forward due to DST. Valid valuse are:
- 'shift_forward' will shift the nonexistent time forward to the
closest existing time
- 'shift_backward' will shift the nonexistent time backward to the
closest existing time
- 'NaT' will return NaT where there are nonexistent times
- timedelta objects will shift nonexistent times by the timedelta
- 'raise' will raise an NonExistentTimeError if there are
nonexistent times
.. versionadded:: 0.24.0
Returns
-------
Series or DataFrame
Same type as the input.
Raises
------
TypeError
If the TimeSeries is tz-aware and tz is not None.
Examples
--------
Localize local times:
>>> s = pd.Series([1],
... index=pd.DatetimeIndex(['2018-09-15 01:30:00']))
>>> s.tz_localize('CET')
2018-09-15 01:30:00+02:00 1
dtype: int64
Be careful with DST changes. When there is sequential data, pandas
can infer the DST time:
>>> s = pd.Series(range(7), index=pd.DatetimeIndex([
... '2018-10-28 01:30:00',
... '2018-10-28 02:00:00',
... '2018-10-28 02:30:00',
... '2018-10-28 02:00:00',
... '2018-10-28 02:30:00',
... '2018-10-28 03:00:00',
... '2018-10-28 03:30:00']))
>>> s.tz_localize('CET', ambiguous='infer')
2018-10-28 01:30:00+02:00 0
2018-10-28 02:00:00+02:00 1
2018-10-28 02:30:00+02:00 2
2018-10-28 02:00:00+01:00 3
2018-10-28 02:30:00+01:00 4
2018-10-28 03:00:00+01:00 5
2018-10-28 03:30:00+01:00 6
dtype: int64
In some cases, inferring the DST is impossible. In such cases, you can
pass an ndarray to the ambiguous parameter to set the DST explicitly
>>> s = pd.Series(range(3), index=pd.DatetimeIndex([
... '2018-10-28 01:20:00',
... '2018-10-28 02:36:00',
... '2018-10-28 03:46:00']))
>>> s.tz_localize('CET', ambiguous=np.array([True, True, False]))
2018-10-28 01:20:00+02:00 0
2018-10-28 02:36:00+02:00 1
2018-10-28 03:46:00+01:00 2
dtype: int64
If the DST transition causes nonexistent times, you can shift these
dates forward or backwards with a timedelta object or `'shift_forward'`
or `'shift_backwards'`.
>>> s = pd.Series(range(2), index=pd.DatetimeIndex([
... '2015-03-29 02:30:00',
... '2015-03-29 03:30:00']))
>>> s.tz_localize('Europe/Warsaw', nonexistent='shift_forward')
2015-03-29 03:00:00+02:00 0
2015-03-29 03:30:00+02:00 1
dtype: int64
>>> s.tz_localize('Europe/Warsaw', nonexistent='shift_backward')
2015-03-29 01:59:59.999999999+01:00 0
2015-03-29 03:30:00+02:00 1
dtype: int64
>>> s.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1H'))
2015-03-29 03:30:00+02:00 0
2015-03-29 03:30:00+02:00 1
dtype: int64
"""
nonexistent_options = ('raise', 'NaT', 'shift_forward',
'shift_backward')
if nonexistent not in nonexistent_options and not isinstance(
nonexistent, timedelta):
raise ValueError("The nonexistent argument must be one of 'raise',"
" 'NaT', 'shift_forward', 'shift_backward' or"
" a timedelta object")
axis = self._get_axis_number(axis)
ax = self._get_axis(axis)
def _tz_localize(ax, tz, ambiguous, nonexistent):
if not hasattr(ax, 'tz_localize'):
if len(ax) > 0:
ax_name = self._get_axis_name(axis)
raise TypeError('%s is not a valid DatetimeIndex or '
'PeriodIndex' % ax_name)
else:
ax = DatetimeIndex([], tz=tz)
else:
ax = ax.tz_localize(
tz, ambiguous=ambiguous, nonexistent=nonexistent
)
return ax
# if a level is given it must be a MultiIndex level or
# equivalent to the axis name
if isinstance(ax, MultiIndex):
level = ax._get_level_number(level)
new_level = _tz_localize(
ax.levels[level], tz, ambiguous, nonexistent
)
ax = ax.set_levels(new_level, level=level)
else:
if level not in (None, 0, ax.name):
raise ValueError("The level {0} is not valid".format(level))
ax = _tz_localize(ax, tz, ambiguous, nonexistent)
result = self._constructor(self._data, copy=copy)
result = result.set_axis(ax, axis=axis, inplace=False)
return result.__finalize__(self)
# ----------------------------------------------------------------------
# Numeric Methods
def abs(self):
"""
Return a Series/DataFrame with absolute numeric value of each element.
This function only applies to elements that are all numeric.
Returns
-------
abs
Series/DataFrame containing the absolute value of each element.
See Also
--------
numpy.absolute : Calculate the absolute value element-wise.
Notes
-----
For ``complex`` inputs, ``1.2 + 1j``, the absolute value is
:math:`\\sqrt{ a^2 + b^2 }`.
Examples
--------
Absolute numeric values in a Series.
>>> s = pd.Series([-1.10, 2, -3.33, 4])
>>> s.abs()
0 1.10
1 2.00
2 3.33
3 4.00
dtype: float64
Absolute numeric values in a Series with complex numbers.
>>> s = pd.Series([1.2 + 1j])
>>> s.abs()
0 1.56205
dtype: float64
Absolute numeric values in a Series with a Timedelta element.
>>> s = pd.Series([pd.Timedelta('1 days')])
>>> s.abs()
0 1 days
dtype: timedelta64[ns]
Select rows with data closest to certain value using argsort (from
`StackOverflow <https://stackoverflow.com/a/17758115>`__).
>>> df = pd.DataFrame({
... 'a': [4, 5, 6, 7],
... 'b': [10, 20, 30, 40],
... 'c': [100, 50, -30, -50]
... })
>>> df
a b c
0 4 10 100
1 5 20 50
2 6 30 -30
3 7 40 -50
>>> df.loc[(df.c - 43).abs().argsort()]
a b c
1 5 20 50
0 4 10 100
2 6 30 -30
3 7 40 -50
"""
return np.abs(self)
def describe(self, percentiles=None, include=None, exclude=None):
"""
Generate descriptive statistics that summarize the central tendency,
dispersion and shape of a dataset's distribution, excluding
``NaN`` values.
Analyzes both numeric and object series, as well
as ``DataFrame`` column sets of mixed data types. The output
will vary depending on what is provided. Refer to the notes
below for more detail.
Parameters
----------
percentiles : list-like of numbers, optional
The percentiles to include in the output. All should
fall between 0 and 1. The default is
``[.25, .5, .75]``, which returns the 25th, 50th, and
75th percentiles.
include : 'all', list-like of dtypes or None (default), optional
A white list of data types to include in the result. Ignored
for ``Series``. Here are the options:
- 'all' : All columns of the input will be included in the output.
- A list-like of dtypes : Limits the results to the
provided data types.
To limit the result to numeric types submit
``numpy.number``. To limit it instead to object columns submit
the ``numpy.object`` data type. Strings
can also be used in the style of
``select_dtypes`` (e.g. ``df.describe(include=['O'])``). To
select pandas categorical columns, use ``'category'``
- None (default) : The result will include all numeric columns.
exclude : list-like of dtypes or None (default), optional,
A black list of data types to omit from the result. Ignored
for ``Series``. Here are the options:
- A list-like of dtypes : Excludes the provided data types
from the result. To exclude numeric types submit
``numpy.number``. To exclude object columns submit the data
type ``numpy.object``. Strings can also be used in the style of
``select_dtypes`` (e.g. ``df.describe(include=['O'])``). To
exclude pandas categorical columns, use ``'category'``
- None (default) : The result will exclude nothing.
Returns
-------
Series or DataFrame
Summary statistics of the Series or Dataframe provided.
See Also
--------
DataFrame.count: Count number of non-NA/null observations.
DataFrame.max: Maximum of the values in the object.
DataFrame.min: Minimum of the values in the object.
DataFrame.mean: Mean of the values.
DataFrame.std: Standard deviation of the obersvations.
DataFrame.select_dtypes: Subset of a DataFrame including/excluding
columns based on their dtype.
Notes
-----
For numeric data, the result's index will include ``count``,
``mean``, ``std``, ``min``, ``max`` as well as lower, ``50`` and
upper percentiles. By default the lower percentile is ``25`` and the
upper percentile is ``75``. The ``50`` percentile is the
same as the median.
For object data (e.g. strings or timestamps), the result's index
will include ``count``, ``unique``, ``top``, and ``freq``. The ``top``
is the most common value. The ``freq`` is the most common value's
frequency. Timestamps also include the ``first`` and ``last`` items.
If multiple object values have the highest count, then the
``count`` and ``top`` results will be arbitrarily chosen from
among those with the highest count.
For mixed data types provided via a ``DataFrame``, the default is to
return only an analysis of numeric columns. If the dataframe consists
only of object and categorical data without any numeric columns, the
default is to return an analysis of both the object and categorical
columns. If ``include='all'`` is provided as an option, the result
will include a union of attributes of each type.
The `include` and `exclude` parameters can be used to limit
which columns in a ``DataFrame`` are analyzed for the output.
The parameters are ignored when analyzing a ``Series``.
Examples
--------
Describing a numeric ``Series``.
>>> s = pd.Series([1, 2, 3])
>>> s.describe()
count 3.0
mean 2.0
std 1.0
min 1.0
25% 1.5
50% 2.0
75% 2.5
max 3.0
dtype: float64
Describing a categorical ``Series``.
>>> s = pd.Series(['a', 'a', 'b', 'c'])
>>> s.describe()
count 4
unique 3
top a
freq 2
dtype: object
Describing a timestamp ``Series``.
>>> s = pd.Series([
... np.datetime64("2000-01-01"),
... np.datetime64("2010-01-01"),
... np.datetime64("2010-01-01")
... ])
>>> s.describe()
count 3
unique 2
top 2010-01-01 00:00:00
freq 2
first 2000-01-01 00:00:00
last 2010-01-01 00:00:00
dtype: object
Describing a ``DataFrame``. By default only numeric fields
are returned.
>>> df = pd.DataFrame({'categorical': pd.Categorical(['d','e','f']),
... 'numeric': [1, 2, 3],
... 'object': ['a', 'b', 'c']
... })
>>> df.describe()
numeric
count 3.0
mean 2.0
std 1.0
min 1.0
25% 1.5
50% 2.0
75% 2.5
max 3.0
Describing all columns of a ``DataFrame`` regardless of data type.
>>> df.describe(include='all')
categorical numeric object
count 3 3.0 3
unique 3 NaN 3
top f NaN c
freq 1 NaN 1
mean NaN 2.0 NaN
std NaN 1.0 NaN
min NaN 1.0 NaN
25% NaN 1.5 NaN
50% NaN 2.0 NaN
75% NaN 2.5 NaN
max NaN 3.0 NaN
Describing a column from a ``DataFrame`` by accessing it as
an attribute.
>>> df.numeric.describe()
count 3.0
mean 2.0
std 1.0
min 1.0
25% 1.5
50% 2.0
75% 2.5
max 3.0
Name: numeric, dtype: float64
Including only numeric columns in a ``DataFrame`` description.
>>> df.describe(include=[np.number])
numeric
count 3.0
mean 2.0
std 1.0
min 1.0
25% 1.5
50% 2.0
75% 2.5
max 3.0
Including only string columns in a ``DataFrame`` description.
>>> df.describe(include=[np.object])
object
count 3
unique 3
top c
freq 1
Including only categorical columns from a ``DataFrame`` description.
>>> df.describe(include=['category'])
categorical
count 3
unique 3
top f
freq 1
Excluding numeric columns from a ``DataFrame`` description.
>>> df.describe(exclude=[np.number])
categorical object
count 3 3
unique 3 3
top f c
freq 1 1
Excluding object columns from a ``DataFrame`` description.
>>> df.describe(exclude=[np.object])
categorical numeric
count 3 3.0
unique 3 NaN
top f NaN
freq 1 NaN
mean NaN 2.0
std NaN 1.0
min NaN 1.0
25% NaN 1.5
50% NaN 2.0
75% NaN 2.5
max NaN 3.0
"""
if self.ndim >= 3:
msg = "describe is not implemented on Panel objects."
raise NotImplementedError(msg)
elif self.ndim == 2 and self.columns.size == 0:
raise ValueError("Cannot describe a DataFrame without columns")
if percentiles is not None:
# explicit conversion of `percentiles` to list
percentiles = list(percentiles)
# get them all to be in [0, 1]
self._check_percentile(percentiles)
# median should always be included
if 0.5 not in percentiles:
percentiles.append(0.5)
percentiles = np.asarray(percentiles)
else:
percentiles = np.array([0.25, 0.5, 0.75])
# sort and check for duplicates
unique_pcts = np.unique(percentiles)
if len(unique_pcts) < len(percentiles):
raise ValueError("percentiles cannot contain duplicates")
percentiles = unique_pcts
formatted_percentiles = format_percentiles(percentiles)
def describe_numeric_1d(series):
stat_index = (['count', 'mean', 'std', 'min'] +
formatted_percentiles + ['max'])
d = ([series.count(), series.mean(), series.std(), series.min()] +
series.quantile(percentiles).tolist() + [series.max()])
return pd.Series(d, index=stat_index, name=series.name)
def describe_categorical_1d(data):
names = ['count', 'unique']
objcounts = data.value_counts()
count_unique = len(objcounts[objcounts != 0])
result = [data.count(), count_unique]
if result[1] > 0:
top, freq = objcounts.index[0], objcounts.iloc[0]
if is_datetime64_any_dtype(data):
tz = data.dt.tz
asint = data.dropna().values.view('i8')
top = Timestamp(top)
if top.tzinfo is not None and tz is not None:
# Don't tz_localize(None) if key is already tz-aware
top = top.tz_convert(tz)
else:
top = top.tz_localize(tz)
names += ['top', 'freq', 'first', 'last']
result += [top, freq,
Timestamp(asint.min(), tz=tz),
Timestamp(asint.max(), tz=tz)]
else:
names += ['top', 'freq']
result += [top, freq]
return pd.Series(result, index=names, name=data.name)
def describe_1d(data):
if is_bool_dtype(data):
return describe_categorical_1d(data)
elif is_numeric_dtype(data):
return describe_numeric_1d(data)
elif is_timedelta64_dtype(data):
return describe_numeric_1d(data)
else:
return describe_categorical_1d(data)
if self.ndim == 1:
return describe_1d(self)
elif (include is None) and (exclude is None):
# when some numerics are found, keep only numerics
data = self.select_dtypes(include=[np.number])
if len(data.columns) == 0:
data = self
elif include == 'all':
if exclude is not None:
msg = "exclude must be None when include is 'all'"
raise ValueError(msg)
data = self
else:
data = self.select_dtypes(include=include, exclude=exclude)
ldesc = [describe_1d(s) for _, s in data.iteritems()]
# set a convenient order for rows
names = []
ldesc_indexes = sorted((x.index for x in ldesc), key=len)
for idxnames in ldesc_indexes:
for name in idxnames:
if name not in names:
names.append(name)
d = pd.concat(ldesc, join_axes=pd.Index([names]), axis=1)
d.columns = data.columns.copy()
return d
def _check_percentile(self, q):
"""
Validate percentiles (used by describe and quantile).
"""
msg = ("percentiles should all be in the interval [0, 1]. "
"Try {0} instead.")
q = np.asarray(q)
if q.ndim == 0:
if not 0 <= q <= 1:
raise ValueError(msg.format(q / 100.0))
else:
if not all(0 <= qs <= 1 for qs in q):
raise ValueError(msg.format(q / 100.0))
return q
_shared_docs['pct_change'] = """
Percentage change between the current and a prior element.
Computes the percentage change from the immediately previous row by
default. This is useful in comparing the percentage of change in a time
series of elements.
Parameters
----------
periods : int, default 1
Periods to shift for forming percent change.
fill_method : str, default 'pad'
How to handle NAs before computing percent changes.
limit : int, default None
The number of consecutive NAs to fill before stopping.
freq : DateOffset, timedelta, or offset alias string, optional
Increment to use from time series API (e.g. 'M' or BDay()).
**kwargs
Additional keyword arguments are passed into
`DataFrame.shift` or `Series.shift`.
Returns
-------
chg : Series or DataFrame
The same type as the calling object.
See Also
--------
Series.diff : Compute the difference of two elements in a Series.
DataFrame.diff : Compute the difference of two elements in a DataFrame.
Series.shift : Shift the index by some number of periods.
DataFrame.shift : Shift the index by some number of periods.
Examples
--------
**Series**
>>> s = pd.Series([90, 91, 85])
>>> s
0 90
1 91
2 85
dtype: int64
>>> s.pct_change()
0 NaN
1 0.011111
2 -0.065934
dtype: float64
>>> s.pct_change(periods=2)
0 NaN
1 NaN
2 -0.055556
dtype: float64
See the percentage change in a Series where filling NAs with last
valid observation forward to next valid.
>>> s = pd.Series([90, 91, None, 85])
>>> s
0 90.0
1 91.0
2 NaN
3 85.0
dtype: float64
>>> s.pct_change(fill_method='ffill')
0 NaN
1 0.011111
2 0.000000
3 -0.065934
dtype: float64
**DataFrame**
Percentage change in French franc, Deutsche Mark, and Italian lira from
1980-01-01 to 1980-03-01.
>>> df = pd.DataFrame({
... 'FR': [4.0405, 4.0963, 4.3149],
... 'GR': [1.7246, 1.7482, 1.8519],
... 'IT': [804.74, 810.01, 860.13]},
... index=['1980-01-01', '1980-02-01', '1980-03-01'])
>>> df
FR GR IT
1980-01-01 4.0405 1.7246 804.74
1980-02-01 4.0963 1.7482 810.01
1980-03-01 4.3149 1.8519 860.13
>>> df.pct_change()
FR GR IT
1980-01-01 NaN NaN NaN
1980-02-01 0.013810 0.013684 0.006549
1980-03-01 0.053365 0.059318 0.061876
Percentage of change in GOOG and APPL stock volume. Shows computing
the percentage change between columns.
>>> df = pd.DataFrame({
... '2016': [1769950, 30586265],
... '2015': [1500923, 40912316],
... '2014': [1371819, 41403351]},
... index=['GOOG', 'APPL'])
>>> df
2016 2015 2014
GOOG 1769950 1500923 1371819
APPL 30586265 40912316 41403351
>>> df.pct_change(axis='columns')
2016 2015 2014
GOOG NaN -0.151997 -0.086016
APPL NaN 0.337604 0.012002
"""
@Appender(_shared_docs['pct_change'] % _shared_doc_kwargs)
def pct_change(self, periods=1, fill_method='pad', limit=None, freq=None,
**kwargs):
# TODO: Not sure if above is correct - need someone to confirm.
axis = self._get_axis_number(kwargs.pop('axis', self._stat_axis_name))
if fill_method is None:
data = self
else:
data = self.fillna(method=fill_method, limit=limit, axis=axis)
rs = (data.div(data.shift(periods=periods, freq=freq, axis=axis,
**kwargs)) - 1)
rs = rs.reindex_like(data)
if freq is None:
mask = isna(com.values_from_object(data))
np.putmask(rs.values, mask, np.nan)
return rs
def _agg_by_level(self, name, axis=0, level=0, skipna=True, **kwargs):
if axis is None:
raise ValueError("Must specify 'axis' when aggregating by level.")
grouped = self.groupby(level=level, axis=axis, sort=False)
if hasattr(grouped, name) and skipna:
return getattr(grouped, name)(**kwargs)
axis = self._get_axis_number(axis)
method = getattr(type(self), name)
applyf = lambda x: method(x, axis=axis, skipna=skipna, **kwargs)
return grouped.aggregate(applyf)
@classmethod
def _add_numeric_operations(cls):
"""
Add the operations to the cls; evaluate the doc strings again
"""
axis_descr, name, name2 = _doc_parms(cls)
cls.any = _make_logical_function(
cls, 'any', name, name2, axis_descr, _any_desc, nanops.nanany,
_any_see_also, _any_examples, empty_value=False)
cls.all = _make_logical_function(
cls, 'all', name, name2, axis_descr, _all_desc, nanops.nanall,
_all_see_also, _all_examples, empty_value=True)
@Substitution(desc="Return the mean absolute deviation of the values "
"for the requested axis.",
name1=name, name2=name2, axis_descr=axis_descr,
min_count='', see_also='', examples='')
@Appender(_num_doc)
def mad(self, axis=None, skipna=None, level=None):
if skipna is None:
skipna = True
if axis is None:
axis = self._stat_axis_number
if level is not None:
return self._agg_by_level('mad', axis=axis, level=level,
skipna=skipna)
data = self._get_numeric_data()
if axis == 0:
demeaned = data - data.mean(axis=0)
else:
demeaned = data.sub(data.mean(axis=1), axis=0)
return np.abs(demeaned).mean(axis=axis, skipna=skipna)
cls.mad = mad
cls.sem = _make_stat_function_ddof(
cls, 'sem', name, name2, axis_descr,
"Return unbiased standard error of the mean over requested "
"axis.\n\nNormalized by N-1 by default. This can be changed "
"using the ddof argument",
nanops.nansem)
cls.var = _make_stat_function_ddof(
cls, 'var', name, name2, axis_descr,
"Return unbiased variance over requested axis.\n\nNormalized by "
"N-1 by default. This can be changed using the ddof argument",
nanops.nanvar)
cls.std = _make_stat_function_ddof(
cls, 'std', name, name2, axis_descr,
"Return sample standard deviation over requested axis."
"\n\nNormalized by N-1 by default. This can be changed using the "
"ddof argument",
nanops.nanstd)
@Substitution(desc="Return the compound percentage of the values for "
"the requested axis.", name1=name, name2=name2,
axis_descr=axis_descr,
min_count='', see_also='', examples='')
@Appender(_num_doc)
def compound(self, axis=None, skipna=None, level=None):
if skipna is None:
skipna = True
return (1 + self).prod(axis=axis, skipna=skipna, level=level) - 1
cls.compound = compound
cls.cummin = _make_cum_function(
cls, 'cummin', name, name2, axis_descr, "minimum",
lambda y, axis: np.minimum.accumulate(y, axis), "min",
np.inf, np.nan, _cummin_examples)
cls.cumsum = _make_cum_function(
cls, 'cumsum', name, name2, axis_descr, "sum",
lambda y, axis: y.cumsum(axis), "sum", 0.,
np.nan, _cumsum_examples)
cls.cumprod = _make_cum_function(
cls, 'cumprod', name, name2, axis_descr, "product",
lambda y, axis: y.cumprod(axis), "prod", 1.,
np.nan, _cumprod_examples)
cls.cummax = _make_cum_function(
cls, 'cummax', name, name2, axis_descr, "maximum",
lambda y, axis: np.maximum.accumulate(y, axis), "max",
-np.inf, np.nan, _cummax_examples)
cls.sum = _make_min_count_stat_function(
cls, 'sum', name, name2, axis_descr,
"""Return the sum of the values for the requested axis.\n
This is equivalent to the method ``numpy.sum``.""",
nanops.nansum, _stat_func_see_also, _sum_examples)
cls.mean = _make_stat_function(
cls, 'mean', name, name2, axis_descr,
'Return the mean of the values for the requested axis.',
nanops.nanmean)
cls.skew = _make_stat_function(
cls, 'skew', name, name2, axis_descr,
'Return unbiased skew over requested axis\nNormalized by N-1.',
nanops.nanskew)
cls.kurt = _make_stat_function(
cls, 'kurt', name, name2, axis_descr,
"Return unbiased kurtosis over requested axis using Fisher's "
"definition of\nkurtosis (kurtosis of normal == 0.0). Normalized "
"by N-1.",
nanops.nankurt)
cls.kurtosis = cls.kurt
cls.prod = _make_min_count_stat_function(
cls, 'prod', name, name2, axis_descr,
'Return the product of the values for the requested axis.',
nanops.nanprod, examples=_prod_examples)
cls.product = cls.prod
cls.median = _make_stat_function(
cls, 'median', name, name2, axis_descr,
'Return the median of the values for the requested axis.',
nanops.nanmedian)
cls.max = _make_stat_function(
cls, 'max', name, name2, axis_descr,
"""Return the maximum of the values for the requested axis.\n
If you want the *index* of the maximum, use ``idxmax``. This is
the equivalent of the ``numpy.ndarray`` method ``argmax``.""",
nanops.nanmax, _stat_func_see_also, _max_examples)
cls.min = _make_stat_function(
cls, 'min', name, name2, axis_descr,
"""Return the minimum of the values for the requested axis.\n
If you want the *index* of the minimum, use ``idxmin``. This is
the equivalent of the ``numpy.ndarray`` method ``argmin``.""",
nanops.nanmin, _stat_func_see_also, _min_examples)
@classmethod
def _add_series_only_operations(cls):
"""
Add the series only operations to the cls; evaluate the doc
strings again.
"""
axis_descr, name, name2 = _doc_parms(cls)
def nanptp(values, axis=0, skipna=True):
nmax = nanops.nanmax(values, axis, skipna)
nmin = nanops.nanmin(values, axis, skipna)
warnings.warn("Method .ptp is deprecated and will be removed "
"in a future version. Use numpy.ptp instead.",
FutureWarning, stacklevel=4)
return nmax - nmin
cls.ptp = _make_stat_function(
cls, 'ptp', name, name2, axis_descr,
"""Return the difference between the maximum value and the
minimum value in the object. This is the equivalent of the
``numpy.ndarray`` method ``ptp``.\n\n.. deprecated:: 0.24.0
Use numpy.ptp instead""",
nanptp)
@classmethod
def _add_series_or_dataframe_operations(cls):
"""
Add the series or dataframe only operations to the cls; evaluate
the doc strings again.
"""
from pandas.core import window as rwindow
@Appender(rwindow.rolling.__doc__)
def rolling(self, window, min_periods=None, center=False,
win_type=None, on=None, axis=0, closed=None):
axis = self._get_axis_number(axis)
return rwindow.rolling(self, window=window,
min_periods=min_periods,
center=center, win_type=win_type,
on=on, axis=axis, closed=closed)
cls.rolling = rolling
@Appender(rwindow.expanding.__doc__)
def expanding(self, min_periods=1, center=False, axis=0):
axis = self._get_axis_number(axis)
return rwindow.expanding(self, min_periods=min_periods,
center=center, axis=axis)
cls.expanding = expanding
@Appender(rwindow.ewm.__doc__)
def ewm(self, com=None, span=None, halflife=None, alpha=None,
min_periods=0, adjust=True, ignore_na=False,
axis=0):
axis = self._get_axis_number(axis)
return rwindow.ewm(self, com=com, span=span, halflife=halflife,
alpha=alpha, min_periods=min_periods,
adjust=adjust, ignore_na=ignore_na, axis=axis)
cls.ewm = ewm
@Appender(_shared_docs['transform'] % dict(axis="", **_shared_doc_kwargs))
def transform(self, func, *args, **kwargs):
result = self.agg(func, *args, **kwargs)
if is_scalar(result) or len(result) != len(self):
raise ValueError("transforms cannot produce "
"aggregated results")
return result
# ----------------------------------------------------------------------
# Misc methods
_shared_docs['valid_index'] = """
Return index for %(position)s non-NA/null value.
Returns
--------
scalar : type of index
Notes
--------
If all elements are non-NA/null, returns None.
Also returns None for empty %(klass)s.
"""
def _find_valid_index(self, how):
"""
Retrieves the index of the first valid value.
Parameters
----------
how : {'first', 'last'}
Use this parameter to change between the first or last valid index.
Returns
-------
idx_first_valid : type of index
"""
assert how in ['first', 'last']
if len(self) == 0: # early stop
return None
is_valid = ~self.isna()
if self.ndim == 2:
is_valid = is_valid.any(1) # reduce axis 1
if how == 'first':
idxpos = is_valid.values[::].argmax()
if how == 'last':
idxpos = len(self) - 1 - is_valid.values[::-1].argmax()
chk_notna = is_valid.iat[idxpos]
idx = self.index[idxpos]
if not chk_notna:
return None
return idx
@Appender(_shared_docs['valid_index'] % {'position': 'first',
'klass': 'NDFrame'})
def first_valid_index(self):
return self._find_valid_index('first')
@Appender(_shared_docs['valid_index'] % {'position': 'last',
'klass': 'NDFrame'})
def last_valid_index(self):
return self._find_valid_index('last')
def _doc_parms(cls):
"""Return a tuple of the doc parms."""
axis_descr = "{%s}" % ', '.join(["{0} ({1})".format(a, i)
for i, a in enumerate(cls._AXIS_ORDERS)])
name = (cls._constructor_sliced.__name__
if cls._AXIS_LEN > 1 else 'scalar')
name2 = cls.__name__
return axis_descr, name, name2
_num_doc = """
%(desc)s
Parameters
----------
axis : %(axis_descr)s
Axis for the function to be applied on.
skipna : bool, default True
Exclude NA/null values when computing the result.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a %(name1)s.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
%(min_count)s\
**kwargs
Additional keyword arguments to be passed to the function.
Returns
-------
%(name1)s or %(name2)s (if level specified)
%(see_also)s
%(examples)s\
"""
_num_ddof_doc = """
%(desc)s
Parameters
----------
axis : %(axis_descr)s
skipna : boolean, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be NA
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a %(name1)s
ddof : int, default 1
Delta Degrees of Freedom. The divisor used in calculations is N - ddof,
where N represents the number of elements.
numeric_only : boolean, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
Returns
-------
%(name1)s or %(name2)s (if level specified)\n"""
_bool_doc = """
%(desc)s
Parameters
----------
axis : {0 or 'index', 1 or 'columns', None}, default 0
Indicate which axis or axes should be reduced.
* 0 / 'index' : reduce the index, return a Series whose index is the
original column labels.
* 1 / 'columns' : reduce the columns, return a Series whose index is the
original index.
* None : reduce all axes, return a scalar.
bool_only : bool, default None
Include only boolean columns. If None, will attempt to use everything,
then use only boolean data. Not implemented for Series.
skipna : bool, default True
Exclude NA/null values. If the entire row/column is NA and skipna is
True, then the result will be %(empty_value)s, as for an empty row/column.
If skipna is False, then NA are treated as True, because these are not
equal to zero.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a %(name1)s.
**kwargs : any, default None
Additional keywords have no effect but might be accepted for
compatibility with NumPy.
Returns
-------
%(name1)s or %(name2)s
If level is specified, then, %(name2)s is returned; otherwise, %(name1)s
is returned.
%(see_also)s
%(examples)s"""
_all_desc = """\
Return whether all elements are True, potentially over an axis.
Returns True unless there at least one element within a series or
along a Dataframe axis that is False or equivalent (e.g. zero or
empty)."""
_all_examples = """\
Examples
--------
**Series**
>>> pd.Series([True, True]).all()
True
>>> pd.Series([True, False]).all()
False
>>> pd.Series([]).all()
True
>>> pd.Series([np.nan]).all()
True
>>> pd.Series([np.nan]).all(skipna=False)
True
**DataFrames**
Create a dataframe from a dictionary.
>>> df = pd.DataFrame({'col1': [True, True], 'col2': [True, False]})
>>> df
col1 col2
0 True True
1 True False
Default behaviour checks if column-wise values all return True.
>>> df.all()
col1 True
col2 False
dtype: bool
Specify ``axis='columns'`` to check if row-wise values all return True.
>>> df.all(axis='columns')
0 True
1 False
dtype: bool
Or ``axis=None`` for whether every value is True.
>>> df.all(axis=None)
False
"""
_all_see_also = """\
See Also
--------
Series.all : Return True if all elements are True.
DataFrame.any : Return True if one (or more) elements are True.
"""
_cnum_doc = """
Return cumulative %(desc)s over a DataFrame or Series axis.
Returns a DataFrame or Series of the same size containing the cumulative
%(desc)s.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
The index or the name of the axis. 0 is equivalent to None or 'index'.
skipna : boolean, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be NA.
*args, **kwargs :
Additional keywords have no effect but might be accepted for
compatibility with NumPy.
Returns
-------
%(name1)s or %(name2)s\n
See Also
--------
core.window.Expanding.%(accum_func_name)s : Similar functionality
but ignores ``NaN`` values.
%(name2)s.%(accum_func_name)s : Return the %(desc)s over
%(name2)s axis.
%(name2)s.cummax : Return cumulative maximum over %(name2)s axis.
%(name2)s.cummin : Return cumulative minimum over %(name2)s axis.
%(name2)s.cumsum : Return cumulative sum over %(name2)s axis.
%(name2)s.cumprod : Return cumulative product over %(name2)s axis.
%(examples)s
"""
_cummin_examples = """\
Examples
--------
**Series**
>>> s = pd.Series([2, np.nan, 5, -1, 0])
>>> s
0 2.0
1 NaN
2 5.0
3 -1.0
4 0.0
dtype: float64
By default, NA values are ignored.
>>> s.cummin()
0 2.0
1 NaN
2 2.0
3 -1.0
4 -1.0
dtype: float64
To include NA values in the operation, use ``skipna=False``
>>> s.cummin(skipna=False)
0 2.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
**DataFrame**
>>> df = pd.DataFrame([[2.0, 1.0],
... [3.0, np.nan],
... [1.0, 0.0]],
... columns=list('AB'))
>>> df
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
By default, iterates over rows and finds the minimum
in each column. This is equivalent to ``axis=None`` or ``axis='index'``.
>>> df.cummin()
A B
0 2.0 1.0
1 2.0 NaN
2 1.0 0.0
To iterate over columns and find the minimum in each row,
use ``axis=1``
>>> df.cummin(axis=1)
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
"""
_cumsum_examples = """\
Examples
--------
**Series**
>>> s = pd.Series([2, np.nan, 5, -1, 0])
>>> s
0 2.0
1 NaN
2 5.0
3 -1.0
4 0.0
dtype: float64
By default, NA values are ignored.
>>> s.cumsum()
0 2.0
1 NaN
2 7.0
3 6.0
4 6.0
dtype: float64
To include NA values in the operation, use ``skipna=False``
>>> s.cumsum(skipna=False)
0 2.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
**DataFrame**
>>> df = pd.DataFrame([[2.0, 1.0],
... [3.0, np.nan],
... [1.0, 0.0]],
... columns=list('AB'))
>>> df
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
By default, iterates over rows and finds the sum
in each column. This is equivalent to ``axis=None`` or ``axis='index'``.
>>> df.cumsum()
A B
0 2.0 1.0
1 5.0 NaN
2 6.0 1.0
To iterate over columns and find the sum in each row,
use ``axis=1``
>>> df.cumsum(axis=1)
A B
0 2.0 3.0
1 3.0 NaN
2 1.0 1.0
"""
_cumprod_examples = """\
Examples
--------
**Series**
>>> s = pd.Series([2, np.nan, 5, -1, 0])
>>> s
0 2.0
1 NaN
2 5.0
3 -1.0
4 0.0
dtype: float64
By default, NA values are ignored.
>>> s.cumprod()
0 2.0
1 NaN
2 10.0
3 -10.0
4 -0.0
dtype: float64
To include NA values in the operation, use ``skipna=False``
>>> s.cumprod(skipna=False)
0 2.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
**DataFrame**
>>> df = pd.DataFrame([[2.0, 1.0],
... [3.0, np.nan],
... [1.0, 0.0]],
... columns=list('AB'))
>>> df
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
By default, iterates over rows and finds the product
in each column. This is equivalent to ``axis=None`` or ``axis='index'``.
>>> df.cumprod()
A B
0 2.0 1.0
1 6.0 NaN
2 6.0 0.0
To iterate over columns and find the product in each row,
use ``axis=1``
>>> df.cumprod(axis=1)
A B
0 2.0 2.0
1 3.0 NaN
2 1.0 0.0
"""
_cummax_examples = """\
Examples
--------
**Series**
>>> s = pd.Series([2, np.nan, 5, -1, 0])
>>> s
0 2.0
1 NaN
2 5.0
3 -1.0
4 0.0
dtype: float64
By default, NA values are ignored.
>>> s.cummax()
0 2.0
1 NaN
2 5.0
3 5.0
4 5.0
dtype: float64
To include NA values in the operation, use ``skipna=False``
>>> s.cummax(skipna=False)
0 2.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
**DataFrame**
>>> df = pd.DataFrame([[2.0, 1.0],
... [3.0, np.nan],
... [1.0, 0.0]],
... columns=list('AB'))
>>> df
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
By default, iterates over rows and finds the maximum
in each column. This is equivalent to ``axis=None`` or ``axis='index'``.
>>> df.cummax()
A B
0 2.0 1.0
1 3.0 NaN
2 3.0 1.0
To iterate over columns and find the maximum in each row,
use ``axis=1``
>>> df.cummax(axis=1)
A B
0 2.0 2.0
1 3.0 NaN
2 1.0 1.0
"""
_any_see_also = """\
See Also
--------
numpy.any : Numpy version of this method.
Series.any : Return whether any element is True.
Series.all : Return whether all elements are True.
DataFrame.any : Return whether any element is True over requested axis.
DataFrame.all : Return whether all elements are True over requested axis.
"""
_any_desc = """\
Return whether any element is True, potentially over an axis.
Returns False unless there at least one element within a series or
along a Dataframe axis that is True or equivalent (e.g. non-zero or
non-empty)."""
_any_examples = """\
Examples
--------
**Series**
For Series input, the output is a scalar indicating whether any element
is True.
>>> pd.Series([False, False]).any()
False
>>> pd.Series([True, False]).any()
True
>>> pd.Series([]).any()
False
>>> pd.Series([np.nan]).any()
False
>>> pd.Series([np.nan]).any(skipna=False)
True
**DataFrame**
Whether each column contains at least one True element (the default).
>>> df = pd.DataFrame({"A": [1, 2], "B": [0, 2], "C": [0, 0]})
>>> df
A B C
0 1 0 0
1 2 2 0
>>> df.any()
A True
B True
C False
dtype: bool
Aggregating over the columns.
>>> df = pd.DataFrame({"A": [True, False], "B": [1, 2]})
>>> df
A B
0 True 1
1 False 2
>>> df.any(axis='columns')
0 True
1 True
dtype: bool
>>> df = pd.DataFrame({"A": [True, False], "B": [1, 0]})
>>> df
A B
0 True 1
1 False 0
>>> df.any(axis='columns')
0 True
1 False
dtype: bool
Aggregating over the entire DataFrame with ``axis=None``.
>>> df.any(axis=None)
True
`any` for an empty DataFrame is an empty Series.
>>> pd.DataFrame([]).any()
Series([], dtype: bool)
"""
_shared_docs['stat_func_example'] = """\
Examples
--------
>>> idx = pd.MultiIndex.from_arrays([
... ['warm', 'warm', 'cold', 'cold'],
... ['dog', 'falcon', 'fish', 'spider']],
... names=['blooded', 'animal'])
>>> s = pd.Series([4, 2, 0, 8], name='legs', index=idx)
>>> s
blooded animal
warm dog 4
falcon 2
cold fish 0
spider 8
Name: legs, dtype: int64
>>> s.{stat_func}()
{default_output}
{verb} using level names, as well as indices.
>>> s.{stat_func}(level='blooded')
blooded
warm {level_output_0}
cold {level_output_1}
Name: legs, dtype: int64
>>> s.{stat_func}(level=0)
blooded
warm {level_output_0}
cold {level_output_1}
Name: legs, dtype: int64
"""
_sum_examples = _shared_docs['stat_func_example'].format(
stat_func='sum',
verb='Sum',
default_output=14,
level_output_0=6,
level_output_1=8)
_sum_examples += """
By default, the sum of an empty or all-NA Series is ``0``.
>>> pd.Series([]).sum() # min_count=0 is the default
0.0
This can be controlled with the ``min_count`` parameter. For example, if
you'd like the sum of an empty series to be NaN, pass ``min_count=1``.
>>> pd.Series([]).sum(min_count=1)
nan
Thanks to the ``skipna`` parameter, ``min_count`` handles all-NA and
empty series identically.
>>> pd.Series([np.nan]).sum()
0.0
>>> pd.Series([np.nan]).sum(min_count=1)
nan
"""
_max_examples = _shared_docs['stat_func_example'].format(
stat_func='max',
verb='Max',
default_output=8,
level_output_0=4,
level_output_1=8)
_min_examples = _shared_docs['stat_func_example'].format(
stat_func='min',
verb='Min',
default_output=0,
level_output_0=2,
level_output_1=0)
_stat_func_see_also = """
See Also
--------
Series.sum : Return the sum.
Series.min : Return the minimum.
Series.max : Return the maximum.
Series.idxmin : Return the index of the minimum.
Series.idxmax : Return the index of the maximum.
DataFrame.min : Return the sum over the requested axis.
DataFrame.min : Return the minimum over the requested axis.
DataFrame.max : Return the maximum over the requested axis.
DataFrame.idxmin : Return the index of the minimum over the requested axis.
DataFrame.idxmax : Return the index of the maximum over the requested axis.
"""
_prod_examples = """\
Examples
--------
By default, the product of an empty or all-NA Series is ``1``
>>> pd.Series([]).prod()
1.0
This can be controlled with the ``min_count`` parameter
>>> pd.Series([]).prod(min_count=1)
nan
Thanks to the ``skipna`` parameter, ``min_count`` handles all-NA and
empty series identically.
>>> pd.Series([np.nan]).prod()
1.0
>>> pd.Series([np.nan]).prod(min_count=1)
nan
"""
_min_count_stub = """\
min_count : int, default 0
The required number of valid values to perform the operation. If fewer than
``min_count`` non-NA values are present the result will be NA.
.. versionadded :: 0.22.0
Added with the default being 0. This means the sum of an all-NA
or empty Series is 0, and the product of an all-NA or empty
Series is 1.
"""
def _make_min_count_stat_function(cls, name, name1, name2, axis_descr, desc,
f, see_also='', examples=''):
@Substitution(desc=desc, name1=name1, name2=name2,
axis_descr=axis_descr, min_count=_min_count_stub,
see_also=see_also, examples=examples)
@Appender(_num_doc)
def stat_func(self, axis=None, skipna=None, level=None, numeric_only=None,
min_count=0,
**kwargs):
if name == 'sum':
nv.validate_sum(tuple(), kwargs)
elif name == 'prod':
nv.validate_prod(tuple(), kwargs)
else:
nv.validate_stat_func(tuple(), kwargs, fname=name)
if skipna is None:
skipna = True
if axis is None:
axis = self._stat_axis_number
if level is not None:
return self._agg_by_level(name, axis=axis, level=level,
skipna=skipna, min_count=min_count)
return self._reduce(f, name, axis=axis, skipna=skipna,
numeric_only=numeric_only, min_count=min_count)
return set_function_name(stat_func, name, cls)
def _make_stat_function(cls, name, name1, name2, axis_descr, desc, f,
see_also='', examples=''):
@Substitution(desc=desc, name1=name1, name2=name2,
axis_descr=axis_descr, min_count='', see_also=see_also,
examples=examples)
@Appender(_num_doc)
def stat_func(self, axis=None, skipna=None, level=None, numeric_only=None,
**kwargs):
if name == 'median':
nv.validate_median(tuple(), kwargs)
else:
nv.validate_stat_func(tuple(), kwargs, fname=name)
if skipna is None:
skipna = True
if axis is None:
axis = self._stat_axis_number
if level is not None:
return self._agg_by_level(name, axis=axis, level=level,
skipna=skipna)
return self._reduce(f, name, axis=axis, skipna=skipna,
numeric_only=numeric_only)
return set_function_name(stat_func, name, cls)
def _make_stat_function_ddof(cls, name, name1, name2, axis_descr, desc, f):
@Substitution(desc=desc, name1=name1, name2=name2,
axis_descr=axis_descr)
@Appender(_num_ddof_doc)
def stat_func(self, axis=None, skipna=None, level=None, ddof=1,
numeric_only=None, **kwargs):
nv.validate_stat_ddof_func(tuple(), kwargs, fname=name)
if skipna is None:
skipna = True
if axis is None:
axis = self._stat_axis_number
if level is not None:
return self._agg_by_level(name, axis=axis, level=level,
skipna=skipna, ddof=ddof)
return self._reduce(f, name, axis=axis, numeric_only=numeric_only,
skipna=skipna, ddof=ddof)
return set_function_name(stat_func, name, cls)
def _make_cum_function(cls, name, name1, name2, axis_descr, desc,
accum_func, accum_func_name, mask_a, mask_b, examples):
@Substitution(desc=desc, name1=name1, name2=name2,
axis_descr=axis_descr, accum_func_name=accum_func_name,
examples=examples)
@Appender(_cnum_doc)
def cum_func(self, axis=None, skipna=True, *args, **kwargs):
skipna = nv.validate_cum_func_with_skipna(skipna, args, kwargs, name)
if axis is None:
axis = self._stat_axis_number
else:
axis = self._get_axis_number(axis)
y = com.values_from_object(self).copy()
if (skipna and
issubclass(y.dtype.type, (np.datetime64, np.timedelta64))):
result = accum_func(y, axis)
mask = isna(self)
np.putmask(result, mask, iNaT)
elif skipna and not issubclass(y.dtype.type, (np.integer, np.bool_)):
mask = isna(self)
np.putmask(y, mask, mask_a)
result = accum_func(y, axis)
np.putmask(result, mask, mask_b)
else:
result = accum_func(y, axis)
d = self._construct_axes_dict()
d['copy'] = False
return self._constructor(result, **d).__finalize__(self)
return set_function_name(cum_func, name, cls)
def _make_logical_function(cls, name, name1, name2, axis_descr, desc, f,
see_also, examples, empty_value):
@Substitution(desc=desc, name1=name1, name2=name2,
axis_descr=axis_descr, see_also=see_also, examples=examples,
empty_value=empty_value)
@Appender(_bool_doc)
def logical_func(self, axis=0, bool_only=None, skipna=True, level=None,
**kwargs):
nv.validate_logical_func(tuple(), kwargs, fname=name)
if level is not None:
if bool_only is not None:
raise NotImplementedError("Option bool_only is not "
"implemented with option level.")
return self._agg_by_level(name, axis=axis, level=level,
skipna=skipna)
return self._reduce(f, name, axis=axis, skipna=skipna,
numeric_only=bool_only, filter_type='bool')
return set_function_name(logical_func, name, cls)
# install the indexes
for _name, _indexer in indexing.get_indexers_list():
NDFrame._create_indexer(_name, _indexer)
|
import getUsdValue from 'utils/getUsdValue'
import useHederaPrice from 'hooks/useHederaPrice'
import fetchEnvironment from 'utils/getEnvironment'
const getIdForDragonglassExplorer = transactionid => {
const hashWithoutPeriods = transactionid.split('.').join('')
const hashWithoutSymbol = hashWithoutPeriods.split('@').join('')
return hashWithoutSymbol.replace(/-/g, '')
}
const FirstBidItem = ({
auctionaccountid,
createdAt,
reserve,
isLive,
transactionId,
}) => {
const handleTransactoinViewClick = async () => {
if (!transactionId) return
const { network } = await fetchEnvironment()
const isTestNet = network === 'testnet'
const idForDragonGlass = getIdForDragonglassExplorer(transactionId)
const subdomain = isTestNet ? 'testnet.' : ''
const dragonGlassBaseUrl = `https://${subdomain}dragonglass.me/hedera/transactions/`
window.open(dragonGlassBaseUrl + idForDragonGlass)
}
const { currentPrice, isFetching: isFetchingHederaData } = useHederaPrice()
return (
<div
className={`mb-8 shadow-bid-item sm:h-16 h-full relative flex justify-between`}
>
{isLive && <div className='bg-purple-gradient w-2 h-full absolute' />}
<div className='flex sm:flex-row flex-col sm:items-center items-left w-full justify-between sm:ml-5 ml-7 sm:mt-0 mt-3'>
<div className='sm:pb-0 pb-4 w-1/4'>
<p className='font-light text-xs text-gray-400 whitespace-nowrap'>
Listing Transaction
</p>
<p className='font-bold text-sm'>{auctionaccountid}</p>
</div>
<div className='flex flex-grow justify-between sm:items-center items-left sm:flex-row flex-col '>
<div className='sm:pb-0 pb-4 w-3/4'>
<p className='font-light text-xs text-gray-400'>Date created</p>
<p className='font-bold text-sm'>{createdAt}</p>
</div>
<div className='sm:pb-0 pb-3 w-1/2'>
<p className='font-bold text-md mx-0'>
{reserve} <span className='font-light text-md'>HBAR</span>
</p>
<p className='font-semibold text-xs mx-0 text-gray-400'>
${getUsdValue(reserve, currentPrice)}
</p>
</div>
<img
src='/assets/view-transaction.svg'
onClick={handleTransactoinViewClick}
className='h-6 w-6 sm:ml-12 ml-2 cursor-pointer sm:relative absolute top-1 right-3 sm:mt-0 mt-3'
/>
</div>
</div>
</div>
)
}
export default FirstBidItem
|
import express from 'express'
import getRequestData from '../utils/get-request-data'
import { httpHeader, statusCode } from '../utils/http-header'
import quizService from './quiz-service'
const quizRouter = express.Router()
quizRouter.post('/createquiz', async (req, res)=> {
console.log("POST /createquiz")
const request = getRequestData(req)
try {
const response = await quizService.createQuiz(request)
res
.set(httpHeader.json)
.status(statusCode.success)
.send(response)
} catch(err) {
res
.set(httpHeader.json)
.status(statusCode.badRequest)
.send({
response: {
success: false,
error: err.errorMessage
}
})
}
})
quizRouter.get('/quiz/:id', quizService.getQuiz)
module.exports = quizRouter
|
import React, { useContext, useState } from 'react';
import './index.css';
import DataAreaContext from '../../utils/DataAreaContext';
import Lib from '../../utils/Lib';
import { Offcanvas, Button, Form, FloatingLabel } from 'react-bootstrap';
import { orderBy } from "lodash";
import moment from 'moment';
let isEmpty = require('lodash.isempty');
function FiltersOffCanvas(props) {
const { showFilters, setShowFilters, setFilterValue, filterValue } = useContext(DataAreaContext);
const [filterObject, setFilterObject] = useState({
year: moment().format('YYYY'),
region: "",
round: "",
golfClub: ""
});
let golfClubListByYear;
if (filterValue.year === filterObject.year) {
golfClubListByYear = props.matches.filter(function(match) {
if (filterValue.year === moment(match.matchDateTime).format('YYYY')) {
return match;
}
});
} else {
golfClubListByYear = props.matches.filter(function(match) {
if (filterObject.year === moment(match.matchDateTime).format('YYYY')) {
return match;
}
});
};
let golfClubsTeamOneName = golfClubListByYear.map(({ teamOneName }) => teamOneName);
let golfClubsTeamTwoName = golfClubListByYear.map(({ teamTwoName }) => teamTwoName);
let golfClubs = golfClubsTeamOneName.concat(golfClubsTeamTwoName);
let removedDuplicateGolfClubs = Lib.eliminateDuplicates(golfClubs);
let sortedGolfClubs = removedDuplicateGolfClubs.sort();
let matchYears = Lib.eliminateDuplicates(orderBy(props.matches, "matchDateTime", "asc").map(({ matchDateTime }) => moment(matchDateTime).format('YYYY'))).sort(function(a, b) {
return b - a;
});
let matchRegions = golfClubListByYear.map(({ competitionConcatRegion }) => competitionConcatRegion);
let removedDuplicateMatchRegions = Lib.eliminateDuplicates(matchRegions);
let sortedMatchRegions = removedDuplicateMatchRegions.sort();
let matchRounds = golfClubListByYear.map(({ competitionRound }) => competitionRound.round);
let removedDuplicateMatchRounds = Lib.eliminateDuplicates(matchRounds);
let sortedMatchRounds = removedDuplicateMatchRounds.sort();
// Handles updating component state when the user types into the input field
function handleInputChange(event, key) {
event.preventDefault();
if (key === "year") {
setFilterValue({...filterValue,
"golfClub": "",
"region": "",
"round": ""
});
setFilterObject({
year: event.target.value,
region: "",
round: "",
golfClub: ""
});
} else {
setFilterObject({...filterObject, [key]: event.target.value});
}
};
function handleApplyFiltersSubmit(event) {
event.preventDefault();
setFilterValue({...filterObject});
setShowFilters(false);
};
const handleClose = () => {
setFilterValue({
year: moment().format('YYYY'),
region: "",
round: "",
golfClub: ""
});
setFilterObject({
year: moment().format('YYYY'),
region: "",
round: "",
golfClub: ""
});
setShowFilters(false);
};
function handleClearFiltersSubmit(event) {
event.preventDefault();
setFilterValue({
year: moment().format('YYYY'),
region: "",
round: "",
golfClub: ""
});
setFilterObject({
year: moment().format('YYYY'),
region: "",
round: "",
golfClub: ""
});
setShowFilters(false);
};
return (
<>
<Offcanvas show={showFilters} onHide={handleClose}>
<Offcanvas.Header>
<Offcanvas.Title>Filters</Offcanvas.Title>
</Offcanvas.Header>
<Offcanvas.Body>
<Form>
<FloatingLabel controlId="floatingSelect" label="Filter by year">
<Form.Select aria-label="Filter by year" onChange={(e) => handleInputChange(e, "year")}>
<option>{filterValue.year}</option>
{matchYears.map(function(year) {
if (filterValue.year !== year) {
return (
<option value={year}>{year}</option>
)
}
})}
</Form.Select>
</FloatingLabel>
<br />
<FloatingLabel controlId="floatingSelect" label="Filter by region">
<Form.Select aria-label="Filter by region" onChange={(e) => handleInputChange(e, "region")}>
{filterValue.year === filterObject.year ? (
<>
{!isEmpty(filterValue.region) ?
<>
<option>{Lib.capitalize(filterValue.region)}</option>
<option>{""}</option>
</>
:
<option>{""}</option>
}
{sortedMatchRegions.map(function(region) {
if (filterValue.region !== region) {
return (
<option value={region}>{Lib.capitalize(region)}</option>
)
}
})}
</>
) : (
<>
{!isEmpty(filterObject.region) ?
<>
<option>{Lib.capitalize(filterObject.region)}</option>
<option>{""}</option>
</>
:
<option>{""}</option>
}
{sortedMatchRegions.map(function(region) {
if (filterObject.region !== region) {
return (
<option value={region}>{Lib.capitalize(region)}</option>
)
}
})}
</>
)}
</Form.Select>
</FloatingLabel>
<br />
<FloatingLabel controlId="floatingSelect" label="Filter by round">
<Form.Select aria-label="Filter by round" onChange={(e) => handleInputChange(e, "round")}>
{filterValue.year === filterObject.year ? (
<>
{!isEmpty(filterValue.round) ?
<>
<option>{Lib.capitalize(filterValue.round)}</option>
<option>{""}</option>
</>
:
<option>{""}</option>
}
{sortedMatchRounds.map(function(round) {
if (filterValue.round !== round) {
return (
<option value={round}>{Lib.capitalize(round)}</option>
)
}
})}
</>
) : (
<>
{!isEmpty(filterObject.round) ?
<>
<option>{Lib.capitalize(filterObject.round)}</option>
<option>{""}</option>
</>
:
<option>{""}</option>
}
{sortedMatchRounds.map(function(round) {
if (filterObject.round !== round) {
return (
<option value={round}>{Lib.capitalize(round)}</option>
)
}
})}
</>
)}
</Form.Select>
</FloatingLabel>
<br />
<FloatingLabel controlId="floatingSelect" label="Filter matches by golf club">
<Form.Select aria-label="Filter matches by golf club" onChange={(e) => handleInputChange(e, "golfClub")}>
{filterValue.year === filterObject.year ? (
<>
{!isEmpty(filterValue.golfClub) ?
<>
<option>{Lib.capitalize(filterValue.golfClub)}</option>
<option>{""}</option>
</>
:
<option>{""}</option>
}
{sortedGolfClubs.map(function(golfClub) {
if (filterValue.golfClub !== golfClub) {
return (
<option value={golfClub}>{Lib.capitalize(golfClub)}</option>
)
}
})}
</>
) : (
<>
{!isEmpty(filterObject.golfClub) ?
<>
<option>{Lib.capitalize(filterObject.golfClub)}</option>
<option>{""}</option>
</>
:
<option>{""}</option>
}
{sortedGolfClubs.map(function(golfClub) {
if (filterObject.golfClub !== golfClub) {
return (
<option value={golfClub}>{Lib.capitalize(golfClub)}</option>
)
}
})}
</>
)}
</Form.Select>
</FloatingLabel>
<br />
<Button variant="outline-primary" onClick={handleApplyFiltersSubmit} className="me-2">
Apply filters
</Button>
<Button variant="outline-danger" onClick={handleClearFiltersSubmit} className="me-2">
Clear filters
</Button>
</Form>
</Offcanvas.Body>
</Offcanvas>
</>
);
}
export default FiltersOffCanvas;
|
import hashlib, os, requests, struct, wx, webbrowser
import winreg as reg
from bs4 import BeautifulSoup
# path = input("Enter file path")
# print("Remove same files")
class FilePro:
path = None
name = None
full_path = None
type = None
def __init__(self, path, name):
self.path = path
self.name = name
self.full_path = self.path + "\\" + self.name
try:
self.type = name[name.rindex("."): len(name)]
except:
self.type = ""
return
def get_file_md5(self):
file = open(self.full_path, "rb")
content = file.read()
md5 = hashlib.md5()
md5.update(content)
file.close()
return md5.hexdigest()
def get_file_sha256(self):
file = open(self.full_path, "rb")
content = file.read()
sha256 = hashlib.sha256()
sha256.update(content)
file.close()
return sha256.hexdigest()
def rename(self, name):
new_full_path = self.path + "\\" + name + self.type
if os.path.exists(new_full_path) and new_full_path != self.full_path:
os.remove(new_full_path)
os.rename(self.full_path, new_full_path)
self.name = name
self.full_path = new_full_path
return
def modify(self):
file = open(self.full_path, "ab+")
file.write(b'000000000000')
file.close()
def get_360(self):
params = {"md5s": (None, self.get_file_md5()),
"format": (None, "XML"),
"product": (None, "360zip"),
"combo": (None, "360zip_main"),
"v": (None, "2"),
"osver": (None, "5.1"),
"vk": (None, "a03bc211"),
"mid": (None, "8a40d9eff408a78fe9ec10a0e7e60f62")}
return requests.post("http://qup.f.360.cn/file_health_info.php", files=params)
def upload_vt(self, apikey = dict()):
if os.path.getsize(self.full_path) > 31457280:
return False
url = 'https://www.virustotal.com/vtapi/v2/file/scan'
if apikey is None:
apikey = {1: "8dd0c36fd4ef57dc1effd53d580a2d2c4413c65041abcc103fe60641dc001ea4",
2: "a2b51c4511a5da05b595cc57e57aad2428db72ed28d66d9c72ca394f6ce47963",
3: "e08d3ae2419f5a7f27b37db6adaf27b6d31d06d1c522b71d9b0ad8f25b542702"}
i = 1
params = {'apikey': apikey[i]}
file = {'file': (open(self.full_path, 'rb'))}
while True:
try:
response = requests.post(url, params=params, files=file)
if response.status_code == 200:
break
if response.status_code == 204:
if i >= len(apikey):
i = 1
else:
i = i + 1
params = {'apikey': apikey[i]}
print("Upload: 204")
continue
if response.status_code == 400 or response.status_code == 403:
wx.MessageBox("Invalid API key, Please enter again!", "Error")
print(400)
return "wrongkey"
if response.status_code == 403:
wx.MessageBox("Your IP Address is banned by VirusTotal. You may change your IP Address by using proxy." + "\n" + "Click to try again.")
continue
except:
if wx.MessageBox("Uploading files fail. Please Check your Internet Connection." + "\n" + "Do you want to try again?", caption= "Error", style=wx.YES_NO) ==2:
continue
else:
return "Fail"
report = response.json()
if report['response_code'] == 1:
return True
else:
return False
def get_vt_report(self, apikey = dict(), use_crawler = True):
url = 'https://www.virustotal.com/vtapi/v2/file/report'
if apikey is None:
apikey = {1: "8dd0c36fd4ef57dc1effd53d580a2d2c4413c65041abcc103fe60641dc001ea4",
2: "a2b51c4511a5da05b595cc57e57aad2428db72ed28d66d9c72ca394f6ce47963",
3: "e08d3ae2419f5a7f27b37db6adaf27b6d31d06d1c522b71d9b0ad8f25b542702"}
i = 1
params = {'apikey': apikey[i], 'resource': self.get_file_sha256()}
while True:
try:
response = requests.get(url, params=params)
if response.status_code == 200:
report = response.json()
if report["response_code"] == -2:
return "Analyzing"
if report["response_code"] == 0:
return "Unknown"
else:
break
if response.status_code == 204:
if i >= len(apikey):
if use_crawler:
return self.get_vt_report_html()
else:
i = 1
else:
i = i + 1
params = {'apikey': apikey[i], 'resource': self.get_file_sha256()}
print("Get:204")
continue
if response.status_code == 400:
wx.MessageBox("Invalid API key, Please enter again!", "Error")
print(400)
return "wrongkey"
if response.status_code == 403:
wx.MessageBox("Your IP Address is banned by VirusTotal. You may change your IP Address by using proxy." + "\n" + "Click to try again.")
continue
except:
if wx.MessageBox("Getting report fails. Please Check your Internet Connection." + "\n" + "Do you want to try again?", caption= "Error", style=wx.YES_NO) ==2:
continue
else:
return "Fail"
result = response.json()
if result["response_code"] ==0:
return result
kaspersky = ""
eset = ""
malwarebytes = ""
microsoft = ""
if "Kaspersky" in report["scans"]:
kaspersky = str(report["scans"]["Kaspersky"]["result"])
if "ESET-NOD32" in report["scans"]:
eset = str(report["scans"]["ESET-NOD32"]["result"])
if "Malwarebytes" in report["scans"]:
malwarebytes = str(report["scans"]["Malwarebytes"]["result"])
if "Microsoft" in report["scans"]:
microsoft = str(report["scans"]["Microsoft"]["result"])
threat = kaspersky + eset + malwarebytes + microsoft
threat = threat.lower()
result['detections'] = threat
return result
def get_vt_report_html(self):
print("use crawler")
report = {'response_code': 0, 'positives': 0, 'detections': "", 'total': 0}
# cookies = browser_cookie3.load()
# headers = {
# 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.116 Safari/537.36'
# }
while True:
try:
url = "https://www.virustotal.com/en/file/" + self.get_file_sha256() + "/analysis/"
response = requests.get(url)
except:
if wx.MessageBox("Getting report fails. Please Check your Internet Connection." + "\n" + "Do you want to try again?", caption= "Error", style=wx.YES_NO) ==2:
continue
else:
return "Fail"
soup = BeautifulSoup(response.text, 'lxml')
try:
recap = soup.find(src='https://www.google.com/recaptcha/api.js')
if recap is not None:
print(recap)
wx.MessageBox("Click to open browser to pass the CAPTCHA")
webbrowser.open(url)
wx.MessageBox('When you finished, Click OK')
continue
except:
wx.MessageBox("Your IP Address is banned by VirusTotal. You may change your IP Address by using proxy." + "\n" + "Click to try again.")
continue
# if str(soup.find(src= 'https://virustotalcloud.appspot.com/static/img/wait.gif').string).find('que') != -1:
# time.sleep(5)
# continue
a = soup.find(class_='text-green')
if a is not None:
if str(a.string).find('0 / 0') !=-1:
return "Analyzing"
else:
break
else:
break
if soup.find(class_="alert-heading") is not None:
print("Unknown")
return "Unknown"
report['response_code'] = 1
try:
t = str(soup.find(class_='row').div.table.find(class_='text-red').string).strip()
report['positives'] = t[0:t.find('/')].strip()
report['total'] = t[t.find('/') + 2: len(t)].strip()
except:
return report
results = soup.find(id="active-tab").find(id="antivirus-results").find_all(class_="ltr text-red")
for result in results:
name = str(result.parent.td.string).rstrip().strip().strip("\n")
if name == "Kaspersky" or name == "ESET-NOD32" or name == "Malwarebytes" or name == "Microsoft":
report['detections'] = str(report['detections']) + str(result.string).strip().strip("\n")
report['detections'] = report['detections'].lower()
return report
def get_threat_type(self, report, sensitivity, is_grayware = True):
if report == "Fail":
return "Fail"
if report == "Unknown":
return 'Unknown'
detected_num = report["positives"]
threat = report['detections']
if int(detected_num) ==0:
return "Clean"
final_verdict = ""
type_category = {
"Win32.": ["win32"],
"Win64.": ["win64"],
"JS.": ["js"],
"VBS.": ["vba", "vbs"],
"Shell.": ["shell"],
"Html.": ["html"],
"Macro.": ["doc", "macro", "office"],
"PDF.": ["pdf"],
"Script.": ["script", "swf"],
"Email.": ["email"],
"Java.": ["java"],
"Linux.": ["linux"],
"Android.": ["android"]
}
for name, rules in type_category.items():
for rule in rules:
if threat.find(rule) != -1:
final_verdict = final_verdict + name
break
else:
continue
break
print(final_verdict)
threat_category = {
"Phishing.Generic": ["phishing"],
"Exploit.Generic": ["exp", "cve"],
"Worm.Generic": ["worm"],
"Ransom.Generic": ["ransom", "code", "mbr"],
"Rootkit.Generic": ["root", "uefi", "boot"],
"Backdoor.Bot": ["bot", "fareit", "rat"],
"Backdoor.Generic": ["backdoor", "bds"],
"Trojan.Banker": ["banker", "emotet"],
"Trojan.Spy": ["spy"],
"Trojan.Downloader": ["downloader"],
"Trojan.PasswordStealer": ["pws", "psw", "passwordstealer"],
"Trojan.Dropper": ["drop"],
"Trojan.Injector": ["inject"],
"Trojan.CoinMiner": ["coin", "mine"],
"Trojan.Generic": ["trojan", "virtool", "vho", "kry", "msil", "dangerous", "generik", "adwin"]
}
print(int(detected_num) / int(report['total']))
if is_grayware:
threat_category["Grayware.Unwanted"] = ["potentially unwanted", "adware", "pua", "pup", "unwan"]
threat_category["Grayware.RiskTool"] = ["potentially unsafe", "hacktool", "risk", "not-a-virus"]
for name, rules in threat_category.items():
for rule in rules:
if threat.find(rule) != -1:
if final_verdict == "":
return "Win32." + name
else:
return final_verdict + name
if final_verdict != "":
return final_verdict + "Trojan.Generic"
threshold = int(detected_num) / int(report['total'])
if threshold > (1 - (sensitivity / 100)):
return "Malware.Confidence:" + str(int(threshold * 100)) + "%"
else:
return "NoRisk"
def classify(self, threatname):
self.rename(str(threatname) + "_" + self.get_file_md5())
def readFileChar(path):
try:
fileHandle = open(path, "rb")
data_id = struct.unpack("h", fileHandle.read(2))
fileHandle.close()
return data_id[0]
except:
return
def getShifting(path):
try:
# 获得0x3c地址的值,pe文件应为0x45 50
fileHandle = open(path, "rb")
fileHandle.seek(60, 0)
data_id = struct.unpack("h", fileHandle.read(2))[0]
fileHandle.close()
# print data_id
fileHandle = open(path, "rb")
fileHandle.seek(data_id, 0)
pe = struct.unpack("h", fileHandle.read(2))[0]
fileHandle.close()
return pe
except:
return
def isPE(path):
if readFileChar(path) == 23117 and getShifting(path) == 17744:
return True
def emul(path, PE=True):
allfiles = set()
files = os.walk(path)
for interfile in files:
file_name = interfile[2]
for filename in file_name:
if PE:
if isPE(interfile[0] + "\\" + filename):
allfiles.add(FilePro(interfile[0], filename))
else:
allfiles.add(FilePro(interfile[0], filename))
return allfiles
def print_result(allfiles=set()):
for file in allfiles:
print(file.full_path + " " + file.get_file_md5())
def remove_same(path):
i = ""
allfiles = emul(path)
for file in allfiles:
if i.find(file.get_file_md5()) != -1:
os.remove(file.full_path)
else:
i = i + " " + file.get_file_md5()
return emul(path)
def add_context_menu(menu_name, command, reg_root_key_path, reg_key_path, shortcut_key):
key = reg.OpenKey(reg_root_key_path, reg_key_path)
reg.SetValue(key, menu_name, reg.REG_SZ, menu_name + '(&{0})'.format(shortcut_key))
sub_key = reg.OpenKey(key, menu_name)
reg.SetValue(sub_key, 'command', reg.REG_SZ, command + ' "%1"')
reg.CloseKey(sub_key)
reg.CloseKey(key)
def delete_reg_key(root_key, key, menu_name):
try:
parent_key = reg.OpenKey(root_key, key)
except Exception as msg:
print(msg)
return
if parent_key:
try:
menu_key = reg.OpenKey(parent_key, menu_name)
except Exception as msg:
print(msg)
return
if menu_key:
try:
reg.DeleteKey(menu_key, 'command')
except Exception as msg:
print(msg)
return
else:
reg.DeleteKey(parent_key, menu_name)
def get_file_sha256(path):
file = open(path, "rb")
content = file.read()
sha256 = hashlib.sha256()
sha256.update(content)
file.close()
return sha256.hexdigest()
|
// 데이터 타입
// - 원시형 : 단일 데이터 (numebr, string, null, undefined, Symbol)
// - 객체 : 복합 데이터 (object, funtion)
let integer = 123;
let negative = -123;
let double = 1.23;
console.log(integer, negative, double);
let binary = 0b1111011;
let octal = 0o173;
let hex = 0x7b;
console.log(binary, octal, hex);
console.log(123 / "text"); // NaN
|
#ifndef VALUE_DETAIL_ITERATOR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define VALUE_DETAIL_ITERATOR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include "yaml-cpp/dll.h"
#include "yaml-cpp/node/ptr.h"
#include "yaml-cpp/node/detail/node_iterator.h"
#include <cstddef>
#include <iterator>
namespace YAML {
namespace detail {
struct iterator_value;
template <typename V>
class iterator_base : public std::iterator<std::forward_iterator_tag, V,
std::ptrdiff_t, V*, V> {
private:
template <typename>
friend class iterator_base;
struct enabler {};
typedef node_iterator base_type;
struct proxy {
explicit proxy(const V& x) : m_ref(x) {}
V* operator->() { return std::addressof(m_ref); }
operator V*() { return std::addressof(m_ref); }
V m_ref;
};
public:
typedef typename iterator_base::value_type value_type;
public:
iterator_base() : m_iterator(), m_pMemory() {}
explicit iterator_base(base_type rhs, shared_memory_holder pMemory)
: m_iterator(rhs), m_pMemory(pMemory) {}
template <class W>
iterator_base(const iterator_base<W>& rhs,
typename std::enable_if<std::is_convertible<W*, V*>::value,
enabler>::type = enabler())
: m_iterator(rhs.m_iterator), m_pMemory(rhs.m_pMemory) {}
iterator_base<V>& operator++() {
++m_iterator;
return *this;
}
iterator_base<V> operator++(int) {
iterator_base<V> iterator_pre(*this);
++(*this);
return iterator_pre;
}
template <typename W>
bool operator==(const iterator_base<W>& rhs) const {
return m_iterator == rhs.m_iterator;
}
template <typename W>
bool operator!=(const iterator_base<W>& rhs) const {
return m_iterator != rhs.m_iterator;
}
value_type operator*() const {
const typename base_type::value_type& v = *m_iterator;
if (v.pNode)
return value_type(Node(*v, m_pMemory));
if (v.first && v.second)
return value_type(Node(*v.first, m_pMemory), Node(*v.second, m_pMemory));
return value_type();
}
proxy operator->() const { return proxy(**this); }
private:
base_type m_iterator;
shared_memory_holder m_pMemory;
};
}
}
#endif // VALUE_DETAIL_ITERATOR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
|
import { StyleSheet } from 'react-native';
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'column',
justifyContent: 'center',
alignItems: 'stretch',
marginLeft : 30,
marginRight : 30,
},
importButton : {
marginTop : 50
},
topText : {
height : 30,
textAlign: 'left',
},
descText : {
height : 30,
textAlign: 'left',
},
textbox: {
borderColor: 'black',
borderBottomWidth: 1,
textAlign: 'center',
borderRadius: 5,
padding : 10,
marginTop: 10,
marginBottom: 40
},
});
export default styles;
|
g_db.quests[17453]={id:17453,name:"Frigoris 1st Prize",type:0,trigger_policy:0,on_give_up_parent_fail:1,on_success_parent_success:0,can_give_up:1,can_retake:1,can_retake_after_failure:1,on_fail_parent_fail:0,fail_on_death:0,simultaneous_player_limit:0,ai_trigger:0,ai_trigger_enable:0,auto_trigger:0,trigger_on_death:0,remove_obtained_items:1,recommended_level:0,show_quest_title:1,show_as_gold_quest:0,start_npc:0,finish_npc:0,is_craft_skill_quest:0,can_be_found:0,show_direction:1,level_min:90,level_max:150,dontshow_under_level_min:1,premise_coins:0,dontshow_without_premise_coins:1,req_reputation_min:0,req_reputation_max:0,dontshow_without_req_reputation:1,premise_quests:[],req_cultivation:0,dontshow_without_req_cultivation:1,req_faction_role:0,dontshow_without_req_faction_role:1,req_gender:0,dontshow_wrong_gender:1,req_class:0,dontshow_wrong_class:1,req_be_married:0,dontshow_without_marriage:0,req_be_gm:0,req_global_quest:0,req_global_quest_cond:0,quests_mutex:[],req_blacksmith_level:0,req_tailor_level:0,req_craftsman_level:0,req_apothecary_level:0,special_award_type:0,is_team_task:0,recv_in_team_only:0,req_success_type:2,req_npc_type:0,briefing:"You can't get your prize without Token of Trophy.",parent_quest:0,previous_quest:0,next_quest:0,sub_quest_first:0,dialogue:{},on_success:{normal:{xp:0,sp:0,coins:0,rep:0,culti:0,chi:0,level_multiplier:0,new_waypoint:0,storage_slots:0,inventory_slots:0,petbag_slots:0,tp:{world:127,x:-268.00000000,y:218.00000000,z:-57.00000000,},ai_trigger:0,ai_trigger_enable:0,divorce:0,item_groups:[{chosen_randomly:0,items:[{id:22646,is_common:1,amount:1,probability:1.00000000,},{id:21493,is_common:1,amount:1,probability:1.00000000,},]},],},by_time:[],by_item_cnt:[],},on_failure:{normal:{xp:0,sp:0,coins:0,rep:0,culti:0,chi:0,level_multiplier:0,new_waypoint:0,storage_slots:0,inventory_slots:0,petbag_slots:0,ai_trigger:0,ai_trigger_enable:0,divorce:0,item_groups:[],},by_time:[],by_item_cnt:[],},children:[]};
|
// @flow
import * as actions from '../actions/actionTypes';
type PostAction = {
type: string,
posts?: Array<any>,
postData?: any,
deletedPostId?: string
};
const postsReducer = (state: any = {}, action: PostAction) => {
switch (action.type) {
case actions.NEW_POST:
case actions.UPDATE_POST:
case actions.UPDATE_POST_SCORE:
return { ...state, [action.postData.id]: action.postData };
case actions.POSTS_LOADED:
return action.posts.reduce((postCache, post) => {
if (!post.deleted) {
postCache[post.id] = post;
}
return postCache;
}, {});
case actions.DELETE_POST:
return Object.keys(state).reduce((postCache, post_id) => {
if (action.deletedPostId !== post_id) {
postCache[post_id] = state[post_id];
}
return postCache;
}, {});
default:
return state;
}
};
export default postsReducer;
|
import React, { useState } from "react";
import Spinner from "../Spinner/Spinner";
import LoginForm from "../LoginForm/LoginForm";
import Chart from "../Chart/Chart";
import FieldContainer from "../FieldContainer/FieldContainer";
import Styles from "./Admin.module.css";
const Admin = (props) => {
const [fieldType, setFieldType] = useState("all");
if (props.isLoading) {
return <Spinner />;
}
if (!props.isLoggedin) {
return <LoginForm submitHandler={() => props.setLoggedin()} />;
}
return (
<div className={Styles.admin}>
<Chart setType={(val) => setFieldType(val)} />
<FieldContainer type={fieldType} setType={(val) => setFieldType(val)} />
</div>
);
};
export default Admin;
|
"""
Generate and compile C modules for Python.
"""
import atexit
import distutils.sysconfig
import importlib
import logging
import os
import pickle
import platform
import re
import shutil
import stat
import subprocess
import sys
import tempfile
import textwrap
import time
import warnings
from io import BytesIO, StringIO
from typing import Dict, List, Set
import numpy.distutils
import aesara
# we will abuse the lockfile mechanism when reading and writing the registry
from aesara.compile.compilelock import lock_ctx
from aesara.configdefaults import config, gcc_version_str
from aesara.link.c.exceptions import MissingGXX
from aesara.utils import (
LOCAL_BITWIDTH,
flatten,
hash_from_code,
output_subprocess_Popen,
subprocess_Popen,
)
_logger = logging.getLogger("aesara.link.c.cmodule")
METH_VARARGS = "METH_VARARGS"
METH_NOARGS = "METH_NOARGS"
# global variable that represent the total time spent in importing module.
import_time = 0
def debug_counter(name, every=1):
"""
Debug counter to know how often we go through some piece of code.
This is a utility function one may use when debugging.
Examples
--------
debug_counter('I want to know how often I run this line')
"""
setattr(debug_counter, name, getattr(debug_counter, name, 0) + 1)
n = getattr(debug_counter, name)
if n % every == 0:
print(f"debug_counter [{name}]: {n}", file=sys.stderr)
class ExtFunction:
"""
A C function to put into a DynamicModule.
"""
name = ""
"""
str - function's name.
"""
code_block = ""
"""
str - the entire code for the function.
Has the form ``static PyObject* <name>([...]){ ... }
See Python's C API Reference for how to write c functions for python
modules.
"""
method = ""
"""
str - calling method for this function (i.e. 'METH_VARARGS', 'METH_NOARGS').
"""
doc = ""
"""
str - documentation string for this function.
"""
def __init__(self, name, code_block, method, doc="undocumented"):
self.name = name
self.code_block = code_block
self.method = method
self.doc = doc
def method_decl(self):
"""
Returns the signature for this function.
It goes into the DynamicModule's method table.
"""
return f'\t{{"{self.name}", {self.name}, {self.method}, "{self.doc}"}}'
class DynamicModule:
def __init__(self, name=None):
assert name is None, (
"The 'name' parameter of DynamicModule"
" cannot be specified anymore. Instead, 'code_hash'"
" will be automatically computed and can be used as"
" the module's name."
)
# While the module is not finalized, we can call add_...
# when it is finalized, a hash is computed and used instead of
# the placeholder, and as module name.
self.finalized = False
self.code_hash = None
self.hash_placeholder = "<<<<HASH_PLACEHOLDER>>>>"
self.support_code = []
self.functions = []
self.includes = ["<Python.h>", "<iostream>", '"aesara_mod_helper.h"']
self.init_blocks = []
def print_methoddef(self, stream):
print("static PyMethodDef MyMethods[] = {", file=stream)
for f in self.functions:
print(f.method_decl(), ",", file=stream)
print("\t{NULL, NULL, 0, NULL}", file=stream)
print("};", file=stream)
def print_init(self, stream):
print(
f"""static struct PyModuleDef moduledef = {{
PyModuleDef_HEAD_INIT,
"{self.hash_placeholder}",
NULL,
-1,
MyMethods,
}};
""",
file=stream,
)
print(
f"PyMODINIT_FUNC PyInit_{self.hash_placeholder}(void) {{",
file=stream,
)
for block in self.init_blocks:
print(" ", block, file=stream)
print(" PyObject *m = PyModule_Create(&moduledef);", file=stream)
print(" return m;", file=stream)
print("}", file=stream)
def add_include(self, str):
assert not self.finalized
self.includes.append(str)
def add_init_code(self, code):
assert not self.finalized
self.init_blocks.append(code)
def add_support_code(self, code):
assert not self.finalized
if code and code not in self.support_code: # TODO: KLUDGE
self.support_code.append(code)
def add_function(self, fn):
assert not self.finalized
self.functions.append(fn)
def code(self):
sio = StringIO()
for inc in self.includes:
if not inc:
continue
if inc[0] == "<" or inc[0] == '"':
print("#include", inc, file=sio)
else:
print(f'#include "{inc}"', file=sio)
print("//////////////////////", file=sio)
print("//// Support Code", file=sio)
print("//////////////////////", file=sio)
for sc in self.support_code:
print(sc, file=sio)
print("//////////////////////", file=sio)
print("//// Functions", file=sio)
print("//////////////////////", file=sio)
for f in self.functions:
print(f.code_block, file=sio)
print("//////////////////////", file=sio)
print("//// Module init", file=sio)
print("//////////////////////", file=sio)
self.print_methoddef(sio)
self.print_init(sio)
rval = sio.getvalue()
# Make sure the hash of the code hasn't changed
h = hash_from_code(rval)
assert self.code_hash is None or self.code_hash == h
self.code_hash = h
rval = re.sub(self.hash_placeholder, self.code_hash, rval)
# Finalize the Module, so no support code or function
# can be added
self.finalized = True
return rval
def list_code(self, ofile=sys.stdout):
"""
Print out the code with line numbers to `ofile`.
"""
for i, line in enumerate(self.code().split("\n")):
print(f"{i + 1}", line, file=ofile)
ofile.flush()
# TODO: add_type
def _get_ext_suffix():
"""Get the suffix for compiled extensions"""
dist_suffix = distutils.sysconfig.get_config_var("EXT_SUFFIX")
if dist_suffix is None:
dist_suffix = distutils.sysconfig.get_config_var("SO")
return dist_suffix
def dlimport(fullpath, suffix=None):
"""
Dynamically load a .so, .pyd, .dll, or .py file.
Parameters
----------
fullpath : str
A fully-qualified path do a compiled python module.
suffix : str
A suffix to strip from the end of fullpath to get the
import name.
Returns
-------
object
The dynamically loaded module (from __import__).
"""
if not os.path.isabs(fullpath):
raise ValueError("`fullpath` must be an absolute path", fullpath)
if suffix is None:
suffix = ""
dist_suffix = _get_ext_suffix()
if dist_suffix is not None and dist_suffix != "":
if fullpath.endswith(dist_suffix):
suffix = dist_suffix
if suffix == "":
if fullpath.endswith(".so"):
suffix = ".so"
elif fullpath.endswith(".pyd"):
suffix = ".pyd"
elif fullpath.endswith(".dll"):
suffix = ".dll"
elif fullpath.endswith(".py"):
suffix = ".py"
rval = None
if fullpath.endswith(suffix):
module_name = ".".join(fullpath.split(os.path.sep)[-2:])[: -len(suffix)]
else:
raise ValueError("path has wrong suffix", (fullpath, suffix))
workdir = fullpath[: -len(module_name) - 1 - len(suffix)]
_logger.debug(f"WORKDIR {workdir}")
_logger.debug(f"module_name {module_name}")
sys.path[0:0] = [workdir] # insert workdir at beginning (temporarily)
global import_time
try:
importlib.invalidate_caches()
t0 = time.time()
with warnings.catch_warnings():
warnings.filterwarnings("ignore", message="numpy.ndarray size changed")
rval = __import__(module_name, {}, {}, [module_name])
t1 = time.time()
import_time += t1 - t0
if not rval:
raise Exception("__import__ failed", fullpath)
finally:
del sys.path[0]
assert fullpath.startswith(rval.__file__)
return rval
def dlimport_workdir(basedir):
"""
Return a directory where you should put your .so file for dlimport
to be able to load it, given a basedir which should normally be
config.compiledir.
"""
return tempfile.mkdtemp(dir=basedir)
def last_access_time(path):
"""
Return the number of seconds since the epoch of the last access of a
given file.
"""
return os.stat(path)[stat.ST_ATIME]
def module_name_from_dir(dirname, err=True, files=None):
"""
Scan the contents of a cache directory and return full path of the
dynamic lib in it.
"""
if files is None:
try:
files = os.listdir(dirname)
except OSError as e:
if e.errno == 2 and not err: # No such file or directory
return None
names = [file for file in files if file.endswith(".so") or file.endswith(".pyd")]
if len(names) == 0 and not err:
return None
elif len(names) == 1:
return os.path.join(dirname, names[0])
else:
raise ValueError("More than 1 compiled module in this directory:" + dirname)
def is_same_entry(entry_1, entry_2):
"""
Return True iff both paths can be considered to point to the same module.
This is the case if and only if at least one of these conditions holds:
- They are equal.
- Their real paths are equal.
- They share the same temporary work directory and module file name.
"""
if entry_1 == entry_2:
return True
if os.path.realpath(entry_1) == os.path.realpath(entry_2):
return True
if (
os.path.basename(entry_1) == os.path.basename(entry_2)
and (
os.path.basename(os.path.dirname(entry_1))
== os.path.basename(os.path.dirname(entry_2))
)
and os.path.basename(os.path.dirname(entry_1)).startswith("tmp")
):
return True
return False
def get_module_hash(src_code, key):
"""
Return a SHA256 hash that uniquely identifies a module.
This hash takes into account:
1. The C source code of the module (`src_code`).
2. The version part of the key.
3. The compiler options defined in `key` (command line parameters and
libraries to link against).
4. The NumPy ABI version.
"""
# `to_hash` will contain any element such that we know for sure that if
# it changes, then the module hash should be different.
# We start with the source code itself (stripping blanks might avoid
# recompiling after a basic indentation fix for instance).
to_hash = [l.strip() for l in src_code.split("\n")]
# Get the version part of the key (ignore if unversioned).
if key[0]:
to_hash += list(map(str, key[0]))
c_link_key = key[1]
# Currently, in order to catch potential bugs early, we are very
# convervative about the structure of the key and raise an exception
# if it does not match exactly what we expect. In the future we may
# modify this behavior to be less strict and be able to accommodate
# changes to the key in an automatic way.
# Note that if the key structure changes, the `get_safe_part` function
# below may also need to be modified.
error_msg = (
"This should not happen unless someone modified the code "
"that defines the CLinker key, in which case you should "
"ensure this piece of code is still valid (and this "
"AssertionError may be removed or modified to accommodate "
"this change)"
)
assert c_link_key[0] == "CLinker.cmodule_key", error_msg
for key_element in c_link_key[1:]:
if isinstance(key_element, tuple):
# This should be the C++ compilation command line parameters or the
# libraries to link against.
to_hash += list(key_element)
elif isinstance(key_element, str):
if key_element.startswith("md5:") or key_element.startswith("hash:"):
# This is actually a sha256 hash of the config options.
# Currently, we still keep md5 to don't break old Aesara.
# We add 'hash:' so that when we change it in
# the futur, it won't break this version of Aesara.
break
elif key_element.startswith("NPY_ABI_VERSION=0x") or key_element.startswith(
"c_compiler_str="
):
to_hash.append(key_element)
else:
raise AssertionError(error_msg)
else:
raise AssertionError(error_msg)
return hash_from_code("\n".join(to_hash))
def get_safe_part(key):
"""
Return a tuple containing a subset of `key`, to be used to find equal keys.
This tuple should only contain objects whose __eq__ and __hash__ methods
can be trusted (currently: the version part of the key, as well as the
SHA256 hash of the config options).
It is used to reduce the amount of key comparisons one has to go through
in order to find broken keys (i.e. keys with bad implementations of __eq__
or __hash__).
"""
version = key[0]
# This function should only be called on versioned keys.
assert version
# Find the hash part. This is actually a sha256 hash of the config
# options. Currently, we still keep md5 to don't break old
# Aesara. We add 'hash:' so that when we change it
# in the futur, it won't break this version of Aesara.
c_link_key = key[1]
# In case in the future, we don't have an md5 part and we have
# such stuff in the cache. In that case, we can set None, and the
# rest of the cache mechanism will just skip that key.
hash = None
for key_element in c_link_key[1:]:
if isinstance(key_element, str):
if key_element.startswith("md5:"):
hash = key_element[4:]
break
elif key_element.startswith("hash:"):
hash = key_element[5:]
break
return key[0] + (hash,)
class KeyData:
"""
Used to store the key information in the cache.
Parameters
----------
keys
Set of keys that are associated to the exact same module.
module_hash
Hash identifying the module (it should hash both the code and the
compilation options).
key_pkl
Path to the file in which this KeyData object should be
pickled.
"""
def __init__(self, keys, module_hash, key_pkl, entry):
self.keys = keys
self.module_hash = module_hash
self.key_pkl = key_pkl
self.entry = entry
def add_key(self, key, save_pkl=True):
"""
Add a key to self.keys, and update pickled file if asked to.
"""
assert key not in self.keys
self.keys.add(key)
if save_pkl:
self.save_pkl()
def remove_key(self, key, save_pkl=True):
"""
Remove a key from self.keys, and update pickled file if asked to.
"""
self.keys.remove(key)
if save_pkl:
self.save_pkl()
def save_pkl(self):
"""
Dump this object into its `key_pkl` file.
May raise a cPickle.PicklingError if such an exception is raised at
pickle time (in which case a warning is also displayed).
"""
# Note that writing in binary mode is important under Windows.
try:
with open(self.key_pkl, "wb") as f:
pickle.dump(self, f, protocol=pickle.HIGHEST_PROTOCOL)
except pickle.PicklingError:
_logger.warning(f"Cache leak due to unpickle-able key data {self.keys}")
os.remove(self.key_pkl)
raise
def get_entry(self):
"""
Return path to the module file.
"""
# TODO This method may be removed in the future (e.g. in 0.5) since
# its only purpose is to make sure that old KeyData objects created
# before the 'entry' field was added are properly handled.
if not hasattr(self, "entry"):
self.entry = module_name_from_dir(os.path.dirname(self.key_pkl))
return self.entry
def delete_keys_from(self, entry_from_key, do_manual_check=True):
"""
Delete from entry_from_key all keys associated to this KeyData object.
Note that broken keys will not appear in the keys field, so we also
manually look for keys associated to the same entry, unless
do_manual_check is False.
"""
entry = self.get_entry()
for key in self.keys:
try:
del entry_from_key[key]
except KeyError:
# This happen if the compiledir was deleted during
# this process execution.
pass
if do_manual_check:
to_del = []
for key, key_entry in entry_from_key.items():
if key_entry == entry:
to_del.append(key)
for key in to_del:
try:
del entry_from_key[key]
except KeyError:
# This happen if the compiledir was deleted during
# this process execution.
pass
class ModuleCache:
"""
Interface to the cache of dynamically compiled modules on disk.
Note that this interface does not assume exclusive use of the cache
directory. It is built to handle the case where multiple programs are also
using instances of this class to manage the same directory.
The cache works on the basis of keys. Each key is mapped to only one
dynamic module, but multiple keys may be mapped to the same module (see
below for details). Each module is a dynamic library file, that Python
can import.
The cache contains one directory for each module, containing:
- the dynamic library file itself (.so/.pyd),
- an empty __init__.py file, so Python can import it,
- a file containing the source code for the module (mod.cpp/mod.cu),
- a key.pkl file, containing a KeyData object with all the keys
associated with that module,
- possibly a delete.me file, meaning this directory has been marked
for deletion.
Keys should be tuples of length 2: (version, rest). The
``rest`` can be anything hashable and picklable, that uniquely
identifies the computation in the module. The key is returned by
``CLinker.cmodule_key_``.
The ``version`` should be a hierarchy of tuples of integers.
If the ``version`` is either 0 or (), then the key is unversioned, and its
corresponding module will be marked for deletion in an atexit() handler.
If the ``version`` is neither 0 nor (), then the module will be kept in the
cache between processes.
An unversioned module is not always deleted by the process that
creates it. Deleting such modules may not work on NFS filesystems
because the tmpdir in which the library resides is in use until the
end of the process' lifetime. In this case, unversioned modules
are left in their tmpdirs without corresponding .pkl files. These
modules and their directories are erased by subsequent processes'
refresh() functions.
Two different keys are mapped to the same module when all conditions below
are met:
- They have the same version.
- They share the same compilation options in their ``rest`` part (see
``CLinker.cmodule_key_`` for how this part is built).
- They share the same C code.
These three elements uniquely identify a module, and are summarized
in a single "module hash".
Parameters
----------
check_for_broken_eq
A bad __eq__ implementation can break this cache mechanism.
This option turns on a not-too-expensive sanity check every
time a new key is added to the cache.
do_refresh : bool
If True, then the ``refresh`` method will be called
in the constructor.
"""
dirname: str = ""
"""
The working directory that is managed by this interface.
"""
module_from_name: Dict = {}
"""
Maps a module filename to the loaded module object.
"""
entry_from_key: Dict = {}
"""
Maps keys to the filename of a .so/.pyd.
"""
similar_keys: Dict = {}
"""
Maps a part-of-key to all keys that share this same part.
"""
module_hash_to_key_data: Dict = {}
"""
Maps a module hash to its corresponding KeyData object.
"""
stats: List = []
"""
A list with counters for the number of hits, loads, compiles issued by
module_from_key().
"""
loaded_key_pkl: Set = set()
"""
Set of all key.pkl files that have been loaded.
"""
def __init__(self, dirname, check_for_broken_eq=True, do_refresh=True):
self.dirname = dirname
self.module_from_name = dict(self.module_from_name)
self.entry_from_key = dict(self.entry_from_key)
self.module_hash_to_key_data = dict(self.module_hash_to_key_data)
self.similar_keys = dict(self.similar_keys)
self.stats = [0, 0, 0]
self.check_for_broken_eq = check_for_broken_eq
self.loaded_key_pkl = set()
self.time_spent_in_check_key = 0
if do_refresh:
self.refresh()
age_thresh_use = config.cmodule__age_thresh_use # default 24 days
"""
The default age threshold (in seconds) for cache files we want to use.
Older modules will be deleted in ``clear_old``.
"""
def _get_module(self, name):
"""
Fetch a compiled module from the loaded cache or the disk.
"""
if name not in self.module_from_name:
_logger.debug(f"loading name {name}")
self.module_from_name[name] = dlimport(name)
self.stats[1] += 1
else:
_logger.debug(f"returning compiled module from cache {name}")
self.stats[0] += 1
return self.module_from_name[name]
def refresh(self, age_thresh_use=None, delete_if_problem=False, cleanup=True):
"""
Update cache data by walking the cache directory structure.
Load key.pkl files that have not been loaded yet.
Remove entries which have been removed from the filesystem.
Also, remove malformed cache directories.
Parameters
----------
age_thresh_use
Do not use modules other than this. Defaults to self.age_thresh_use.
delete_if_problem : bool
If True, cache entries that meet one of those two conditions are
deleted:
- Those for which unpickling the KeyData file fails with
an unknown exception.
- Duplicated modules, regardless of their age.
cleanup : bool
Do a cleanup of the cache removing expired and broken modules.
Returns
-------
list
A list of modules of age higher than age_thresh_use.
"""
if age_thresh_use is None:
age_thresh_use = self.age_thresh_use
start_time = time.time()
too_old_to_use = []
to_delete = []
to_delete_empty = []
def rmtree(*args, **kwargs):
if cleanup:
to_delete.append((args, kwargs))
def rmtree_empty(*args, **kwargs):
if cleanup:
to_delete_empty.append((args, kwargs))
# add entries that are not in the entry_from_key dictionary
time_now = time.time()
# Go through directories in alphabetical order to ensure consistent
# behavior.
try:
subdirs = sorted(os.listdir(self.dirname))
except OSError:
# This can happen if the dir don't exist.
subdirs = []
files, root = None, None # To make sure the "del" below works
for subdirs_elem in subdirs:
# Never clean/remove lock_dir
if subdirs_elem == "lock_dir":
continue
root = os.path.join(self.dirname, subdirs_elem)
# Don't delete the gpuarray kernel cache
if root == config.gpuarray__cache_path:
continue
key_pkl = os.path.join(root, "key.pkl")
if key_pkl in self.loaded_key_pkl:
continue
if not os.path.isdir(root):
continue
files = os.listdir(root)
if not files:
rmtree_empty(root, ignore_nocleanup=True, msg="empty dir")
continue
if "delete.me" in files:
rmtree(root, ignore_nocleanup=True, msg="delete.me found in dir")
continue
elif "key.pkl" in files:
try:
entry = module_name_from_dir(root, files=files)
except ValueError: # there is a key but no dll!
if not root.startswith("/tmp"):
# Under /tmp, file are removed periodically by the
# os. So it is normal that this happens from time
# to time.
_logger.warning(
"ModuleCache.refresh() Found key "
f"without dll in cache, deleting it. {key_pkl}",
)
rmtree(
root,
ignore_nocleanup=True,
msg="missing module file",
level=logging.INFO,
)
continue
if (time_now - last_access_time(entry)) < age_thresh_use:
_logger.debug(f"refresh adding {key_pkl}")
def unpickle_failure():
_logger.info(
f"ModuleCache.refresh() Failed to unpickle cache file {key_pkl}",
)
try:
with open(key_pkl, "rb") as f:
key_data = pickle.load(f)
except EOFError:
# Happened once... not sure why (would be worth
# investigating if it ever happens again).
unpickle_failure()
rmtree(
root,
ignore_nocleanup=True,
msg="broken cache directory [EOF]",
level=logging.WARNING,
)
continue
except Exception:
unpickle_failure()
if delete_if_problem:
rmtree(
root,
ignore_nocleanup=True,
msg="broken cache directory",
level=logging.INFO,
)
else:
# This exception is often triggered by keys
# that contain references to classes that have
# not yet been imported (e.g. when running two
# different Aesara-based scripts). They are not
# necessarily broken, but we cannot load them
# now. They will be loaded later if needed.
pass
continue
if not isinstance(key_data, KeyData):
# This is some old cache data, that does not fit
# the new cache format. It would be possible to
# update it, but it is not entirely safe since we
# do not know the config options that were used.
# As a result, we delete it instead (which is also
# simpler to implement).
rmtree(
root,
ignore_nocleanup=True,
msg=(
"invalid cache entry format -- this "
"should not happen unless your cache "
"was really old"
),
level=logging.WARN,
)
continue
# Check the path to the module stored in the KeyData
# object matches the path to `entry`. There may be
# a mismatch e.g. due to symlinks, or some directory
# being renamed since last time cache was created.
kd_entry = key_data.get_entry()
if kd_entry != entry:
if is_same_entry(entry, kd_entry):
# Update KeyData object. Note that we also need
# to update the key_pkl field, because it is
# likely to be incorrect if the entry itself
# was wrong.
key_data.entry = entry
key_data.key_pkl = key_pkl
else:
# This is suspicious. Better get rid of it.
rmtree(
root,
ignore_nocleanup=True,
msg="module file path mismatch",
level=logging.INFO,
)
continue
# Find unversioned keys from other processes.
# TODO: check if this can happen at all
to_del = [key for key in key_data.keys if not key[0]]
if to_del:
_logger.warning(
"ModuleCache.refresh() Found unversioned "
f"key in cache, removing it. {key_pkl}",
)
# Since the version is in the module hash, all
# keys should be unversioned.
if len(to_del) != len(key_data.keys):
_logger.warning(
"Found a mix of unversioned and "
"versioned keys for the same "
f"module {key_pkl}",
)
rmtree(
root,
ignore_nocleanup=True,
msg="unversioned key(s) in cache",
level=logging.INFO,
)
continue
mod_hash = key_data.module_hash
if mod_hash in self.module_hash_to_key_data:
# This may happen when two processes running
# simultaneously compiled the same module, one
# after the other. We delete one once it is old
# enough (to be confident there is no other process
# using it), or if `delete_if_problem` is True.
# Note that it is important to walk through
# directories in alphabetical order so as to make
# sure all new processes only use the first one.
if cleanup:
age = time.time() - last_access_time(entry)
if delete_if_problem or age > self.age_thresh_del:
rmtree(
root,
ignore_nocleanup=True,
msg="duplicated module",
level=logging.DEBUG,
)
else:
_logger.debug(
"Found duplicated module not "
"old enough yet to be deleted "
f"(age: {age}): {entry}",
)
continue
# Remember the map from a module's hash to the KeyData
# object associated with it.
self.module_hash_to_key_data[mod_hash] = key_data
for key in key_data.keys:
if key not in self.entry_from_key:
self.entry_from_key[key] = entry
# Assert that we have not already got this
# entry somehow.
assert entry not in self.module_from_name
# Store safe part of versioned keys.
if key[0]:
self.similar_keys.setdefault(
get_safe_part(key), []
).append(key)
else:
dir1 = os.path.dirname(self.entry_from_key[key])
dir2 = os.path.dirname(entry)
_logger.warning(
"The same cache key is associated to "
f"different modules ({dir1} and {dir2}). This "
"is not supposed to happen! You may "
"need to manually delete your cache "
"directory to fix this.",
)
# Clean up the name space to prevent bug.
if key_data.keys:
del key
self.loaded_key_pkl.add(key_pkl)
else:
too_old_to_use.append(entry)
# If the compilation failed, no key.pkl is in that
# directory, but a mod.* should be there.
# We do nothing here.
# Clean up the name space to prevent bug.
del root, files, subdirs
# Remove entries that are not in the filesystem.
items_copy = list(self.module_hash_to_key_data.items())
for module_hash, key_data in items_copy:
entry = key_data.get_entry()
try:
# Test to see that the file is [present and] readable.
open(entry).close()
gone = False
except OSError:
gone = True
if gone:
# Assert that we did not have one of the deleted files
# loaded up and in use.
# If so, it should not have been deleted. This should be
# considered a failure of the OTHER process, that deleted
# it.
if entry in self.module_from_name:
_logger.warning(
"A module that was loaded by this "
"ModuleCache can no longer be read from file "
f"{entry}... this could lead to problems.",
)
del self.module_from_name[entry]
_logger.info(f"deleting ModuleCache entry {entry}")
key_data.delete_keys_from(self.entry_from_key)
del self.module_hash_to_key_data[module_hash]
if key_data.keys and list(key_data.keys)[0][0]:
# this is a versioned entry, so should have been on
# disk. Something weird happened to cause this, so we
# are responding by printing a warning, removing
# evidence that we ever saw this mystery key.
pkl_file_to_remove = key_data.key_pkl
if not key_data.key_pkl.startswith("/tmp"):
# Under /tmp, file are removed periodically by the
# os. So it is normal that this happen from time to
# time.
_logger.warning(
f"Removing key file {pkl_file_to_remove} because the "
"corresponding module is gone from the "
"file system."
)
self.loaded_key_pkl.remove(pkl_file_to_remove)
if to_delete or to_delete_empty:
with lock_ctx():
for a, kw in to_delete:
_rmtree(*a, **kw)
for a, kw in to_delete_empty:
files = os.listdir(a[0])
if not files:
_rmtree(*a, **kw)
_logger.debug(f"Time needed to refresh cache: {time.time() - start_time}")
return too_old_to_use
def _get_from_key(self, key, key_data=None):
"""
Returns a module if the passed-in key is found in the cache
and None otherwise.
May raise ValueError if the key is malformed.
"""
name = None
if key is not None:
assert key_data is None
try:
_version, _rest = key
except (TypeError, ValueError):
raise ValueError("Invalid key. key must have form (version, rest)", key)
if key in self.entry_from_key:
name = self.entry_from_key[key]
else:
assert key_data is not None
name = key_data.get_entry()
if name is None:
return None
return self._get_module(name)
def _get_from_hash(self, module_hash, key):
if module_hash in self.module_hash_to_key_data:
key_data = self.module_hash_to_key_data[module_hash]
module = self._get_from_key(None, key_data)
with lock_ctx():
try:
key_data.add_key(key, save_pkl=bool(key[0]))
key_broken = False
except pickle.PicklingError:
key_data.remove_key(key)
key_broken = True
# We need the lock while we check in case of parallel
# process that could be changing the file at the same
# time.
if key[0] and not key_broken and self.check_for_broken_eq:
self.check_key(key, key_data.key_pkl)
self._update_mappings(
key, key_data, module.__file__, check_in_keys=not key_broken
)
return module
else:
return None
def _update_mappings(self, key, key_data, name, check_in_keys):
all_keys = key_data.keys
if not all_keys:
all_keys = [key]
if check_in_keys:
assert key in all_keys
for k in all_keys:
if k in self.entry_from_key:
assert self.entry_from_key[k] == name, (self.entry_from_key[k], name)
else:
self.entry_from_key[k] = name
if key[0]:
self.similar_keys.setdefault(get_safe_part(k), []).append(key)
def _add_to_cache(self, module, key, module_hash):
"""
This function expects the compile lock to be held.
"""
name = module.__file__
_logger.debug(f"Adding module to cache {key} {name}")
# Changing the hash of the key is not allowed during
# compilation. That is the only cause found that makes
# the following assert fail.
assert key not in self.entry_from_key
location = os.path.dirname(name)
key_pkl = os.path.join(location, "key.pkl")
assert not os.path.exists(key_pkl)
key_data = KeyData(
keys={key}, module_hash=module_hash, key_pkl=key_pkl, entry=name
)
key_broken = False
if key[0]:
try:
key_data.save_pkl()
except pickle.PicklingError:
key_broken = True
key_data.remove_key(key)
key_data.save_pkl()
if not key_broken and self.check_for_broken_eq:
self.check_key(key, key_pkl)
self.loaded_key_pkl.add(key_pkl)
elif config.cmodule__warn_no_version:
key_flat = flatten(key)
ops = [k for k in key_flat if isinstance(k, aesara.graph.op.Op)]
_logger.warning(
"not all the"
" following op(s) implement"
" c_code_cache_version(). This makes them"
" recompiled for each process." + str(ops)
)
self._update_mappings(key, key_data, module.__file__, not key_broken)
return key_data
def module_from_key(self, key, lnk=None):
"""
Return a module from the cache, compiling it if necessary.
Parameters
----------
key
The key object associated with the module. If this hits a match,
we avoid compilation.
lnk
Usually a CLinker instance, but it can be any object that defines
the `get_src_code()` and `compile_cmodule(location)` functions. The
first one returns the source code of the module to load/compile and
the second performs the actual compilation.
"""
# Is the module in the cache?
module = self._get_from_key(key)
if module is not None:
return module
src_code = lnk.get_src_code()
# Is the source code already in the cache?
module_hash = get_module_hash(src_code, key)
module = self._get_from_hash(module_hash, key)
if module is not None:
return module
with lock_ctx():
# 1) Maybe somebody else compiled it for us while we
# where waiting for the lock. Try to load it again.
# 2) If other repo that import Aesara have Aesara ops defined,
# we need to refresh the cache here. Otherwise, there are import
# order problems.
# When device=gpu, we compile during Aesara
# import. This triggers the loading of the cache. But
# unpickling the cache asks that the external Ops are
# completly loaded, which isn't always the case!
# If a module isn't completly loaded and its unpickling
# fails, it means it is safe for this function
# compilation to skip them, but not for future
# compilations. So reloading the cache here
# compilation fixes this problem. (we could do that only once)
self.refresh(cleanup=False)
module = self._get_from_key(key)
if module is not None:
return module
module = self._get_from_hash(module_hash, key)
if module is not None:
return module
hash_key = hash(key)
nocleanup = False
try:
location = dlimport_workdir(self.dirname)
module = lnk.compile_cmodule(location)
name = module.__file__
assert name.startswith(location)
assert name not in self.module_from_name
self.module_from_name[name] = module
nocleanup = True
except OSError as e:
_logger.error(e)
if e.errno == 31:
_logger.error(
f"There are {len(os.listdir(config.compiledir))} files in {config.compiledir}"
)
raise
finally:
if not nocleanup:
_rmtree(
location,
ignore_if_missing=True,
msg="exception during compilation",
)
# Changing the hash of the key is not allowed during
# compilation.
assert hash(key) == hash_key
key_data = self._add_to_cache(module, key, module_hash)
self.module_hash_to_key_data[module_hash] = key_data
self.stats[2] += 1
return module
def check_key(self, key, key_pkl):
"""
Perform checks to detect broken __eq__ / __hash__ implementations.
Parameters
----------
key
The key to be checked.
key_pkl
Its associated pickled file containing a KeyData.
"""
start_time = time.time()
# Verify that when we reload the KeyData from the pickled file, the
# same key can be found in it, and is not equal to more than one
# other key.
for i in range(3):
try:
with open(key_pkl, "rb") as f:
key_data = pickle.load(f)
break
except EOFError:
# This file is probably getting written/updated at the
# same time. This can happen as we read the cache
# without taking the lock.
if i == 2:
with lock_ctx():
with open(key_pkl, "rb") as f:
key_data = pickle.load(f)
time.sleep(2)
found = sum(key == other_key for other_key in key_data.keys)
msg = ""
if found == 0:
msg = "Key not found in unpickled KeyData file"
if key_data.keys:
# This is to make debugging in pdb easier, by providing
# the offending keys in the local context.
# key_data_keys = list(key_data.keys)
# import pdb; pdb.set_trace()
pass
elif found > 1:
msg = "Multiple equal keys found in unpickled KeyData file"
if msg:
raise AssertionError(
f"{msg}. Verify the __eq__ and __hash__ functions of your "
f"Ops. The file is: {key_pkl}. The key is: {key}"
)
# Also verify that there exists no other loaded key that would be equal
# to this key. In order to speed things up, we only compare to keys
# with the same version part and config hash, since we can assume this
# part of the key is not broken.
for other in self.similar_keys.get(get_safe_part(key), []):
if other is not key and other == key and hash(other) != hash(key):
raise AssertionError(
"Found two keys that are equal but have a different hash. "
"Verify the __eq__ and __hash__ functions of your Ops. "
f"The keys are:\n {other}\nand\n {key}\n(found in {key_pkl})."
)
self.time_spent_in_check_key += time.time() - start_time
# default 31 days
age_thresh_del = config.cmodule__age_thresh_use + 60 * 60 * 24 * 7
age_thresh_del_unversioned = 60 * 60 * 24 * 7 # 7 days
"""
The default age threshold for `clear_old` (in seconds).
"""
def clear_old(self, age_thresh_del=None, delete_if_problem=False):
"""Delete entries from the filesystem for cache entries that are too old.
This refreshes the content of the cache. Don't hold the lock
while calling this method, this is useless. It will be taken
if needed.
Parameters
----------
age_thresh_del
Dynamic modules whose last access time is more than
``age_thresh_del`` seconds ago will be erased.
Defaults to 31-day age if not provided.
delete_if_problem
See help of refresh() method.
"""
if age_thresh_del is None:
age_thresh_del = self.age_thresh_del
# Ensure that the too_old_to_use list return by refresh() will
# contain all modules older than age_thresh_del.
if age_thresh_del < self.age_thresh_use:
if age_thresh_del > 0:
_logger.warning(
"Clearing modules that were not deemed "
f"too old to use: age_thresh_del={age_thresh_del}, "
f"self.age_thresh_use={self.age_thresh_use}"
)
else:
_logger.info("Clearing all modules.")
age_thresh_use = age_thresh_del
else:
age_thresh_use = None
too_old_to_use = self.refresh(
age_thresh_use=age_thresh_use,
delete_if_problem=delete_if_problem,
# The clean up is done at init, no need to trigger it again
cleanup=False,
)
if not too_old_to_use:
return
with lock_ctx():
# Update the age of modules that have been accessed by other
# processes and get all module that are too old to use
# (not loaded in self.entry_from_key).
for entry in too_old_to_use:
# TODO: we are assuming that modules that haven't been
# accessed in over age_thresh_del are not currently in
# use by other processes, but that could be false for
# long-running jobs, or if age_thresh_del < 0.
assert entry not in self.module_from_name
parent = os.path.dirname(entry)
assert parent.startswith(os.path.join(self.dirname, "tmp"))
_rmtree(
parent,
msg="old cache directory",
level=logging.INFO,
ignore_nocleanup=True,
)
def clear(
self, unversioned_min_age=None, clear_base_files=False, delete_if_problem=False
):
"""
Clear all elements in the cache.
Parameters
----------
unversioned_min_age
Forwarded to `clear_unversioned`. In particular, you can set it
to -1 in order to delete all unversioned cached modules regardless
of their age.
clear_base_files : bool
If True, then delete base directories 'cutils_ext',
'lazylinker_ext' and 'scan_perform' if they are present.
If False, those directories are left intact.
delete_if_problem
See help of refresh() method.
"""
with lock_ctx():
self.clear_old(age_thresh_del=-1.0, delete_if_problem=delete_if_problem)
self.clear_unversioned(min_age=unversioned_min_age)
if clear_base_files:
self.clear_base_files()
def clear_base_files(self):
"""
Remove base directories 'cutils_ext', 'lazylinker_ext' and
'scan_perform' if present.
Note that we do not delete them outright because it may not work on
some systems due to these modules being currently in use. Instead we
rename them with the '.delete.me' extension, to mark them to be deleted
next time we clear the cache.
"""
with lock_ctx():
for base_dir in ("cutils_ext", "lazylinker_ext", "scan_perform"):
to_delete = os.path.join(self.dirname, base_dir + ".delete.me")
if os.path.isdir(to_delete):
try:
shutil.rmtree(to_delete)
_logger.debug(f"Deleted: {to_delete}")
except Exception:
_logger.warning(f"Could not delete {to_delete}")
continue
to_rename = os.path.join(self.dirname, base_dir)
if os.path.isdir(to_rename):
try:
shutil.move(to_rename, to_delete)
except Exception:
_logger.warning(f"Could not move {to_rename} to {to_delete}")
def clear_unversioned(self, min_age=None):
"""Delete unversioned dynamic modules.
They are deleted both from the internal dictionaries and from the
filesystem.
No need to have the lock when calling this method. It does not
take the lock as unversioned module aren't shared.
This method does not refresh the cache content, it just
accesses the in-memory known module(s).
Parameters
----------
min_age
Minimum age to be deleted, in seconds. Defaults to
7-day age if not provided.
"""
if min_age is None:
min_age = self.age_thresh_del_unversioned
# As this delete object that we build and other don't use, we
# don't need the lock.
all_key_datas = list(self.module_hash_to_key_data.values())
for key_data in all_key_datas:
if not key_data.keys:
# May happen for broken versioned keys.
continue
for key_idx, key in enumerate(key_data.keys):
version, rest = key
if version:
# Since the version is included in the module hash,
# it should not be possible to mix versioned and
# unversioned keys in the same KeyData object.
assert key_idx == 0
break
if not version:
# Note that unversioned keys cannot be broken, so we can
# set do_manual_check to False to speed things up.
key_data.delete_keys_from(self.entry_from_key, do_manual_check=False)
entry = key_data.get_entry()
# Entry is guaranteed to be in this dictionary, because
# an unversioned entry should never have been loaded via
# refresh.
assert entry in self.module_from_name
del self.module_from_name[entry]
del self.module_hash_to_key_data[key_data.module_hash]
parent = os.path.dirname(entry)
assert parent.startswith(os.path.join(self.dirname, "tmp"))
_rmtree(
parent, msg="unversioned", level=logging.INFO, ignore_nocleanup=True
)
# Sanity check: all unversioned keys should have been removed at
# this point.
for key in self.entry_from_key:
assert key[0]
to_del = []
time_now = time.time()
for filename in os.listdir(self.dirname):
if filename.startswith("tmp"):
try:
fname = os.path.join(self.dirname, filename, "key.pkl")
open(fname).close()
has_key = True
except OSError:
has_key = False
if not has_key:
# Use the compiled file by default
path = module_name_from_dir(
os.path.join(self.dirname, filename), False
)
# If it don't exist, use any file in the directory.
if path is None:
path = os.path.join(self.dirname, filename)
try:
files = os.listdir(path)
except OSError as e:
if e.errno == 2: # No such file or directory
# if it don't exist anymore, it mean
# the clean up was already done by
# someone else, so nothing to do about
# it.
continue
if files:
path = os.path.join(path, files[0])
else:
# If the directory is empty skip it.
# They are deleted elsewhere.
continue
age = time_now - last_access_time(path)
# In normal case, the processus that created this
# directory will delete it. However, if this processus
# crashes, it will not be cleaned up.
# As we don't know if this directory is still used,
# we wait one week and suppose that the processus
# crashed, and we take care of the clean-up.
if age > min_age:
to_del.append(os.path.join(self.dirname, filename))
# No need to take the lock as it isn't shared.
for f in to_del:
_rmtree(f, msg="old unversioned", level=logging.INFO, ignore_nocleanup=True)
def _on_atexit(self):
# Note: no need to call refresh() since it is called by clear_old().
# Note: no need to take the lock. For unversioned files, we
# don't need it as they aren't shared. For old unversioned
# files, this happen rarely, so we take the lock only when
# this happen.
# Note: for clear_old(), as this happen unfrequently, we only
# take the lock when it happen.
self.clear_old()
self.clear_unversioned()
_logger.debug(f"Time spent checking keys: {self.time_spent_in_check_key}")
def _rmtree(
parent, ignore_nocleanup=False, msg="", level=logging.DEBUG, ignore_if_missing=False
):
"""
On NFS filesystems, it is impossible to delete a directory with open
files in it.
So instead, some commands in this file will respond to a
failed rmtree() by touching a 'delete.me' file. This file is a message
for a future process to try deleting the directory.
Parameters:
----------
parent
Root node to start deleting from
ignore_nocleanup
Delete the tree if flag is TRUE
level
Python Logging level. Set to "DEBUG" by default
ignore_if_missing
If set to True, just return without any issue if parent is NULL
"""
if ignore_if_missing and not os.path.exists(parent):
return
try:
if ignore_nocleanup or not config.nocleanup:
log_msg = "Deleting"
if msg:
log_msg += f" ({msg})"
_logger.log(level, f"{log_msg}: {parent}")
shutil.rmtree(parent)
except Exception as e:
# If parent still exists, mark it for deletion by a future refresh()
_logger.debug(f"In _rmtree, encountered exception: {type(e)}({e})")
if os.path.exists(parent):
try:
_logger.info(f'placing "delete.me" in {parent}')
open(os.path.join(parent, "delete.me"), "w").close()
except Exception as ee:
_logger.warning(
f"Failed to remove or mark cache directory {parent} for removal {ee}"
)
_module_cache = None
def get_module_cache(dirname, init_args=None):
"""
Create a new module_cache with the (k, v) pairs in this dictionary
Parameters
----------
init_args
If not None, the (k, v) pairs in this dictionary will be forwarded to
the ModuleCache constructor as keyword arguments.
"""
global _module_cache
if init_args is None:
init_args = {}
if _module_cache is None:
_module_cache = ModuleCache(dirname, **init_args)
atexit.register(_module_cache._on_atexit)
elif init_args:
_logger.warning(
"Ignoring init arguments for module cache because it "
"was created prior to this call"
)
if _module_cache.dirname != dirname:
_logger.warning(
"Returning module cache instance with different "
f"dirname ({_module_cache.dirname}) than you requested ({dirname})"
)
return _module_cache
def get_lib_extension():
"""
Return the platform-dependent extension for compiled modules.
"""
if sys.platform == "win32":
return "pyd"
elif sys.platform == "cygwin":
return "dll"
else:
return "so"
def get_gcc_shared_library_arg():
"""
Return the platform-dependent GCC argument for shared libraries.
"""
if sys.platform == "darwin":
return "-dynamiclib"
else:
return "-shared"
def std_include_dirs():
numpy_inc_dirs = numpy.distutils.misc_util.get_numpy_include_dirs()
py_inc = distutils.sysconfig.get_python_inc()
py_plat_spec_inc = distutils.sysconfig.get_python_inc(plat_specific=True)
python_inc_dirs = (
[py_inc] if py_inc == py_plat_spec_inc else [py_inc, py_plat_spec_inc]
)
gof_inc_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "c_code")
return numpy_inc_dirs + python_inc_dirs + [gof_inc_dir]
def std_lib_dirs_and_libs():
# We cache the results as on Windows, this trigger file access and
# this method is called many times.
if std_lib_dirs_and_libs.data is not None:
return std_lib_dirs_and_libs.data
python_inc = distutils.sysconfig.get_python_inc()
if sys.platform == "win32":
# Obtain the library name from the Python version instead of the
# installation directory, in case the user defined a custom
# installation directory.
python_version = distutils.sysconfig.get_python_version()
libname = "python" + python_version.replace(".", "")
# Also add directory containing the Python library to the library
# directories.
python_lib_dirs = [os.path.join(os.path.dirname(python_inc), "libs")]
if "Canopy" in python_lib_dirs[0]:
# Canopy stores libpython27.a and libmsccr90.a in this directory.
# For some reason, these files are needed when compiling Python
# modules, even when libpython27.lib and python27.dll are
# available, and the *.a files have to be found earlier than
# the other ones.
# When Canopy is installed for the user:
# sys.prefix:C:\Users\username\AppData\Local\Enthought\Canopy\User
# sys.base_prefix:C:\Users\username\AppData\Local\Enthought\Canopy\App\appdata\canopy-1.1.0.1371.win-x86_64
# When Canopy is installed for all users:
# sys.base_prefix: C:\Program Files\Enthought\Canopy\App\appdata\canopy-1.1.0.1371.win-x86_64
# sys.prefix: C:\Users\username\AppData\Local\Enthought\Canopy\User
# So we need to use sys.prefix as it support both cases.
# sys.base_prefix support only one case
libdir = os.path.join(sys.prefix, "libs")
for f, lib in [("libpython27.a", "libpython 1.2")]:
if not os.path.exists(os.path.join(libdir, f)):
print(
"Your Python version is from Canopy. "
+ "You need to install the package '"
+ lib
+ "' from Canopy package manager."
)
libdirs = [
# Used in older Canopy
os.path.join(sys.prefix, "libs"),
# Used in newer Canopy
os.path.join(sys.prefix, r"EGG-INFO\mingw\usr\x86_64-w64-mingw32\lib"),
]
for f, lib in [
("libmsvcr90.a", "mingw 4.5.2 or 4.8.1-2 (newer could work)")
]:
if not any(
[
os.path.exists(os.path.join(tmp_libdir, f))
for tmp_libdir in libdirs
]
):
print(
"Your Python version is from Canopy. "
+ "You need to install the package '"
+ lib
+ "' from Canopy package manager."
)
python_lib_dirs.insert(0, libdir)
std_lib_dirs_and_libs.data = [libname], python_lib_dirs
# Suppress -lpython2.x on OS X since the `-undefined dynamic_lookup`
# makes it unnecessary.
elif sys.platform == "darwin":
std_lib_dirs_and_libs.data = [], []
else:
if platform.python_implementation() == "PyPy":
# Assume Linux (note: Ubuntu doesn't ship this .so)
libname = "pypy3-c"
# Unfortunately the only convention of this .so is that it appears
# next to the location of the interpreter binary.
libdir = os.path.dirname(os.path.realpath(sys.executable))
else:
# Assume Linux
# Typical include directory: /usr/include/python2.6
# get the name of the python library (shared object)
libname = distutils.sysconfig.get_config_var("LDLIBRARY")
if libname.startswith("lib"):
libname = libname[3:]
# remove extension if present
if libname.endswith(".so"):
libname = libname[:-3]
elif libname.endswith(".a"):
libname = libname[:-2]
libdir = distutils.sysconfig.get_config_var("LIBDIR")
std_lib_dirs_and_libs.data = [libname], [libdir]
# sometimes, the linker cannot find -lpython so we need to tell it
# explicitly where it is located this returns
# somepath/lib/python2.x
python_lib = distutils.sysconfig.get_python_lib(plat_specific=1, standard_lib=1)
python_lib = os.path.dirname(python_lib)
if python_lib not in std_lib_dirs_and_libs.data[1]:
std_lib_dirs_and_libs.data[1].append(python_lib)
return std_lib_dirs_and_libs.data
std_lib_dirs_and_libs.data = None
def std_libs():
return std_lib_dirs_and_libs()[0]
def std_lib_dirs():
return std_lib_dirs_and_libs()[1]
def gcc_version():
return gcc_version_str
def gcc_llvm():
"""
Detect if the g++ version used is the llvm one or not.
It don't support all g++ parameters even if it support many of them.
"""
if gcc_llvm.is_llvm is None:
try:
p_out = output_subprocess_Popen([config.cxx, "--version"])
output = p_out[0] + p_out[1]
except OSError:
# Typically means g++ cannot be found.
# So it is not an llvm compiler.
# Normally this should not happen as we should not try to
# compile when g++ is not available. If this happen, it
# will crash later so supposing it is not llvm is "safe".
output = b""
gcc_llvm.is_llvm = b"llvm" in output
return gcc_llvm.is_llvm
gcc_llvm.is_llvm = None
class Compiler:
"""
Meta compiler that offer some generic function.
"""
@classmethod
def _try_compile_tmp(
cls,
src_code,
tmp_prefix="",
flags=(),
try_run=False,
output=False,
compiler=None,
comp_args=True,
):
"""
Try to compile (and run) a test program.
This is useful in various occasions, to check if libraries
or compilers are behaving as expected.
If try_run is True, the src_code is assumed to be executable,
and will be run.
If try_run is False, returns the compilation status.
If try_run is True, returns a (compile_status, run_status) pair.
If output is there, we append the stdout and stderr to the output.
Compile arguments from the Compiler's compile_args() method are added
if comp_args=True.
"""
if not compiler:
return False
flags = list(flags)
# Get compile arguments from compiler method if required
if comp_args:
args = cls.compile_args()
else:
args = []
compilation_ok = True
run_ok = False
out, err = None, None
try:
fd, path = tempfile.mkstemp(suffix=".c", prefix=tmp_prefix)
exe_path = path[:-2]
if os.name == "nt":
path = '"' + path + '"'
exe_path = '"' + exe_path + '"'
try:
try:
src_code = src_code.encode()
except AttributeError: # src_code was already bytes
pass
os.write(fd, src_code)
os.close(fd)
fd = None
out, err, p_ret = output_subprocess_Popen(
[compiler] + args + [path, "-o", exe_path] + flags
)
if p_ret != 0:
compilation_ok = False
elif try_run:
out, err, p_ret = output_subprocess_Popen([exe_path])
run_ok = p_ret == 0
finally:
try:
if fd is not None:
os.close(fd)
finally:
if os.path.exists(path):
os.remove(path)
if os.path.exists(exe_path):
os.remove(exe_path)
if os.path.exists(exe_path + ".exe"):
os.remove(exe_path + ".exe")
except OSError as e:
if err is None:
err = str(e)
else:
err = str(err) + "\n" + str(e)
compilation_ok = False
if not try_run and not output:
return compilation_ok
elif not try_run and output:
return (compilation_ok, out, err)
elif not output:
return (compilation_ok, run_ok)
else:
return (compilation_ok, run_ok, out, err)
@classmethod
def _try_flags(
cls,
flag_list,
preambule="",
body="",
try_run=False,
output=False,
compiler=None,
comp_args=True,
):
"""
Try to compile a dummy file with these flags.
Returns True if compilation was successful, False if there
were errors.
Compile arguments from the Compiler's compile_args() method are added
if comp_args=True.
"""
if not compiler:
return False
code = (
"""
%(preambule)s
int main(int argc, char** argv)
{
%(body)s
return 0;
}
"""
% locals()
).encode()
return cls._try_compile_tmp(
code,
tmp_prefix="try_flags_",
flags=flag_list,
try_run=try_run,
output=output,
compiler=compiler,
comp_args=comp_args,
)
def try_blas_flag(flags):
test_code = textwrap.dedent(
"""\
extern "C" double ddot_(int*, double*, int*, double*, int*);
int main(int argc, char** argv)
{
int Nx = 5;
int Sx = 1;
double x[5] = {0, 1, 2, 3, 4};
double r = ddot_(&Nx, x, &Sx, x, &Sx);
if ((r - 30.) > 1e-6 || (r - 30.) < -1e-6)
{
return -1;
}
return 0;
}
"""
)
cflags = list(flags)
# to support path that includes spaces, we need to wrap it with double quotes on Windows
path_wrapper = '"' if os.name == "nt" else ""
cflags.extend([f"-L{path_wrapper}{d}{path_wrapper}" for d in std_lib_dirs()])
res = GCC_compiler.try_compile_tmp(
test_code, tmp_prefix="try_blas_", flags=cflags, try_run=True
)
# res[0]: shows successful compilation
# res[1]: shows successful execution
if res and res[0] and res[1]:
return " ".join(flags)
else:
return ""
def try_march_flag(flags):
"""
Try to compile and run a simple C snippet using current flags.
Return: compilation success (True/False), execution success (True/False)
"""
test_code = textwrap.dedent(
"""\
#include <cmath>
using namespace std;
int main(int argc, char** argv)
{
float Nx = -1.3787706641;
float Sx = 25.0;
double r = Nx + sqrt(Sx);
if (abs(r - 3.621229) > 0.01)
{
return -1;
}
return 0;
}
"""
)
cflags = flags + ["-L" + d for d in aesara.link.c.cmodule.std_lib_dirs()]
compilation_result, execution_result = GCC_compiler.try_compile_tmp(
test_code, tmp_prefix="try_march_", flags=cflags, try_run=True
)
return compilation_result, execution_result
class GCC_compiler(Compiler):
# The equivalent flags of --march=native used by g++.
march_flags = None
supports_amdlibm = True
@staticmethod
def version_str():
return config.cxx + " " + gcc_version_str
@staticmethod
def compile_args(march_flags=True):
cxxflags = [flag for flag in config.gcc__cxxflags.split(" ") if flag]
if "-fopenmp" in cxxflags:
raise ValueError(
"Do not use -fopenmp in Aesara flag gcc__cxxflags."
" To enable OpenMP, use the Aesara flag openmp=True"
)
# Add the equivalent of -march=native flag. We can't use
# -march=native as when the compiledir is shared by multiple
# computers (for example, if the home directory is on NFS), this
# won't be optimum or cause crash depending if the file is compiled
# on an older or more recent computer.
# Those URL discuss how to find witch flags are used by -march=native.
# http://en.gentoo-wiki.com/wiki/Safe_Cflags#-march.3Dnative
# http://en.gentoo-wiki.com/wiki/Hardware_CFLAGS
detect_march = GCC_compiler.march_flags is None and march_flags
if detect_march:
for f in cxxflags:
# If the user give an -march=X parameter, don't add one ourself
if f.startswith("--march=") or f.startswith("-march="):
detect_march = False
GCC_compiler.march_flags = []
break
if (
"g++" not in config.cxx
and "clang++" not in config.cxx
and "clang-omp++" not in config.cxx
and "icpc" not in config.cxx
):
_logger.warning(
"Your Aesara flag `cxx` seems not to be"
" the g++ compiler. So we disable the compiler optimization"
" specific to g++ that tell to compile for a specific CPU."
" At worst, this could cause slow down.\n"
" You can add those parameters to the compiler yourself"
" via the Aesara flag `gcc__cxxflags`."
)
detect_march = False
if detect_march:
GCC_compiler.march_flags = []
def get_lines(cmd, parse=True):
p = subprocess_Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
shell=True,
)
# For mingw64 with GCC >= 4.7, passing os.devnull
# as stdin (which is the default) results in the process
# waiting forever without returning. For that reason,
# we use a pipe, and use the empty string as input.
(stdout, stderr) = p.communicate(input=b"")
if p.returncode != 0:
return None
lines = BytesIO(stdout + stderr).readlines()
lines = (l.decode() for l in lines)
if parse:
selected_lines = []
for line in lines:
if (
"COLLECT_GCC_OPTIONS=" in line
or "CFLAGS=" in line
or "CXXFLAGS=" in line
or "-march=native" in line
):
continue
for reg in ["-march=", "-mtune=", "-target-cpu", "-mabi="]:
if reg in line:
selected_lines.append(line.strip())
lines = list(set(selected_lines)) # to remove duplicate
return lines
# The '-' at the end is needed. Otherwise, g++ do not output
# enough information.
native_lines = get_lines(f"{config.cxx} -march=native -E -v -")
if native_lines is None:
_logger.info(
"Call to 'g++ -march=native' failed," "not setting -march flag"
)
detect_march = False
else:
_logger.info(f"g++ -march=native selected lines: {native_lines}")
if detect_march:
if len(native_lines) != 1:
if len(native_lines) == 0:
# That means we did not select the right lines, so
# we have to report all the lines instead
reported_lines = get_lines(
f"{config.cxx} -march=native -E -v -", parse=False
)
else:
reported_lines = native_lines
_logger.warning(
"Aesara was not able to find the"
" g++ parameters that tune the compilation to your "
" specific CPU. This can slow down the execution of Aesara"
" functions. Please submit the following lines to"
" Aesara's mailing list so that we can fix this"
f" problem:\n {reported_lines}"
)
else:
default_lines = get_lines(f"{config.cxx} -E -v -")
_logger.info(f"g++ default lines: {default_lines}")
if len(default_lines) < 1:
_logger.warning(
"Aesara was not able to find the"
" default g++ parameters. This is needed to tune"
" the compilation to your specific"
" CPU. This can slow down the execution of Aesara"
" functions. Please submit the following lines to"
" Aesara's mailing list so that we can fix this"
" problem:\n %s",
get_lines(f"{config.cxx} -E -v -", parse=False),
)
else:
# Some options are actually given as "-option value",
# we want to treat them as only one token when comparing
# different command lines.
# Heuristic: tokens not starting with a dash should be
# joined with the previous one.
def join_options(init_part):
new_part = []
for i in range(len(init_part)):
p = init_part[i]
if p.startswith("-"):
p_list = [p]
while (i + 1 < len(init_part)) and not init_part[
i + 1
].startswith("-"):
# append that next part to p_list
p_list.append(init_part[i + 1])
i += 1
new_part.append(" ".join(p_list))
elif i == 0:
# The first argument does not usually start
# with "-", still add it
new_part.append(p)
# Else, skip it, as it was already included
# with the previous part.
return new_part
part = join_options(native_lines[0].split())
for line in default_lines:
if line.startswith(part[0]):
part2 = [
p
for p in join_options(line.split())
if (
"march" not in p
and "mtune" not in p
and "target-cpu" not in p
)
]
if sys.platform == "darwin":
# We only use translated target-cpu on
# mac since the other flags are not
# supported as compiler flags for the
# driver.
new_flags = [p for p in part if "target-cpu" in p]
else:
new_flags = [p for p in part if p not in part2]
# Replace '-target-cpu value', which is an option
# of clang, with '-march=value'.
for i, p in enumerate(new_flags):
if "target-cpu" in p:
opt = p.split()
if len(opt) == 2:
opt_name, opt_val = opt
new_flags[i] = f"-march={opt_val}"
# Some versions of GCC report the native arch
# as "corei7-avx", but it generates illegal
# instructions, and should be "corei7" instead.
# Affected versions are:
# - 4.6 before 4.6.4
# - 4.7 before 4.7.3
# - 4.8 before 4.8.1
# Earlier versions did not have arch "corei7-avx"
for i, p in enumerate(new_flags):
if "march" not in p:
continue
opt = p.split("=")
if len(opt) != 2:
# Inexpected, but do not crash
continue
opt_val = opt[1]
if not opt_val.endswith("-avx"):
# OK
continue
# Check the version of GCC
version = gcc_version_str.split(".")
if len(version) != 3:
# Unexpected, but should not be a problem
continue
mj, mn, patch = [int(vp) for vp in version]
if (
((mj, mn) == (4, 6) and patch < 4)
or ((mj, mn) == (4, 7) and patch <= 3)
or ((mj, mn) == (4, 8) and patch < 1)
):
new_flags[i] = p.rstrip("-avx")
# Go back to split arguments, like
# ["-option", "value"],
# as this is the way g++ expects them split.
split_flags = []
for p in new_flags:
split_flags.extend(p.split())
GCC_compiler.march_flags = split_flags
break
_logger.info(
f"g++ -march=native equivalent flags: {GCC_compiler.march_flags}"
)
# Find working march flag:
# -- if current GCC_compiler.march_flags works, we're done.
# -- else replace -march and -mtune with ['core-i7-avx', 'core-i7', 'core2']
# and retry with all other flags and arguments intact.
# -- else remove all other flags and only try with -march = default + flags_to_try.
# -- if none of that worked, set GCC_compiler.march_flags = [] (for x86).
default_compilation_result, default_execution_result = try_march_flag(
GCC_compiler.march_flags
)
if not default_compilation_result or not default_execution_result:
march_success = False
march_ind = None
mtune_ind = None
default_detected_flag = []
march_flags_to_try = ["corei7-avx", "corei7", "core2"]
for m_ in range(len(GCC_compiler.march_flags)):
march_flag = GCC_compiler.march_flags[m_]
if "march" in march_flag:
march_ind = m_
default_detected_flag = [march_flag]
elif "mtune" in march_flag:
mtune_ind = m_
for march_flag in march_flags_to_try:
if march_ind is not None:
GCC_compiler.march_flags[march_ind] = "-march=" + march_flag
if mtune_ind is not None:
GCC_compiler.march_flags[mtune_ind] = "-mtune=" + march_flag
compilation_result, execution_result = try_march_flag(
GCC_compiler.march_flags
)
if compilation_result and execution_result:
march_success = True
break
if not march_success:
# perhaps one of the other flags was problematic; try default flag in isolation again:
march_flags_to_try = default_detected_flag + march_flags_to_try
for march_flag in march_flags_to_try:
compilation_result, execution_result = try_march_flag(
["-march=" + march_flag]
)
if compilation_result and execution_result:
march_success = True
GCC_compiler.march_flags = ["-march=" + march_flag]
break
if not march_success:
GCC_compiler.march_flags = []
# Add the detected -march=native equivalent flags
if march_flags and GCC_compiler.march_flags:
cxxflags.extend(GCC_compiler.march_flags)
# NumPy 1.7 Deprecate the old API.
# The following macro asserts that we don't bring new code
# that use the old API.
cxxflags.append("-DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION")
# Platform-specific flags.
# We put them here, rather than in compile_str(), so they en up
# in the key of the compiled module, avoiding potential conflicts.
# Figure out whether the current Python executable is 32
# or 64 bit and compile accordingly. This step is ignored for
# ARM (32-bit and 64-bit) architectures in order to make
# Aesara compatible with the Raspberry Pi, Raspberry Pi 2, or
# other systems with ARM processors.
if not any(["arm" in flag for flag in cxxflags]) and not any(
arch in platform.machine() for arch in ["arm", "aarch"]
):
n_bits = LOCAL_BITWIDTH
cxxflags.append(f"-m{int(n_bits)}")
_logger.debug(f"Compiling for {n_bits} bit architecture")
if sys.platform != "win32":
# Under Windows it looks like fPIC is useless. Compiler warning:
# '-fPIC ignored for target (all code is position independent)'
cxxflags.append("-fPIC")
if sys.platform == "win32" and LOCAL_BITWIDTH == 64:
# Under 64-bit Windows installation, sys.platform is 'win32'.
# We need to define MS_WIN64 for the preprocessor to be able to
# link with libpython.
cxxflags.append("-DMS_WIN64")
if sys.platform == "darwin":
# Use the already-loaded python symbols.
cxxflags.extend(["-undefined", "dynamic_lookup"])
if sys.platform == "win32":
# Workaround for https://github.com/Theano/Theano/issues/4926.
# https://github.com/python/cpython/pull/11283/ removed the "hypot"
# redefinition for recent CPython versions (>=2.7.16 and >=3.7.3).
# The following nullifies that redefinition, if it is found.
python_version = sys.version_info[:3]
if (3,) <= python_version < (3, 7, 3):
config_h_filename = distutils.sysconfig.get_config_h_filename()
try:
with open(config_h_filename) as config_h:
if any(
line.startswith("#define hypot _hypot") for line in config_h
):
cxxflags.append("-D_hypot=hypot")
except OSError:
pass
return cxxflags
@classmethod
def try_compile_tmp(
cls,
src_code,
tmp_prefix="",
flags=(),
try_run=False,
output=False,
comp_args=True,
):
return cls._try_compile_tmp(
src_code, tmp_prefix, flags, try_run, output, config.cxx, comp_args
)
@classmethod
def try_flags(
cls,
flag_list,
preambule="",
body="",
try_run=False,
output=False,
comp_args=True,
):
return cls._try_flags(
flag_list, preambule, body, try_run, output, config.cxx, comp_args
)
@staticmethod
def compile_str(
module_name,
src_code,
location=None,
include_dirs=None,
lib_dirs=None,
libs=None,
preargs=None,
py_module=True,
hide_symbols=True,
):
"""
Parameters
----------
module_name : str
This has been embedded in the src_code.
src_code
A complete c or c++ source listing for the module.
location
A pre-existing filesystem directory where the cpp file and .so will
be written.
include_dirs
A list of include directory names (each gets prefixed with -I).
lib_dirs
A list of library search path directory names (each gets prefixed
with -L).
libs
A list of libraries to link with (each gets prefixed with -l).
preargs
A list of extra compiler arguments.
py_module
If False, compile to a shared library, but do not import it as a
Python module.
hide_symbols
If True (the default) all symbols will be hidden from the library
symbol table (which means that other objects can't use them).
Returns
-------
object
Dynamically-imported python module of the compiled code (unless
py_module is False, in that case returns None).
"""
# TODO: Do not do the dlimport in this function
if not config.cxx:
raise MissingGXX("g++ not available! We can't compile c code.")
if include_dirs is None:
include_dirs = []
if lib_dirs is None:
lib_dirs = []
if libs is None:
libs = []
if preargs is None:
preargs = []
# Remove empty string directory
include_dirs = [d for d in include_dirs if d]
lib_dirs = [d for d in lib_dirs if d]
include_dirs = include_dirs + std_include_dirs()
libs = libs + std_libs()
lib_dirs = lib_dirs + std_lib_dirs()
cppfilename = os.path.join(location, "mod.cpp")
with open(cppfilename, "w") as cppfile:
_logger.debug(f"Writing module C++ code to {cppfilename}")
cppfile.write(src_code)
# Avoid gcc warning "no newline at end of file".
if not src_code.endswith("\n"):
cppfile.write("\n")
if platform.python_implementation() == "PyPy":
suffix = "." + get_lib_extension()
dist_suffix = distutils.sysconfig.get_config_var("SO")
if dist_suffix is not None and dist_suffix != "":
suffix = dist_suffix
filepath = f"{module_name}{suffix}"
else:
filepath = f"{module_name}.{get_lib_extension()}"
lib_filename = os.path.join(location, filepath)
_logger.debug(f"Generating shared lib {lib_filename}")
cmd = [config.cxx, get_gcc_shared_library_arg(), "-g"]
if config.cmodule__remove_gxx_opt:
cmd.extend(p for p in preargs if not p.startswith("-O"))
else:
cmd.extend(preargs)
# to support path that includes spaces, we need to wrap it with double quotes on Windows
path_wrapper = '"' if os.name == "nt" else ""
cmd.extend([f"-I{path_wrapper}{idir}{path_wrapper}" for idir in include_dirs])
cmd.extend([f"-L{path_wrapper}{ldir}{path_wrapper}" for ldir in lib_dirs])
if hide_symbols and sys.platform != "win32":
# This has been available since gcc 4.0 so we suppose it
# is always available. We pass it here since it
# significantly reduces the size of the symbol table for
# the objects we want to share. This in turns leads to
# improved loading times on most platforms (win32 is
# different, as usual).
cmd.append("-fvisibility=hidden")
cmd.extend(["-o", f"{path_wrapper}{lib_filename}{path_wrapper}"])
cmd.append(f"{path_wrapper}{cppfilename}{path_wrapper}")
cmd.extend([f"-l{l}" for l in libs])
# print >> sys.stderr, 'COMPILING W CMD', cmd
_logger.debug(f"Running cmd: {' '.join(cmd)}")
def print_command_line_error():
# Print command line when a problem occurred.
print(
("Problem occurred during compilation with the " "command line below:"),
file=sys.stderr,
)
print(" ".join(cmd), file=sys.stderr)
try:
p_out = output_subprocess_Popen(cmd)
compile_stderr = p_out[1].decode()
except Exception:
# An exception can occur e.g. if `g++` is not found.
print_command_line_error()
raise
status = p_out[2]
if status:
tf = tempfile.NamedTemporaryFile(
mode="w", prefix="aesara_compilation_error_", delete=False
)
# gcc put its messages to stderr, so we add ours now
tf.write("===============================\n")
for i, l in enumerate(src_code.split("\n")):
tf.write(f"{i + 1}\t{l}\n")
tf.write("===============================\n")
tf.write(
"Problem occurred during compilation with the " "command line below:\n"
)
tf.write(" ".join(cmd))
# Print errors just below the command line.
tf.write(compile_stderr)
tf.close()
print("\nYou can find the C code in this temporary file: " + tf.name)
not_found_libraries = re.findall('-l["."-_a-zA-Z0-9]*', compile_stderr)
for nf_lib in not_found_libraries:
print("library " + nf_lib[2:] + " is not found.")
if re.search('-lPYTHON["."0-9]*', nf_lib, re.IGNORECASE):
py_string = re.search(
'-lpython["."0-9]*', nf_lib, re.IGNORECASE
).group()[8:]
if py_string != "":
print(
"Check if package python-dev "
+ py_string
+ " or python-devel "
+ py_string
+ " is installed."
)
else:
print(
"Check if package python-dev or python-devel is installed."
)
# We replace '\n' by '. ' in the error message because when Python
# prints the exception, having '\n' in the text makes it more
# difficult to read.
compile_stderr = compile_stderr.replace("\n", ". ")
raise Exception(
f"Compilation failed (return status={status}): {compile_stderr}"
)
elif config.cmodule__compilation_warning and compile_stderr:
# Print errors just below the command line.
print(compile_stderr)
if py_module:
# touch the __init__ file
open(os.path.join(location, "__init__.py"), "w").close()
assert os.path.isfile(lib_filename)
return dlimport(lib_filename)
def icc_module_compile_str(*args):
raise NotImplementedError()
def check_mkl_openmp():
if not config.blas__check_openmp:
return
if sys.platform == "darwin":
return
if (
"MKL_THREADING_LAYER" in os.environ
and os.environ["MKL_THREADING_LAYER"] == "GNU"
):
return
try:
import numpy._mklinit # noqa
return
except ImportError:
pass
try:
import mkl
if "2018" in mkl.get_version_string():
raise RuntimeError(
"""
To use MKL 2018 with Aesara either update the numpy conda packages to
their latest build or set "MKL_THREADING_LAYER=GNU" in your
environment.
"""
)
except ImportError:
raise RuntimeError(
"""
Could not import 'mkl'. If you are using conda, update the numpy
packages to the latest build otherwise, set MKL_THREADING_LAYER=GNU in
your environment for MKL 2018.
If you have MKL 2017 install and are not in a conda environment you
can set the Aesara flag blas__check_openmp to False. Be warned that if
you set this flag and don't set the appropriate environment or make
sure you have the right version you *will* get wrong results.
"""
)
def default_blas_ldflags():
"""Read local NumPy and MKL build settings and construct `ld` flags from them.
Returns
-------
str
"""
import numpy.distutils # noqa
warn_record = []
try:
if hasattr(numpy.distutils, "__config__") and numpy.distutils.__config__:
# If the old private interface is available use it as it
# don't print information to the user.
blas_info = numpy.distutils.__config__.blas_opt_info
else:
# We do this import only here, as in some setup, if we
# just import aesara and exit, with the import at global
# scope, we get this error at exit: "Exception TypeError:
# "'NoneType' object is not callable" in <bound method
# Popen.__del__ of <subprocess.Popen object at 0x21359d0>>
# ignored"
# This happen with Python 2.7.3 |EPD 7.3-1 and numpy 1.8.1
# isort: off
import numpy.distutils.system_info # noqa
# We need to catch warnings as in some cases NumPy print
# stuff that we don't want the user to see.
# I'm not able to remove all printed stuff
with warnings.catch_warnings(record=True):
numpy.distutils.system_info.system_info.verbosity = 0
blas_info = numpy.distutils.system_info.get_info("blas_opt")
# If we are in a EPD installation, mkl is available
if "EPD" in sys.version:
use_unix_epd = True
if sys.platform == "win32":
return " ".join(
['-L"%s"' % os.path.join(sys.prefix, "Scripts")]
+
# Why on Windows, the library used are not the
# same as what is in
# blas_info['libraries']?
[f"-l{l}" for l in ["mk2_core", "mk2_intel_thread", "mk2_rt"]]
)
elif sys.platform == "darwin":
# The env variable is needed to link with mkl
new_path = os.path.join(sys.prefix, "lib")
v = os.getenv("DYLD_FALLBACK_LIBRARY_PATH", None)
if v is not None:
# Explicit version could be replaced by a symbolic
# link called 'Current' created by EPD installer
# This will resolve symbolic links
v = os.path.realpath(v)
# The python __import__ don't seam to take into account
# the new env variable "DYLD_FALLBACK_LIBRARY_PATH"
# when we set with os.environ['...'] = X or os.putenv()
# So we warn the user and tell him what todo.
if v is None or new_path not in v.split(":"):
_logger.warning(
"The environment variable "
"'DYLD_FALLBACK_LIBRARY_PATH' does not contain "
"the '{new_path}' path in its value. This will make "
"Aesara use a slow version of BLAS. Update "
"'DYLD_FALLBACK_LIBRARY_PATH' to contain the "
"said value, this will disable this warning."
)
use_unix_epd = False
if use_unix_epd:
return " ".join(
["-L%s" % os.path.join(sys.prefix, "lib")]
+ ["-l%s" % l for l in blas_info["libraries"]]
)
# Canopy
if "Canopy" in sys.prefix:
subsub = "lib"
if sys.platform == "win32":
subsub = "Scripts"
lib_path = os.path.join(sys.base_prefix, subsub)
if not os.path.exists(lib_path):
# Old logic to find the path. I don't think we still
# need it, but I don't have the time to test all
# installation configuration. So I keep this as a fall
# back in case the current expectation don't work.
# This old logic don't work when multiple version of
# Canopy is installed.
p = os.path.join(sys.base_prefix, "..", "..", "appdata")
assert os.path.exists(p), "Canopy changed the location of MKL"
lib_paths = os.listdir(p)
# Try to remove subdir that can't contain MKL
for sub in lib_paths:
if not os.path.exists(os.path.join(p, sub, subsub)):
lib_paths.remove(sub)
assert len(lib_paths) == 1, (
"Unexpected case when looking for Canopy MKL libraries",
p,
lib_paths,
[os.listdir(os.path.join(p, sub)) for sub in lib_paths],
)
lib_path = os.path.join(p, lib_paths[0], subsub)
assert os.path.exists(lib_path), "Canopy changed the location of MKL"
if sys.platform == "linux2" or sys.platform == "darwin":
return " ".join(
["-L%s" % lib_path] + ["-l%s" % l for l in blas_info["libraries"]]
)
elif sys.platform == "win32":
return " ".join(
['-L"%s"' % lib_path]
+
# Why on Windows, the library used are not the
# same as what is in blas_info['libraries']?
[f"-l{l}" for l in ["mk2_core", "mk2_intel_thread", "mk2_rt"]]
)
# MKL
# If mkl can be imported then use it. On conda:
# "conda install mkl-service" installs the Python wrapper and
# the low-level C libraries as well as optimised version of
# numpy and scipy.
try:
import mkl # noqa
except ImportError as e:
if any([m for m in ("conda", "Continuum") if m in sys.version]):
warn_record.append(f"install mkl with `conda install mkl-service`: {e}")
else:
# This branch is executed if no exception was raised
if sys.platform == "win32":
lib_path = os.path.join(sys.prefix, "Library", "bin")
flags = [f'-L"{lib_path}"']
else:
lib_path = blas_info.get("library_dirs", [])
flags = []
if lib_path:
flags = [f"-L{lib_path[0]}"]
if "2018" in mkl.get_version_string():
thr = "mkl_gnu_thread"
else:
thr = "mkl_intel_thread"
base_flags = list(flags)
flags += [f"-l{l}" for l in ["mkl_core", thr, "mkl_rt"]]
res = try_blas_flag(flags)
if not res and sys.platform == "win32" and thr == "mkl_gnu_thread":
# Check if it would work for intel OpenMP on windows
flags = base_flags + [
f"-l{l}" for l in ["mkl_core", "mkl_intel_thread", "mkl_rt"]
]
res = try_blas_flag(flags)
if res:
check_mkl_openmp()
return res
flags.extend(["-Wl,-rpath," + l for l in blas_info.get("library_dirs", [])])
res = try_blas_flag(flags)
if res:
check_mkl_openmp()
aesara.utils.maybe_add_to_os_environ_pathlist("PATH", lib_path[0])
return res
# to support path that includes spaces, we need to wrap it with double quotes on Windows
path_wrapper = '"' if os.name == "nt" else ""
ret = (
# TODO: the Gemm op below should separate the
# -L and -l arguments into the two callbacks
# that CLinker uses for that stuff. for now,
# we just pass the whole ldflags as the -l
# options part.
[
f"-L{path_wrapper}{l}{path_wrapper}"
for l in blas_info.get("library_dirs", [])
]
+ [f"-l{l}" for l in blas_info.get("libraries", [])]
+ blas_info.get("extra_link_args", [])
)
# For some very strange reason, we need to specify -lm twice
# to get mkl to link correctly. I have no idea why.
if any("mkl" in fl for fl in ret):
ret.extend(["-lm", "-lm"])
res = try_blas_flag(ret)
if res:
if "mkl" in res:
check_mkl_openmp()
return res
# If we are using conda and can't reuse numpy blas, then doing
# the fallback and test -lblas could give slow computation, so
# warn about this.
for warn in warn_record:
_logger.warning(warn)
del warn_record
# Some environment don't have the lib dir in LD_LIBRARY_PATH.
# So add it.
ret.extend(["-Wl,-rpath," + l for l in blas_info.get("library_dirs", [])])
res = try_blas_flag(ret)
if res:
if "mkl" in res:
check_mkl_openmp()
return res
# Add sys.prefix/lib to the runtime search path. On
# non-system installations of Python that use the
# system linker, this is generally necessary.
if sys.platform in ("linux", "darwin"):
lib_path = os.path.join(sys.prefix, "lib")
ret.append("-Wl,-rpath," + lib_path)
res = try_blas_flag(ret)
if res:
if "mkl" in res:
check_mkl_openmp()
return res
except KeyError:
pass
# Even if we could not detect what was used for numpy, or if these
# libraries are not found, most Linux systems have a libblas.so
# readily available. We try to see if that's the case, rather
# than disable blas. To test it correctly, we must load a program.
# Otherwise, there could be problem in the LD_LIBRARY_PATH.
return try_blas_flag(["-lblas"])
def add_blas_configvars():
config.add(
"blas__ldflags",
"lib[s] to include for [Fortran] level-3 blas implementation",
aesara.configparser.StrParam(default_blas_ldflags),
# Added elsewhere in the c key only when needed.
in_c_key=False,
)
config.add(
"blas__check_openmp",
"Check for openmp library conflict.\nWARNING: Setting this to False leaves you open to wrong results in blas-related operations.",
aesara.configparser.BoolParam(True),
in_c_key=False,
)
# Register config parameters that are specific to this module:
add_blas_configvars()
|
load("bf4b12814bc95f34eeb130127d8438ab.js");
load("93fae755edd261212639eed30afa2ca4.js");
// Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es5id: 15.4.4.18-3-14
description: Array.prototype.forEach - 'length' is a string containing -Infinity
---*/
var accessed2 = false;
function callbackfn2(val, idx, obj) {
accessed2 = true;
}
var obj2 = { 0: 9, length: "-Infinity" };
Array.prototype.forEach.call(obj2, callbackfn2);
assert.sameValue(accessed2, false, 'accessed2');
|
cid = input('Digite a cidade que voce nasceu:').strip()
separado = cid.split()
palavra = separado[0].upper()
print(palavra == 'SANTO')
|
from .datatypes import DATATYPES_STRUCTS
FIELDS = {
'ABS_1': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'DISCHARGE_CARE_PROVIDER', 'HL70010', -1),
'ABS_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TRANSFER_MEDICAL_SERVICE_CODE', 'HL70069', -1),
'ABS_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SEVERITY_OF_ILLNESS_CODE', 'HL70421', -1),
'ABS_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_OF_ATTESTATION', None, -1),
'ABS_5': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ATTESTED_BY', None, -1),
'ABS_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TRIAGE_CODE', 'HL70422', -1),
'ABS_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ABSTRACT_COMPLETION_DATE_TIME', None, -1),
'ABS_8': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ABSTRACTED_BY', None, -1),
'ABS_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CASE_CATEGORY_CODE', 'HL70423', -1),
'ABS_10': ('leaf', None, 'ID', 'CAESARIAN_SECTION_INDICATOR', 'HL70136', -1),
'ABS_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GESTATION_CATEGORY_CODE', 'HL70424', -1),
'ABS_12': ('leaf', None, 'NM', 'GESTATION_PERIOD_WEEKS', None, -1),
'ABS_13': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'NEWBORN_CODE', 'HL70425', -1),
'ABS_14': ('leaf', None, 'ID', 'STILLBORN_INDICATOR', 'HL70136', -1),
'ACC_1': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ACCIDENT_DATE_TIME', None, -1),
'ACC_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ACCIDENT_CODE', 'HL70050', -1),
'ACC_3': ('leaf', None, 'ST', 'ACCIDENT_LOCATION', None, -1),
'ACC_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'AUTO_ACCIDENT_STATE', 'HL70347', -1),
'ACC_5': ('leaf', None, 'ID', 'ACCIDENT_JOB_RELATED_INDICATOR', 'HL70136', -1),
'ACC_6': ('leaf', None, 'ID', 'ACCIDENT_DEATH_INDICATOR', 'HL70136', -1),
'ACC_7': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ENTERED_BY', None, -1),
'ACC_8': ('leaf', None, 'ST', 'ACCIDENT_DESCRIPTION', None, -1),
'ACC_9': ('leaf', None, 'ST', 'BROUGHT_IN_BY', None, -1),
'ACC_10': ('leaf', None, 'ID', 'POLICE_NOTIFIED_INDICATOR', 'HL70136', -1),
'ACC_11': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'ACCIDENT_ADDRESS', None, -1),
'ADD_1': ('leaf', None, 'ST', 'ADDENDUM_CONTINUATION_POINTER', None, -1),
'AFF_1': ('leaf', None, 'SI', 'SET_ID_AFF', None, -1),
'AFF_2': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'PROFESSIONAL_ORGANIZATION', None, -1),
'AFF_3': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'PROFESSIONAL_ORGANIZATION_ADDRESS', None, -1),
'AFF_4': ('sequence', DATATYPES_STRUCTS['DR'], 'DR', 'PROFESSIONAL_ORGANIZATION_AFFILIATION_DATE_RANGE', None, -1),
'AFF_5': ('leaf', None, 'ST', 'PROFESSIONAL_AFFILIATION_ADDITIONAL_INFORMATION', None, -1),
'AIG_1': ('leaf', None, 'SI', 'SET_ID_AIG', None, -1),
'AIG_2': ('leaf', None, 'ID', 'SEGMENT_ACTION_CODE', 'HL70206', -1),
'AIG_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RESOURCE_ID', None, -1),
'AIG_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RESOURCE_TYPE', None, -1),
'AIG_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RESOURCE_GROUP', None, -1),
'AIG_6': ('leaf', None, 'NM', 'RESOURCE_QUANTITY', None, -1),
'AIG_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RESOURCE_QUANTITY_UNITS', None, -1),
'AIG_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'START_DATE_TIME', None, -1),
'AIG_9': ('leaf', None, 'NM', 'START_DATE_TIME_OFFSET', None, -1),
'AIG_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'START_DATE_TIME_OFFSET_UNITS', None, -1),
'AIG_11': ('leaf', None, 'NM', 'DURATION', None, -1),
'AIG_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DURATION_UNITS', None, -1),
'AIG_13': ('leaf', None, 'IS', 'ALLOW_SUBSTITUTION_CODE', 'HL70279', -1),
'AIG_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FILLER_STATUS_CODE', 'HL70278', -1),
'AIL_1': ('leaf', None, 'SI', 'SET_ID_AIL', None, -1),
'AIL_2': ('leaf', None, 'ID', 'SEGMENT_ACTION_CODE', 'HL70206', -1),
'AIL_3': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'LOCATION_RESOURCE_ID', None, -1),
'AIL_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCATION_TYPE_AIL', 'HL70305', -1),
'AIL_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCATION_GROUP', None, -1),
'AIL_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'START_DATE_TIME', None, -1),
'AIL_7': ('leaf', None, 'NM', 'START_DATE_TIME_OFFSET', None, -1),
'AIL_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'START_DATE_TIME_OFFSET_UNITS', None, -1),
'AIL_9': ('leaf', None, 'NM', 'DURATION', None, -1),
'AIL_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DURATION_UNITS', None, -1),
'AIL_11': ('leaf', None, 'IS', 'ALLOW_SUBSTITUTION_CODE', 'HL70279', -1),
'AIL_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FILLER_STATUS_CODE', 'HL70278', -1),
'AIP_1': ('leaf', None, 'SI', 'SET_ID_AIP', None, -1),
'AIP_2': ('leaf', None, 'ID', 'SEGMENT_ACTION_CODE', 'HL70206', -1),
'AIP_3': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PERSONNEL_RESOURCE_ID', None, -1),
'AIP_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RESOURCE_TYPE', 'HL70182', -1),
'AIP_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RESOURCE_GROUP', None, -1),
'AIP_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'START_DATE_TIME', None, -1),
'AIP_7': ('leaf', None, 'NM', 'START_DATE_TIME_OFFSET', None, -1),
'AIP_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'START_DATE_TIME_OFFSET_UNITS', None, -1),
'AIP_9': ('leaf', None, 'NM', 'DURATION', None, -1),
'AIP_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DURATION_UNITS', None, -1),
'AIP_11': ('leaf', None, 'IS', 'ALLOW_SUBSTITUTION_CODE', 'HL70279', -1),
'AIP_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FILLER_STATUS_CODE', 'HL70278', -1),
'AIS_1': ('leaf', None, 'SI', 'SET_ID_AIS', None, -1),
'AIS_2': ('leaf', None, 'ID', 'SEGMENT_ACTION_CODE', 'HL70206', -1),
'AIS_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'UNIVERSAL_SERVICE_IDENTIFIER', None, -1),
'AIS_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'START_DATE_TIME', None, -1),
'AIS_5': ('leaf', None, 'NM', 'START_DATE_TIME_OFFSET', None, -1),
'AIS_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'START_DATE_TIME_OFFSET_UNITS', None, -1),
'AIS_7': ('leaf', None, 'NM', 'DURATION', None, -1),
'AIS_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DURATION_UNITS', None, -1),
'AIS_9': ('leaf', None, 'IS', 'ALLOW_SUBSTITUTION_CODE', 'HL70279', -1),
'AIS_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FILLER_STATUS_CODE', 'HL70278', -1),
'AIS_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PLACER_SUPPLEMENTAL_SERVICE_INFORMATION', 'HL70411', -1),
'AIS_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FILLER_SUPPLEMENTAL_SERVICE_INFORMATION', 'HL70411', -1),
'AL1_1': ('leaf', None, 'SI', 'SET_ID_AL1', None, -1),
'AL1_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALLERGEN_TYPE_CODE', 'HL70127', -1),
'AL1_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALLERGEN_CODE_MNEMONIC_DESCRIPTION', None, -1),
'AL1_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALLERGY_SEVERITY_CODE', 'HL70128', -1),
'AL1_5': ('leaf', None, 'ST', 'ALLERGY_REACTION_CODE', None, -1),
'AL1_6': ('leaf', None, 'DT', 'IDENTIFICATION_DATE', None, -1),
'APR_1': ('sequence', DATATYPES_STRUCTS['SCV'], 'SCV', 'TIME_SELECTION_CRITERIA', 'HL70294', -1),
'APR_2': ('sequence', DATATYPES_STRUCTS['SCV'], 'SCV', 'RESOURCE_SELECTION_CRITERIA', 'HL70294', -1),
'APR_3': ('sequence', DATATYPES_STRUCTS['SCV'], 'SCV', 'LOCATION_SELECTION_CRITERIA', 'HL70294', -1),
'APR_4': ('leaf', None, 'NM', 'SLOT_SPACING_CRITERIA', None, -1),
'APR_5': ('sequence', DATATYPES_STRUCTS['SCV'], 'SCV', 'FILLER_OVERRIDE_CRITERIA', None, -1),
'ARQ_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_APPOINTMENT_ID', None, -1),
'ARQ_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'FILLER_APPOINTMENT_ID', None, -1),
'ARQ_3': ('leaf', None, 'NM', 'OCCURRENCE_NUMBER', None, -1),
'ARQ_4': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_GROUP_NUMBER', None, -1),
'ARQ_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SCHEDULE_ID', None, -1),
'ARQ_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REQUEST_EVENT_REASON', None, -1),
'ARQ_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'APPOINTMENT_REASON', 'HL70276', -1),
'ARQ_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'APPOINTMENT_TYPE', 'HL70277', -1),
'ARQ_9': ('leaf', None, 'NM', 'APPOINTMENT_DURATION', None, -1),
'ARQ_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'APPOINTMENT_DURATION_UNITS', None, -1),
'ARQ_11': ('sequence', DATATYPES_STRUCTS['DR'], 'DR', 'REQUESTED_START_DATE_TIME_RANGE', None, -1),
'ARQ_12': ('leaf', None, 'ST', 'PRIORITY_ARQ', None, -1),
'ARQ_13': ('sequence', DATATYPES_STRUCTS['RI'], 'RI', 'REPEATING_INTERVAL', None, -1),
'ARQ_14': ('leaf', None, 'ST', 'REPEATING_INTERVAL_DURATION', None, -1),
'ARQ_15': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PLACER_CONTACT_PERSON', None, -1),
'ARQ_16': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PLACER_CONTACT_PHONE_NUMBER', None, -1),
'ARQ_17': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'PLACER_CONTACT_ADDRESS', None, -1),
'ARQ_18': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PLACER_CONTACT_LOCATION', None, -1),
'ARQ_19': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ENTERED_BY_PERSON', None, -1),
'ARQ_20': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'ENTERED_BY_PHONE_NUMBER', None, -1),
'ARQ_21': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'ENTERED_BY_LOCATION', None, -1),
'ARQ_22': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PARENT_PLACER_APPOINTMENT_ID', None, -1),
'ARQ_23': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PARENT_FILLER_APPOINTMENT_ID', None, -1),
'ARQ_24': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_ORDER_NUMBER', None, -1),
'ARQ_25': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'FILLER_ORDER_NUMBER', None, -1),
'AUT_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'AUTHORIZING_PAYOR_PLAN_ID', 'HL70072', -1),
'AUT_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'AUTHORIZING_PAYOR_COMPANY_ID', 'HL70285', -1),
'AUT_3': ('leaf', None, 'ST', 'AUTHORIZING_PAYOR_COMPANY_NAME', None, -1),
'AUT_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'AUTHORIZATION_EFFECTIVE_DATE', None, -1),
'AUT_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'AUTHORIZATION_EXPIRATION_DATE', None, -1),
'AUT_6': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'AUTHORIZATION_IDENTIFIER', None, -1),
'AUT_7': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'REIMBURSEMENT_LIMIT', None, -1),
'AUT_8': ('leaf', None, 'NM', 'REQUESTED_NUMBER_OF_TREATMENTS', None, -1),
'AUT_9': ('leaf', None, 'NM', 'AUTHORIZED_NUMBER_OF_TREATMENTS', None, -1),
'AUT_10': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PROCESS_DATE', None, -1),
'BHS_1': ('leaf', None, 'ST', 'BATCH_FIELD_SEPARATOR', None, -1),
'BHS_2': ('leaf', None, 'ST', 'BATCH_ENCODING_CHARACTERS', None, -1),
'BHS_3': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'BATCH_SENDING_APPLICATION', None, -1),
'BHS_4': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'BATCH_SENDING_FACILITY', None, -1),
'BHS_5': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'BATCH_RECEIVING_APPLICATION', None, -1),
'BHS_6': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'BATCH_RECEIVING_FACILITY', None, -1),
'BHS_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'BATCH_CREATION_DATE_TIME', None, -1),
'BHS_8': ('leaf', None, 'ST', 'BATCH_SECURITY', None, -1),
'BHS_9': ('leaf', None, 'ST', 'BATCH_NAME_ID_TYPE', None, -1),
'BHS_10': ('leaf', None, 'ST', 'BATCH_COMMENT', None, -1),
'BHS_11': ('leaf', None, 'ST', 'BATCH_CONTROL_ID', None, -1),
'BHS_12': ('leaf', None, 'ST', 'REFERENCE_BATCH_CONTROL_ID', None, -1),
'BLC_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'BLOOD_PRODUCT_CODE', 'HL70426', -1),
'BLC_2': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'BLOOD_AMOUNT', None, -1),
'BLG_1': ('sequence', DATATYPES_STRUCTS['CCD'], 'CCD', 'WHEN_TO_CHARGE', 'HL70100', -1),
'BLG_2': ('leaf', None, 'ID', 'CHARGE_TYPE', 'HL70122', -1),
'BLG_3': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'ACCOUNT_ID', None, -1),
'BLG_4': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CHARGE_TYPE_REASON', 'HL70475', -1),
'BPO_1': ('leaf', None, 'SI', 'SET_ID_BPO', None, -1),
'BPO_2': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'BP_UNIVERSAL_SERVICE_ID', None, -1),
'BPO_3': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'BP_PROCESSING_REQUIREMENTS', 'HL70508', -1),
'BPO_4': ('leaf', None, 'NM', 'BP_QUANTITY', None, -1),
'BPO_5': ('leaf', None, 'NM', 'BP_AMOUNT', None, -1),
'BPO_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'BP_UNITS', None, -1),
'BPO_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'BP_INTENDED_USE_DATE_TIME', None, -1),
'BPO_8': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'BP_INTENDED_DISPENSE_FROM_LOCATION', None, -1),
'BPO_9': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'BP_INTENDED_DISPENSE_FROM_ADDRESS', None, -1),
'BPO_10': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'BP_REQUESTED_DISPENSE_DATE_TIME', None, -1),
'BPO_11': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'BP_REQUESTED_DISPENSE_TO_LOCATION', None, -1),
'BPO_12': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'BP_REQUESTED_DISPENSE_TO_ADDRESS', None, -1),
'BPO_13': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'BP_INDICATION_FOR_USE', 'HL70509', -1),
'BPO_14': ('leaf', None, 'ID', 'BP_INFORMED_CONSENT_INDICATOR', 'HL70136', -1),
'BPX_1': ('leaf', None, 'SI', 'SET_ID_BPX', None, -1),
'BPX_2': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'BP_DISPENSE_STATUS', 'HL70510', -1),
'BPX_3': ('leaf', None, 'ID', 'BP_STATUS', 'HL70511', -1),
'BPX_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'BP_DATE_TIME_OF_STATUS', None, -1),
'BPX_5': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'BC_DONATION_ID', None, -1),
'BPX_6': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'BC_COMPONENT', None, -1),
'BPX_7': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'BC_DONATION_TYPE_INTENDED_USE', None, -1),
'BPX_8': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CP_COMMERCIAL_PRODUCT', 'HL70512', -1),
'BPX_9': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'CP_MANUFACTURER', None, -1),
'BPX_10': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'CP_LOT_NUMBER', None, -1),
'BPX_11': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'BP_BLOOD_GROUP', None, -1),
'BPX_12': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'BC_SPECIAL_TESTING', None, -1),
'BPX_13': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'BP_EXPIRATION_DATE_TIME', None, -1),
'BPX_14': ('leaf', None, 'NM', 'BP_QUANTITY', None, -1),
'BPX_15': ('leaf', None, 'NM', 'BP_AMOUNT', None, -1),
'BPX_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'BP_UNITS', None, -1),
'BPX_17': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'BP_UNIQUE_ID', None, -1),
'BPX_18': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'BP_ACTUAL_DISPENSED_TO_LOCATION', None, -1),
'BPX_19': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'BP_ACTUAL_DISPENSED_TO_ADDRESS', None, -1),
'BPX_20': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'BP_DISPENSED_TO_RECEIVER', None, -1),
'BPX_21': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'BP_DISPENSING_INDIVIDUAL', None, -1),
'BTS_1': ('leaf', None, 'ST', 'BATCH_MESSAGE_COUNT', None, -1),
'BTS_2': ('leaf', None, 'ST', 'BATCH_COMMENT', None, -1),
'BTS_3': ('leaf', None, 'NM', 'BATCH_TOTALS', None, -1),
'BTX_1': ('leaf', None, 'SI', 'SET_ID_BTX', None, -1),
'BTX_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'BC_DONATION_ID', None, -1),
'BTX_3': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'BC_COMPONENT', None, -1),
'BTX_4': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'BC_BLOOD_GROUP', None, -1),
'BTX_5': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CP_COMMERCIAL_PRODUCT', 'HL70512', -1),
'BTX_6': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'CP_MANUFACTURER', None, -1),
'BTX_7': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'CP_LOT_NUMBER', None, -1),
'BTX_8': ('leaf', None, 'NM', 'BP_QUANTITY', None, -1),
'BTX_9': ('leaf', None, 'NM', 'BP_AMOUNT', None, -1),
'BTX_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'BP_UNITS', None, -1),
'BTX_11': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'BP_TRANSFUSION_DISPOSITION_STATUS', 'HL70513', -1),
'BTX_12': ('leaf', None, 'ID', 'BP_MESSAGE_STATUS', 'HL70511', -1),
'BTX_13': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'BP_DATE_TIME_OF_STATUS', None, -1),
'BTX_14': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'BP_ADMINISTRATOR', None, -1),
'BTX_15': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'BP_VERIFIER', None, -1),
'BTX_16': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'BP_TRANSFUSION_START_DATE_TIME_OF_STATUS', None, -1),
'BTX_17': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'BP_TRANSFUSION_END_DATE_TIME_OF_STATUS', None, -1),
'BTX_18': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'BP_ADVERSE_REACTION_TYPE', 'HL70514', -1),
'BTX_19': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'BP_TRANSFUSION_INTERRUPTED_REASON', 'HL70515', -1),
'CDM_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRIMARY_KEY_VALUE_CDM', 'HL70132', -1),
'CDM_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CHARGE_CODE_ALIAS', None, -1),
'CDM_3': ('leaf', None, 'ST', 'CHARGE_DESCRIPTION_SHORT', None, -1),
'CDM_4': ('leaf', None, 'ST', 'CHARGE_DESCRIPTION_LONG', None, -1),
'CDM_5': ('leaf', None, 'IS', 'DESCRIPTION_OVERRIDE_INDICATOR', 'HL70268', -1),
'CDM_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EXPLODING_CHARGES', None, -1),
'CDM_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_CODE', 'HL70088', -1),
'CDM_8': ('leaf', None, 'ID', 'ACTIVE_INACTIVE_FLAG', 'HL70183', -1),
'CDM_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INVENTORY_NUMBER', 'HL70463', -1),
'CDM_10': ('leaf', None, 'NM', 'RESOURCE_LOAD', None, -1),
'CDM_11': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'CONTRACT_NUMBER', None, -1),
'CDM_12': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'CONTRACT_ORGANIZATION', None, -1),
'CDM_13': ('leaf', None, 'ID', 'ROOM_FEE_INDICATOR', 'HL70136', -1),
'CER_1': ('leaf', None, 'SI', 'SET_ID_CER', None, -1),
'CER_2': ('leaf', None, 'ST', 'SERIAL_NUMBER', None, -1),
'CER_3': ('leaf', None, 'ST', 'VERSION', None, -1),
'CER_4': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'GRANTING_AUTHORITY', None, -1),
'CER_5': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ISSUING_AUTHORITY', None, -1),
'CER_6': ('sequence', DATATYPES_STRUCTS['ED'], 'ED', 'SIGNATURE_OF_ISSUING_AUTHORITY', None, -1),
'CER_7': ('leaf', None, 'ID', 'GRANTING_COUNTRY', 'HL70399', -1),
'CER_8': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'GRANTING_STATE_PROVINCE', 'HL70347', -1),
'CER_9': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'GRANTING_COUNTY_PARISH', 'HL70289', -1),
'CER_10': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CERTIFICATE_TYPE', None, -1),
'CER_11': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CERTIFICATE_DOMAIN', None, -1),
'CER_12': ('leaf', None, 'ID', 'SUBJECT_ID', None, -1),
'CER_13': ('leaf', None, 'ST', 'SUBJECT_NAME', None, -1),
'CER_14': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE',
'SUBJECT_DIRECTORY_ATTRIBUTE_EXTENSION_HEALTH_PROFESSIONAL_DATA', None, -1),
'CER_15': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SUBJECT_PUBLIC_KEY_INFO', None, -1),
'CER_16': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'AUTHORITY_KEY_IDENTIFIER', None, -1),
'CER_17': ('leaf', None, 'ID', 'BASIC_CONSTRAINT', 'HL70136', -1),
'CER_18': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CRL_DISTRIBUTION_POINT', None, -1),
'CER_19': ('leaf', None, 'ID', 'JURISDICTION_COUNTRY', 'HL70399', -1),
'CER_20': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'JURISDICTION_STATE_PROVINCE', 'HL70347', -1),
'CER_21': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'JURISDICTION_COUNTY_PARISH', 'HL70289', -1),
'CER_22': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'JURISDICTION_BREADTH', 'HL70547', -1),
'CER_23': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'GRANTING_DATE', None, -1),
'CER_24': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ISSUING_DATE', None, -1),
'CER_25': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ACTIVATION_DATE', None, -1),
'CER_26': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'INACTIVATION_DATE', None, -1),
'CER_27': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXPIRATION_DATE', None, -1),
'CER_28': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'RENEWAL_DATE', None, -1),
'CER_29': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'REVOCATION_DATE', None, -1),
'CER_30': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REVOCATION_REASON_CODE', None, -1),
'CER_31': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CERTIFICATE_STATUS', 'HL70536', -1),
'CM0_1': ('leaf', None, 'SI', 'SET_ID_CM0', None, -1),
'CM0_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'SPONSOR_STUDY_ID', None, -1),
'CM0_3': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'ALTERNATE_STUDY_ID', None, -1),
'CM0_4': ('leaf', None, 'ST', 'TITLE_OF_STUDY', None, -1),
'CM0_5': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'CHAIRMAN_OF_STUDY', None, -1),
'CM0_6': ('leaf', None, 'DT', 'LAST_IRB_APPROVAL_DATE', None, -1),
'CM0_7': ('leaf', None, 'NM', 'TOTAL_ACCRUAL_TO_DATE', None, -1),
'CM0_8': ('leaf', None, 'DT', 'LAST_ACCRUAL_DATE', None, -1),
'CM0_9': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'CONTACT_FOR_STUDY', None, -1),
'CM0_10': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'CONTACT_S_TELEPHONE_NUMBER', None, -1),
'CM0_11': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'CONTACT_S_ADDRESS', None, -1),
'CM1_1': ('leaf', None, 'SI', 'SET_ID_CM1', None, -1),
'CM1_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STUDY_PHASE_IDENTIFIER', None, -1),
'CM1_3': ('leaf', None, 'ST', 'DESCRIPTION_OF_STUDY_PHASE', None, -1),
'CM2_1': ('leaf', None, 'SI', 'SET_ID_CM2', None, -1),
'CM2_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SCHEDULED_TIME_POINT', None, -1),
'CM2_3': ('leaf', None, 'ST', 'DESCRIPTION_OF_TIME_POINT', None, -1),
'CM2_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EVENTS_SCHEDULED_THIS_TIME_POINT', None, -1),
'CNS_1': ('leaf', None, 'NM', 'STARTING_NOTIFICATION_REFERENCE_NUMBER', None, -1),
'CNS_2': ('leaf', None, 'NM', 'ENDING_NOTIFICATION_REFERENCE_NUMBER', None, -1),
'CNS_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'STARTING_NOTIFICATION_DATE_TIME', None, -1),
'CNS_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ENDING_NOTIFICATION_DATE_TIME', None, -1),
'CNS_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STARTING_NOTIFICATION_CODE', None, -1),
'CNS_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ENDING_NOTIFICATION_CODE', None, -1),
'CON_1': ('leaf', None, 'SI', 'SET_ID_CON', None, -1),
'CON_2': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CONSENT_TYPE', 'HL70496', -1),
'CON_3': ('leaf', None, 'ST', 'CONSENT_FORM_ID', None, -1),
'CON_4': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'CONSENT_FORM_NUMBER', None, -1),
'CON_5': ('leaf', None, 'FT', 'CONSENT_TEXT', None, -1),
'CON_6': ('leaf', None, 'FT', 'SUBJECT_SPECIFIC_CONSENT_TEXT', None, -1),
'CON_7': ('leaf', None, 'FT', 'CONSENT_BACKGROUND', None, -1),
'CON_8': ('leaf', None, 'FT', 'SUBJECT_SPECIFIC_CONSENT_BACKGROUND', None, -1),
'CON_9': ('leaf', None, 'FT', 'CONSENTER_IMPOSED_LIMITATIONS', None, -1),
'CON_10': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'CONSENT_MODE', 'HL70497', -1),
'CON_11': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'CONSENT_STATUS', 'HL70498', -1),
'CON_12': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CONSENT_DISCUSSION_DATE_TIME', None, -1),
'CON_13': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CONSENT_DECISION_DATE_TIME', None, -1),
'CON_14': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CONSENT_EFFECTIVE_DATE_TIME', None, -1),
'CON_15': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CONSENT_END_DATE_TIME', None, -1),
'CON_16': ('leaf', None, 'ID', 'SUBJECT_COMPETENCE_INDICATOR', 'HL70136', -1),
'CON_17': ('leaf', None, 'ID', 'TRANSLATOR_ASSISTANCE_INDICATOR', 'HL70136', -1),
'CON_18': ('leaf', None, 'ID', 'LANGUAGE_TRANSLATED_TO', 'HL70296', -1),
'CON_19': ('leaf', None, 'ID', 'INFORMATIONAL_MATERIAL_SUPPLIED_INDICATOR', 'HL70136', -1),
'CON_20': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CONSENT_BYPASS_REASON', 'HL70499', -1),
'CON_21': ('leaf', None, 'ID', 'CONSENT_DISCLOSURE_LEVEL', 'HL70500', -1),
'CON_22': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CONSENT_NON_DISCLOSURE_REASON', 'HL70501', -1),
'CON_23': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'NON_SUBJECT_CONSENTER_REASON', 'HL70502', -1),
'CON_24': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'CONSENTER_ID', None, -1),
'CON_25': ('leaf', None, 'IS', 'RELATIONSHIP_TO_SUBJECT_TABLE', 'HL70548', -1),
'CSP_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STUDY_PHASE_IDENTIFIER', None, -1),
'CSP_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_STUDY_PHASE_BEGAN', None, -1),
'CSP_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_STUDY_PHASE_ENDED', None, -1),
'CSP_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STUDY_PHASE_EVALUABILITY', None, -1),
'CSR_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'SPONSOR_STUDY_ID', None, -1),
'CSR_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'ALTERNATE_STUDY_ID', None, -1),
'CSR_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INSTITUTION_REGISTERING_THE_PATIENT', None, -1),
'CSR_4': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'SPONSOR_PATIENT_ID', None, -1),
'CSR_5': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'ALTERNATE_PATIENT_ID_CSR', None, -1),
'CSR_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_OF_PATIENT_STUDY_REGISTRATION', None, -1),
'CSR_7': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PERSON_PERFORMING_STUDY_REGISTRATION', None, -1),
'CSR_8': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'STUDY_AUTHORIZING_PROVIDER', None, -1),
'CSR_9': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_PATIENT_STUDY_CONSENT_SIGNED', None, -1),
'CSR_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PATIENT_STUDY_ELIGIBILITY_STATUS', None, -1),
'CSR_11': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'STUDY_RANDOMIZATION_DATE_TIME', None, -1),
'CSR_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RANDOMIZED_STUDY_ARM', None, -1),
'CSR_13': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STRATUM_FOR_STUDY_RANDOMIZATION', None, -1),
'CSR_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PATIENT_EVALUABILITY_STATUS', None, -1),
'CSR_15': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_ENDED_STUDY', None, -1),
'CSR_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REASON_ENDED_STUDY', None, -1),
'CSS_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STUDY_SCHEDULED_TIME_POINT', None, -1),
'CSS_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'STUDY_SCHEDULED_PATIENT_TIME_POINT', None, -1),
'CSS_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STUDY_QUALITY_CONTROL_CODES', None, -1),
'CTD_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONTACT_ROLE', 'HL70131', -1),
'CTD_2': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'CONTACT_NAME', None, -1),
'CTD_3': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'CONTACT_ADDRESS', None, -1),
'CTD_4': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'CONTACT_LOCATION', None, -1),
'CTD_5': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'CONTACT_COMMUNICATION_INFORMATION', None, -1),
'CTD_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PREFERRED_METHOD_OF_CONTACT', 'HL70185', -1),
'CTD_7': ('sequence', DATATYPES_STRUCTS['PLN'], 'PLN', 'CONTACT_IDENTIFIERS', None, -1),
'CTI_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'SPONSOR_STUDY_ID', None, -1),
'CTI_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STUDY_PHASE_IDENTIFIER', None, -1),
'CTI_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STUDY_SCHEDULED_TIME_POINT', None, -1),
'DB1_1': ('leaf', None, 'SI', 'SET_ID_DB1', None, -1),
'DB1_2': ('leaf', None, 'IS', 'DISABLED_PERSON_CODE', 'HL70334', -1),
'DB1_3': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'DISABLED_PERSON_IDENTIFIER', None, -1),
'DB1_4': ('leaf', None, 'ID', 'DISABLED_INDICATOR', 'HL70136', -1),
'DB1_5': ('leaf', None, 'DT', 'DISABILITY_START_DATE', None, -1),
'DB1_6': ('leaf', None, 'DT', 'DISABILITY_END_DATE', None, -1),
'DB1_7': ('leaf', None, 'DT', 'DISABILITY_RETURN_TO_WORK_DATE', None, -1),
'DB1_8': ('leaf', None, 'DT', 'DISABILITY_UNABLE_TO_WORK_DATE', None, -1),
'DG1_1': ('leaf', None, 'SI', 'SET_ID_DG1', None, -1),
'DG1_2': ('leaf', None, 'ID', 'DIAGNOSIS_CODING_METHOD', 'HL70053', -1),
'DG1_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DIAGNOSIS_CODE_DG1', 'HL70051', -1),
'DG1_4': ('leaf', None, 'ST', 'DIAGNOSIS_DESCRIPTION', None, -1),
'DG1_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DIAGNOSIS_DATE_TIME', None, -1),
'DG1_6': ('leaf', None, 'IS', 'DIAGNOSIS_TYPE', 'HL70052', -1),
'DG1_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MAJOR_DIAGNOSTIC_CATEGORY', 'HL70118', -1),
'DG1_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DIAGNOSTIC_RELATED_GROUP', 'HL70055', -1),
'DG1_9': ('leaf', None, 'ID', 'DRG_APPROVAL_INDICATOR', 'HL70136', -1),
'DG1_10': ('leaf', None, 'IS', 'DRG_GROUPER_REVIEW_CODE', 'HL70056', -1),
'DG1_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'OUTLIER_TYPE', 'HL70083', -1),
'DG1_12': ('leaf', None, 'NM', 'OUTLIER_DAYS', None, -1),
'DG1_13': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'OUTLIER_COST', None, -1),
'DG1_14': ('leaf', None, 'ST', 'GROUPER_VERSION_AND_TYPE', None, -1),
'DG1_15': ('leaf', None, 'ID', 'DIAGNOSIS_PRIORITY', 'HL70359', -1),
'DG1_16': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'DIAGNOSING_CLINICIAN', None, -1),
'DG1_17': ('leaf', None, 'IS', 'DIAGNOSIS_CLASSIFICATION', 'HL70228', -1),
'DG1_18': ('leaf', None, 'ID', 'CONFIDENTIAL_INDICATOR', 'HL70136', -1),
'DG1_19': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ATTESTATION_DATE_TIME', None, -1),
'DG1_20': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'DIAGNOSIS_IDENTIFIER', None, -1),
'DG1_21': ('leaf', None, 'ID', 'DIAGNOSIS_ACTION_CODE', 'HL70206', -1),
'DRG_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DIAGNOSTIC_RELATED_GROUP', 'HL70055', -1),
'DRG_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DRG_ASSIGNED_DATE_TIME', None, -1),
'DRG_3': ('leaf', None, 'ID', 'DRG_APPROVAL_INDICATOR', 'HL70136', -1),
'DRG_4': ('leaf', None, 'IS', 'DRG_GROUPER_REVIEW_CODE', 'HL70056', -1),
'DRG_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'OUTLIER_TYPE', 'HL70083', -1),
'DRG_6': ('leaf', None, 'NM', 'OUTLIER_DAYS', None, -1),
'DRG_7': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'OUTLIER_COST', None, -1),
'DRG_8': ('leaf', None, 'IS', 'DRG_PAYOR', 'HL70229', -1),
'DRG_9': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'OUTLIER_REIMBURSEMENT', None, -1),
'DRG_10': ('leaf', None, 'ID', 'CONFIDENTIAL_INDICATOR', 'HL70136', -1),
'DRG_11': ('leaf', None, 'IS', 'DRG_TRANSFER_TYPE', 'HL70415', -1),
'DSC_1': ('leaf', None, 'ST', 'CONTINUATION_POINTER', None, -1),
'DSC_2': ('leaf', None, 'ID', 'CONTINUATION_STYLE', 'HL70398', -1),
'DSP_1': ('leaf', None, 'SI', 'SET_ID_DSP', None, -1),
'DSP_2': ('leaf', None, 'SI', 'DISPLAY_LEVEL', None, -1),
'DSP_3': ('leaf', None, 'TX', 'DATA_LINE', None, -1),
'DSP_4': ('leaf', None, 'ST', 'LOGICAL_BREAK_POINT', None, -1),
'DSP_5': ('leaf', None, 'TX', 'RESULT_ID', None, -1),
'ECD_1': ('leaf', None, 'NM', 'REFERENCE_COMMAND_NUMBER', None, -1),
'ECD_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REMOTE_CONTROL_COMMAND', 'HL70368', -1),
'ECD_3': ('leaf', None, 'ID', 'RESPONSE_REQUIRED', 'HL70136', -1),
'ECD_4': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'REQUESTED_COMPLETION_TIME', None, -1),
'ECD_5': ('leaf', None, 'TX', 'PARAMETERS', None, -1),
'ECR_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'COMMAND_RESPONSE', 'HL70387', -1),
'ECR_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_COMPLETED', None, -1),
'ECR_3': ('leaf', None, 'TX', 'COMMAND_RESPONSE_PARAMETERS', None, -1),
'EDU_1': ('leaf', None, 'SI', 'SET_ID_EDU', None, -1),
'EDU_2': ('leaf', None, 'IS', 'ACADEMIC_DEGREE', 'HL70360', -1),
'EDU_3': ('sequence', DATATYPES_STRUCTS['DR'], 'DR', 'ACADEMIC_DEGREE_PROGRAM_DATE_RANGE', None, -1),
'EDU_4': ('sequence', DATATYPES_STRUCTS['DR'], 'DR', 'ACADEMIC_DEGREE_PROGRAM_PARTICIPATION_DATE_RANGE', None, -1),
'EDU_5': ('leaf', None, 'DT', 'ACADEMIC_DEGREE_GRANTED_DATE', None, -1),
'EDU_6': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'SCHOOL', None, -1),
'EDU_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SCHOOL_TYPE_CODE', 'HL70402', -1),
'EDU_8': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'SCHOOL_ADDRESS', None, -1),
'EDU_9': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'MAJOR_FIELD_OF_STUDY', None, -1),
'EQL_1': ('leaf', None, 'ST', 'QUERY_TAG', None, -1),
'EQL_2': ('leaf', None, 'ID', 'QUERY_RESPONSE_FORMAT_CODE', 'HL70106', -1),
'EQL_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EQL_QUERY_NAME', None, -1),
'EQL_4': ('leaf', None, 'ST', 'EQL_QUERY_STATEMENT', None, -1),
'EQP_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EVENT_TYPE', 'HL70450', -1),
'EQP_2': ('leaf', None, 'ST', 'FILE_NAME', None, -1),
'EQP_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'START_DATE_TIME', None, -1),
'EQP_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'END_DATE_TIME', None, -1),
'EQP_5': ('leaf', None, 'FT', 'TRANSACTION_DATA', None, -1),
'EQU_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'EQUIPMENT_INSTANCE_IDENTIFIER', None, -1),
'EQU_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EVENT_DATE_TIME', None, -1),
'EQU_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EQUIPMENT_STATE', 'HL70365', -1),
'EQU_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCAL_REMOTE_CONTROL_STATE', 'HL70366', -1),
'EQU_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALERT_LEVEL', 'HL70367', -1),
'ERQ_1': ('leaf', None, 'ST', 'QUERY_TAG', None, -1),
'ERQ_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EVENT_IDENTIFIER', None, -1),
'ERQ_3': ('sequence', DATATYPES_STRUCTS['QIP'], 'QIP', 'INPUT_PARAMETER_LIST', None, -1),
'ERR_1': ('sequence', DATATYPES_STRUCTS['ELD'], 'ELD', 'ERROR_CODE_AND_LOCATION', None, -1),
'ERR_2': ('sequence', DATATYPES_STRUCTS['ERL'], 'ERL', 'ERROR_LOCATION', None, -1),
'ERR_3': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'HL7_ERROR_CODE', 'HL70357', -1),
'ERR_4': ('leaf', None, 'ID', 'SEVERITY', 'HL70516', -1),
'ERR_5': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'APPLICATION_ERROR_CODE', 'HL70533', -1),
'ERR_6': ('leaf', None, 'ST', 'APPLICATION_ERROR_PARAMETER', None, -1),
'ERR_7': ('leaf', None, 'TX', 'DIAGNOSTIC_INFORMATION', None, -1),
'ERR_8': ('leaf', None, 'TX', 'USER_MESSAGE', None, -1),
'ERR_9': ('leaf', None, 'IS', 'INFORM_PERSON_INDICATOR', 'HL70517', -1),
'ERR_10': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'OVERRIDE_TYPE', 'HL70518', -1),
'ERR_11': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'OVERRIDE_REASON_CODE', 'HL70519', -1),
'ERR_12': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'HELP_DESK_CONTACT_POINT', None, -1),
'EVN_1': ('leaf', None, 'ID', 'EVENT_TYPE_CODE', 'HL70003', -1),
'EVN_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'RECORDED_DATE_TIME', None, -1),
'EVN_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_PLANNED_EVENT', None, -1),
'EVN_4': ('leaf', None, 'IS', 'EVENT_REASON_CODE', 'HL70062', -1),
'EVN_5': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'OPERATOR_ID', 'HL70188', -1),
'EVN_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EVENT_OCCURRED', None, -1),
'EVN_7': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'EVENT_FACILITY', None, -1),
'FAC_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'FACILITY_ID_FAC', None, -1),
'FAC_2': ('leaf', None, 'ID', 'FACILITY_TYPE', 'HL70331', -1),
'FAC_3': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'FACILITY_ADDRESS', None, -1),
'FAC_4': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'FACILITY_TELECOMMUNICATION', None, -1),
'FAC_5': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'CONTACT_PERSON', None, -1),
'FAC_6': ('leaf', None, 'ST', 'CONTACT_TITLE', None, -1),
'FAC_7': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'CONTACT_ADDRESS', None, -1),
'FAC_8': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'CONTACT_TELECOMMUNICATION', None, -1),
'FAC_9': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'SIGNATURE_AUTHORITY', None, -1),
'FAC_10': ('leaf', None, 'ST', 'SIGNATURE_AUTHORITY_TITLE', None, -1),
'FAC_11': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'SIGNATURE_AUTHORITY_ADDRESS', None, -1),
'FAC_12': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'SIGNATURE_AUTHORITY_TELECOMMUNICATION', None, -1),
'FHS_1': ('leaf', None, 'ST', 'FILE_FIELD_SEPARATOR', None, -1),
'FHS_2': ('leaf', None, 'ST', 'FILE_ENCODING_CHARACTERS', None, -1),
'FHS_3': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'FILE_SENDING_APPLICATION', None, -1),
'FHS_4': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'FILE_SENDING_FACILITY', None, -1),
'FHS_5': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'FILE_RECEIVING_APPLICATION', None, -1),
'FHS_6': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'FILE_RECEIVING_FACILITY', None, -1),
'FHS_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'FILE_CREATION_DATE_TIME', None, -1),
'FHS_8': ('leaf', None, 'ST', 'FILE_SECURITY', None, -1),
'FHS_9': ('leaf', None, 'ST', 'FILE_NAME_ID', None, -1),
'FHS_10': ('leaf', None, 'ST', 'FILE_HEADER_COMMENT', None, -1),
'FHS_11': ('leaf', None, 'ST', 'FILE_CONTROL_ID', None, -1),
'FHS_12': ('leaf', None, 'ST', 'REFERENCE_FILE_CONTROL_ID', None, -1),
'FT1_1': ('leaf', None, 'SI', 'SET_ID_FT1', None, -1),
'FT1_2': ('leaf', None, 'ST', 'TRANSACTION_ID', None, -1),
'FT1_3': ('leaf', None, 'ST', 'TRANSACTION_BATCH_ID', None, -1),
'FT1_4': ('sequence', DATATYPES_STRUCTS['DR'], 'DR', 'TRANSACTION_DATE', None, -1),
'FT1_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'TRANSACTION_POSTING_DATE', None, -1),
'FT1_6': ('leaf', None, 'IS', 'TRANSACTION_TYPE', 'HL70017', -1),
'FT1_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TRANSACTION_CODE', 'HL70132', -1),
'FT1_8': ('leaf', None, 'ST', 'TRANSACTION_DESCRIPTION', None, -1),
'FT1_9': ('leaf', None, 'ST', 'TRANSACTION_DESCRIPTION_ALT', None, -1),
'FT1_10': ('leaf', None, 'NM', 'TRANSACTION_QUANTITY', None, -1),
'FT1_11': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'TRANSACTION_AMOUNT_EXTENDED', None, -1),
'FT1_12': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'TRANSACTION_AMOUNT_UNIT', None, -1),
'FT1_13': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DEPARTMENT_CODE', 'HL70049', -1),
'FT1_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INSURANCE_PLAN_ID', 'HL70072', -1),
'FT1_15': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'INSURANCE_AMOUNT', None, -1),
'FT1_16': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'ASSIGNED_PATIENT_LOCATION', None, -1),
'FT1_17': ('leaf', None, 'IS', 'FEE_SCHEDULE', 'HL70024', -1),
'FT1_18': ('leaf', None, 'IS', 'PATIENT_TYPE', 'HL70018', -1),
'FT1_19': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DIAGNOSIS_CODE_FT1', 'HL70051', -1),
'FT1_20': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PERFORMED_BY_CODE', 'HL70084', -1),
'FT1_21': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ORDERED_BY_CODE', None, -1),
'FT1_22': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'UNIT_COST', None, -1),
'FT1_23': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'FILLER_ORDER_NUMBER', None, -1),
'FT1_24': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ENTERED_BY_CODE', None, -1),
'FT1_25': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_CODE', 'HL70088', -1),
'FT1_26': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_CODE_MODIFIER', 'HL70340', -1),
'FT1_27': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADVANCED_BENEFICIARY_NOTICE_CODE', 'HL70339', -1),
'FT1_28': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE',
'MEDICALLY_NECESSARY_DUPLICATE_PROCEDURE_REASON', 'HL70476', -1),
'FT1_29': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'NDC_CODE', 'HL70549', -1),
'FT1_30': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PAYMENT_REFERENCE_ID', None, -1),
'FT1_31': ('leaf', None, 'SI', 'TRANSACTION_REFERENCE_KEY', None, -1),
'FTS_1': ('leaf', None, 'NM', 'FILE_BATCH_COUNT', None, -1),
'FTS_2': ('leaf', None, 'ST', 'FILE_TRAILER_COMMENT', None, -1),
'GOL_1': ('leaf', None, 'ID', 'ACTION_CODE', 'HL70287', -1),
'GOL_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ACTION_DATE_TIME', None, -1),
'GOL_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GOAL_ID', None, -1),
'GOL_4': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'GOAL_INSTANCE_ID', None, -1),
'GOL_5': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'EPISODE_OF_CARE_ID', None, -1),
'GOL_6': ('leaf', None, 'NM', 'GOAL_LIST_PRIORITY', None, -1),
'GOL_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'GOAL_ESTABLISHED_DATE_TIME', None, -1),
'GOL_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXPECTED_GOAL_ACHIEVE_DATE_TIME', None, -1),
'GOL_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GOAL_CLASSIFICATION', None, -1),
'GOL_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GOAL_MANAGEMENT_DISCIPLINE', None, -1),
'GOL_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CURRENT_GOAL_REVIEW_STATUS', None, -1),
'GOL_12': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CURRENT_GOAL_REVIEW_DATE_TIME', None, -1),
'GOL_13': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'NEXT_GOAL_REVIEW_DATE_TIME', None, -1),
'GOL_14': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PREVIOUS_GOAL_REVIEW_DATE_TIME', None, -1),
'GOL_15': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'GOAL_REVIEW_INTERVAL', None, -1),
'GOL_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GOAL_EVALUATION', None, -1),
'GOL_17': ('leaf', None, 'ST', 'GOAL_EVALUATION_COMMENT', None, -1),
'GOL_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GOAL_LIFE_CYCLE_STATUS', None, -1),
'GOL_19': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'GOAL_LIFE_CYCLE_STATUS_DATE_TIME', None, -1),
'GOL_20': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GOAL_TARGET_TYPE', None, -1),
'GOL_21': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'GOAL_TARGET_NAME', None, -1),
'GP1_1': ('leaf', None, 'IS', 'TYPE_OF_BILL_CODE', 'HL70455', -1),
'GP1_2': ('leaf', None, 'IS', 'REVENUE_CODE', 'HL70456', -1),
'GP1_3': ('leaf', None, 'IS', 'OVERALL_CLAIM_DISPOSITION_CODE', 'HL70457', -1),
'GP1_4': ('leaf', None, 'IS', 'OCE_EDITS_PER_VISIT_CODE', 'HL70458', -1),
'GP1_5': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'OUTLIER_COST', None, -1),
'GP2_1': ('leaf', None, 'IS', 'REVENUE_CODE', 'HL70456', -1),
'GP2_2': ('leaf', None, 'NM', 'NUMBER_OF_SERVICE_UNITS', None, -1),
'GP2_3': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'CHARGE', None, -1),
'GP2_4': ('leaf', None, 'IS', 'REIMBURSEMENT_ACTION_CODE', 'HL70459', -1),
'GP2_5': ('leaf', None, 'IS', 'DENIAL_OR_REJECTION_CODE', 'HL70460', -1),
'GP2_6': ('leaf', None, 'IS', 'OCE_EDIT_CODE', 'HL70458', -1),
'GP2_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'AMBULATORY_PAYMENT_CLASSIFICATION_CODE', 'HL70466', -1),
'GP2_8': ('leaf', None, 'IS', 'MODIFIER_EDIT_CODE', 'HL70467', -1),
'GP2_9': ('leaf', None, 'IS', 'PAYMENT_ADJUSTMENT_CODE', 'HL70468', -1),
'GP2_10': ('leaf', None, 'IS', 'PACKAGING_STATUS_CODE', 'HL70469', -1),
'GP2_11': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'EXPECTED_CMS_PAYMENT_AMOUNT', None, -1),
'GP2_12': ('leaf', None, 'IS', 'REIMBURSEMENT_TYPE_CODE', 'HL70470', -1),
'GP2_13': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'CO_PAY_AMOUNT', None, -1),
'GP2_14': ('leaf', None, 'NM', 'PAY_RATE_PER_SERVICE_UNIT', None, -1),
'GT1_1': ('leaf', None, 'SI', 'SET_ID_GT1', None, -1),
'GT1_2': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'GUARANTOR_NUMBER', None, -1),
'GT1_3': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'GUARANTOR_NAME', None, -1),
'GT1_4': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'GUARANTOR_SPOUSE_NAME', None, -1),
'GT1_5': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'GUARANTOR_ADDRESS', None, -1),
'GT1_6': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'GUARANTOR_PH_NUM_HOME', None, -1),
'GT1_7': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'GUARANTOR_PH_NUM_BUSINESS', None, -1),
'GT1_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'GUARANTOR_DATE_TIME_OF_BIRTH', None, -1),
'GT1_9': ('leaf', None, 'IS', 'GUARANTOR_ADMINISTRATIVE_SEX', 'HL70001', -1),
'GT1_10': ('leaf', None, 'IS', 'GUARANTOR_TYPE', 'HL70068', -1),
'GT1_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GUARANTOR_RELATIONSHIP', 'HL70063', -1),
'GT1_12': ('leaf', None, 'ST', 'GUARANTOR_SSN', None, -1),
'GT1_13': ('leaf', None, 'DT', 'GUARANTOR_DATE_BEGIN', None, -1),
'GT1_14': ('leaf', None, 'DT', 'GUARANTOR_DATE_END', None, -1),
'GT1_15': ('leaf', None, 'NM', 'GUARANTOR_PRIORITY', None, -1),
'GT1_16': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'GUARANTOR_EMPLOYER_NAME', None, -1),
'GT1_17': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'GUARANTOR_EMPLOYER_ADDRESS', None, -1),
'GT1_18': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'GUARANTOR_EMPLOYER_PHONE_NUMBER', None, -1),
'GT1_19': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'GUARANTOR_EMPLOYEE_ID_NUMBER', None, -1),
'GT1_20': ('leaf', None, 'IS', 'GUARANTOR_EMPLOYMENT_STATUS', 'HL70066', -1),
'GT1_21': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'GUARANTOR_ORGANIZATION_NAME', None, -1),
'GT1_22': ('leaf', None, 'ID', 'GUARANTOR_BILLING_HOLD_FLAG', 'HL70136', -1),
'GT1_23': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GUARANTOR_CREDIT_RATING_CODE', 'HL70341', -1),
'GT1_24': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'GUARANTOR_DEATH_DATE_AND_TIME', None, -1),
'GT1_25': ('leaf', None, 'ID', 'GUARANTOR_DEATH_FLAG', 'HL70136', -1),
'GT1_26': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GUARANTOR_CHARGE_ADJUSTMENT_CODE', 'HL70218', -1),
'GT1_27': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'GUARANTOR_HOUSEHOLD_ANNUAL_INCOME', None, -1),
'GT1_28': ('leaf', None, 'NM', 'GUARANTOR_HOUSEHOLD_SIZE', None, -1),
'GT1_29': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'GUARANTOR_EMPLOYER_ID_NUMBER', None, -1),
'GT1_30': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GUARANTOR_MARITAL_STATUS_CODE', 'HL70002', -1),
'GT1_31': ('leaf', None, 'DT', 'GUARANTOR_HIRE_EFFECTIVE_DATE', None, -1),
'GT1_32': ('leaf', None, 'DT', 'EMPLOYMENT_STOP_DATE', None, -1),
'GT1_33': ('leaf', None, 'IS', 'LIVING_DEPENDENCY', 'HL70223', -1),
'GT1_34': ('leaf', None, 'IS', 'AMBULATORY_STATUS', 'HL70009', -1),
'GT1_35': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CITIZENSHIP', 'HL70171', -1),
'GT1_36': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRIMARY_LANGUAGE', 'HL70296', -1),
'GT1_37': ('leaf', None, 'IS', 'LIVING_ARRANGEMENT', 'HL70220', -1),
'GT1_38': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PUBLICITY_CODE', 'HL70215', -1),
'GT1_39': ('leaf', None, 'ID', 'PROTECTION_INDICATOR', 'HL70136', -1),
'GT1_40': ('leaf', None, 'IS', 'STUDENT_INDICATOR', 'HL70231', -1),
'GT1_41': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RELIGION', 'HL70006', -1),
'GT1_42': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'MOTHER_S_MAIDEN_NAME', None, -1),
'GT1_43': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'NATIONALITY', 'HL70212', -1),
'GT1_44': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ETHNIC_GROUP', 'HL70189', -1),
'GT1_45': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'CONTACT_PERSON_S_NAME', None, -1),
'GT1_46': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'CONTACT_PERSON_S_TELEPHONE_NUMBER', None, -1),
'GT1_47': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONTACT_REASON', 'HL70222', -1),
'GT1_48': ('leaf', None, 'IS', 'CONTACT_RELATIONSHIP', 'HL70063', -1),
'GT1_49': ('leaf', None, 'ST', 'JOB_TITLE', None, -1),
'GT1_50': ('sequence', DATATYPES_STRUCTS['JCC'], 'JCC', 'JOB_CODE_CLASS', None, -1),
'GT1_51': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'GUARANTOR_EMPLOYER_S_ORGANIZATION_NAME', None, -1),
'GT1_52': ('leaf', None, 'IS', 'HANDICAP', 'HL70295', -1),
'GT1_53': ('leaf', None, 'IS', 'JOB_STATUS', 'HL70311', -1),
'GT1_54': ('sequence', DATATYPES_STRUCTS['FC'], 'FC', 'GUARANTOR_FINANCIAL_CLASS', None, -1),
'GT1_55': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GUARANTOR_RACE', 'HL70005', -1),
'GT1_56': ('leaf', None, 'ST', 'GUARANTOR_BIRTH_PLACE', None, -1),
'GT1_57': ('leaf', None, 'IS', 'VIP_INDICATOR', 'HL70099', -1),
'IAM_1': ('leaf', None, 'SI', 'SET_ID_IAM', None, -1),
'IAM_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALLERGEN_TYPE_CODE', 'HL70127', -1),
'IAM_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALLERGEN_CODE_MNEMONIC_DESCRIPTION', None, -1),
'IAM_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALLERGY_SEVERITY_CODE', 'HL70128', -1),
'IAM_5': ('leaf', None, 'ST', 'ALLERGY_REACTION_CODE', None, -1),
'IAM_6': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'ALLERGY_ACTION_CODE', 'HL70323', -1),
'IAM_7': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'ALLERGY_UNIQUE_IDENTIFIER', None, -1),
'IAM_8': ('leaf', None, 'ST', 'ACTION_REASON', None, -1),
'IAM_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SENSITIVITY_TO_CAUSATIVE_AGENT_CODE', 'HL70436', -1),
'IAM_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALLERGEN_GROUP_CODE_MNEMONIC_DESCRIPTION', None, -1),
'IAM_11': ('leaf', None, 'DT', 'ONSET_DATE', None, -1),
'IAM_12': ('leaf', None, 'ST', 'ONSET_DATE_TEXT', None, -1),
'IAM_13': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'REPORTED_DATE_TIME', None, -1),
'IAM_14': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'REPORTED_BY', None, -1),
'IAM_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RELATIONSHIP_TO_PATIENT_CODE', 'HL70063', -1),
'IAM_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALERT_DEVICE_CODE', 'HL70437', -1),
'IAM_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALLERGY_CLINICAL_STATUS_CODE', 'HL70438', -1),
'IAM_18': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'STATUSED_BY_PERSON', None, -1),
'IAM_19': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'STATUSED_BY_ORGANIZATION', None, -1),
'IAM_20': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'STATUSED_AT_DATE_TIME', None, -1),
'IIM_1': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'PRIMARY_KEY_VALUE_IIM', None, -1),
'IIM_2': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SERVICE_ITEM_CODE', None, -1),
'IIM_3': ('leaf', None, 'ST', 'INVENTORY_LOT_NUMBER', None, -1),
'IIM_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'INVENTORY_EXPIRATION_DATE', None, -1),
'IIM_5': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'INVENTORY_MANUFACTURER_NAME', None, -1),
'IIM_6': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'INVENTORY_LOCATION', None, -1),
'IIM_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'INVENTORY_RECEIVED_DATE', None, -1),
'IIM_8': ('leaf', None, 'NM', 'INVENTORY_RECEIVED_QUANTITY', None, -1),
'IIM_9': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'INVENTORY_RECEIVED_QUANTITY_UNIT', None, -1),
'IIM_10': ('sequence', DATATYPES_STRUCTS['MO'], 'MO', 'INVENTORY_RECEIVED_ITEM_COST', None, -1),
'IIM_11': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'INVENTORY_ON_HAND_DATE', None, -1),
'IIM_12': ('leaf', None, 'NM', 'INVENTORY_ON_HAND_QUANTITY', None, -1),
'IIM_13': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'INVENTORY_ON_HAND_QUANTITY_UNIT', None, -1),
'IIM_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_CODE', 'HL70088', -1),
'IIM_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_CODE_MODIFIER', 'HL70340', -1),
'IN1_1': ('leaf', None, 'SI', 'SET_ID_IN1', None, -1),
'IN1_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INSURANCE_PLAN_ID', 'HL70072', -1),
'IN1_3': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'INSURANCE_COMPANY_ID', None, -1),
'IN1_4': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'INSURANCE_COMPANY_NAME', None, -1),
'IN1_5': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'INSURANCE_COMPANY_ADDRESS', None, -1),
'IN1_6': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'INSURANCE_CO_CONTACT_PERSON', None, -1),
'IN1_7': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'INSURANCE_CO_PHONE_NUMBER', None, -1),
'IN1_8': ('leaf', None, 'ST', 'GROUP_NUMBER', None, -1),
'IN1_9': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'GROUP_NAME', None, -1),
'IN1_10': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'INSURED_S_GROUP_EMP_ID', None, -1),
'IN1_11': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'INSURED_S_GROUP_EMP_NAME', None, -1),
'IN1_12': ('leaf', None, 'DT', 'PLAN_EFFECTIVE_DATE', None, -1),
'IN1_13': ('leaf', None, 'DT', 'PLAN_EXPIRATION_DATE', None, -1),
'IN1_14': ('sequence', DATATYPES_STRUCTS['AUI'], 'AUI', 'AUTHORIZATION_INFORMATION', None, -1),
'IN1_15': ('leaf', None, 'IS', 'PLAN_TYPE', 'HL70086', -1),
'IN1_16': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'NAME_OF_INSURED', None, -1),
'IN1_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INSURED_S_RELATIONSHIP_TO_PATIENT', 'HL70063', -1),
'IN1_18': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'INSURED_S_DATE_OF_BIRTH', None, -1),
'IN1_19': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'INSURED_S_ADDRESS', None, -1),
'IN1_20': ('leaf', None, 'IS', 'ASSIGNMENT_OF_BENEFITS', 'HL70135', -1),
'IN1_21': ('leaf', None, 'IS', 'COORDINATION_OF_BENEFITS', 'HL70173', -1),
'IN1_22': ('leaf', None, 'ST', 'COORD_OF_BEN_PRIORITY', None, -1),
'IN1_23': ('leaf', None, 'ID', 'NOTICE_OF_ADMISSION_FLAG', 'HL70136', -1),
'IN1_24': ('leaf', None, 'DT', 'NOTICE_OF_ADMISSION_DATE', None, -1),
'IN1_25': ('leaf', None, 'ID', 'REPORT_OF_ELIGIBILITY_FLAG', 'HL70136', -1),
'IN1_26': ('leaf', None, 'DT', 'REPORT_OF_ELIGIBILITY_DATE', None, -1),
'IN1_27': ('leaf', None, 'IS', 'RELEASE_INFORMATION_CODE', 'HL70093', -1),
'IN1_28': ('leaf', None, 'ST', 'PRE_ADMIT_CERT_PAC', None, -1),
'IN1_29': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'VERIFICATION_DATE_TIME', None, -1),
'IN1_30': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'VERIFICATION_BY', None, -1),
'IN1_31': ('leaf', None, 'IS', 'TYPE_OF_AGREEMENT_CODE', 'HL70098', -1),
'IN1_32': ('leaf', None, 'IS', 'BILLING_STATUS', 'HL70022', -1),
'IN1_33': ('leaf', None, 'NM', 'LIFETIME_RESERVE_DAYS', None, -1),
'IN1_34': ('leaf', None, 'NM', 'DELAY_BEFORE_L_R_DAY', None, -1),
'IN1_35': ('leaf', None, 'IS', 'COMPANY_PLAN_CODE', 'HL70042', -1),
'IN1_36': ('leaf', None, 'ST', 'POLICY_NUMBER', None, -1),
'IN1_37': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'POLICY_DEDUCTIBLE', None, -1),
'IN1_38': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'POLICY_LIMIT_AMOUNT', None, -1),
'IN1_39': ('leaf', None, 'NM', 'POLICY_LIMIT_DAYS', None, -1),
'IN1_40': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'ROOM_RATE_SEMI_PRIVATE', None, -1),
'IN1_41': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'ROOM_RATE_PRIVATE', None, -1),
'IN1_42': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INSURED_S_EMPLOYMENT_STATUS', 'HL70066', -1),
'IN1_43': ('leaf', None, 'IS', 'INSURED_S_ADMINISTRATIVE_SEX', 'HL70001', -1),
'IN1_44': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'INSURED_S_EMPLOYER_S_ADDRESS', None, -1),
'IN1_45': ('leaf', None, 'ST', 'VERIFICATION_STATUS', None, -1),
'IN1_46': ('leaf', None, 'IS', 'PRIOR_INSURANCE_PLAN_ID', 'HL70072', -1),
'IN1_47': ('leaf', None, 'IS', 'COVERAGE_TYPE', 'HL70309', -1),
'IN1_48': ('leaf', None, 'IS', 'HANDICAP', 'HL70295', -1),
'IN1_49': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'INSURED_S_ID_NUMBER', None, -1),
'IN1_50': ('leaf', None, 'IS', 'SIGNATURE_CODE', 'HL70535', -1),
'IN1_51': ('leaf', None, 'DT', 'SIGNATURE_CODE_DATE', None, -1),
'IN1_52': ('leaf', None, 'ST', 'INSURED_S_BIRTH_PLACE', None, -1),
'IN1_53': ('leaf', None, 'IS', 'VIP_INDICATOR', 'HL70099', -1),
'IN2_1': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'INSURED_S_EMPLOYEE_ID', None, -1),
'IN2_2': ('leaf', None, 'ST', 'INSURED_S_SOCIAL_SECURITY_NUMBER', None, -1),
'IN2_3': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'INSURED_S_EMPLOYER_S_NAME_AND_ID', None, -1),
'IN2_4': ('leaf', None, 'IS', 'EMPLOYER_INFORMATION_DATA', 'HL70139', -1),
'IN2_5': ('leaf', None, 'IS', 'MAIL_CLAIM_PARTY', 'HL70137', -1),
'IN2_6': ('leaf', None, 'ST', 'MEDICARE_HEALTH_INS_CARD_NUMBER', None, -1),
'IN2_7': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'MEDICAID_CASE_NAME', None, -1),
'IN2_8': ('leaf', None, 'ST', 'MEDICAID_CASE_NUMBER', None, -1),
'IN2_9': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'MILITARY_SPONSOR_NAME', None, -1),
'IN2_10': ('leaf', None, 'ST', 'MILITARY_ID_NUMBER', None, -1),
'IN2_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DEPENDENT_OF_MILITARY_RECIPIENT', 'HL70342', -1),
'IN2_12': ('leaf', None, 'ST', 'MILITARY_ORGANIZATION', None, -1),
'IN2_13': ('leaf', None, 'ST', 'MILITARY_STATION', None, -1),
'IN2_14': ('leaf', None, 'IS', 'MILITARY_SERVICE', 'HL70140', -1),
'IN2_15': ('leaf', None, 'IS', 'MILITARY_RANK_GRADE', 'HL70141', -1),
'IN2_16': ('leaf', None, 'IS', 'MILITARY_STATUS', 'HL70142', -1),
'IN2_17': ('leaf', None, 'DT', 'MILITARY_RETIRE_DATE', None, -1),
'IN2_18': ('leaf', None, 'ID', 'MILITARY_NON_AVAIL_CERT_ON_FILE', 'HL70136', -1),
'IN2_19': ('leaf', None, 'ID', 'BABY_COVERAGE', 'HL70136', -1),
'IN2_20': ('leaf', None, 'ID', 'COMBINE_BABY_BILL', 'HL70136', -1),
'IN2_21': ('leaf', None, 'ST', 'BLOOD_DEDUCTIBLE', None, -1),
'IN2_22': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'SPECIAL_COVERAGE_APPROVAL_NAME', None, -1),
'IN2_23': ('leaf', None, 'ST', 'SPECIAL_COVERAGE_APPROVAL_TITLE', None, -1),
'IN2_24': ('leaf', None, 'IS', 'NON_COVERED_INSURANCE_CODE', 'HL70143', -1),
'IN2_25': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PAYOR_ID', None, -1),
'IN2_26': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PAYOR_SUBSCRIBER_ID', None, -1),
'IN2_27': ('leaf', None, 'IS', 'ELIGIBILITY_SOURCE', 'HL70144', -1),
'IN2_28': ('sequence', DATATYPES_STRUCTS['RMC'], 'RMC', 'ROOM_COVERAGE_TYPE_AMOUNT', None, -1),
'IN2_29': ('sequence', DATATYPES_STRUCTS['PTA'], 'PTA', 'POLICY_TYPE_AMOUNT', None, -1),
'IN2_30': ('sequence', DATATYPES_STRUCTS['DDI'], 'DDI', 'DAILY_DEDUCTIBLE', None, -1),
'IN2_31': ('leaf', None, 'IS', 'LIVING_DEPENDENCY', 'HL70223', -1),
'IN2_32': ('leaf', None, 'IS', 'AMBULATORY_STATUS', 'HL70009', -1),
'IN2_33': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CITIZENSHIP', 'HL70171', -1),
'IN2_34': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRIMARY_LANGUAGE', 'HL70296', -1),
'IN2_35': ('leaf', None, 'IS', 'LIVING_ARRANGEMENT', 'HL70220', -1),
'IN2_36': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PUBLICITY_CODE', 'HL70215', -1),
'IN2_37': ('leaf', None, 'ID', 'PROTECTION_INDICATOR', 'HL70136', -1),
'IN2_38': ('leaf', None, 'IS', 'STUDENT_INDICATOR', 'HL70231', -1),
'IN2_39': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RELIGION', 'HL70006', -1),
'IN2_40': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'MOTHER_S_MAIDEN_NAME', None, -1),
'IN2_41': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'NATIONALITY', 'HL70212', -1),
'IN2_42': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ETHNIC_GROUP', 'HL70189', -1),
'IN2_43': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MARITAL_STATUS', 'HL70002', -1),
'IN2_44': ('leaf', None, 'DT', 'INSURED_S_EMPLOYMENT_START_DATE', None, -1),
'IN2_45': ('leaf', None, 'DT', 'EMPLOYMENT_STOP_DATE', None, -1),
'IN2_46': ('leaf', None, 'ST', 'JOB_TITLE', None, -1),
'IN2_47': ('sequence', DATATYPES_STRUCTS['JCC'], 'JCC', 'JOB_CODE_CLASS', None, -1),
'IN2_48': ('leaf', None, 'IS', 'JOB_STATUS', 'HL70311', -1),
'IN2_49': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'EMPLOYER_CONTACT_PERSON_NAME', None, -1),
'IN2_50': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'EMPLOYER_CONTACT_PERSON_PHONE_NUMBER', None, -1),
'IN2_51': ('leaf', None, 'IS', 'EMPLOYER_CONTACT_REASON', 'HL70222', -1),
'IN2_52': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'INSURED_S_CONTACT_PERSON_S_NAME', None, -1),
'IN2_53': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'INSURED_S_CONTACT_PERSON_PHONE_NUMBER', None, -1),
'IN2_54': ('leaf', None, 'IS', 'INSURED_S_CONTACT_PERSON_REASON', 'HL70222', -1),
'IN2_55': ('leaf', None, 'DT', 'RELATIONSHIP_TO_THE_PATIENT_START_DATE', None, -1),
'IN2_56': ('leaf', None, 'DT', 'RELATIONSHIP_TO_THE_PATIENT_STOP_DATE', None, -1),
'IN2_57': ('leaf', None, 'IS', 'INSURANCE_CO_CONTACT_REASON', 'HL70232', -1),
'IN2_58': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'INSURANCE_CO_CONTACT_PHONE_NUMBER', None, -1),
'IN2_59': ('leaf', None, 'IS', 'POLICY_SCOPE', 'HL70312', -1),
'IN2_60': ('leaf', None, 'IS', 'POLICY_SOURCE', 'HL70313', -1),
'IN2_61': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PATIENT_MEMBER_NUMBER', None, -1),
'IN2_62': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GUARANTOR_S_RELATIONSHIP_TO_INSURED', 'HL70063', -1),
'IN2_63': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'INSURED_S_PHONE_NUMBER_HOME', None, -1),
'IN2_64': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'INSURED_S_EMPLOYER_PHONE_NUMBER', None, -1),
'IN2_65': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MILITARY_HANDICAPPED_PROGRAM', 'HL70343', -1),
'IN2_66': ('leaf', None, 'ID', 'SUSPEND_FLAG', 'HL70136', -1),
'IN2_67': ('leaf', None, 'ID', 'COPAY_LIMIT_FLAG', 'HL70136', -1),
'IN2_68': ('leaf', None, 'ID', 'STOPLOSS_LIMIT_FLAG', 'HL70136', -1),
'IN2_69': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'INSURED_ORGANIZATION_NAME_AND_ID', None, -1),
'IN2_70': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'INSURED_EMPLOYER_ORGANIZATION_NAME_AND_ID', None, -1),
'IN2_71': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RACE', 'HL70005', -1),
'IN2_72': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CMS_PATIENT_S_RELATIONSHIP_TO_INSURED', 'HL70344', -1),
'IN3_1': ('leaf', None, 'SI', 'SET_ID_IN3', None, -1),
'IN3_2': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'CERTIFICATION_NUMBER', None, -1),
'IN3_3': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'CERTIFIED_BY', None, -1),
'IN3_4': ('leaf', None, 'ID', 'CERTIFICATION_REQUIRED', 'HL70136', -1),
'IN3_5': ('sequence', DATATYPES_STRUCTS['MOP'], 'MOP', 'PENALTY', None, -1),
'IN3_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CERTIFICATION_DATE_TIME', None, -1),
'IN3_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CERTIFICATION_MODIFY_DATE_TIME', None, -1),
'IN3_8': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'OPERATOR', None, -1),
'IN3_9': ('leaf', None, 'DT', 'CERTIFICATION_BEGIN_DATE', None, -1),
'IN3_10': ('leaf', None, 'DT', 'CERTIFICATION_END_DATE', None, -1),
'IN3_11': ('sequence', DATATYPES_STRUCTS['DTN'], 'DTN', 'DAYS', None, -1),
'IN3_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'NON_CONCUR_CODE_DESCRIPTION', 'HL70233', -1),
'IN3_13': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'NON_CONCUR_EFFECTIVE_DATE_TIME', None, -1),
'IN3_14': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PHYSICIAN_REVIEWER', 'HL70010', -1),
'IN3_15': ('leaf', None, 'ST', 'CERTIFICATION_CONTACT', None, -1),
'IN3_16': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'CERTIFICATION_CONTACT_PHONE_NUMBER', None, -1),
'IN3_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'APPEAL_REASON', 'HL70345', -1),
'IN3_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CERTIFICATION_AGENCY', 'HL70346', -1),
'IN3_19': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'CERTIFICATION_AGENCY_PHONE_NUMBER', None, -1),
'IN3_20': ('sequence', DATATYPES_STRUCTS['ICD'], 'ICD', 'PRE_CERTIFICATION_REQUIREMENT', None, -1),
'IN3_21': ('leaf', None, 'ST', 'CASE_MANAGER', None, -1),
'IN3_22': ('leaf', None, 'DT', 'SECOND_OPINION_DATE', None, -1),
'IN3_23': ('leaf', None, 'IS', 'SECOND_OPINION_STATUS', 'HL70151', -1),
'IN3_24': ('leaf', None, 'IS', 'SECOND_OPINION_DOCUMENTATION_RECEIVED', 'HL70152', -1),
'IN3_25': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'SECOND_OPINION_PHYSICIAN', 'HL70010', -1),
'INV_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUBSTANCE_IDENTIFIER', 'HL70451', -1),
'INV_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUBSTANCE_STATUS', 'HL70383', -1),
'INV_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUBSTANCE_TYPE', 'HL70384', -1),
'INV_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INVENTORY_CONTAINER_IDENTIFIER', None, -1),
'INV_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONTAINER_CARRIER_IDENTIFIER', None, -1),
'INV_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'POSITION_ON_CARRIER', None, -1),
'INV_7': ('leaf', None, 'NM', 'INITIAL_QUANTITY', None, -1),
'INV_8': ('leaf', None, 'NM', 'CURRENT_QUANTITY', None, -1),
'INV_9': ('leaf', None, 'NM', 'AVAILABLE_QUANTITY', None, -1),
'INV_10': ('leaf', None, 'NM', 'CONSUMPTION_QUANTITY', None, -1),
'INV_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'QUANTITY_UNITS', None, -1),
'INV_12': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXPIRATION_DATE_TIME', None, -1),
'INV_13': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'FIRST_USED_DATE_TIME', None, -1),
'INV_14': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'ON_BOARD_STABILITY_DURATION', None, -1),
'INV_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TEST_FLUID_IDENTIFIER_S', None, -1),
'INV_16': ('leaf', None, 'ST', 'MANUFACTURER_LOT_NUMBER', None, -1),
'INV_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MANUFACTURER_IDENTIFIER', 'HL70385', -1),
'INV_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUPPLIER_IDENTIFIER', 'HL70386', -1),
'INV_19': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'ON_BOARD_STABILITY_TIME', None, -1),
'INV_20': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'TARGET_VALUE', None, -1),
'IPC_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'ACCESSION_IDENTIFIER', None, -1),
'IPC_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'REQUESTED_PROCEDURE_ID', None, -1),
'IPC_3': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'STUDY_INSTANCE_UID', None, -1),
'IPC_4': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'SCHEDULED_PROCEDURE_STEP_ID', None, -1),
'IPC_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MODALITY', None, -1),
'IPC_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROTOCOL_CODE', None, -1),
'IPC_7': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'SCHEDULED_STATION_NAME', None, -1),
'IPC_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SCHEDULED_PROCEDURE_STEP_LOCATION', None, -1),
'IPC_9': ('leaf', None, 'ST', 'SCHEDULED_AE_TITLE', None, -1),
'ISD_1': ('leaf', None, 'NM', 'REFERENCE_INTERACTION_NUMBER_UNIQUE_IDENTIFIER', None, -1),
'ISD_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INTERACTION_TYPE_IDENTIFIER', 'HL70368', -1),
'ISD_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INTERACTION_ACTIVE_STATE', 'HL70387', -1),
'LAN_1': ('leaf', None, 'SI', 'SET_ID_LAN', None, -1),
'LAN_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LANGUAGE_CODE', 'HL70296', -1),
'LAN_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LANGUAGE_ABILITY_CODE', 'HL70403', -1),
'LAN_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LANGUAGE_PROFICIENCY_CODE', 'HL70404', -1),
'LCC_1': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PRIMARY_KEY_VALUE_LCC', None, -1),
'LCC_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCATION_DEPARTMENT', 'HL70264', -1),
'LCC_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ACCOMMODATION_TYPE', 'HL70129', -1),
'LCC_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CHARGE_CODE', 'HL70132', -1),
'LCH_1': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PRIMARY_KEY_VALUE_LCH', None, -1),
'LCH_2': ('leaf', None, 'ID', 'SEGMENT_ACTION_CODE', 'HL70206', -1),
'LCH_3': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'SEGMENT_UNIQUE_KEY', None, -1),
'LCH_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCATION_CHARACTERISTIC_ID', 'HL70324', -1),
'LCH_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCATION_CHARACTERISTIC_VALUE_LCH', 'HL70136', -1),
'LDP_1': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PRIMARY_KEY_VALUE_LDP', None, -1),
'LDP_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCATION_DEPARTMENT', 'HL70264', -1),
'LDP_3': ('leaf', None, 'IS', 'LOCATION_SERVICE', 'HL70069', -1),
'LDP_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SPECIALTY_TYPE', 'HL70265', -1),
'LDP_5': ('leaf', None, 'IS', 'VALID_PATIENT_CLASSES', 'HL70004', -1),
'LDP_6': ('leaf', None, 'ID', 'ACTIVE_INACTIVE_FLAG', 'HL70183', -1),
'LDP_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ACTIVATION_DATE_LDP', None, -1),
'LDP_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'INACTIVATION_DATE_LDP', None, -1),
'LDP_9': ('leaf', None, 'ST', 'INACTIVATED_REASON', None, -1),
'LDP_10': ('sequence', DATATYPES_STRUCTS['VH'], 'VH', 'VISITING_HOURS', 'HL70267', -1),
'LDP_11': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'CONTACT_PHONE', None, -1),
'LDP_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCATION_COST_CENTER', 'HL70462', -1),
'LOC_1': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PRIMARY_KEY_VALUE_LOC', None, -1),
'LOC_2': ('leaf', None, 'ST', 'LOCATION_DESCRIPTION', None, -1),
'LOC_3': ('leaf', None, 'IS', 'LOCATION_TYPE_LOC', 'HL70260', -1),
'LOC_4': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'ORGANIZATION_NAME_LOC', None, -1),
'LOC_5': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'LOCATION_ADDRESS', None, -1),
'LOC_6': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'LOCATION_PHONE', None, -1),
'LOC_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LICENSE_NUMBER', 'HL70461', -1),
'LOC_8': ('leaf', None, 'IS', 'LOCATION_EQUIPMENT', 'HL70261', -1),
'LOC_9': ('leaf', None, 'IS', 'LOCATION_SERVICE_CODE', 'HL70442', -1),
'LRL_1': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PRIMARY_KEY_VALUE_LRL', None, -1),
'LRL_2': ('leaf', None, 'ID', 'SEGMENT_ACTION_CODE', 'HL70206', -1),
'LRL_3': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'SEGMENT_UNIQUE_KEY', None, -1),
'LRL_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCATION_RELATIONSHIP_ID', 'HL70325', -1),
'LRL_5': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'ORGANIZATIONAL_LOCATION_RELATIONSHIP_VALUE', None, -1),
'LRL_6': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PATIENT_LOCATION_RELATIONSHIP_VALUE', None, -1),
'MFA_1': ('leaf', None, 'ID', 'RECORD_LEVEL_EVENT_CODE', 'HL70180', -1),
'MFA_2': ('leaf', None, 'ST', 'MFN_CONTROL_ID', None, -1),
'MFA_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EVENT_COMPLETION_DATE_TIME', None, -1),
'MFA_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MFN_RECORD_LEVEL_ERROR_RETURN', 'HL70181', -1),
'MFA_5': ('leaf', None, 'varies', 'PRIMARY_KEY_VALUE_MFA', 'HL79999', -1),
'MFA_6': ('leaf', None, 'ID', 'PRIMARY_KEY_VALUE_TYPE_MFA', 'HL70355', -1),
'MFE_1': ('leaf', None, 'ID', 'RECORD_LEVEL_EVENT_CODE', 'HL70180', -1),
'MFE_2': ('leaf', None, 'ST', 'MFN_CONTROL_ID', None, -1),
'MFE_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_DATE_TIME', None, -1),
'MFE_4': ('leaf', None, 'varies', 'PRIMARY_KEY_VALUE_MFE', 'HL79999', -1),
'MFE_5': ('leaf', None, 'ID', 'PRIMARY_KEY_VALUE_TYPE', 'HL70355', -1),
'MFI_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MASTER_FILE_IDENTIFIER', 'HL70175', -1),
'MFI_2': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'MASTER_FILE_APPLICATION_IDENTIFIER', 'HL70361', -1),
'MFI_3': ('leaf', None, 'ID', 'FILE_LEVEL_EVENT_CODE', 'HL70178', -1),
'MFI_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ENTERED_DATE_TIME', None, -1),
'MFI_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_DATE_TIME', None, -1),
'MFI_6': ('leaf', None, 'ID', 'RESPONSE_LEVEL_CODE', 'HL70179', -1),
'MRG_1': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PRIOR_PATIENT_IDENTIFIER_LIST', None, -1),
'MRG_2': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PRIOR_ALTERNATE_PATIENT_ID', None, -1),
'MRG_3': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PRIOR_PATIENT_ACCOUNT_NUMBER', None, -1),
'MRG_4': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PRIOR_PATIENT_ID', None, -1),
'MRG_5': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PRIOR_VISIT_NUMBER', None, -1),
'MRG_6': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PRIOR_ALTERNATE_VISIT_ID', None, -1),
'MRG_7': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'PRIOR_PATIENT_NAME', None, -1),
'MSA_1': ('leaf', None, 'ID', 'ACKNOWLEDGMENT_CODE', 'HL70008', -1),
'MSA_2': ('leaf', None, 'ST', 'MESSAGE_CONTROL_ID', None, -1),
'MSA_3': ('leaf', None, 'ST', 'TEXT_MESSAGE', None, -1),
'MSA_4': ('leaf', None, 'NM', 'EXPECTED_SEQUENCE_NUMBER', None, -1),
'MSA_5': ('leaf', None, None, 'DELAYED_ACKNOWLEDGMENT_TYPE', None, -1),
'MSA_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ERROR_CONDITION', 'HL70357', -1),
'MSH_1': ('leaf', None, 'ST', 'FIELD_SEPARATOR', None, -1),
'MSH_2': ('leaf', None, 'ST', 'ENCODING_CHARACTERS', None, -1),
'MSH_3': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'SENDING_APPLICATION', 'HL70361', -1),
'MSH_4': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'SENDING_FACILITY', 'HL70362', -1),
'MSH_5': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'RECEIVING_APPLICATION', 'HL70361', -1),
'MSH_6': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'RECEIVING_FACILITY', 'HL70362', -1),
'MSH_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_OF_MESSAGE', None, -1),
'MSH_8': ('leaf', None, 'ST', 'SECURITY', None, -1),
'MSH_9': ('sequence', DATATYPES_STRUCTS['MSG'], 'MSG', 'MESSAGE_TYPE', None, -1),
'MSH_10': ('leaf', None, 'ST', 'MESSAGE_CONTROL_ID', None, -1),
'MSH_11': ('sequence', DATATYPES_STRUCTS['PT'], 'PT', 'PROCESSING_ID', None, -1),
'MSH_12': ('sequence', DATATYPES_STRUCTS['VID'], 'VID', 'VERSION_ID', None, -1),
'MSH_13': ('leaf', None, 'NM', 'SEQUENCE_NUMBER', None, -1),
'MSH_14': ('leaf', None, 'ST', 'CONTINUATION_POINTER', None, -1),
'MSH_15': ('leaf', None, 'ID', 'ACCEPT_ACKNOWLEDGMENT_TYPE', 'HL70155', -1),
'MSH_16': ('leaf', None, 'ID', 'APPLICATION_ACKNOWLEDGMENT_TYPE', 'HL70155', -1),
'MSH_17': ('leaf', None, 'ID', 'COUNTRY_CODE', 'HL70399', -1),
'MSH_18': ('leaf', None, 'ID', 'CHARACTER_SET', 'HL70211', -1),
'MSH_19': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRINCIPAL_LANGUAGE_OF_MESSAGE', None, -1),
'MSH_20': ('leaf', None, 'ID', 'ALTERNATE_CHARACTER_SET_HANDLING_SCHEME', 'HL70356', -1),
'MSH_21': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'MESSAGE_PROFILE_IDENTIFIER', None, -1),
'NCK_1': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SYSTEM_DATE_TIME', None, -1),
'NDS_1': ('leaf', None, 'NM', 'NOTIFICATION_REFERENCE_NUMBER', None, -1),
'NDS_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'NOTIFICATION_DATE_TIME', None, -1),
'NDS_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'NOTIFICATION_ALERT_SEVERITY', 'HL70367', -1),
'NDS_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'NOTIFICATION_CODE', None, -1),
'NK1_1': ('leaf', None, 'SI', 'SET_ID_NK1', None, -1),
'NK1_2': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'NAME', None, -1),
'NK1_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RELATIONSHIP', 'HL70063', -1),
'NK1_4': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'ADDRESS', None, -1),
'NK1_5': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PHONE_NUMBER', None, -1),
'NK1_6': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'BUSINESS_PHONE_NUMBER', None, -1),
'NK1_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONTACT_ROLE', 'HL70131', -1),
'NK1_8': ('leaf', None, 'DT', 'START_DATE', None, -1),
'NK1_9': ('leaf', None, 'DT', 'END_DATE', None, -1),
'NK1_10': ('leaf', None, 'ST', 'NEXT_OF_KIN_ASSOCIATED_PARTIES_JOB_TITLE', None, -1),
'NK1_11': ('sequence', DATATYPES_STRUCTS['JCC'], 'JCC',
'NEXT_OF_KIN_ASSOCIATED_PARTIES_JOB_CODE_CLASS', 'HL70327', -1),
'NK1_12': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'NEXT_OF_KIN_ASSOCIATED_PARTIES_EMPLOYEE_NUMBER', None, -1),
'NK1_13': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'ORGANIZATION_NAME_NK1', None, -1),
'NK1_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MARITAL_STATUS', 'HL70002', -1),
'NK1_15': ('leaf', None, 'IS', 'ADMINISTRATIVE_SEX', 'HL70001', -1),
'NK1_16': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_OF_BIRTH', None, -1),
'NK1_17': ('leaf', None, 'IS', 'LIVING_DEPENDENCY', 'HL70223', -1),
'NK1_18': ('leaf', None, 'IS', 'AMBULATORY_STATUS', 'HL70009', -1),
'NK1_19': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CITIZENSHIP', 'HL70171', -1),
'NK1_20': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRIMARY_LANGUAGE', 'HL70296', -1),
'NK1_21': ('leaf', None, 'IS', 'LIVING_ARRANGEMENT', 'HL70220', -1),
'NK1_22': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PUBLICITY_CODE', 'HL70215', -1),
'NK1_23': ('leaf', None, 'ID', 'PROTECTION_INDICATOR', 'HL70136', -1),
'NK1_24': ('leaf', None, 'IS', 'STUDENT_INDICATOR', 'HL70231', -1),
'NK1_25': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RELIGION', 'HL70006', -1),
'NK1_26': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'MOTHER_S_MAIDEN_NAME', None, -1),
'NK1_27': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'NATIONALITY', 'HL70212', -1),
'NK1_28': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ETHNIC_GROUP', 'HL70189', -1),
'NK1_29': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONTACT_REASON', 'HL70222', -1),
'NK1_30': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'CONTACT_PERSON_S_NAME', None, -1),
'NK1_31': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'CONTACT_PERSON_S_TELEPHONE_NUMBER', None, -1),
'NK1_32': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'CONTACT_PERSON_S_ADDRESS', None, -1),
'NK1_33': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'NEXT_OF_KIN_ASSOCIATED_PARTY_S_IDENTIFIERS', None, -1),
'NK1_34': ('leaf', None, 'IS', 'JOB_STATUS', 'HL70311', -1),
'NK1_35': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RACE', 'HL70005', -1),
'NK1_36': ('leaf', None, 'IS', 'HANDICAP', 'HL70295', -1),
'NK1_37': ('leaf', None, 'ST', 'CONTACT_PERSON_SOCIAL_SECURITY_NUMBER', None, -1),
'NK1_38': ('leaf', None, 'ST', 'NEXT_OF_KIN_BIRTH_PLACE', None, -1),
'NK1_39': ('leaf', None, 'IS', 'VIP_INDICATOR', 'HL70099', -1),
'NPU_1': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'BED_LOCATION', None, -1),
'NPU_2': ('leaf', None, 'IS', 'BED_STATUS', 'HL70116', -1),
'NSC_1': ('leaf', None, 'IS', 'APPLICATION_CHANGE_TYPE', 'HL70409', -1),
'NSC_2': ('leaf', None, 'ST', 'CURRENT_CPU', None, -1),
'NSC_3': ('leaf', None, 'ST', 'CURRENT_FILESERVER', None, -1),
'NSC_4': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'CURRENT_APPLICATION', None, -1),
'NSC_5': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'CURRENT_FACILITY', None, -1),
'NSC_6': ('leaf', None, 'ST', 'NEW_CPU', None, -1),
'NSC_7': ('leaf', None, 'ST', 'NEW_FILESERVER', None, -1),
'NSC_8': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'NEW_APPLICATION', None, -1),
'NSC_9': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'NEW_FACILITY', None, -1),
'NST_1': ('leaf', None, 'ID', 'STATISTICS_AVAILABLE', 'HL70136', -1),
'NST_2': ('leaf', None, 'ST', 'SOURCE_IDENTIFIER', None, -1),
'NST_3': ('leaf', None, 'ID', 'SOURCE_TYPE', 'HL70332', -1),
'NST_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'STATISTICS_START', None, -1),
'NST_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'STATISTICS_END', None, -1),
'NST_6': ('leaf', None, 'NM', 'RECEIVE_CHARACTER_COUNT', None, -1),
'NST_7': ('leaf', None, 'NM', 'SEND_CHARACTER_COUNT', None, -1),
'NST_8': ('leaf', None, 'NM', 'MESSAGES_RECEIVED', None, -1),
'NST_9': ('leaf', None, 'NM', 'MESSAGES_SENT', None, -1),
'NST_10': ('leaf', None, 'NM', 'CHECKSUM_ERRORS_RECEIVED', None, -1),
'NST_11': ('leaf', None, 'NM', 'LENGTH_ERRORS_RECEIVED', None, -1),
'NST_12': ('leaf', None, 'NM', 'OTHER_ERRORS_RECEIVED', None, -1),
'NST_13': ('leaf', None, 'NM', 'CONNECT_TIMEOUTS', None, -1),
'NST_14': ('leaf', None, 'NM', 'RECEIVE_TIMEOUTS', None, -1),
'NST_15': ('leaf', None, 'NM', 'APPLICATION_CONTROL_LEVEL_ERRORS', None, -1),
'NTE_1': ('leaf', None, 'SI', 'SET_ID_NTE', None, -1),
'NTE_2': ('leaf', None, 'ID', 'SOURCE_OF_COMMENT', 'HL70105', -1),
'NTE_3': ('leaf', None, 'FT', 'COMMENT', None, -1),
'NTE_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'COMMENT_TYPE', 'HL70364', -1),
'OBR_1': ('leaf', None, 'SI', 'SET_ID_OBR', None, -1),
'OBR_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_ORDER_NUMBER', None, -1),
'OBR_3': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'FILLER_ORDER_NUMBER', None, -1),
'OBR_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'UNIVERSAL_SERVICE_IDENTIFIER', None, -1),
'OBR_5': ('leaf', None, 'ID', 'PRIORITY_OBR', None, -1),
'OBR_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'REQUESTED_DATE_TIME', None, -1),
'OBR_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'OBSERVATION_DATE_TIME', None, -1),
'OBR_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'OBSERVATION_END_DATE_TIME', None, -1),
'OBR_9': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'COLLECTION_VOLUME', None, -1),
'OBR_10': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'COLLECTOR_IDENTIFIER', None, -1),
'OBR_11': ('leaf', None, 'ID', 'SPECIMEN_ACTION_CODE', 'HL70065', -1),
'OBR_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DANGER_CODE', None, -1),
'OBR_13': ('leaf', None, 'ST', 'RELEVANT_CLINICAL_INFORMATION', None, -1),
'OBR_14': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SPECIMEN_RECEIVED_DATE_TIME', None, -1),
'OBR_15': ('sequence', DATATYPES_STRUCTS['SPS'], 'SPS', 'SPECIMEN_SOURCE', None, -1),
'OBR_16': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ORDERING_PROVIDER', None, -1),
'OBR_17': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'ORDER_CALLBACK_PHONE_NUMBER', None, -1),
'OBR_18': ('leaf', None, 'ST', 'PLACER_FIELD_1', None, -1),
'OBR_19': ('leaf', None, 'ST', 'PLACER_FIELD_2', None, -1),
'OBR_20': ('leaf', None, 'ST', 'FILLER_FIELD_1', None, -1),
'OBR_21': ('leaf', None, 'ST', 'FILLER_FIELD_2', None, -1),
'OBR_22': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'RESULTS_RPT_STATUS_CHNG_DATE_TIME', None, -1),
'OBR_23': ('sequence', DATATYPES_STRUCTS['MOC'], 'MOC', 'CHARGE_TO_PRACTICE', None, -1),
'OBR_24': ('leaf', None, 'ID', 'DIAGNOSTIC_SERV_SECT_ID', 'HL70074', -1),
'OBR_25': ('leaf', None, 'ID', 'RESULT_STATUS', 'HL70123', -1),
'OBR_26': ('sequence', DATATYPES_STRUCTS['PRL'], 'PRL', 'PARENT_RESULT', None, -1),
'OBR_27': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'QUANTITY_TIMING', None, -1),
'OBR_28': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'RESULT_COPIES_TO', None, -1),
'OBR_29': ('sequence', DATATYPES_STRUCTS['EIP'], 'EIP', 'PARENT', None, -1),
'OBR_30': ('leaf', None, 'ID', 'TRANSPORTATION_MODE', 'HL70124', -1),
'OBR_31': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REASON_FOR_STUDY', None, -1),
'OBR_32': ('sequence', DATATYPES_STRUCTS['NDL'], 'NDL', 'PRINCIPAL_RESULT_INTERPRETER', None, -1),
'OBR_33': ('sequence', DATATYPES_STRUCTS['NDL'], 'NDL', 'ASSISTANT_RESULT_INTERPRETER', None, -1),
'OBR_34': ('sequence', DATATYPES_STRUCTS['NDL'], 'NDL', 'TECHNICIAN', None, -1),
'OBR_35': ('sequence', DATATYPES_STRUCTS['NDL'], 'NDL', 'TRANSCRIPTIONIST', None, -1),
'OBR_36': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SCHEDULED_DATE_TIME', None, -1),
'OBR_37': ('leaf', None, 'NM', 'NUMBER_OF_SAMPLE_CONTAINERS', None, -1),
'OBR_38': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TRANSPORT_LOGISTICS_OF_COLLECTED_SAMPLE', None, -1),
'OBR_39': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'COLLECTOR_S_COMMENT', None, -1),
'OBR_40': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TRANSPORT_ARRANGEMENT_RESPONSIBILITY', None, -1),
'OBR_41': ('leaf', None, 'ID', 'TRANSPORT_ARRANGED', 'HL70224', -1),
'OBR_42': ('leaf', None, 'ID', 'ESCORT_REQUIRED', 'HL70225', -1),
'OBR_43': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PLANNED_PATIENT_TRANSPORT_COMMENT', None, -1),
'OBR_44': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_CODE', 'HL70088', -1),
'OBR_45': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_CODE_MODIFIER', 'HL70340', -1),
'OBR_46': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PLACER_SUPPLEMENTAL_SERVICE_INFORMATION', 'HL70411', -1),
'OBR_47': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FILLER_SUPPLEMENTAL_SERVICE_INFORMATION', 'HL70411', -1),
'OBR_48': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE',
'MEDICALLY_NECESSARY_DUPLICATE_PROCEDURE_REASON', 'HL70476', -1),
'OBR_49': ('leaf', None, 'IS', 'RESULT_HANDLING', 'HL70507', -1),
'OBR_50': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'PARENT_UNIVERSAL_SERVICE_IDENTIFIER', None, -1),
'OBX_1': ('leaf', None, 'SI', 'SET_ID_OBX', None, -1),
'OBX_2': ('leaf', None, 'ID', 'VALUE_TYPE', 'HL70125', -1),
'OBX_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'OBSERVATION_IDENTIFIER', None, -1),
'OBX_4': ('leaf', None, 'ST', 'OBSERVATION_SUB_ID', None, -1),
'OBX_5': ('leaf', None, 'varies', 'OBSERVATION_VALUE', None, -1),
'OBX_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'UNITS', None, -1),
'OBX_7': ('leaf', None, 'ST', 'REFERENCES_RANGE', None, -1),
'OBX_8': ('leaf', None, 'IS', 'ABNORMAL_FLAGS', 'HL70078', -1),
'OBX_9': ('leaf', None, 'NM', 'PROBABILITY', None, -1),
'OBX_10': ('leaf', None, 'ID', 'NATURE_OF_ABNORMAL_TEST', 'HL70080', -1),
'OBX_11': ('leaf', None, 'ID', 'OBSERVATION_RESULT_STATUS', 'HL70085', -1),
'OBX_12': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_DATE_OF_REFERENCE_RANGE_VALUES', None, -1),
'OBX_13': ('leaf', None, 'ST', 'USER_DEFINED_ACCESS_CHECKS', None, -1),
'OBX_14': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_OF_THE_OBSERVATION', None, -1),
'OBX_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRODUCER_S_REFERENCE', None, -1),
'OBX_16': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'RESPONSIBLE_OBSERVER', None, -1),
'OBX_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'OBSERVATION_METHOD', None, -1),
'OBX_18': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'EQUIPMENT_INSTANCE_IDENTIFIER', None, -1),
'OBX_19': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_OF_THE_ANALYSIS', None, -1),
'OBX_20': ('leaf', None, None, 'PERFORMING_ORGANIZATION_NAME', None, -1),
'OBX_21': ('leaf', None, None, 'PERFORMING_ORGANIZATION_ADDRESS', None, -1),
'OBX_22': ('leaf', None, None, 'PERFORMING_ORGANIZATION_MEDICAL_DIRECTOR', None, -1),
'OBX_23': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'PERFORMING_ORGANIZATION_NAME', None, -1),
'OBX_24': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'PERFORMING_ORGANIZATION_ADDRESS', None, -1),
'OBX_25': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PERFORMING_ORGANIZATION_MEDICAL_DIRECTOR', None, -1),
'ODS_1': ('leaf', None, 'ID', 'TYPE', 'HL70159', -1),
'ODS_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SERVICE_PERIOD', None, -1),
'ODS_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DIET_SUPPLEMENT_OR_PREFERENCE_CODE', None, -1),
'ODS_4': ('leaf', None, 'ST', 'TEXT_INSTRUCTION', None, -1),
'ODT_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TRAY_TYPE', 'HL70160', -1),
'ODT_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SERVICE_PERIOD', None, -1),
'ODT_3': ('leaf', None, 'ST', 'TEXT_INSTRUCTION', None, -1),
'OM1_1': ('leaf', None, 'NM', 'SEQUENCE_NUMBER_TEST_OBSERVATION_MASTER_FILE', None, -1),
'OM1_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRODUCER_S_SERVICE_TEST_OBSERVATION_ID', 'HL79999', -1),
'OM1_3': ('leaf', None, 'ID', 'PERMITTED_DATA_TYPES', 'HL70125', -1),
'OM1_4': ('leaf', None, 'ID', 'SPECIMEN_REQUIRED', 'HL70136', -1),
'OM1_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRODUCER_ID', 'HL79999', -1),
'OM1_6': ('leaf', None, 'TX', 'OBSERVATION_DESCRIPTION', None, -1),
'OM1_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'OTHER_SERVICE_TEST_OBSERVATION_IDS_FOR_THE_OBSERVATION', 'HL79999', -1),
'OM1_8': ('leaf', None, 'ST', 'OTHER_NAMES', None, -1),
'OM1_9': ('leaf', None, 'ST', 'PREFERRED_REPORT_NAME_FOR_THE_OBSERVATION', None, -1),
'OM1_10': ('leaf', None, 'ST', 'PREFERRED_SHORT_NAME_OR_MNEMONIC_FOR_OBSERVATION', None, -1),
'OM1_11': ('leaf', None, 'ST', 'PREFERRED_LONG_NAME_FOR_THE_OBSERVATION', None, -1),
'OM1_12': ('leaf', None, 'ID', 'ORDERABILITY', 'HL70136', -1),
'OM1_13': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'IDENTITY_OF_INSTRUMENT_USED_TO_PERFORM_THIS_STUDY', 'HL79999', -1),
'OM1_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CODED_REPRESENTATION_OF_METHOD', 'HL79999', -1),
'OM1_15': ('leaf', None, 'ID', 'PORTABLE_DEVICE_INDICATOR', 'HL70136', -1),
'OM1_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'OBSERVATION_PRODUCING_DEPARTMENT_SECTION', 'HL79999', -1),
'OM1_17': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'TELEPHONE_NUMBER_OF_SECTION', None, -1),
'OM1_18': ('leaf', None, 'IS', 'NATURE_OF_SERVICE_TEST_OBSERVATION', 'HL70174', -1),
'OM1_19': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REPORT_SUBHEADER', 'HL79999', -1),
'OM1_20': ('leaf', None, 'ST', 'REPORT_DISPLAY_ORDER', None, -1),
'OM1_21': ('sequence', DATATYPES_STRUCTS['TS'], 'TS',
'DATE_TIME_STAMP_FOR_ANY_CHANGE_IN_DEFINITION_FOR_THE_OBSERVATION', None, -1),
'OM1_22': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_DATE_TIME_OF_CHANGE', None, -1),
'OM1_23': ('leaf', None, 'NM', 'TYPICAL_TURN_AROUND_TIME', None, -1),
'OM1_24': ('leaf', None, 'NM', 'PROCESSING_TIME', None, -1),
'OM1_25': ('leaf', None, 'ID', 'PROCESSING_PRIORITY', 'HL70168', -1),
'OM1_26': ('leaf', None, 'ID', 'REPORTING_PRIORITY', 'HL70169', -1),
'OM1_27': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'OUTSIDE_SITE_S_WHERE_OBSERVATION_MAY_BE_PERFORMED', 'HL79999', -1),
'OM1_28': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'ADDRESS_OF_OUTSIDE_SITE_S', None, -1),
'OM1_29': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PHONE_NUMBER_OF_OUTSIDE_SITE', None, -1),
'OM1_30': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CONFIDENTIALITY_CODE', 'HL70177', -1),
'OM1_31': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'OBSERVATIONS_REQUIRED_TO_INTERPRET_THE_OBSERVATION', 'HL79999', -1),
'OM1_32': ('leaf', None, 'TX', 'INTERPRETATION_OF_OBSERVATIONS', None, -1),
'OM1_33': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONTRAINDICATIONS_TO_OBSERVATIONS', 'HL79999', -1),
'OM1_34': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REFLEX_TESTS_OBSERVATIONS', 'HL79999', -1),
'OM1_35': ('leaf', None, 'TX', 'RULES_THAT_TRIGGER_REFLEX_TESTING', None, -1),
'OM1_36': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FIXED_CANNED_MESSAGE', 'HL79999', -1),
'OM1_37': ('leaf', None, 'TX', 'PATIENT_PREPARATION', None, -1),
'OM1_38': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_MEDICATION', 'HL79999', -1),
'OM1_39': ('leaf', None, 'TX', 'FACTORS_THAT_MAY_AFFECT_THE_OBSERVATION', None, -1),
'OM1_40': ('leaf', None, 'ST', 'SERVICE_TEST_OBSERVATION_PERFORMANCE_SCHEDULE', None, -1),
'OM1_41': ('leaf', None, 'TX', 'DESCRIPTION_OF_TEST_METHODS', None, -1),
'OM1_42': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'KIND_OF_QUANTITY_OBSERVED', 'HL70254', -1),
'OM1_43': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'POINT_VERSUS_INTERVAL', 'HL70255', -1),
'OM1_44': ('leaf', None, 'TX', 'CHALLENGE_INFORMATION', 'HL70256', -1),
'OM1_45': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RELATIONSHIP_MODIFIER', 'HL70258', -1),
'OM1_46': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TARGET_ANATOMIC_SITE_OF_TEST', 'HL79999', -1),
'OM1_47': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MODALITY_OF_IMAGING_MEASUREMENT', 'HL70259', -1),
'OM2_1': ('leaf', None, 'NM', 'SEQUENCE_NUMBER_TEST_OBSERVATION_MASTER_FILE', None, -1),
'OM2_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'UNITS_OF_MEASURE', 'HL79999', -1),
'OM2_3': ('leaf', None, 'NM', 'RANGE_OF_DECIMAL_PRECISION', None, -1),
'OM2_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CORRESPONDING_SI_UNITS_OF_MEASURE', 'HL79999', -1),
'OM2_5': ('leaf', None, 'TX', 'SI_CONVERSION_FACTOR', None, -1),
'OM2_6': ('sequence', DATATYPES_STRUCTS['RFR'], 'RFR',
'REFERENCE_NORMAL_RANGE_ORDINAL_AND_CONTINUOUS_OBSERVATIONS', None, -1),
'OM2_7': ('sequence', DATATYPES_STRUCTS['RFR'], 'RFR',
'CRITICAL_RANGE_FOR_ORDINAL_AND_CONTINUOUS_OBSERVATIONS', None, -1),
'OM2_8': ('sequence', DATATYPES_STRUCTS['RFR'], 'RFR',
'ABSOLUTE_RANGE_FOR_ORDINAL_AND_CONTINUOUS_OBSERVATIONS', None, -1),
'OM2_9': ('sequence', DATATYPES_STRUCTS['DLT'], 'DLT', 'DELTA_CHECK_CRITERIA', None, -1),
'OM2_10': ('leaf', None, 'NM', 'MINIMUM_MEANINGFUL_INCREMENTS', None, -1),
'OM3_1': ('leaf', None, 'NM', 'SEQUENCE_NUMBER_TEST_OBSERVATION_MASTER_FILE', None, -1),
'OM3_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PREFERRED_CODING_SYSTEM', 'HL79999', -1),
'OM3_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'VALID_CODED_ANSWERS', 'HL79999', -1),
'OM3_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'NORMAL_TEXT_CODES_FOR_CATEGORICAL_OBSERVATIONS', 'HL79999', -1),
'OM3_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'ABNORMAL_TEXT_CODES_FOR_CATEGORICAL_OBSERVATIONS', 'HL79999', -1),
'OM3_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'CRITICAL_TEXT_CODES_FOR_CATEGORICAL_OBSERVATIONS', 'HL79999', -1),
'OM3_7': ('leaf', None, 'ID', 'VALUE_TYPE', 'HL70125', -1),
'OM4_1': ('leaf', None, 'NM', 'SEQUENCE_NUMBER_TEST_OBSERVATION_MASTER_FILE', None, -1),
'OM4_2': ('leaf', None, 'ID', 'DERIVED_SPECIMEN', 'HL70170', -1),
'OM4_3': ('leaf', None, 'TX', 'CONTAINER_DESCRIPTION', None, -1),
'OM4_4': ('leaf', None, 'NM', 'CONTAINER_VOLUME', None, -1),
'OM4_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONTAINER_UNITS', 'HL79999', -1),
'OM4_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SPECIMEN', 'HL79999', -1),
'OM4_7': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ADDITIVE', 'HL70371', -1),
'OM4_8': ('leaf', None, 'TX', 'PREPARATION', None, -1),
'OM4_9': ('leaf', None, 'TX', 'SPECIAL_HANDLING_REQUIREMENTS', None, -1),
'OM4_10': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'NORMAL_COLLECTION_VOLUME', None, -1),
'OM4_11': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'MINIMUM_COLLECTION_VOLUME', None, -1),
'OM4_12': ('leaf', None, 'TX', 'SPECIMEN_REQUIREMENTS', None, -1),
'OM4_13': ('leaf', None, 'ID', 'SPECIMEN_PRIORITIES', 'HL70027', -1),
'OM4_14': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'SPECIMEN_RETENTION_TIME', None, -1),
'OM5_1': ('leaf', None, 'NM', 'SEQUENCE_NUMBER_TEST_OBSERVATION_MASTER_FILE', None, -1),
'OM5_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'TEST_OBSERVATIONS_INCLUDED_WITHIN_AN_ORDERED_TEST_BATTERY', 'HL79999', -1),
'OM5_3': ('leaf', None, 'ST', 'OBSERVATION_ID_SUFFIXES', None, -1),
'OM6_1': ('leaf', None, 'NM', 'SEQUENCE_NUMBER_TEST_OBSERVATION_MASTER_FILE', None, -1),
'OM6_2': ('leaf', None, 'TX', 'DERIVATION_RULE', None, -1),
'OM7_1': ('leaf', None, 'NM', 'SEQUENCE_NUMBER_TEST_OBSERVATION_MASTER_FILE', None, -1),
'OM7_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'UNIVERSAL_SERVICE_IDENTIFIER', None, -1),
'OM7_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CATEGORY_IDENTIFIER', 'HL70412', -1),
'OM7_4': ('leaf', None, 'TX', 'CATEGORY_DESCRIPTION', None, -1),
'OM7_5': ('leaf', None, 'ST', 'CATEGORY_SYNONYM', None, -1),
'OM7_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_TEST_SERVICE_START_DATE_TIME', None, -1),
'OM7_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_TEST_SERVICE_END_DATE_TIME', None, -1),
'OM7_8': ('leaf', None, 'NM', 'TEST_SERVICE_DEFAULT_DURATION_QUANTITY', None, -1),
'OM7_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TEST_SERVICE_DEFAULT_DURATION_UNITS', 'HL79999', -1),
'OM7_10': ('leaf', None, 'IS', 'TEST_SERVICE_DEFAULT_FREQUENCY', 'HL70335', -1),
'OM7_11': ('leaf', None, 'ID', 'CONSENT_INDICATOR', 'HL70136', -1),
'OM7_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONSENT_IDENTIFIER', 'HL70413', -1),
'OM7_13': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CONSENT_EFFECTIVE_START_DATE_TIME', None, -1),
'OM7_14': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CONSENT_EFFECTIVE_END_DATE_TIME', None, -1),
'OM7_15': ('leaf', None, 'NM', 'CONSENT_INTERVAL_QUANTITY', None, -1),
'OM7_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONSENT_INTERVAL_UNITS', 'HL70414', -1),
'OM7_17': ('leaf', None, 'NM', 'CONSENT_WAITING_PERIOD_QUANTITY', None, -1),
'OM7_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONSENT_WAITING_PERIOD_UNITS', 'HL70414', -1),
'OM7_19': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_DATE_TIME_OF_CHANGE', None, -1),
'OM7_20': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ENTERED_BY', None, -1),
'OM7_21': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'ORDERABLE_AT_LOCATION', None, -1),
'OM7_22': ('leaf', None, 'IS', 'FORMULARY_STATUS', 'HL70473', -1),
'OM7_23': ('leaf', None, 'ID', 'SPECIAL_ORDER_INDICATOR', 'HL70136', -1),
'OM7_24': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRIMARY_KEY_VALUE_CDM', 'HL70132', -1),
'ORC_1': ('leaf', None, 'ID', 'ORDER_CONTROL', 'HL70119', -1),
'ORC_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_ORDER_NUMBER', None, -1),
'ORC_3': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'FILLER_ORDER_NUMBER', None, -1),
'ORC_4': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_GROUP_NUMBER', None, -1),
'ORC_5': ('leaf', None, 'ID', 'ORDER_STATUS', 'HL70038', -1),
'ORC_6': ('leaf', None, 'ID', 'RESPONSE_FLAG', 'HL70121', -1),
'ORC_7': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'QUANTITY_TIMING', None, -1),
'ORC_8': ('sequence', DATATYPES_STRUCTS['EIP'], 'EIP', 'PARENT', None, -1),
'ORC_9': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_OF_TRANSACTION', None, -1),
'ORC_10': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ENTERED_BY', None, -1),
'ORC_11': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'VERIFIED_BY', None, -1),
'ORC_12': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ORDERING_PROVIDER', None, -1),
'ORC_13': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'ENTERER_S_LOCATION', None, -1),
'ORC_14': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'CALL_BACK_PHONE_NUMBER', None, -1),
'ORC_15': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ORDER_EFFECTIVE_DATE_TIME', None, -1),
'ORC_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ORDER_CONTROL_CODE_REASON', None, -1),
'ORC_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ENTERING_ORGANIZATION', None, -1),
'ORC_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ENTERING_DEVICE', None, -1),
'ORC_19': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ACTION_BY', None, -1),
'ORC_20': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADVANCED_BENEFICIARY_NOTICE_CODE', 'HL70339', -1),
'ORC_21': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'ORDERING_FACILITY_NAME', None, -1),
'ORC_22': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'ORDERING_FACILITY_ADDRESS', None, -1),
'ORC_23': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'ORDERING_FACILITY_PHONE_NUMBER', None, -1),
'ORC_24': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'ORDERING_PROVIDER_ADDRESS', None, -1),
'ORC_25': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ORDER_STATUS_MODIFIER', None, -1),
'ORC_26': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE',
'ADVANCED_BENEFICIARY_NOTICE_OVERRIDE_REASON', 'HL70552', -1),
'ORC_27': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'FILLER_S_EXPECTED_AVAILABILITY_DATE_TIME', None, -1),
'ORC_28': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CONFIDENTIALITY_CODE', 'HL70177', -1),
'ORC_29': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ORDER_TYPE', 'HL70482', -1),
'ORC_30': ('sequence', DATATYPES_STRUCTS['CNE'], 'CNE', 'ENTERER_AUTHORIZATION_MODE', 'HL70483', -1),
'ORC_31': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'PARENT_UNIVERSAL_SERVICE_IDENTIFIER', None, -1),
'ORG_1': ('leaf', None, 'SI', 'SET_ID_ORG', None, -1),
'ORG_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ORGANIZATION_UNIT_CODE', 'HL70405', -1),
'ORG_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ORGANIZATION_UNIT_TYPE_CODE', 'HL70474', -1),
'ORG_4': ('leaf', None, 'ID', 'PRIMARY_ORG_UNIT_INDICATOR', 'HL70136', -1),
'ORG_5': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PRACTITIONER_ORG_UNIT_IDENTIFIER', None, -1),
'ORG_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'HEALTH_CARE_PROVIDER_TYPE_CODE', 'HL70452', -1),
'ORG_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'HEALTH_CARE_PROVIDER_CLASSIFICATION_CODE', 'HL70453', -1),
'ORG_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'HEALTH_CARE_PROVIDER_AREA_OF_SPECIALIZATION_CODE', 'HL70454', -1),
'ORG_9': ('sequence', DATATYPES_STRUCTS['DR'], 'DR', 'EFFECTIVE_DATE_RANGE', None, -1),
'ORG_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EMPLOYMENT_STATUS_CODE', 'HL70066', -1),
'ORG_11': ('leaf', None, 'ID', 'BOARD_APPROVAL_INDICATOR', 'HL70136', -1),
'ORG_12': ('leaf', None, 'ID', 'PRIMARY_CARE_PHYSICIAN_INDICATOR', 'HL70136', -1),
'OVR_1': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'BUSINESS_RULE_OVERRIDE_TYPE', 'HL70518', -1),
'OVR_2': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'BUSINESS_RULE_OVERRIDE_CODE', 'HL70521', -1),
'OVR_3': ('leaf', None, 'TX', 'OVERRIDE_COMMENTS', None, -1),
'OVR_4': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'OVERRIDE_ENTERED_BY', None, -1),
'OVR_5': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'OVERRIDE_AUTHORIZED_BY', None, -1),
'PCR_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'IMPLICATED_PRODUCT', None, -1),
'PCR_2': ('leaf', None, 'IS', 'GENERIC_PRODUCT', 'HL70249', -1),
'PCR_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRODUCT_CLASS', None, -1),
'PCR_4': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'TOTAL_DURATION_OF_THERAPY', None, -1),
'PCR_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PRODUCT_MANUFACTURE_DATE', None, -1),
'PCR_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PRODUCT_EXPIRATION_DATE', None, -1),
'PCR_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PRODUCT_IMPLANTATION_DATE', None, -1),
'PCR_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PRODUCT_EXPLANTATION_DATE', None, -1),
'PCR_9': ('leaf', None, 'IS', 'SINGLE_USE_DEVICE', 'HL70244', -1),
'PCR_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INDICATION_FOR_PRODUCT_USE', None, -1),
'PCR_11': ('leaf', None, 'IS', 'PRODUCT_PROBLEM', 'HL70245', -1),
'PCR_12': ('leaf', None, 'ST', 'PRODUCT_SERIAL_LOT_NUMBER', None, -1),
'PCR_13': ('leaf', None, 'IS', 'PRODUCT_AVAILABLE_FOR_INSPECTION', 'HL70246', -1),
'PCR_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRODUCT_EVALUATION_PERFORMED', None, -1),
'PCR_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRODUCT_EVALUATION_STATUS', 'HL70247', -1),
'PCR_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRODUCT_EVALUATION_RESULTS', None, -1),
'PCR_17': ('leaf', None, 'ID', 'EVALUATED_PRODUCT_SOURCE', 'HL70248', -1),
'PCR_18': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_PRODUCT_RETURNED_TO_MANUFACTURER', None, -1),
'PCR_19': ('leaf', None, 'ID', 'DEVICE_OPERATOR_QUALIFICATIONS', 'HL70242', -1),
'PCR_20': ('leaf', None, 'ID', 'RELATEDNESS_ASSESSMENT', 'HL70250', -1),
'PCR_21': ('leaf', None, 'ID', 'ACTION_TAKEN_IN_RESPONSE_TO_THE_EVENT', 'HL70251', -1),
'PCR_22': ('leaf', None, 'ID', 'EVENT_CAUSALITY_OBSERVATIONS', 'HL70252', -1),
'PCR_23': ('leaf', None, 'ID', 'INDIRECT_EXPOSURE_MECHANISM', 'HL70253', -1),
'PD1_1': ('leaf', None, 'IS', 'LIVING_DEPENDENCY', 'HL70223', -1),
'PD1_2': ('leaf', None, 'IS', 'LIVING_ARRANGEMENT', 'HL70220', -1),
'PD1_3': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'PATIENT_PRIMARY_FACILITY', None, -1),
'PD1_4': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PATIENT_PRIMARY_CARE_PROVIDER_NAME_ID_NO', None, -1),
'PD1_5': ('leaf', None, 'IS', 'STUDENT_INDICATOR', 'HL70231', -1),
'PD1_6': ('leaf', None, 'IS', 'HANDICAP', 'HL70295', -1),
'PD1_7': ('leaf', None, 'IS', 'LIVING_WILL_CODE', 'HL70315', -1),
'PD1_8': ('leaf', None, 'IS', 'ORGAN_DONOR_CODE', 'HL70316', -1),
'PD1_9': ('leaf', None, 'ID', 'SEPARATE_BILL', 'HL70136', -1),
'PD1_10': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'DUPLICATE_PATIENT', None, -1),
'PD1_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PUBLICITY_CODE', 'HL70215', -1),
'PD1_12': ('leaf', None, 'ID', 'PROTECTION_INDICATOR', 'HL70136', -1),
'PD1_13': ('leaf', None, 'DT', 'PROTECTION_INDICATOR_EFFECTIVE_DATE', None, -1),
'PD1_14': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'PLACE_OF_WORSHIP', None, -1),
'PD1_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADVANCE_DIRECTIVE_CODE', 'HL70435', -1),
'PD1_16': ('leaf', None, 'IS', 'IMMUNIZATION_REGISTRY_STATUS', 'HL70441', -1),
'PD1_17': ('leaf', None, 'DT', 'IMMUNIZATION_REGISTRY_STATUS_EFFECTIVE_DATE', None, -1),
'PD1_18': ('leaf', None, 'DT', 'PUBLICITY_CODE_EFFECTIVE_DATE', None, -1),
'PD1_19': ('leaf', None, 'IS', 'MILITARY_BRANCH', 'HL70140', -1),
'PD1_20': ('leaf', None, 'IS', 'MILITARY_RANK_GRADE', 'HL70141', -1),
'PD1_21': ('leaf', None, 'IS', 'MILITARY_STATUS', 'HL70142', -1),
'PDA_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DEATH_CAUSE_CODE', None, -1),
'PDA_2': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'DEATH_LOCATION', None, -1),
'PDA_3': ('leaf', None, 'ID', 'DEATH_CERTIFIED_INDICATOR', 'HL70136', -1),
'PDA_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DEATH_CERTIFICATE_SIGNED_DATE_TIME', None, -1),
'PDA_5': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'DEATH_CERTIFIED_BY', None, -1),
'PDA_6': ('leaf', None, 'ID', 'AUTOPSY_INDICATOR', 'HL70136', -1),
'PDA_7': ('sequence', DATATYPES_STRUCTS['DR'], 'DR', 'AUTOPSY_START_AND_END_DATE_TIME', None, -1),
'PDA_8': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'AUTOPSY_PERFORMED_BY', None, -1),
'PDA_9': ('leaf', None, 'ID', 'CORONER_INDICATOR', 'HL70136', -1),
'PDC_1': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'MANUFACTURER_DISTRIBUTOR', None, -1),
'PDC_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'COUNTRY', None, -1),
'PDC_3': ('leaf', None, 'ST', 'BRAND_NAME', None, -1),
'PDC_4': ('leaf', None, 'ST', 'DEVICE_FAMILY_NAME', None, -1),
'PDC_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GENERIC_NAME', None, -1),
'PDC_6': ('leaf', None, 'ST', 'MODEL_IDENTIFIER', None, -1),
'PDC_7': ('leaf', None, 'ST', 'CATALOGUE_IDENTIFIER', None, -1),
'PDC_8': ('leaf', None, 'ST', 'OTHER_IDENTIFIER', None, -1),
'PDC_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRODUCT_CODE', None, -1),
'PDC_10': ('leaf', None, 'ID', 'MARKETING_BASIS', 'HL70330', -1),
'PDC_11': ('leaf', None, 'ST', 'MARKETING_APPROVAL_ID', None, -1),
'PDC_12': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'LABELED_SHELF_LIFE', None, -1),
'PDC_13': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'EXPECTED_SHELF_LIFE', None, -1),
'PDC_14': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_FIRST_MARKETED', None, -1),
'PDC_15': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_LAST_MARKETED', None, -1),
'PEO_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EVENT_IDENTIFIERS_USED', None, -1),
'PEO_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EVENT_SYMPTOM_DIAGNOSIS_CODE', None, -1),
'PEO_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EVENT_ONSET_DATE_TIME', None, -1),
'PEO_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EVENT_EXACERBATION_DATE_TIME', None, -1),
'PEO_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EVENT_IMPROVED_DATE_TIME', None, -1),
'PEO_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EVENT_ENDED_DATA_TIME', None, -1),
'PEO_7': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'EVENT_LOCATION_OCCURRED_ADDRESS', None, -1),
'PEO_8': ('leaf', None, 'ID', 'EVENT_QUALIFICATION', 'HL70237', -1),
'PEO_9': ('leaf', None, 'ID', 'EVENT_SERIOUS', 'HL70238', -1),
'PEO_10': ('leaf', None, 'ID', 'EVENT_EXPECTED', 'HL70239', -1),
'PEO_11': ('leaf', None, 'ID', 'EVENT_OUTCOME', 'HL70240', -1),
'PEO_12': ('leaf', None, 'ID', 'PATIENT_OUTCOME', 'HL70241', -1),
'PEO_13': ('leaf', None, 'FT', 'EVENT_DESCRIPTION_FROM_OTHERS', None, -1),
'PEO_14': ('leaf', None, 'FT', 'EVENT_FROM_ORIGINAL_REPORTER', None, -1),
'PEO_15': ('leaf', None, 'FT', 'EVENT_DESCRIPTION_FROM_PATIENT', None, -1),
'PEO_16': ('leaf', None, 'FT', 'EVENT_DESCRIPTION_FROM_PRACTITIONER', None, -1),
'PEO_17': ('leaf', None, 'FT', 'EVENT_DESCRIPTION_FROM_AUTOPSY', None, -1),
'PEO_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CAUSE_OF_DEATH', None, -1),
'PEO_19': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'PRIMARY_OBSERVER_NAME', None, -1),
'PEO_20': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'PRIMARY_OBSERVER_ADDRESS', None, -1),
'PEO_21': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PRIMARY_OBSERVER_TELEPHONE', None, -1),
'PEO_22': ('leaf', None, 'ID', 'PRIMARY_OBSERVER_S_QUALIFICATION', 'HL70242', -1),
'PEO_23': ('leaf', None, 'ID', 'CONFIRMATION_PROVIDED_BY', 'HL70242', -1),
'PEO_24': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PRIMARY_OBSERVER_AWARE_DATE_TIME', None, -1),
'PEO_25': ('leaf', None, 'ID', 'PRIMARY_OBSERVER_S_IDENTITY_MAY_BE_DIVULGED', 'HL70243', -1),
'PES_1': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'SENDER_ORGANIZATION_NAME', None, -1),
'PES_2': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'SENDER_INDIVIDUAL_NAME', None, -1),
'PES_3': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'SENDER_ADDRESS', None, -1),
'PES_4': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'SENDER_TELEPHONE', None, -1),
'PES_5': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'SENDER_EVENT_IDENTIFIER', None, -1),
'PES_6': ('leaf', None, 'NM', 'SENDER_SEQUENCE_NUMBER', None, -1),
'PES_7': ('leaf', None, 'FT', 'SENDER_EVENT_DESCRIPTION', None, -1),
'PES_8': ('leaf', None, 'FT', 'SENDER_COMMENT', None, -1),
'PES_9': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SENDER_AWARE_DATE_TIME', None, -1),
'PES_10': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EVENT_REPORT_DATE', None, -1),
'PES_11': ('leaf', None, 'ID', 'EVENT_REPORT_TIMING_TYPE', 'HL70234', -1),
'PES_12': ('leaf', None, 'ID', 'EVENT_REPORT_SOURCE', 'HL70235', -1),
'PES_13': ('leaf', None, 'ID', 'EVENT_REPORTED_TO', 'HL70236', -1),
'PID_1': ('leaf', None, 'SI', 'SET_ID_PID', None, -1),
'PID_2': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PATIENT_ID', None, -1),
'PID_3': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PATIENT_IDENTIFIER_LIST', None, -1),
'PID_4': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'ALTERNATE_PATIENT_ID_PID', None, -1),
'PID_5': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'PATIENT_NAME', None, -1),
'PID_6': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'MOTHER_S_MAIDEN_NAME', None, -1),
'PID_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_OF_BIRTH', None, -1),
'PID_8': ('leaf', None, 'IS', 'ADMINISTRATIVE_SEX', 'HL70001', -1),
'PID_9': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'PATIENT_ALIAS', None, -1),
'PID_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RACE', 'HL70005', -1),
'PID_11': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'PATIENT_ADDRESS', None, -1),
'PID_12': ('leaf', None, 'IS', 'COUNTY_CODE', 'HL70289', -1),
'PID_13': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PHONE_NUMBER_HOME', None, -1),
'PID_14': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PHONE_NUMBER_BUSINESS', None, -1),
'PID_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRIMARY_LANGUAGE', 'HL70296', -1),
'PID_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MARITAL_STATUS', 'HL70002', -1),
'PID_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RELIGION', 'HL70006', -1),
'PID_18': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PATIENT_ACCOUNT_NUMBER', None, -1),
'PID_19': ('leaf', None, 'ST', 'SSN_NUMBER_PATIENT', None, -1),
'PID_20': ('sequence', DATATYPES_STRUCTS['DLN'], 'DLN', 'DRIVER_S_LICENSE_NUMBER_PATIENT', None, -1),
'PID_21': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'MOTHER_S_IDENTIFIER', None, -1),
'PID_22': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ETHNIC_GROUP', 'HL70189', -1),
'PID_23': ('leaf', None, 'ST', 'BIRTH_PLACE', None, -1),
'PID_24': ('leaf', None, 'ID', 'MULTIPLE_BIRTH_INDICATOR', 'HL70136', -1),
'PID_25': ('leaf', None, 'NM', 'BIRTH_ORDER', None, -1),
'PID_26': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CITIZENSHIP', 'HL70171', -1),
'PID_27': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'VETERANS_MILITARY_STATUS', 'HL70172', -1),
'PID_28': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'NATIONALITY', 'HL70212', -1),
'PID_29': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PATIENT_DEATH_DATE_AND_TIME', None, -1),
'PID_30': ('leaf', None, 'ID', 'PATIENT_DEATH_INDICATOR', 'HL70136', -1),
'PID_31': ('leaf', None, 'ID', 'IDENTITY_UNKNOWN_INDICATOR', 'HL70136', -1),
'PID_32': ('leaf', None, 'IS', 'IDENTITY_RELIABILITY_CODE', 'HL70445', -1),
'PID_33': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'LAST_UPDATE_DATE_TIME', None, -1),
'PID_34': ('sequence', DATATYPES_STRUCTS['HD'], 'HD', 'LAST_UPDATE_FACILITY', None, -1),
'PID_35': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SPECIES_CODE', 'HL70446', -1),
'PID_36': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'BREED_CODE', 'HL70447', -1),
'PID_37': ('leaf', None, 'ST', 'STRAIN', None, -1),
'PID_38': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRODUCTION_CLASS_CODE', 'HL70429', -1),
'PID_39': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'TRIBAL_CITIZENSHIP', 'HL70171', -1),
'PR1_1': ('leaf', None, 'SI', 'SET_ID_PR1', None, -1),
'PR1_2': ('leaf', None, 'IS', 'PROCEDURE_CODING_METHOD', 'HL70089', -1),
'PR1_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_CODE', 'HL70088', -1),
'PR1_4': ('leaf', None, 'ST', 'PROCEDURE_DESCRIPTION', None, -1),
'PR1_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PROCEDURE_DATE_TIME', None, -1),
'PR1_6': ('leaf', None, 'IS', 'PROCEDURE_FUNCTIONAL_TYPE', 'HL70230', -1),
'PR1_7': ('leaf', None, 'NM', 'PROCEDURE_MINUTES', None, -1),
'PR1_8': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ANESTHESIOLOGIST', 'HL70010', -1),
'PR1_9': ('leaf', None, 'IS', 'ANESTHESIA_CODE', 'HL70019', -1),
'PR1_10': ('leaf', None, 'NM', 'ANESTHESIA_MINUTES', None, -1),
'PR1_11': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'SURGEON', 'HL70010', -1),
'PR1_12': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PROCEDURE_PRACTITIONER', 'HL70010', -1),
'PR1_13': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONSENT_CODE', 'HL70059', -1),
'PR1_14': ('leaf', None, 'ID', 'PROCEDURE_PRIORITY', 'HL70418', -1),
'PR1_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ASSOCIATED_DIAGNOSIS_CODE', 'HL70051', -1),
'PR1_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCEDURE_CODE_MODIFIER', 'HL70340', -1),
'PR1_17': ('leaf', None, 'IS', 'PROCEDURE_DRG_TYPE', 'HL70416', -1),
'PR1_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TISSUE_TYPE_CODE', 'HL70417', -1),
'PR1_19': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PROCEDURE_IDENTIFIER', None, -1),
'PR1_20': ('leaf', None, 'ID', 'PROCEDURE_ACTION_CODE', 'HL70206', -1),
'PRA_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRIMARY_KEY_VALUE_PRA', 'HL79999', -1),
'PRA_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRACTITIONER_GROUP', 'HL70358', -1),
'PRA_3': ('leaf', None, 'IS', 'PRACTITIONER_CATEGORY', 'HL70186', -1),
'PRA_4': ('leaf', None, 'ID', 'PROVIDER_BILLING', 'HL70187', -1),
'PRA_5': ('sequence', DATATYPES_STRUCTS['SPD'], 'SPD', 'SPECIALTY', 'HL70337', -1),
'PRA_6': ('sequence', DATATYPES_STRUCTS['PLN'], 'PLN', 'PRACTITIONER_ID_NUMBERS', 'HL70338', -1),
'PRA_7': ('sequence', DATATYPES_STRUCTS['PIP'], 'PIP', 'PRIVILEGES', None, -1),
'PRA_8': ('leaf', None, 'DT', 'DATE_ENTERED_PRACTICE', None, -1),
'PRA_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INSTITUTION', 'HL70537', -1),
'PRA_10': ('leaf', None, 'DT', 'DATE_LEFT_PRACTICE', None, -1),
'PRA_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'GOVERNMENT_REIMBURSEMENT_BILLING_ELIGIBILITY', 'HL70401', -1),
'PRA_12': ('leaf', None, 'SI', 'SET_ID_PRA', None, -1),
'PRB_1': ('leaf', None, 'ID', 'ACTION_CODE', 'HL70287', -1),
'PRB_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ACTION_DATE_TIME', None, -1),
'PRB_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROBLEM_ID', None, -1),
'PRB_4': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PROBLEM_INSTANCE_ID', None, -1),
'PRB_5': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'EPISODE_OF_CARE_ID', None, -1),
'PRB_6': ('leaf', None, 'NM', 'PROBLEM_LIST_PRIORITY', None, -1),
'PRB_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PROBLEM_ESTABLISHED_DATE_TIME', None, -1),
'PRB_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ANTICIPATED_PROBLEM_RESOLUTION_DATE_TIME', None, -1),
'PRB_9': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ACTUAL_PROBLEM_RESOLUTION_DATE_TIME', None, -1),
'PRB_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROBLEM_CLASSIFICATION', None, -1),
'PRB_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROBLEM_MANAGEMENT_DISCIPLINE', None, -1),
'PRB_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROBLEM_PERSISTENCE', None, -1),
'PRB_13': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROBLEM_CONFIRMATION_STATUS', None, -1),
'PRB_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROBLEM_LIFE_CYCLE_STATUS', None, -1),
'PRB_15': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PROBLEM_LIFE_CYCLE_STATUS_DATE_TIME', None, -1),
'PRB_16': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PROBLEM_DATE_OF_ONSET', None, -1),
'PRB_17': ('leaf', None, 'ST', 'PROBLEM_ONSET_TEXT', None, -1),
'PRB_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROBLEM_RANKING', None, -1),
'PRB_19': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CERTAINTY_OF_PROBLEM', None, -1),
'PRB_20': ('leaf', None, 'NM', 'PROBABILITY_OF_PROBLEM_0_1', None, -1),
'PRB_21': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INDIVIDUAL_AWARENESS_OF_PROBLEM', None, -1),
'PRB_22': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROBLEM_PROGNOSIS', None, -1),
'PRB_23': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INDIVIDUAL_AWARENESS_OF_PROGNOSIS', None, -1),
'PRB_24': ('leaf', None, 'ST', 'FAMILY_SIGNIFICANT_OTHER_AWARENESS_OF_PROBLEM_PROGNOSIS', None, -1),
'PRB_25': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SECURITY_SENSITIVITY', None, -1),
'PRC_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRIMARY_KEY_VALUE_PRC', 'HL70132', -1),
'PRC_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FACILITY_ID_PRC', 'HL70464', -1),
'PRC_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DEPARTMENT', 'HL70184', -1),
'PRC_4': ('leaf', None, 'IS', 'VALID_PATIENT_CLASSES', 'HL70004', -1),
'PRC_5': ('sequence', DATATYPES_STRUCTS['CP'], 'CP', 'PRICE', None, -1),
'PRC_6': ('leaf', None, 'ST', 'FORMULA', None, -1),
'PRC_7': ('leaf', None, 'NM', 'MINIMUM_QUANTITY', None, -1),
'PRC_8': ('leaf', None, 'NM', 'MAXIMUM_QUANTITY', None, -1),
'PRC_9': ('sequence', DATATYPES_STRUCTS['MO'], 'MO', 'MINIMUM_PRICE', None, -1),
'PRC_10': ('sequence', DATATYPES_STRUCTS['MO'], 'MO', 'MAXIMUM_PRICE', None, -1),
'PRC_11': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_START_DATE', None, -1),
'PRC_12': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_END_DATE', None, -1),
'PRC_13': ('leaf', None, 'IS', 'PRICE_OVERRIDE_FLAG', 'HL70268', -1),
'PRC_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'BILLING_CATEGORY', 'HL70293', -1),
'PRC_15': ('leaf', None, 'ID', 'CHARGEABLE_FLAG', 'HL70136', -1),
'PRC_16': ('leaf', None, 'ID', 'ACTIVE_INACTIVE_FLAG', 'HL70183', -1),
'PRC_17': ('sequence', DATATYPES_STRUCTS['MO'], 'MO', 'COST', None, -1),
'PRC_18': ('leaf', None, 'IS', 'CHARGE_ON_INDICATOR', 'HL70269', -1),
'PRD_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROVIDER_ROLE', 'HL70286', -1),
'PRD_2': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'PROVIDER_NAME', None, -1),
'PRD_3': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'PROVIDER_ADDRESS', None, -1),
'PRD_4': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PROVIDER_LOCATION', None, -1),
'PRD_5': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PROVIDER_COMMUNICATION_INFORMATION', None, -1),
'PRD_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PREFERRED_METHOD_OF_CONTACT', 'HL70185', -1),
'PRD_7': ('sequence', DATATYPES_STRUCTS['PLN'], 'PLN', 'PROVIDER_IDENTIFIERS', None, -1),
'PRD_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_START_DATE_OF_PROVIDER_ROLE', None, -1),
'PRD_9': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_END_DATE_OF_PROVIDER_ROLE', None, -1),
'PSH_1': ('leaf', None, 'ST', 'REPORT_TYPE', None, -1),
'PSH_2': ('leaf', None, 'ST', 'REPORT_FORM_IDENTIFIER', None, -1),
'PSH_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'REPORT_DATE', None, -1),
'PSH_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'REPORT_INTERVAL_START_DATE', None, -1),
'PSH_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'REPORT_INTERVAL_END_DATE', None, -1),
'PSH_6': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'QUANTITY_MANUFACTURED', None, -1),
'PSH_7': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'QUANTITY_DISTRIBUTED', None, -1),
'PSH_8': ('leaf', None, 'ID', 'QUANTITY_DISTRIBUTED_METHOD', 'HL70329', -1),
'PSH_9': ('leaf', None, 'FT', 'QUANTITY_DISTRIBUTED_COMMENT', None, -1),
'PSH_10': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'QUANTITY_IN_USE', None, -1),
'PSH_11': ('leaf', None, 'ID', 'QUANTITY_IN_USE_METHOD', 'HL70329', -1),
'PSH_12': ('leaf', None, 'FT', 'QUANTITY_IN_USE_COMMENT', None, -1),
'PSH_13': ('leaf', None, 'NM', 'NUMBER_OF_PRODUCT_EXPERIENCE_REPORTS_FILED_BY_FACILITY', None, -1),
'PSH_14': ('leaf', None, 'NM', 'NUMBER_OF_PRODUCT_EXPERIENCE_REPORTS_FILED_BY_DISTRIBUTOR', None, -1),
'PTH_1': ('leaf', None, 'ID', 'ACTION_CODE', 'HL70287', -1),
'PTH_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PATHWAY_ID', None, -1),
'PTH_3': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PATHWAY_INSTANCE_ID', None, -1),
'PTH_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PATHWAY_ESTABLISHED_DATE_TIME', None, -1),
'PTH_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PATHWAY_LIFE_CYCLE_STATUS', None, -1),
'PTH_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'CHANGE_PATHWAY_LIFE_CYCLE_STATUS_DATE_TIME', None, -1),
'PV1_1': ('leaf', None, 'SI', 'SET_ID_PV1', None, -1),
'PV1_2': ('leaf', None, 'IS', 'PATIENT_CLASS', 'HL70004', -1),
'PV1_3': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'ASSIGNED_PATIENT_LOCATION', None, -1),
'PV1_4': ('leaf', None, 'IS', 'ADMISSION_TYPE', 'HL70007', -1),
'PV1_5': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'PREADMIT_NUMBER', None, -1),
'PV1_6': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PRIOR_PATIENT_LOCATION', None, -1),
'PV1_7': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ATTENDING_DOCTOR', 'HL70010', -1),
'PV1_8': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'REFERRING_DOCTOR', 'HL70010', -1),
'PV1_9': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'CONSULTING_DOCTOR', 'HL70010', -1),
'PV1_10': ('leaf', None, 'IS', 'HOSPITAL_SERVICE', 'HL70069', -1),
'PV1_11': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'TEMPORARY_LOCATION', None, -1),
'PV1_12': ('leaf', None, 'IS', 'PREADMIT_TEST_INDICATOR', 'HL70087', -1),
'PV1_13': ('leaf', None, 'IS', 'RE_ADMISSION_INDICATOR', 'HL70092', -1),
'PV1_14': ('leaf', None, 'IS', 'ADMIT_SOURCE', 'HL70023', -1),
'PV1_15': ('leaf', None, 'IS', 'AMBULATORY_STATUS', 'HL70009', -1),
'PV1_16': ('leaf', None, 'IS', 'VIP_INDICATOR', 'HL70099', -1),
'PV1_17': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ADMITTING_DOCTOR', 'HL70010', -1),
'PV1_18': ('leaf', None, 'IS', 'PATIENT_TYPE', 'HL70018', -1),
'PV1_19': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'VISIT_NUMBER', None, -1),
'PV1_20': ('sequence', DATATYPES_STRUCTS['FC'], 'FC', 'FINANCIAL_CLASS', 'HL70064', -1),
'PV1_21': ('leaf', None, 'IS', 'CHARGE_PRICE_INDICATOR', 'HL70032', -1),
'PV1_22': ('leaf', None, 'IS', 'COURTESY_CODE', 'HL70045', -1),
'PV1_23': ('leaf', None, 'IS', 'CREDIT_RATING', 'HL70046', -1),
'PV1_24': ('leaf', None, 'IS', 'CONTRACT_CODE', 'HL70044', -1),
'PV1_25': ('leaf', None, 'DT', 'CONTRACT_EFFECTIVE_DATE', None, -1),
'PV1_26': ('leaf', None, 'NM', 'CONTRACT_AMOUNT', None, -1),
'PV1_27': ('leaf', None, 'NM', 'CONTRACT_PERIOD', None, -1),
'PV1_28': ('leaf', None, 'IS', 'INTEREST_CODE', 'HL70073', -1),
'PV1_29': ('leaf', None, 'IS', 'TRANSFER_TO_BAD_DEBT_CODE', 'HL70110', -1),
'PV1_30': ('leaf', None, 'DT', 'TRANSFER_TO_BAD_DEBT_DATE', None, -1),
'PV1_31': ('leaf', None, 'IS', 'BAD_DEBT_AGENCY_CODE', 'HL70021', -1),
'PV1_32': ('leaf', None, 'NM', 'BAD_DEBT_TRANSFER_AMOUNT', None, -1),
'PV1_33': ('leaf', None, 'NM', 'BAD_DEBT_RECOVERY_AMOUNT', None, -1),
'PV1_34': ('leaf', None, 'IS', 'DELETE_ACCOUNT_INDICATOR', 'HL70111', -1),
'PV1_35': ('leaf', None, 'DT', 'DELETE_ACCOUNT_DATE', None, -1),
'PV1_36': ('leaf', None, 'IS', 'DISCHARGE_DISPOSITION', 'HL70112', -1),
'PV1_37': ('sequence', DATATYPES_STRUCTS['DLD'], 'DLD', 'DISCHARGED_TO_LOCATION', 'HL70113', -1),
'PV1_38': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DIET_TYPE', 'HL70114', -1),
'PV1_39': ('leaf', None, 'IS', 'SERVICING_FACILITY', 'HL70115', -1),
'PV1_40': ('leaf', None, 'IS', 'BED_STATUS', 'HL70116', -1),
'PV1_41': ('leaf', None, 'IS', 'ACCOUNT_STATUS', 'HL70117', -1),
'PV1_42': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PENDING_LOCATION', None, -1),
'PV1_43': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PRIOR_TEMPORARY_LOCATION', None, -1),
'PV1_44': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ADMIT_DATE_TIME', None, -1),
'PV1_45': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DISCHARGE_DATE_TIME', None, -1),
'PV1_46': ('leaf', None, 'NM', 'CURRENT_PATIENT_BALANCE', None, -1),
'PV1_47': ('leaf', None, 'NM', 'TOTAL_CHARGES', None, -1),
'PV1_48': ('leaf', None, 'NM', 'TOTAL_ADJUSTMENTS', None, -1),
'PV1_49': ('leaf', None, 'NM', 'TOTAL_PAYMENTS', None, -1),
'PV1_50': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'ALTERNATE_VISIT_ID', 'HL70203', -1),
'PV1_51': ('leaf', None, 'IS', 'VISIT_INDICATOR', 'HL70326', -1),
'PV1_52': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'OTHER_HEALTHCARE_PROVIDER', 'HL70010', -1),
'PV2_1': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PRIOR_PENDING_LOCATION', None, -1),
'PV2_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ACCOMMODATION_CODE', 'HL70129', -1),
'PV2_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADMIT_REASON', None, -1),
'PV2_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TRANSFER_REASON', None, -1),
'PV2_5': ('leaf', None, 'ST', 'PATIENT_VALUABLES', None, -1),
'PV2_6': ('leaf', None, 'ST', 'PATIENT_VALUABLES_LOCATION', None, -1),
'PV2_7': ('leaf', None, 'IS', 'VISIT_USER_CODE', 'HL70130', -1),
'PV2_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXPECTED_ADMIT_DATE_TIME', None, -1),
'PV2_9': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXPECTED_DISCHARGE_DATE_TIME', None, -1),
'PV2_10': ('leaf', None, 'NM', 'ESTIMATED_LENGTH_OF_INPATIENT_STAY', None, -1),
'PV2_11': ('leaf', None, 'NM', 'ACTUAL_LENGTH_OF_INPATIENT_STAY', None, -1),
'PV2_12': ('leaf', None, 'ST', 'VISIT_DESCRIPTION', None, -1),
'PV2_13': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'REFERRAL_SOURCE_CODE', None, -1),
'PV2_14': ('leaf', None, 'DT', 'PREVIOUS_SERVICE_DATE', None, -1),
'PV2_15': ('leaf', None, 'ID', 'EMPLOYMENT_ILLNESS_RELATED_INDICATOR', 'HL70136', -1),
'PV2_16': ('leaf', None, 'IS', 'PURGE_STATUS_CODE', 'HL70213', -1),
'PV2_17': ('leaf', None, 'DT', 'PURGE_STATUS_DATE', None, -1),
'PV2_18': ('leaf', None, 'IS', 'SPECIAL_PROGRAM_CODE', 'HL70214', -1),
'PV2_19': ('leaf', None, 'ID', 'RETENTION_INDICATOR', 'HL70136', -1),
'PV2_20': ('leaf', None, 'NM', 'EXPECTED_NUMBER_OF_INSURANCE_PLANS', None, -1),
'PV2_21': ('leaf', None, 'IS', 'VISIT_PUBLICITY_CODE', 'HL70215', -1),
'PV2_22': ('leaf', None, 'ID', 'VISIT_PROTECTION_INDICATOR', 'HL70136', -1),
'PV2_23': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'CLINIC_ORGANIZATION_NAME', None, -1),
'PV2_24': ('leaf', None, 'IS', 'PATIENT_STATUS_CODE', 'HL70216', -1),
'PV2_25': ('leaf', None, 'IS', 'VISIT_PRIORITY_CODE', 'HL70217', -1),
'PV2_26': ('leaf', None, 'DT', 'PREVIOUS_TREATMENT_DATE', None, -1),
'PV2_27': ('leaf', None, 'IS', 'EXPECTED_DISCHARGE_DISPOSITION', 'HL70112', -1),
'PV2_28': ('leaf', None, 'DT', 'SIGNATURE_ON_FILE_DATE', None, -1),
'PV2_29': ('leaf', None, 'DT', 'FIRST_SIMILAR_ILLNESS_DATE', None, -1),
'PV2_30': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PATIENT_CHARGE_ADJUSTMENT_CODE', 'HL70218', -1),
'PV2_31': ('leaf', None, 'IS', 'RECURRING_SERVICE_CODE', 'HL70219', -1),
'PV2_32': ('leaf', None, 'ID', 'BILLING_MEDIA_CODE', 'HL70136', -1),
'PV2_33': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXPECTED_SURGERY_DATE_AND_TIME', None, -1),
'PV2_34': ('leaf', None, 'ID', 'MILITARY_PARTNERSHIP_CODE', 'HL70136', -1),
'PV2_35': ('leaf', None, 'ID', 'MILITARY_NON_AVAILABILITY_CODE', 'HL70136', -1),
'PV2_36': ('leaf', None, 'ID', 'NEWBORN_BABY_INDICATOR', 'HL70136', -1),
'PV2_37': ('leaf', None, 'ID', 'BABY_DETAINED_INDICATOR', 'HL70136', -1),
'PV2_38': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MODE_OF_ARRIVAL_CODE', 'HL70430', -1),
'PV2_39': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RECREATIONAL_DRUG_USE_CODE', 'HL70431', -1),
'PV2_40': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADMISSION_LEVEL_OF_CARE_CODE', 'HL70432', -1),
'PV2_41': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRECAUTION_CODE', 'HL70433', -1),
'PV2_42': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PATIENT_CONDITION_CODE', 'HL70434', -1),
'PV2_43': ('leaf', None, 'IS', 'LIVING_WILL_CODE', 'HL70315', -1),
'PV2_44': ('leaf', None, 'IS', 'ORGAN_DONOR_CODE', 'HL70316', -1),
'PV2_45': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADVANCE_DIRECTIVE_CODE', 'HL70435', -1),
'PV2_46': ('leaf', None, 'DT', 'PATIENT_STATUS_EFFECTIVE_DATE', None, -1),
'PV2_47': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXPECTED_LOA_RETURN_DATE_TIME', None, -1),
'PV2_48': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXPECTED_PRE_ADMISSION_TESTING_DATE_TIME', None, -1),
'PV2_49': ('leaf', None, 'IS', 'NOTIFY_CLERGY_CODE', 'HL70534', -1),
'QAK_1': ('leaf', None, 'ST', 'QUERY_TAG', None, -1),
'QAK_2': ('leaf', None, 'ID', 'QUERY_RESPONSE_STATUS', 'HL70208', -1),
'QAK_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MESSAGE_QUERY_NAME', 'HL70471', -1),
'QAK_4': ('leaf', None, 'NM', 'HIT_COUNT', None, -1),
'QAK_5': ('leaf', None, 'NM', 'THIS_PAYLOAD', None, -1),
'QAK_6': ('leaf', None, 'NM', 'HITS_REMAINING', None, -1),
'QID_1': ('leaf', None, 'ST', 'QUERY_TAG', None, -1),
'QID_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MESSAGE_QUERY_NAME', 'HL70471', -1),
'QPD_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MESSAGE_QUERY_NAME', 'HL70471', -1),
'QPD_2': ('leaf', None, 'ST', 'QUERY_TAG', None, -1),
'QPD_3': ('leaf', None, 'varies', 'USER_PARAMETERS_IN_SUCCESSIVE_FIELDS', None, -1),
'QRD_1': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'QUERY_DATE_TIME', None, -1),
'QRD_2': ('leaf', None, 'ID', 'QUERY_FORMAT_CODE', 'HL70106', -1),
'QRD_3': ('leaf', None, 'ID', 'QUERY_PRIORITY', 'HL70091', -1),
'QRD_4': ('leaf', None, 'ST', 'QUERY_ID', None, -1),
'QRD_5': ('leaf', None, 'ID', 'DEFERRED_RESPONSE_TYPE', 'HL70107', -1),
'QRD_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DEFERRED_RESPONSE_DATE_TIME', None, -1),
'QRD_7': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'QUANTITY_LIMITED_REQUEST', 'HL70126', -1),
'QRD_8': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'WHO_SUBJECT_FILTER', None, -1),
'QRD_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'WHAT_SUBJECT_FILTER', 'HL70048', -1),
'QRD_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'WHAT_DEPARTMENT_DATA_CODE', None, -1),
'QRD_11': ('sequence', DATATYPES_STRUCTS['VR'], 'VR', 'WHAT_DATA_CODE_VALUE_QUAL', None, -1),
'QRD_12': ('leaf', None, 'ID', 'QUERY_RESULTS_LEVEL', 'HL70108', -1),
'QRF_1': ('leaf', None, 'ST', 'WHERE_SUBJECT_FILTER', None, -1),
'QRF_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'WHEN_DATA_START_DATE_TIME', None, -1),
'QRF_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'WHEN_DATA_END_DATE_TIME', None, -1),
'QRF_4': ('leaf', None, 'ST', 'WHAT_USER_QUALIFIER', None, -1),
'QRF_5': ('leaf', None, 'ST', 'OTHER_QRY_SUBJECT_FILTER', None, -1),
'QRF_6': ('leaf', None, 'ID', 'WHICH_DATE_TIME_QUALIFIER', 'HL70156', -1),
'QRF_7': ('leaf', None, 'ID', 'WHICH_DATE_TIME_STATUS_QUALIFIER', 'HL70157', -1),
'QRF_8': ('leaf', None, 'ID', 'DATE_TIME_SELECTION_QUALIFIER', 'HL70158', -1),
'QRF_9': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'WHEN_QUANTITY_TIMING_QUALIFIER', None, -1),
'QRF_10': ('leaf', None, 'NM', 'SEARCH_CONFIDENCE_THRESHOLD', None, -1),
'QRI_1': ('leaf', None, 'NM', 'CANDIDATE_CONFIDENCE', None, -1),
'QRI_2': ('leaf', None, 'IS', 'MATCH_REASON_CODE', 'HL70392', -1),
'QRI_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ALGORITHM_DESCRIPTOR', 'HL70393', -1),
'RCP_1': ('leaf', None, 'ID', 'QUERY_PRIORITY', 'HL70091', -1),
'RCP_2': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'QUANTITY_LIMITED_REQUEST', 'HL70126', -1),
'RCP_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RESPONSE_MODALITY', 'HL70394', -1),
'RCP_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXECUTION_AND_DELIVERY_TIME', None, -1),
'RCP_5': ('leaf', None, 'ID', 'MODIFY_INDICATOR', 'HL70395', -1),
'RCP_6': ('sequence', DATATYPES_STRUCTS['SRT'], 'SRT', 'SORT_BY_FIELD', None, -1),
'RCP_7': ('leaf', None, 'ID', 'SEGMENT_GROUP_INCLUSION', None, -1),
'RDF_1': ('leaf', None, 'NM', 'NUMBER_OF_COLUMNS_PER_ROW', None, -1),
'RDF_2': ('sequence', DATATYPES_STRUCTS['RCD'], 'RCD', 'COLUMN_DESCRIPTION', 'HL70440', -1),
'RDT_1': ('leaf', None, 'varies', 'COLUMN_VALUE', None, -1),
'RF1_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REFERRAL_STATUS', 'HL70283', -1),
'RF1_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REFERRAL_PRIORITY', 'HL70280', -1),
'RF1_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REFERRAL_TYPE', 'HL70281', -1),
'RF1_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REFERRAL_DISPOSITION', 'HL70282', -1),
'RF1_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REFERRAL_CATEGORY', 'HL70284', -1),
'RF1_6': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'ORIGINATING_REFERRAL_IDENTIFIER', None, -1),
'RF1_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EFFECTIVE_DATE', None, -1),
'RF1_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EXPIRATION_DATE', None, -1),
'RF1_9': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'PROCESS_DATE', None, -1),
'RF1_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REFERRAL_REASON', 'HL70336', -1),
'RF1_11': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'EXTERNAL_REFERRAL_IDENTIFIER', None, -1),
'RGS_1': ('leaf', None, 'SI', 'SET_ID_RGS', None, -1),
'RGS_2': ('leaf', None, 'ID', 'SEGMENT_ACTION_CODE', 'HL70206', -1),
'RGS_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RESOURCE_GROUP_ID', None, -1),
'RMI_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RISK_MANAGEMENT_INCIDENT_CODE', 'HL70427', -1),
'RMI_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_INCIDENT', None, -1),
'RMI_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INCIDENT_TYPE_CODE', 'HL70428', -1),
'ROL_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'ROLE_INSTANCE_ID', None, -1),
'ROL_2': ('leaf', None, 'ID', 'ACTION_CODE', 'HL70287', -1),
'ROL_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ROLE_ROL', 'HL70443', -1),
'ROL_4': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ROLE_PERSON', None, -1),
'ROL_5': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ROLE_BEGIN_DATE_TIME', None, -1),
'ROL_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ROLE_END_DATE_TIME', None, -1),
'ROL_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ROLE_DURATION', None, -1),
'ROL_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ROLE_ACTION_REASON', None, -1),
'ROL_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROVIDER_TYPE', None, -1),
'ROL_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ORGANIZATION_UNIT_TYPE', 'HL70406', -1),
'ROL_11': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'OFFICE_HOME_ADDRESS_BIRTHPLACE', None, -1),
'ROL_12': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PHONE', None, -1),
'RQ1_1': ('leaf', None, 'ST', 'ANTICIPATED_PRICE', None, -1),
'RQ1_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MANUFACTURER_IDENTIFIER', 'HL70385', -1),
'RQ1_3': ('leaf', None, 'ST', 'MANUFACTURER_S_CATALOG', None, -1),
'RQ1_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'VENDOR_ID', None, -1),
'RQ1_5': ('leaf', None, 'ST', 'VENDOR_CATALOG', None, -1),
'RQ1_6': ('leaf', None, 'ID', 'TAXABLE', 'HL70136', -1),
'RQ1_7': ('leaf', None, 'ID', 'SUBSTITUTE_ALLOWED', 'HL70136', -1),
'RQD_1': ('leaf', None, 'SI', 'REQUISITION_LINE_NUMBER', None, -1),
'RQD_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ITEM_CODE_INTERNAL', None, -1),
'RQD_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ITEM_CODE_EXTERNAL', None, -1),
'RQD_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'HOSPITAL_ITEM_CODE', None, -1),
'RQD_5': ('leaf', None, 'NM', 'REQUISITION_QUANTITY', None, -1),
'RQD_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REQUISITION_UNIT_OF_MEASURE', None, -1),
'RQD_7': ('leaf', None, 'IS', 'DEPT_COST_CENTER', 'HL70319', -1),
'RQD_8': ('leaf', None, 'IS', 'ITEM_NATURAL_ACCOUNT_CODE', 'HL70320', -1),
'RQD_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DELIVER_TO_ID', None, -1),
'RQD_10': ('leaf', None, 'DT', 'DATE_NEEDED', None, -1),
'RXA_1': ('leaf', None, 'NM', 'GIVE_SUB_ID_COUNTER', None, -1),
'RXA_2': ('leaf', None, 'NM', 'ADMINISTRATION_SUB_ID_COUNTER', None, -1),
'RXA_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_START_OF_ADMINISTRATION', None, -1),
'RXA_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_END_OF_ADMINISTRATION', None, -1),
'RXA_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADMINISTERED_CODE', 'HL70292', -1),
'RXA_6': ('leaf', None, 'NM', 'ADMINISTERED_AMOUNT', None, -1),
'RXA_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADMINISTERED_UNITS', None, -1),
'RXA_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADMINISTERED_DOSAGE_FORM', None, -1),
'RXA_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADMINISTRATION_NOTES', None, -1),
'RXA_10': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ADMINISTERING_PROVIDER', None, -1),
'RXA_11': ('sequence', DATATYPES_STRUCTS['LA2'], 'LA2', 'ADMINISTERED_AT_LOCATION', None, -1),
'RXA_12': ('leaf', None, 'ST', 'ADMINISTERED_PER_TIME_UNIT', None, -1),
'RXA_13': ('leaf', None, 'NM', 'ADMINISTERED_STRENGTH', None, -1),
'RXA_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADMINISTERED_STRENGTH_UNITS', None, -1),
'RXA_15': ('leaf', None, 'ST', 'SUBSTANCE_LOT_NUMBER', None, -1),
'RXA_16': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SUBSTANCE_EXPIRATION_DATE', None, -1),
'RXA_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUBSTANCE_MANUFACTURER_NAME', 'HL70227', -1),
'RXA_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUBSTANCE_TREATMENT_REFUSAL_REASON', None, -1),
'RXA_19': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INDICATION', None, -1),
'RXA_20': ('leaf', None, 'ID', 'COMPLETION_STATUS', 'HL70322', -1),
'RXA_21': ('leaf', None, 'ID', 'ACTION_CODE_RXA', 'HL70323', -1),
'RXA_22': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SYSTEM_ENTRY_DATE_TIME', None, -1),
'RXA_23': ('leaf', None, 'NM', 'ADMINISTERED_DRUG_STRENGTH_VOLUME', None, -1),
'RXA_24': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ADMINISTERED_DRUG_STRENGTH_VOLUME_UNITS', None, -1),
'RXA_25': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ADMINISTERED_BARCODE_IDENTIFIER', None, -1),
'RXA_26': ('leaf', None, 'ID', 'PHARMACY_ORDER_TYPE', 'HL70480', -1),
'RXC_1': ('leaf', None, 'ID', 'RX_COMPONENT_TYPE', 'HL70166', -1),
'RXC_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'COMPONENT_CODE', None, -1),
'RXC_3': ('leaf', None, 'NM', 'COMPONENT_AMOUNT', None, -1),
'RXC_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'COMPONENT_UNITS', None, -1),
'RXC_5': ('leaf', None, 'NM', 'COMPONENT_STRENGTH', None, -1),
'RXC_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'COMPONENT_STRENGTH_UNITS', None, -1),
'RXC_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUPPLEMENTARY_CODE', None, -1),
'RXC_8': ('leaf', None, 'NM', 'COMPONENT_DRUG_STRENGTH_VOLUME', None, -1),
'RXC_9': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'COMPONENT_DRUG_STRENGTH_VOLUME_UNITS', None, -1),
'RXD_1': ('leaf', None, 'NM', 'DISPENSE_SUB_ID_COUNTER', None, -1),
'RXD_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DISPENSE_GIVE_CODE', 'HL70292', -1),
'RXD_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_DISPENSED', None, -1),
'RXD_4': ('leaf', None, 'NM', 'ACTUAL_DISPENSE_AMOUNT', None, -1),
'RXD_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ACTUAL_DISPENSE_UNITS', None, -1),
'RXD_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ACTUAL_DOSAGE_FORM', None, -1),
'RXD_7': ('leaf', None, 'ST', 'PRESCRIPTION_NUMBER', None, -1),
'RXD_8': ('leaf', None, 'NM', 'NUMBER_OF_REFILLS_REMAINING', None, -1),
'RXD_9': ('leaf', None, 'ST', 'DISPENSE_NOTES', None, -1),
'RXD_10': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'DISPENSING_PROVIDER', None, -1),
'RXD_11': ('leaf', None, 'ID', 'SUBSTITUTION_STATUS', 'HL70167', -1),
'RXD_12': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'TOTAL_DAILY_DOSE', None, -1),
'RXD_13': ('sequence', DATATYPES_STRUCTS['LA2'], 'LA2', 'DISPENSE_TO_LOCATION', None, -1),
'RXD_14': ('leaf', None, 'ID', 'NEEDS_HUMAN_REVIEW', 'HL70136', -1),
'RXD_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'PHARMACY_TREATMENT_SUPPLIER_S_SPECIAL_DISPENSING_INSTRUCTIONS', None, -1),
'RXD_16': ('leaf', None, 'NM', 'ACTUAL_STRENGTH', None, -1),
'RXD_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ACTUAL_STRENGTH_UNIT', None, -1),
'RXD_18': ('leaf', None, 'ST', 'SUBSTANCE_LOT_NUMBER', None, -1),
'RXD_19': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SUBSTANCE_EXPIRATION_DATE', None, -1),
'RXD_20': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUBSTANCE_MANUFACTURER_NAME', 'HL70227', -1),
'RXD_21': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INDICATION', None, -1),
'RXD_22': ('leaf', None, 'NM', 'DISPENSE_PACKAGE_SIZE', None, -1),
'RXD_23': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DISPENSE_PACKAGE_SIZE_UNIT', None, -1),
'RXD_24': ('leaf', None, 'ID', 'DISPENSE_PACKAGE_METHOD', 'HL70321', -1),
'RXD_25': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUPPLEMENTARY_CODE', None, -1),
'RXD_26': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INITIATING_LOCATION', None, -1),
'RXD_27': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PACKAGING_ASSEMBLY_LOCATION', None, -1),
'RXD_28': ('leaf', None, 'NM', 'ACTUAL_DRUG_STRENGTH_VOLUME', None, -1),
'RXD_29': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ACTUAL_DRUG_STRENGTH_VOLUME_UNITS', None, -1),
'RXD_30': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'DISPENSE_TO_PHARMACY', None, -1),
'RXD_31': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'DISPENSE_TO_PHARMACY_ADDRESS', None, -1),
'RXD_32': ('leaf', None, 'ID', 'PHARMACY_ORDER_TYPE', 'HL70480', -1),
'RXD_33': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'DISPENSE_TYPE', 'HL70484', -1),
'RXE_1': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'QUANTITY_TIMING', None, -1),
'RXE_2': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_CODE', 'HL70292', -1),
'RXE_3': ('leaf', None, 'NM', 'GIVE_AMOUNT_MINIMUM', None, -1),
'RXE_4': ('leaf', None, 'NM', 'GIVE_AMOUNT_MAXIMUM', None, -1),
'RXE_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_UNITS', None, -1),
'RXE_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_DOSAGE_FORM', None, -1),
'RXE_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROVIDER_S_ADMINISTRATION_INSTRUCTIONS', None, -1),
'RXE_8': ('sequence', DATATYPES_STRUCTS['LA1'], 'LA1', 'DELIVER_TO_LOCATION', None, -1),
'RXE_9': ('leaf', None, 'ID', 'SUBSTITUTION_STATUS', 'HL70167', -1),
'RXE_10': ('leaf', None, 'NM', 'DISPENSE_AMOUNT', None, -1),
'RXE_11': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DISPENSE_UNITS', None, -1),
'RXE_12': ('leaf', None, 'NM', 'NUMBER_OF_REFILLS', None, -1),
'RXE_13': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ORDERING_PROVIDER_S_DEA_NUMBER', None, -1),
'RXE_14': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PHARMACIST_TREATMENT_SUPPLIER_S_VERIFIER_ID', None, -1),
'RXE_15': ('leaf', None, 'ST', 'PRESCRIPTION_NUMBER', None, -1),
'RXE_16': ('leaf', None, 'NM', 'NUMBER_OF_REFILLS_REMAINING', None, -1),
'RXE_17': ('leaf', None, 'NM', 'NUMBER_OF_REFILLS_DOSES_DISPENSED', None, -1),
'RXE_18': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'D_T_OF_MOST_RECENT_REFILL_OR_DOSE_DISPENSED', None, -1),
'RXE_19': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'TOTAL_DAILY_DOSE', None, -1),
'RXE_20': ('leaf', None, 'ID', 'NEEDS_HUMAN_REVIEW', 'HL70136', -1),
'RXE_21': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'PHARMACY_TREATMENT_SUPPLIER_S_SPECIAL_DISPENSING_INSTRUCTIONS', None, -1),
'RXE_22': ('leaf', None, 'ST', 'GIVE_PER_TIME_UNIT', None, -1),
'RXE_23': ('leaf', None, 'ST', 'GIVE_RATE_AMOUNT', None, -1),
'RXE_24': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_RATE_UNITS', None, -1),
'RXE_25': ('leaf', None, 'NM', 'GIVE_STRENGTH', None, -1),
'RXE_26': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_STRENGTH_UNITS', None, -1),
'RXE_27': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_INDICATION', None, -1),
'RXE_28': ('leaf', None, 'NM', 'DISPENSE_PACKAGE_SIZE', None, -1),
'RXE_29': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DISPENSE_PACKAGE_SIZE_UNIT', None, -1),
'RXE_30': ('leaf', None, 'ID', 'DISPENSE_PACKAGE_METHOD', 'HL70321', -1),
'RXE_31': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUPPLEMENTARY_CODE', None, -1),
'RXE_32': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ORIGINAL_ORDER_DATE_TIME', None, -1),
'RXE_33': ('leaf', None, 'NM', 'GIVE_DRUG_STRENGTH_VOLUME', None, -1),
'RXE_34': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'GIVE_DRUG_STRENGTH_VOLUME_UNITS', None, -1),
'RXE_35': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CONTROLLED_SUBSTANCE_SCHEDULE', 'HL70477', -1),
'RXE_36': ('leaf', None, 'ID', 'FORMULARY_STATUS', 'HL70478', -1),
'RXE_37': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'PHARMACEUTICAL_SUBSTANCE_ALTERNATIVE', None, -1),
'RXE_38': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'PHARMACY_OF_MOST_RECENT_FILL', None, -1),
'RXE_39': ('leaf', None, 'NM', 'INITIAL_DISPENSE_AMOUNT', None, -1),
'RXE_40': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'DISPENSING_PHARMACY', None, -1),
'RXE_41': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'DISPENSING_PHARMACY_ADDRESS', None, -1),
'RXE_42': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'DELIVER_TO_PATIENT_LOCATION', None, -1),
'RXE_43': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'DELIVER_TO_ADDRESS', None, -1),
'RXE_44': ('leaf', None, 'ID', 'PHARMACY_ORDER_TYPE', 'HL70480', -1),
'RXG_1': ('leaf', None, 'NM', 'GIVE_SUB_ID_COUNTER', None, -1),
'RXG_2': ('leaf', None, 'NM', 'DISPENSE_SUB_ID_COUNTER', None, -1),
'RXG_3': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'QUANTITY_TIMING', None, -1),
'RXG_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_CODE', 'HL70292', -1),
'RXG_5': ('leaf', None, 'NM', 'GIVE_AMOUNT_MINIMUM', None, -1),
'RXG_6': ('leaf', None, 'NM', 'GIVE_AMOUNT_MAXIMUM', None, -1),
'RXG_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_UNITS', None, -1),
'RXG_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_DOSAGE_FORM', None, -1),
'RXG_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADMINISTRATION_NOTES', None, -1),
'RXG_10': ('leaf', None, 'ID', 'SUBSTITUTION_STATUS', 'HL70167', -1),
'RXG_11': ('sequence', DATATYPES_STRUCTS['LA2'], 'LA2', 'DISPENSE_TO_LOCATION', None, -1),
'RXG_12': ('leaf', None, 'ID', 'NEEDS_HUMAN_REVIEW', 'HL70136', -1),
'RXG_13': ('sequence', DATATYPES_STRUCTS['CE'], 'CE',
'PHARMACY_TREATMENT_SUPPLIER_S_SPECIAL_ADMINISTRATION_INSTRUCTIONS', None, -1),
'RXG_14': ('leaf', None, 'ST', 'GIVE_PER_TIME_UNIT', None, -1),
'RXG_15': ('leaf', None, 'ST', 'GIVE_RATE_AMOUNT', None, -1),
'RXG_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_RATE_UNITS', None, -1),
'RXG_17': ('leaf', None, 'NM', 'GIVE_STRENGTH', None, -1),
'RXG_18': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'GIVE_STRENGTH_UNITS', None, -1),
'RXG_19': ('leaf', None, 'ST', 'SUBSTANCE_LOT_NUMBER', None, -1),
'RXG_20': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SUBSTANCE_EXPIRATION_DATE', None, -1),
'RXG_21': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUBSTANCE_MANUFACTURER_NAME', 'HL70227', -1),
'RXG_22': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INDICATION', None, -1),
'RXG_23': ('leaf', None, 'NM', 'GIVE_DRUG_STRENGTH_VOLUME', None, -1),
'RXG_24': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'GIVE_DRUG_STRENGTH_VOLUME_UNITS', None, -1),
'RXG_25': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'GIVE_BARCODE_IDENTIFIER', None, -1),
'RXG_26': ('leaf', None, 'ID', 'PHARMACY_ORDER_TYPE', 'HL70480', -1),
'RXO_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REQUESTED_GIVE_CODE', None, -1),
'RXO_2': ('leaf', None, 'NM', 'REQUESTED_GIVE_AMOUNT_MINIMUM', None, -1),
'RXO_3': ('leaf', None, 'NM', 'REQUESTED_GIVE_AMOUNT_MAXIMUM', None, -1),
'RXO_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REQUESTED_GIVE_UNITS', None, -1),
'RXO_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REQUESTED_DOSAGE_FORM', None, -1),
'RXO_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROVIDER_S_PHARMACY_TREATMENT_INSTRUCTIONS', None, -1),
'RXO_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROVIDER_S_ADMINISTRATION_INSTRUCTIONS', None, -1),
'RXO_8': ('sequence', DATATYPES_STRUCTS['LA1'], 'LA1', 'DELIVER_TO_LOCATION', None, -1),
'RXO_9': ('leaf', None, 'ID', 'ALLOW_SUBSTITUTIONS', 'HL70161', -1),
'RXO_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REQUESTED_DISPENSE_CODE', None, -1),
'RXO_11': ('leaf', None, 'NM', 'REQUESTED_DISPENSE_AMOUNT', None, -1),
'RXO_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REQUESTED_DISPENSE_UNITS', None, -1),
'RXO_13': ('leaf', None, 'NM', 'NUMBER_OF_REFILLS', None, -1),
'RXO_14': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ORDERING_PROVIDER_S_DEA_NUMBER', None, -1),
'RXO_15': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PHARMACIST_TREATMENT_SUPPLIER_S_VERIFIER_ID', None, -1),
'RXO_16': ('leaf', None, 'ID', 'NEEDS_HUMAN_REVIEW', 'HL70136', -1),
'RXO_17': ('leaf', None, 'ST', 'REQUESTED_GIVE_PER_TIME_UNIT', None, -1),
'RXO_18': ('leaf', None, 'NM', 'REQUESTED_GIVE_STRENGTH', None, -1),
'RXO_19': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REQUESTED_GIVE_STRENGTH_UNITS', None, -1),
'RXO_20': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'INDICATION', None, -1),
'RXO_21': ('leaf', None, 'ST', 'REQUESTED_GIVE_RATE_AMOUNT', None, -1),
'RXO_22': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'REQUESTED_GIVE_RATE_UNITS', None, -1),
'RXO_23': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'TOTAL_DAILY_DOSE', None, -1),
'RXO_24': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUPPLEMENTARY_CODE', None, -1),
'RXO_25': ('leaf', None, 'NM', 'REQUESTED_DRUG_STRENGTH_VOLUME', None, -1),
'RXO_26': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'REQUESTED_DRUG_STRENGTH_VOLUME_UNITS', None, -1),
'RXO_27': ('leaf', None, 'ID', 'PHARMACY_ORDER_TYPE', 'HL70480', -1),
'RXO_28': ('leaf', None, 'NM', 'DISPENSING_INTERVAL', None, -1),
'RXR_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ROUTE', 'HL70162', -1),
'RXR_2': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ADMINISTRATION_SITE', 'HL70163', -1),
'RXR_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ADMINISTRATION_DEVICE', 'HL70164', -1),
'RXR_4': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ADMINISTRATION_METHOD', 'HL70165', -1),
'RXR_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ROUTING_INSTRUCTION', None, -1),
'RXR_6': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ADMINISTRATION_SITE_MODIFIER', 'HL70495', -1),
'SAC_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'EXTERNAL_ACCESSION_IDENTIFIER', None, -1),
'SAC_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'ACCESSION_IDENTIFIER', None, -1),
'SAC_3': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'CONTAINER_IDENTIFIER', None, -1),
'SAC_4': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PRIMARY_PARENT_CONTAINER_IDENTIFIER', None, -1),
'SAC_5': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'EQUIPMENT_CONTAINER_IDENTIFIER', None, -1),
'SAC_6': ('sequence', DATATYPES_STRUCTS['SPS'], 'SPS', 'SPECIMEN_SOURCE', None, -1),
'SAC_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'REGISTRATION_DATE_TIME', None, -1),
'SAC_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONTAINER_STATUS', 'HL70370', -1),
'SAC_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CARRIER_TYPE', 'HL70378', -1),
'SAC_10': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'CARRIER_IDENTIFIER', None, -1),
'SAC_11': ('sequence', DATATYPES_STRUCTS['NA'], 'NA', 'POSITION_IN_CARRIER', None, -1),
'SAC_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TRAY_TYPE_SAC', 'HL70379', -1),
'SAC_13': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'TRAY_IDENTIFIER', None, -1),
'SAC_14': ('sequence', DATATYPES_STRUCTS['NA'], 'NA', 'POSITION_IN_TRAY', None, -1),
'SAC_15': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LOCATION', None, -1),
'SAC_16': ('leaf', None, 'NM', 'CONTAINER_HEIGHT', None, -1),
'SAC_17': ('leaf', None, 'NM', 'CONTAINER_DIAMETER', None, -1),
'SAC_18': ('leaf', None, 'NM', 'BARRIER_DELTA', None, -1),
'SAC_19': ('leaf', None, 'NM', 'BOTTOM_DELTA', None, -1),
'SAC_20': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CONTAINER_HEIGHT_DIAMETER_DELTA_UNITS', None, -1),
'SAC_21': ('leaf', None, 'NM', 'CONTAINER_VOLUME', None, -1),
'SAC_22': ('leaf', None, 'NM', 'AVAILABLE_SPECIMEN_VOLUME', None, -1),
'SAC_23': ('leaf', None, 'NM', 'INITIAL_SPECIMEN_VOLUME', None, -1),
'SAC_24': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'VOLUME_UNITS', None, -1),
'SAC_25': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SEPARATOR_TYPE', 'HL70380', -1),
'SAC_26': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CAP_TYPE', 'HL70381', -1),
'SAC_27': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'ADDITIVE', 'HL70371', -1),
'SAC_28': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SPECIMEN_COMPONENT', None, -1),
'SAC_29': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'DILUTION_FACTOR', None, -1),
'SAC_30': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'TREATMENT', 'HL70373', -1),
'SAC_31': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'TEMPERATURE', None, -1),
'SAC_32': ('leaf', None, 'NM', 'HEMOLYSIS_INDEX', None, -1),
'SAC_33': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'HEMOLYSIS_INDEX_UNITS', None, -1),
'SAC_34': ('leaf', None, 'NM', 'LIPEMIA_INDEX', None, -1),
'SAC_35': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'LIPEMIA_INDEX_UNITS', None, -1),
'SAC_36': ('leaf', None, 'NM', 'ICTERUS_INDEX', None, -1),
'SAC_37': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ICTERUS_INDEX_UNITS', None, -1),
'SAC_38': ('leaf', None, 'NM', 'FIBRIN_INDEX', None, -1),
'SAC_39': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FIBRIN_INDEX_UNITS', None, -1),
'SAC_40': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SYSTEM_INDUCED_CONTAMINANTS', 'HL70374', -1),
'SAC_41': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DRUG_INTERFERENCE', 'HL70382', -1),
'SAC_42': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ARTIFICIAL_BLOOD', 'HL70375', -1),
'SAC_43': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIAL_HANDLING_CODE', 'HL70376', -1),
'SAC_44': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'OTHER_ENVIRONMENTAL_FACTORS', 'HL70377', -1),
'SCH_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_APPOINTMENT_ID', None, -1),
'SCH_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'FILLER_APPOINTMENT_ID', None, -1),
'SCH_3': ('leaf', None, 'NM', 'OCCURRENCE_NUMBER', None, -1),
'SCH_4': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_GROUP_NUMBER', None, -1),
'SCH_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SCHEDULE_ID', None, -1),
'SCH_6': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EVENT_REASON', None, -1),
'SCH_7': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'APPOINTMENT_REASON', 'HL70276', -1),
'SCH_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'APPOINTMENT_TYPE', 'HL70277', -1),
'SCH_9': ('leaf', None, 'NM', 'APPOINTMENT_DURATION', None, -1),
'SCH_10': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'APPOINTMENT_DURATION_UNITS', None, -1),
'SCH_11': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'APPOINTMENT_TIMING_QUANTITY', None, -1),
'SCH_12': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PLACER_CONTACT_PERSON', None, -1),
'SCH_13': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PLACER_CONTACT_PHONE_NUMBER', None, -1),
'SCH_14': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'PLACER_CONTACT_ADDRESS', None, -1),
'SCH_15': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'PLACER_CONTACT_LOCATION', None, -1),
'SCH_16': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'FILLER_CONTACT_PERSON', None, -1),
'SCH_17': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'FILLER_CONTACT_PHONE_NUMBER', None, -1),
'SCH_18': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'FILLER_CONTACT_ADDRESS', None, -1),
'SCH_19': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'FILLER_CONTACT_LOCATION', None, -1),
'SCH_20': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ENTERED_BY_PERSON', None, -1),
'SCH_21': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'ENTERED_BY_PHONE_NUMBER', None, -1),
'SCH_22': ('sequence', DATATYPES_STRUCTS['PL'], 'PL', 'ENTERED_BY_LOCATION', None, -1),
'SCH_23': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PARENT_PLACER_APPOINTMENT_ID', None, -1),
'SCH_24': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PARENT_FILLER_APPOINTMENT_ID', None, -1),
'SCH_25': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'FILLER_STATUS_CODE', 'HL70278', -1),
'SCH_26': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_ORDER_NUMBER', None, -1),
'SCH_27': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'FILLER_ORDER_NUMBER', None, -1),
'SFT_1': ('sequence', DATATYPES_STRUCTS['XON'], 'XON', 'SOFTWARE_VENDOR_ORGANIZATION', None, -1),
'SFT_2': ('leaf', None, 'ST', 'SOFTWARE_CERTIFIED_VERSION_OR_RELEASE_NUMBER', None, -1),
'SFT_3': ('leaf', None, 'ST', 'SOFTWARE_PRODUCT_NAME', None, -1),
'SFT_4': ('leaf', None, 'ST', 'SOFTWARE_BINARY_ID', None, -1),
'SFT_5': ('leaf', None, 'TX', 'SOFTWARE_PRODUCT_INFORMATION', None, -1),
'SFT_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SOFTWARE_INSTALL_DATE', None, -1),
'SID_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'APPLICATION_METHOD_IDENTIFIER', None, -1),
'SID_2': ('leaf', None, 'ST', 'SUBSTANCE_LOT_NUMBER', None, -1),
'SID_3': ('leaf', None, 'ST', 'SUBSTANCE_CONTAINER_IDENTIFIER', None, -1),
'SID_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SUBSTANCE_MANUFACTURER_IDENTIFIER', 'HL70385', -1),
'SPM_1': ('leaf', None, 'SI', 'SET_ID_SPM', None, -1),
'SPM_2': ('sequence', DATATYPES_STRUCTS['EIP'], 'EIP', 'SPECIMEN_ID', None, -1),
'SPM_3': ('sequence', DATATYPES_STRUCTS['EIP'], 'EIP', 'SPECIMEN_PARENT_IDS', None, -1),
'SPM_4': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_TYPE', 'HL70487', -1),
'SPM_5': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_TYPE_MODIFIER', 'HL70541', -1),
'SPM_6': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_ADDITIVES', 'HL70371', -1),
'SPM_7': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_COLLECTION_METHOD', 'HL70488', -1),
'SPM_8': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_SOURCE_SITE', None, -1),
'SPM_9': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_SOURCE_SITE_MODIFIER', 'HL70542', -1),
'SPM_10': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_COLLECTION_SITE', 'HL70543', -1),
'SPM_11': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_ROLE', 'HL70369', -1),
'SPM_12': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'SPECIMEN_COLLECTION_AMOUNT', None, -1),
'SPM_13': ('leaf', None, 'NM', 'GROUPED_SPECIMEN_COUNT', None, -1),
'SPM_14': ('leaf', None, 'ST', 'SPECIMEN_DESCRIPTION', None, -1),
'SPM_15': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_HANDLING_CODE', 'HL70376', -1),
'SPM_16': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_RISK_CODE', 'HL70489', -1),
'SPM_17': ('sequence', DATATYPES_STRUCTS['DR'], 'DR', 'SPECIMEN_COLLECTION_DATE_TIME', None, -1),
'SPM_18': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SPECIMEN_RECEIVED_DATE_TIME', None, -1),
'SPM_19': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'SPECIMEN_EXPIRATION_DATE_TIME', None, -1),
'SPM_20': ('leaf', None, 'ID', 'SPECIMEN_AVAILABILITY', 'HL70136', -1),
'SPM_21': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_REJECT_REASON', 'HL70490', -1),
'SPM_22': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_QUALITY', 'HL70491', -1),
'SPM_23': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_APPROPRIATENESS', 'HL70492', -1),
'SPM_24': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_CONDITION', 'HL70493', -1),
'SPM_25': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'SPECIMEN_CURRENT_QUANTITY', None, -1),
'SPM_26': ('leaf', None, 'NM', 'NUMBER_OF_SPECIMEN_CONTAINERS', None, -1),
'SPM_27': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CONTAINER_TYPE', None, -1),
'SPM_28': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'CONTAINER_CONDITION', 'HL70544', -1),
'SPM_29': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'SPECIMEN_CHILD_ROLE', 'HL70494', -1),
'SPR_1': ('leaf', None, 'ST', 'QUERY_TAG', None, -1),
'SPR_2': ('leaf', None, 'ID', 'QUERY_RESPONSE_FORMAT_CODE', 'HL70106', -1),
'SPR_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'STORED_PROCEDURE_NAME', None, -1),
'SPR_4': ('sequence', DATATYPES_STRUCTS['QIP'], 'QIP', 'INPUT_PARAMETER_LIST', None, -1),
'STF_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PRIMARY_KEY_VALUE_STF', 'HL79999', -1),
'STF_2': ('sequence', DATATYPES_STRUCTS['CX'], 'CX', 'STAFF_IDENTIFIER_LIST', None, -1),
'STF_3': ('sequence', DATATYPES_STRUCTS['XPN'], 'XPN', 'STAFF_NAME', None, -1),
'STF_4': ('leaf', None, 'IS', 'STAFF_TYPE', 'HL70182', -1),
'STF_5': ('leaf', None, 'IS', 'ADMINISTRATIVE_SEX', 'HL70001', -1),
'STF_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DATE_TIME_OF_BIRTH', None, -1),
'STF_7': ('leaf', None, 'ID', 'ACTIVE_INACTIVE_FLAG', 'HL70183', -1),
'STF_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'DEPARTMENT', 'HL70184', -1),
'STF_9': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'HOSPITAL_SERVICE_STF', 'HL70069', -1),
'STF_10': ('sequence', DATATYPES_STRUCTS['XTN'], 'XTN', 'PHONE', None, -1),
'STF_11': ('sequence', DATATYPES_STRUCTS['XAD'], 'XAD', 'OFFICE_HOME_ADDRESS_BIRTHPLACE', None, -1),
'STF_12': ('sequence', DATATYPES_STRUCTS['DIN'], 'DIN', 'INSTITUTION_ACTIVATION_DATE', 'HL70537', -1),
'STF_13': ('sequence', DATATYPES_STRUCTS['DIN'], 'DIN', 'INSTITUTION_INACTIVATION_DATE', 'HL70537', -1),
'STF_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'BACKUP_PERSON_ID', None, -1),
'STF_15': ('leaf', None, 'ST', 'E_MAIL_ADDRESS', None, -1),
'STF_16': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PREFERRED_METHOD_OF_CONTACT', 'HL70185', -1),
'STF_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'MARITAL_STATUS', 'HL70002', -1),
'STF_18': ('leaf', None, 'ST', 'JOB_TITLE', None, -1),
'STF_19': ('sequence', DATATYPES_STRUCTS['JCC'], 'JCC', 'JOB_CODE_CLASS', None, -1),
'STF_20': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'EMPLOYMENT_STATUS_CODE', 'HL70066', -1),
'STF_21': ('leaf', None, 'ID', 'ADDITIONAL_INSURED_ON_AUTO', 'HL70136', -1),
'STF_22': ('sequence', DATATYPES_STRUCTS['DLN'], 'DLN', 'DRIVER_S_LICENSE_NUMBER_STAFF', None, -1),
'STF_23': ('leaf', None, 'ID', 'COPY_AUTO_INS', 'HL70136', -1),
'STF_24': ('leaf', None, 'DT', 'AUTO_INS_EXPIRES', None, -1),
'STF_25': ('leaf', None, 'DT', 'DATE_LAST_DMV_REVIEW', None, -1),
'STF_26': ('leaf', None, 'DT', 'DATE_NEXT_DMV_REVIEW', None, -1),
'STF_27': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'RACE', 'HL70005', -1),
'STF_28': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ETHNIC_GROUP', 'HL70189', -1),
'STF_29': ('leaf', None, 'ID', 'RE_ACTIVATION_APPROVAL_INDICATOR', 'HL70136', -1),
'STF_30': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'CITIZENSHIP', 'HL70171', -1),
'STF_31': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DEATH_DATE_AND_TIME', None, -1),
'STF_32': ('leaf', None, 'ID', 'DEATH_INDICATOR', 'HL70136', -1),
'STF_33': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'INSTITUTION_RELATIONSHIP_TYPE_CODE', 'HL70538', -1),
'STF_34': ('sequence', DATATYPES_STRUCTS['DR'], 'DR', 'INSTITUTION_RELATIONSHIP_PERIOD', None, -1),
'STF_35': ('leaf', None, 'DT', 'EXPECTED_RETURN_DATE', None, -1),
'STF_36': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'COST_CENTER_CODE', 'HL70539', -1),
'STF_37': ('leaf', None, 'ID', 'GENERIC_CLASSIFICATION_INDICATOR', 'HL70136', -1),
'STF_38': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'INACTIVE_REASON_CODE', 'HL70540', -1),
'TCC_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'UNIVERSAL_SERVICE_IDENTIFIER', None, -1),
'TCC_2': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'TEST_APPLICATION_IDENTIFIER', None, -1),
'TCC_3': ('sequence', DATATYPES_STRUCTS['SPS'], 'SPS', 'SPECIMEN_SOURCE', None, -1),
'TCC_4': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'AUTO_DILUTION_FACTOR_DEFAULT', None, -1),
'TCC_5': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'RERUN_DILUTION_FACTOR_DEFAULT', None, -1),
'TCC_6': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'PRE_DILUTION_FACTOR_DEFAULT', None, -1),
'TCC_7': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'ENDOGENOUS_CONTENT_OF_PRE_DILUTION_DILUENT', None, -1),
'TCC_8': ('leaf', None, 'NM', 'INVENTORY_LIMITS_WARNING_LEVEL', None, -1),
'TCC_9': ('leaf', None, 'ID', 'AUTOMATIC_RERUN_ALLOWED', 'HL70136', -1),
'TCC_10': ('leaf', None, 'ID', 'AUTOMATIC_REPEAT_ALLOWED', 'HL70136', -1),
'TCC_11': ('leaf', None, 'ID', 'AUTOMATIC_REFLEX_ALLOWED', 'HL70136', -1),
'TCC_12': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'EQUIPMENT_DYNAMIC_RANGE', None, -1),
'TCC_13': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'UNITS', None, -1),
'TCC_14': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PROCESSING_TYPE', 'HL70388', -1),
'TCD_1': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'UNIVERSAL_SERVICE_IDENTIFIER', None, -1),
'TCD_2': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'AUTO_DILUTION_FACTOR', None, -1),
'TCD_3': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'RERUN_DILUTION_FACTOR', None, -1),
'TCD_4': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'PRE_DILUTION_FACTOR', None, -1),
'TCD_5': ('sequence', DATATYPES_STRUCTS['SN'], 'SN', 'ENDOGENOUS_CONTENT_OF_PRE_DILUTION_DILUENT', None, -1),
'TCD_6': ('leaf', None, 'ID', 'AUTOMATIC_REPEAT_ALLOWED', 'HL70136', -1),
'TCD_7': ('leaf', None, 'ID', 'REFLEX_ALLOWED', 'HL70136', -1),
'TCD_8': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'ANALYTE_REPEAT_STATUS', 'HL70389', -1),
'TQ1_1': ('leaf', None, 'SI', 'SET_ID_TQ1', None, -1),
'TQ1_2': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'QUANTITY', None, -1),
'TQ1_3': ('sequence', DATATYPES_STRUCTS['RPT'], 'RPT', 'REPEAT_PATTERN', 'HL70335', -1),
'TQ1_4': ('leaf', None, 'TM', 'EXPLICIT_TIME', None, -1),
'TQ1_5': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'RELATIVE_TIME_AND_UNITS', None, -1),
'TQ1_6': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'SERVICE_DURATION', None, -1),
'TQ1_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'START_DATE_TIME', None, -1),
'TQ1_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'END_DATE_TIME', None, -1),
'TQ1_9': ('sequence', DATATYPES_STRUCTS['CWE'], 'CWE', 'PRIORITY', 'HL70485', -1),
'TQ1_10': ('leaf', None, 'TX', 'CONDITION_TEXT', None, -1),
'TQ1_11': ('leaf', None, 'TX', 'TEXT_INSTRUCTION', None, -1),
'TQ1_12': ('leaf', None, 'ID', 'CONJUNCTION', 'HL70427', -1),
'TQ1_13': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'OCCURRENCE_DURATION', None, -1),
'TQ1_14': ('leaf', None, 'NM', 'TOTAL_OCCURRENCE_S', None, -1),
'TQ2_1': ('leaf', None, 'SI', 'SET_ID_TQ2', None, -1),
'TQ2_2': ('leaf', None, 'ID', 'SEQUENCE_RESULTS_FLAG', 'HL70503', -1),
'TQ2_3': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'RELATED_PLACER_NUMBER', None, -1),
'TQ2_4': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'RELATED_FILLER_NUMBER', None, -1),
'TQ2_5': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'RELATED_PLACER_GROUP_NUMBER', None, -1),
'TQ2_6': ('leaf', None, 'ID', 'SEQUENCE_CONDITION_CODE', 'HL70504', -1),
'TQ2_7': ('leaf', None, 'ID', 'CYCLIC_ENTRY_EXIT_INDICATOR', 'HL70505', -1),
'TQ2_8': ('sequence', DATATYPES_STRUCTS['CQ'], 'CQ', 'SEQUENCE_CONDITION_TIME_INTERVAL', None, -1),
'TQ2_9': ('leaf', None, 'NM', 'CYCLIC_GROUP_MAXIMUM_NUMBER_OF_REPEATS', None, -1),
'TQ2_10': ('leaf', None, 'ID', 'SPECIAL_SERVICE_REQUEST_RELATIONSHIP', 'HL70506', -1),
'TXA_1': ('leaf', None, 'SI', 'SET_ID_TXA', None, -1),
'TXA_2': ('leaf', None, 'IS', 'DOCUMENT_TYPE', 'HL70270', -1),
'TXA_3': ('leaf', None, 'ID', 'DOCUMENT_CONTENT_PRESENTATION', 'HL70191', -1),
'TXA_4': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ACTIVITY_DATE_TIME', None, -1),
'TXA_5': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'PRIMARY_ACTIVITY_PROVIDER_CODE_NAME', None, -1),
'TXA_6': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'ORIGINATION_DATE_TIME', None, -1),
'TXA_7': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'TRANSCRIPTION_DATE_TIME', None, -1),
'TXA_8': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'EDIT_DATE_TIME', None, -1),
'TXA_9': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ORIGINATOR_CODE_NAME', None, -1),
'TXA_10': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'ASSIGNED_DOCUMENT_AUTHENTICATOR', None, -1),
'TXA_11': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'TRANSCRIPTIONIST_CODE_NAME', None, -1),
'TXA_12': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'UNIQUE_DOCUMENT_NUMBER', None, -1),
'TXA_13': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PARENT_DOCUMENT_NUMBER', None, -1),
'TXA_14': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'PLACER_ORDER_NUMBER', None, -1),
'TXA_15': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'FILLER_ORDER_NUMBER', None, -1),
'TXA_16': ('leaf', None, 'ST', 'UNIQUE_DOCUMENT_FILE_NAME', None, -1),
'TXA_17': ('leaf', None, 'ID', 'DOCUMENT_COMPLETION_STATUS', 'HL70271', -1),
'TXA_18': ('leaf', None, 'ID', 'DOCUMENT_CONFIDENTIALITY_STATUS', 'HL70272', -1),
'TXA_19': ('leaf', None, 'ID', 'DOCUMENT_AVAILABILITY_STATUS', 'HL70273', -1),
'TXA_20': ('leaf', None, 'ID', 'DOCUMENT_STORAGE_STATUS', 'HL70275', -1),
'TXA_21': ('leaf', None, 'ST', 'DOCUMENT_CHANGE_REASON', None, -1),
'TXA_22': ('sequence', DATATYPES_STRUCTS['PPN'], 'PPN', 'AUTHENTICATION_PERSON_TIME_STAMP', None, -1),
'TXA_23': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'DISTRIBUTED_COPIES_CODE_AND_NAME_OF_RECIPIENTS', None, -1),
'UB1_1': ('leaf', None, 'SI', 'SET_ID_UB1', None, -1),
'UB1_2': ('leaf', None, 'NM', 'BLOOD_DEDUCTIBLE_43', None, -1),
'UB1_3': ('leaf', None, 'NM', 'BLOOD_FURNISHED_PINTS_OF_40', None, -1),
'UB1_4': ('leaf', None, 'NM', 'BLOOD_REPLACED_PINTS_41', None, -1),
'UB1_5': ('leaf', None, 'NM', 'BLOOD_NOT_REPLACED_PINTS_42', None, -1),
'UB1_6': ('leaf', None, 'NM', 'CO_INSURANCE_DAYS_25', None, -1),
'UB1_7': ('leaf', None, 'IS', 'CONDITION_CODE_35_39', 'HL70043', -1),
'UB1_8': ('leaf', None, 'NM', 'COVERED_DAYS_23', None, -1),
'UB1_9': ('leaf', None, 'NM', 'NON_COVERED_DAYS_24', None, -1),
'UB1_10': ('sequence', DATATYPES_STRUCTS['UVC'], 'UVC', 'VALUE_AMOUNT_CODE_46_49', None, -1),
'UB1_11': ('leaf', None, 'NM', 'NUMBER_OF_GRACE_DAYS_90', None, -1),
'UB1_12': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'SPECIAL_PROGRAM_INDICATOR_44', 'HL70348', -1),
'UB1_13': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'PSRO_UR_APPROVAL_INDICATOR_87', 'HL70349', -1),
'UB1_14': ('leaf', None, 'DT', 'PSRO_UR_APPROVED_STAY_FM_88', None, -1),
'UB1_15': ('leaf', None, 'DT', 'PSRO_UR_APPROVED_STAY_TO_89', None, -1),
'UB1_16': ('sequence', DATATYPES_STRUCTS['OCD'], 'OCD', 'OCCURRENCE_28_32', None, -1),
'UB1_17': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'OCCURRENCE_SPAN_33', 'HL70351', -1),
'UB1_18': ('leaf', None, 'DT', 'OCCUR_SPAN_START_DATE_33', None, -1),
'UB1_19': ('leaf', None, 'DT', 'OCCUR_SPAN_END_DATE_33', None, -1),
'UB1_20': ('leaf', None, 'ST', 'UB_82_LOCATOR_2', None, -1),
'UB1_21': ('leaf', None, 'ST', 'UB_82_LOCATOR_9', None, -1),
'UB1_22': ('leaf', None, 'ST', 'UB_82_LOCATOR_27', None, -1),
'UB1_23': ('leaf', None, 'ST', 'UB_82_LOCATOR_45', None, -1),
'UB2_1': ('leaf', None, 'SI', 'SET_ID_UB2', None, -1),
'UB2_2': ('leaf', None, 'ST', 'CO_INSURANCE_DAYS_9', None, -1),
'UB2_3': ('leaf', None, 'IS', 'CONDITION_CODE_24_30', 'HL70043', -1),
'UB2_4': ('leaf', None, 'ST', 'COVERED_DAYS_7', None, -1),
'UB2_5': ('leaf', None, 'ST', 'NON_COVERED_DAYS_8', None, -1),
'UB2_6': ('sequence', DATATYPES_STRUCTS['UVC'], 'UVC', 'VALUE_AMOUNT_CODE', None, -1),
'UB2_7': ('sequence', DATATYPES_STRUCTS['OCD'], 'OCD', 'OCCURRENCE_CODE_DATE_32_35', None, -1),
'UB2_8': ('sequence', DATATYPES_STRUCTS['OSP'], 'OSP', 'OCCURRENCE_SPAN_CODE_DATES_36', None, -1),
'UB2_9': ('leaf', None, 'ST', 'UB92_LOCATOR_2_STATE', None, -1),
'UB2_10': ('leaf', None, 'ST', 'UB92_LOCATOR_11_STATE', None, -1),
'UB2_11': ('leaf', None, 'ST', 'UB92_LOCATOR_31_NATIONAL', None, -1),
'UB2_12': ('leaf', None, 'ST', 'DOCUMENT_CONTROL_NUMBER', None, -1),
'UB2_13': ('leaf', None, 'ST', 'UB92_LOCATOR_49_NATIONAL', None, -1),
'UB2_14': ('leaf', None, 'ST', 'UB92_LOCATOR_56_STATE', None, -1),
'UB2_15': ('leaf', None, 'ST', 'UB92_LOCATOR_57_NATIONAL', None, -1),
'UB2_16': ('leaf', None, 'ST', 'UB92_LOCATOR_78_STATE', None, -1),
'UB2_17': ('leaf', None, 'NM', 'SPECIAL_VISIT_COUNT', None, -1),
'URD_1': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'R_U_DATE_TIME', None, -1),
'URD_2': ('leaf', None, 'ID', 'REPORT_PRIORITY', 'HL70109', -1),
'URD_3': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'R_U_WHO_SUBJECT_DEFINITION', None, -1),
'URD_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'R_U_WHAT_SUBJECT_DEFINITION', 'HL70048', -1),
'URD_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'R_U_WHAT_DEPARTMENT_CODE', None, -1),
'URD_6': ('leaf', None, 'ST', 'R_U_DISPLAY_PRINT_LOCATIONS', None, -1),
'URD_7': ('leaf', None, 'ID', 'R_U_RESULTS_LEVEL', 'HL70108', -1),
'URS_1': ('leaf', None, 'ST', 'R_U_WHERE_SUBJECT_DEFINITION', None, -1),
'URS_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'R_U_WHEN_DATA_START_DATE_TIME', None, -1),
'URS_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'R_U_WHEN_DATA_END_DATE_TIME', None, -1),
'URS_4': ('leaf', None, 'ST', 'R_U_WHAT_USER_QUALIFIER', None, -1),
'URS_5': ('leaf', None, 'ST', 'R_U_OTHER_RESULTS_SUBJECT_DEFINITION', None, -1),
'URS_6': ('leaf', None, 'ID', 'R_U_WHICH_DATE_TIME_QUALIFIER', 'HL70156', -1),
'URS_7': ('leaf', None, 'ID', 'R_U_WHICH_DATE_TIME_STATUS_QUALIFIER', 'HL70157', -1),
'URS_8': ('leaf', None, 'ID', 'R_U_DATE_TIME_SELECTION_QUALIFIER', 'HL70158', -1),
'URS_9': ('sequence', DATATYPES_STRUCTS['TQ'], 'TQ', 'R_U_QUANTITY_TIMING_QUALIFIER', None, -1),
'VAR_1': ('sequence', DATATYPES_STRUCTS['EI'], 'EI', 'VARIANCE_INSTANCE_ID', None, -1),
'VAR_2': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'DOCUMENTED_DATE_TIME', None, -1),
'VAR_3': ('sequence', DATATYPES_STRUCTS['TS'], 'TS', 'STATED_VARIANCE_DATE_TIME', None, -1),
'VAR_4': ('sequence', DATATYPES_STRUCTS['XCN'], 'XCN', 'VARIANCE_ORIGINATOR', None, -1),
'VAR_5': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'VARIANCE_CLASSIFICATION', None, -1),
'VAR_6': ('leaf', None, 'ST', 'VARIANCE_DESCRIPTION', None, -1),
'VTQ_1': ('leaf', None, 'ST', 'QUERY_TAG', None, -1),
'VTQ_2': ('leaf', None, 'ID', 'QUERY_RESPONSE_FORMAT_CODE', 'HL70106', -1),
'VTQ_3': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'VT_QUERY_NAME', None, -1),
'VTQ_4': ('sequence', DATATYPES_STRUCTS['CE'], 'CE', 'VIRTUAL_TABLE_NAME', None, -1),
'VTQ_5': ('sequence', DATATYPES_STRUCTS['QSC'], 'QSC', 'SELECTION_CRITERIA', None, -1),
}
|
import datetime, random, requests
import pytz
import serial
class SerialWorker():
'''
Dedicated serial worker "lil jon" who always keeps an eye on the serial port
'''
def __init__(self):
super().__init__()
self.new=False
self.command=''
self.buff=''
self.die=False
print('lil jon was born but not setup')
self.ready=False
def setup(self,serialID):
self.ser=serial.Serial(serialID,115200)
self.serialID=serialID
self.thread.start()
print('lil jon is setup'+self.serialID)
self.ready=True
def writeLine(self,command):
if not self.new:
self.command=command
self.new=True
def clear(self):
self.buff=''
def getText(self):
return self.buff
def run(self):
print('lil jon started working'+self.serialID)
try:
while True:
if self.die==True:
print('lil jon just got killed :((('+self.serialID)
return True
if self.new:
self.ser.write(bytes(self.command,'ascii'))
self.new=False
else:
try:
if self.ser.inWaiting()>0: #if incoming bytes are waiting to be read from the self.serial input buffer
newContent=self.ser.read(self.ser.inWaiting()).decode('ascii') #read the bytes and convert from binary array to ASCII
self.buff += newContent
self.proocessBuffer()
except Exception as e:
print('lil jon oops'+self.serialID)
except Exception as e:
print('lil jon just died :((('+self.serialID)
print(str(e))
def proocessBuffer(self):
if "\n" in self.buff:
rows=self.buff.split("\n")
if "\n" is not self.buff[-1]:
self.buff=rows[-1]
del rows[-1]
else:
self.buff=''
for row in rows:
up
else:
return
if __name__ == "__main__":
ser = serial.Serial("/dev/ttypa",115200)
while True:
if self.die==True:
print('lil jon just got killed :((('+self.serialID)
return True
if self.new:
self.ser.write(bytes(self.command,'ascii'))
self.new=False
else:
try:
if self.ser.inWaiting()>0: #if incoming bytes are waiting to be read from the self.serial input buffer
newContent=self.ser.read(self.ser.inWaiting()).decode('ascii') #read the bytes and convert from binary array to ASCII
self.buff += newContent
except Exception as e:
print('lil jon oops'+self.serialID)
data={
"tempInternal" : random.randint(40,100),
"humInternal" : random.randint(0,100),
"tempCab" : random.randint(40,100),
"humCab" : random.randint(0,100),
"batteryV" : random.uniform(12,16),
"batteryIP" : random.uniform(0,50),
"batteryIN" : random.uniform(0,50),
"SoC" : random.uniform(0,100),
"PVV" : random.uniform(12,21),
"PVI" : random.uniform(0,8),
"lightPWM" : random.randint(0,100),
"bInverter" : 0,
"bUVLO" : random.randint(0,1),
"bFridge" : random.randint(0,1),
"generatedTimestamp" : datetime.datetime.now(pytz.timezone('US/Pacific'))
}
r = requests.post('http://localhost:8000/possumTrack/telemetry', data = data)
print(r.content)
|
import React from 'react'
import { Box } from '@chakra-ui/core'
import Image from './image'
export default ({logo, logoDescription}) => (
<Box mb="15px">
<Box
w="44px"
h="44px"
borderRadius="50%"
overflow="hidden"
display="inline-block"
verticalAlign="top"
>
<Image imgName={logo} width="100%" hight="100%" alt="logo"/>
</Box>
<Box
as="span"
display="inline-block"
verticaAlign="top"
pl="10px"
lineHeight="44px"
>
{logoDescription}
</Box>
</Box>
)
|
/*
* Эмулятор советских ПМК для iOS -- (c) Xen, 2014
*
* emu_rom.swift
* ПЗУ микроконтроллеров К745ИК13
*
*/
#import "inttypes.h"
// Общая структура
struct ROM
{
uint32_t micro [68]; // микрокоманды
uint32_t macro [256]; // макрокоманды
uint8_t synchro[1152]; // синхрокоманды
};
struct ROM mcu1302_rom =
{
.micro =
{
0x0000000, 0x0800001, 0x0A00820, 0x0040020,
0x0A03120, 0x0203081, 0x0A00181, 0x0803800,
0x0818001, 0x0800400, 0x0A00089, 0x0A03C20,
0x0800820, 0x0080020, 0x0800120, 0x1400020,
0x0800081, 0x0210801, 0x0040000, 0x0058001,
0x0808001, 0x0A03081, 0x0A01081, 0x0A01181,
0x0040090, 0x0800401, 0x0A00081, 0x0040001,
0x0800801, 0x1000000, 0x0800100, 0x1200801,
0x0013C01, 0x0800008, 0x0A00088, 0x0010200,
0x0800040, 0x0800280, 0x1801200, 0x1000208,
0x0080001, 0x0A00082, 0x0A01008, 0x1000001,
0x0A00808, 0x0900001, 0x8010004, 0x0080820,
0x0800002, 0x0140002, 0x0008000, 0x0A00090,
0x0A00220, 0x0801001, 0x1203200, 0x4800001,
0x0011801, 0x1008001, 0x0A04020, 0x4800801,
0x0840801, 0x0840020, 0x0013081, 0x0010801,
0x0818180, 0x0800180, 0x0A00081, 0x0800001
},
.macro =
{
0x00204E4E, 0x00117360, 0x00114840, 0x01040240,
0x00164040, 0x001B3240, 0x00064640, 0x015B4013,
0x00D93130, 0x00001040, 0x01A52014, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00C12040,
0x00D0536D, 0x00517740, 0x00B43130, 0x00B22223,
0x00C15340, 0x00FD2040, 0x002D1D1D, 0x0008403B,
0x00092140, 0x00094061, 0x000A2140, 0x00082140,
0x000D7076, 0x010D400D, 0x000A403B, 0x00056D40,
0x00100259, 0x010B1340, 0x00242044, 0x010B7840,
0x00064002, 0x01FF2008, 0x0008565A, 0x0126403F,
0x016C400D, 0x00C12077, 0x00517740, 0x00517740,
0x00083240, 0x010C400D, 0x01FF200A, 0x010B3568,
0x00117B5A, 0x0021206D, 0x01222034, 0x01015C5B,
0x01D03454, 0x00005E5D, 0x010E400D, 0x010E0044,
0x00F44E40, 0x009A206D, 0x00F44E5A, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00C11D1D,
0x00063333, 0x010B403B, 0x01344043, 0x00096A6A,
0x000A4443, 0x00792120, 0x01D32047, 0x00081E1E,
0x01AF1140, 0x00AB1D1D, 0x0039324C, 0x000B324C,
0x0008326D, 0x000D404C, 0x00854D40, 0x00134040,
0x0009404C, 0x006D7770, 0x006D7240, 0x01001640,
0x00A54C7E, 0x00F44E40, 0x01536900, 0x000A580E,
0x003C5262, 0x0005716D, 0x013C4013, 0x00104070,
0x00056F6D, 0x00A62070, 0x00106F40, 0x01056F40,
0x001F3E3D, 0x0028595A, 0x001E2223, 0x00064B40,
0x00524A40, 0x00692120, 0x001B4940, 0x00093240,
0x011F0140, 0x00154840, 0x00062423, 0x00062423,
0x01057340, 0x015E400D, 0x00095828, 0x00092223,
0x00992F40, 0x00982F40, 0x00622040, 0x005D5820,
0x00740F40, 0x00B81C20, 0x00D05373, 0x005B205C,
0x006D2062, 0x0133200A, 0x010B7D62, 0x00A52120,
0x01054072, 0x01494013, 0x01040540, 0x00217362,
0x013D6A40, 0x00067840, 0x01AB6C6D, 0x01332014,
0x000E7C6C, 0x00050B3F, 0x00C15340, 0x00950853,
0x00E0417A, 0x00E04240, 0x00532120, 0x00365562,
0x008F1E20, 0x013D1740, 0x004C2120, 0x0170406A,
0x00C05340, 0x00061D1D, 0x00814545, 0x00063333,
0x00061E1E, 0x00091E1E, 0x00900720, 0x01514078,
0x00081D1D, 0x01622206, 0x001E4545, 0x00114060,
0x000B2E40, 0x000F2D40, 0x010E1F40, 0x000D7677,
0x00D33C40, 0x01D32032, 0x00116E60, 0x011D3440,
0x00FF7440, 0x00073240, 0x001B430A, 0x01D32047,
0x00113434, 0x001E6E40, 0x00D33C40, 0x00937540,
0x00D01E20, 0x00043277, 0x00CA4020, 0x00107F54,
0x00212068, 0x000B7840, 0x017C400C, 0x00056F6D,
0x01470C40, 0x01716B62, 0x006B2120, 0x00332120,
0x006D204C, 0x00E67362, 0x010D0940, 0x00062423,
0x001A3A3A, 0x018F406F, 0x0151334C, 0x010D1716,
0x01D35340, 0x00D24061, 0x00CA6554, 0x00104064,
0x00512223, 0x00782120, 0x00263130, 0x001E3434,
0x00193838, 0x00183939, 0x000D6654, 0x010D7A40,
0x010E1740, 0x00057340, 0x00B86140, 0x00045263,
0x00122773, 0x008F5373, 0x002E5150, 0x0151404C,
0x001E3737, 0x00894E40, 0x001E3636, 0x006D563D,
0x00E07A41, 0x00E12973, 0x00082640, 0x00062540,
0x00D87967, 0x0005565A, 0x0005286C, 0x00762041,
0x00952040, 0x008F1D1D, 0x01D35340, 0x008F2040,
0x00CC4F4F, 0x00114060, 0x00054040, 0x001E3434,
0x01047340, 0x011E3434, 0x00C62C2B, 0x00C53130,
0x003E1D1D, 0x01041740, 0x001E3535, 0x00D35353,
0x00DE4077, 0x00E24057, 0x00064E68, 0x01E53812,
0x00D84067, 0x00064069, 0x000A402A, 0x00EF202A,
0x01015C5B, 0x00090F40, 0x00005E5D, 0x010B3613,
0x00144740, 0x01176806, 0x000A5A5A, 0x01D3200D
},
.synchro =
{
0x00, 0x00, 0x00, 0x10, 0x03, 0x1D, 0x00, 0x07, 0x1E,
0x10, 0x03, 0x1C, 0x0B, 0x07, 0x0C, 0x1E, 0x00, 0x00,
0x15, 0x18, 0x09, 0x16, 0x18, 0x09, 0x16, 0x18, 0x24,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x03, 0x0E, 0x1E, 0x33, 0x00, 0x00, 0x00, 0x00,
0x23, 0x00, 0x00, 0x00, 0x2F, 0x00, 0x2C, 0x00, 0x01,
0x11, 0x32, 0x00, 0x00, 0x00, 0x03, 0x00, 0x0E, 0x1A,
0x0F, 0x0E, 0x0D, 0x19, 0x03, 0x2F, 0x0E, 0x0D, 0x08,
0x1C, 0x0C, 0x0D, 0x01, 0x00, 0x00, 0x03, 0x24, 0x0F,
0x1C, 0x0C, 0x2F, 0x09, 0x1E, 0x34, 0x0E, 0x1E, 0x0C,
0x06, 0x0A, 0x0D, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0F,
0x38, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x26, 0x06,
0x35, 0x34, 0x0D, 0x24, 0x1E, 0x1A, 0x09, 0x0C, 0x0F,
0x3D, 0x00, 0x00, 0x1C, 0x03, 0x0E, 0x0A, 0x0F, 0x06,
0x3D, 0x00, 0x0E, 0x3F, 0x03, 0x01, 0x00, 0x00, 0x0E,
0x3F, 0x33, 0x0D, 0x01, 0x08, 0x00, 0x01, 0x08, 0x04,
0x06, 0x03, 0x0E, 0x2B, 0x3A, 0x09, 0x12, 0x1E, 0x33,
0x35, 0x03, 0x07, 0x0C, 0x1E, 0x1A, 0x00, 0x00, 0x00,
0x35, 0x0C, 0x2F, 0x0E, 0x03, 0x01, 0x00, 0x00, 0x15,
0x24, 0x1E, 0x1A, 0x23, 0x1D, 0x00, 0x00, 0x00, 0x00,
0x09, 0x0C, 0x2F, 0x09, 0x03, 0x00, 0x24, 0x0C, 0x0F,
0x3D, 0x09, 0x1E, 0x3F, 0x03, 0x07, 0x0B, 0x22, 0x03,
0x07, 0x0B, 0x0D, 0x0C, 0x03, 0x0E, 0x1E, 0x3A, 0x2B,
0x3C, 0x03, 0x00, 0x09, 0x34, 0x0E, 0x1E, 0x0C, 0x1E,
0x2E, 0x01, 0x31, 0x2E, 0x01, 0x31, 0x00, 0x00, 0x00,
0x2E, 0x30, 0x03, 0x2E, 0x30, 0x03, 0x00, 0x00, 0x00,
0x2E, 0x2D, 0x00, 0x2E, 0x2D, 0x00, 0x00, 0x00, 0x00,
0x3B, 0x04, 0x2F, 0x37, 0x12, 0x00, 0x00, 0x00, 0x00,
0x14, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x13, 0x00, 0x01, 0x13, 0x00, 0x01, 0x13, 0x04,
0x2E, 0x00, 0x00, 0x2E, 0x00, 0x00, 0x2E, 0x00, 0x00,
0x3D, 0x07, 0x10, 0x3F, 0x03, 0x00, 0x2C, 0x07, 0x1E,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
0x00, 0x0F, 0x10, 0x03, 0x00, 0x1C, 0x03, 0x0F, 0x1D,
0x03, 0x32, 0x00, 0x2B, 0x14, 0x00, 0x00, 0x08, 0x00,
0x04, 0x14, 0x00, 0x00, 0x32, 0x00, 0x00, 0x32, 0x0C,
0x0A, 0x32, 0x00, 0x00, 0x32, 0x00, 0x00, 0x32, 0x00,
0x21, 0x15, 0x18, 0x21, 0x16, 0x18, 0x00, 0x17, 0x18,
0x19, 0x1A, 0x18, 0x19, 0x16, 0x18, 0x09, 0x16, 0x18,
0x2B, 0x15, 0x00, 0x00, 0x17, 0x00, 0x00, 0x17, 0x00,
0x12, 0x1B, 0x0E, 0x0F, 0x1B, 0x0E, 0x23, 0x2B, 0x0A,
0x2C, 0x18, 0x00, 0x2A, 0x18, 0x07, 0x0B, 0x03, 0x04,
0x32, 0x14, 0x00, 0x32, 0x32, 0x11, 0x00, 0x08, 0x00,
0x09, 0x0C, 0x15, 0x03, 0x00, 0x00, 0x06, 0x3C, 0x00,
0x00, 0x2C, 0x00, 0x00, 0x2A, 0x00, 0x09, 0x16, 0x00,
0x00, 0x00, 0x11, 0x00, 0x09, 0x16, 0x18, 0x09, 0x1E,
0x00, 0x00, 0x07, 0x0A, 0x29, 0x3E, 0x33, 0x29, 0x00,
0x0F, 0x0B, 0x0F, 0x10, 0x03, 0x08, 0x24, 0x03, 0x23,
0x32, 0x01, 0x1D, 0x32, 0x08, 0x00, 0x32, 0x08, 0x32,
0x32, 0x08, 0x23, 0x32, 0x08, 0x0F, 0x23, 0x23, 0x04,
0x09, 0x1E, 0x0F, 0x00, 0x00, 0x14, 0x00, 0x00, 0x08,
0x37, 0x00, 0x00, 0x37, 0x00, 0x00, 0x37, 0x00, 0x00,
0x01, 0x31, 0x00, 0x01, 0x31, 0x00, 0x01, 0x31, 0x36,
0x1A, 0x30, 0x0D, 0x00, 0x30, 0x0D, 0x00, 0x30, 0x0D,
0x30, 0x03, 0x00, 0x30, 0x03, 0x00, 0x30, 0x03, 0x2B,
0x2D, 0x00, 0x00, 0x2D, 0x00, 0x00, 0x2D, 0x00, 0x00,
0x0A, 0x30, 0x03, 0x00, 0x30, 0x03, 0x00, 0x30, 0x03,
0x00, 0x01, 0x31, 0x00, 0x01, 0x31, 0x00, 0x01, 0x31,
0x00, 0x2D, 0x00, 0x00, 0x2D, 0x00, 0x00, 0x2D, 0x00,
0x2C, 0x00, 0x00, 0x2A, 0x00, 0x00, 0x09, 0x18, 0x00,
0x07, 0x1E, 0x0F, 0x01, 0x00, 0x08, 0x1C, 0x0A, 0x08,
0x14, 0x00, 0x00, 0x32, 0x00, 0x00, 0x32, 0x2B, 0x00,
0x32, 0x00, 0x00, 0x32, 0x27, 0x36, 0x08, 0x09, 0x0C,
0x1E, 0x02, 0x1D, 0x0F, 0x0C, 0x0F, 0x26, 0x07, 0x22,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x1D, 0x23, 0x23, 0x09, 0x23, 0x0C, 0x03, 0x23, 0x23,
0x02, 0x35, 0x03, 0x0F, 0x00, 0x00, 0x00, 0x04, 0x0C,
0x01, 0x12, 0x00, 0x08, 0x00, 0x32, 0x0A, 0x00, 0x00,
0x06, 0x18, 0x00, 0x17, 0x18, 0x00, 0x17, 0x18, 0x00,
0x00, 0x01, 0x13, 0x00, 0x01, 0x13, 0x04, 0x01, 0x13,
0x00, 0x00, 0x00, 0x09, 0x15, 0x18, 0x00, 0x35, 0x03,
0x0E, 0x03, 0x09, 0x0C, 0x1B, 0x1E, 0x0F, 0x1B, 0x08,
0x00, 0x00, 0x1C, 0x03, 0x1E, 0x15, 0x02, 0x0C, 0x00,
0x07, 0x1E, 0x10, 0x0F, 0x09, 0x32, 0x1E, 0x0F, 0x08,
0x09, 0x1E, 0x1A, 0x18, 0x1D, 0x17, 0x03, 0x0F, 0x3D,
0x07, 0x0B, 0x1A, 0x1D, 0x28, 0x00, 0x0E, 0x28, 0x08,
0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x03, 0x00, 0x09,
0x00, 0x04, 0x2B, 0x23, 0x04, 0x08, 0x08, 0x00, 0x08,
0x0E, 0x03, 0x00, 0x2B, 0x2F, 0x0D, 0x12, 0x03, 0x04,
0x01, 0x08, 0x00, 0x01, 0x08, 0x00, 0x01, 0x08, 0x04,
0x0F, 0x1D, 0x2F, 0x0E, 0x03, 0x23, 0x07, 0x1E, 0x0D,
0x0F, 0x12, 0x00, 0x23, 0x24, 0x1E, 0x23, 0x0F, 0x04,
0x26, 0x12, 0x15, 0x03, 0x12, 0x04, 0x24, 0x2F, 0x0F,
0x12, 0x04, 0x01, 0x0F, 0x07, 0x1E, 0x0F, 0x00, 0x01,
0x0E, 0x0F, 0x20, 0x05, 0x00, 0x07, 0x12, 0x0E, 0x08,
0x1E, 0x00, 0x10, 0x03, 0x0F, 0x04, 0x00, 0x00, 0x00,
0x32, 0x00, 0x00, 0x32, 0x00, 0x00, 0x00, 0x03, 0x00,
0x00, 0x00, 0x01, 0x00, 0x00, 0x05, 0x00, 0x17, 0x0D,
0x00, 0x00, 0x00, 0x0A, 0x1A, 0x18, 0x00, 0x17, 0x03,
0x32, 0x09, 0x0F, 0x32, 0x07, 0x0C, 0x0C, 0x1A, 0x0F,
0x14, 0x00, 0x00, 0x32, 0x00, 0x00, 0x32, 0x00, 0x00,
0x0E, 0x1E, 0x15, 0x00, 0x00, 0x02, 0x00, 0x00, 0x02,
0x00, 0x0E, 0x08, 0x0E, 0x1D, 0x23, 0x1E, 0x3A, 0x3A,
0x1D, 0x04, 0x15, 0x00, 0x00, 0x3A, 0x00, 0x00, 0x3A,
0x00, 0x00, 0x3A, 0x00, 0x0D, 0x0E, 0x03, 0x0F, 0x00,
0x3B, 0x3C, 0x2F, 0x37, 0x3C, 0x01, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x30, 0x00, 0x02, 0x24, 0x1E,
0x00, 0x00, 0x00, 0x00, 0x07, 0x0B, 0x22, 0x03, 0x04,
0x00, 0x00, 0x39, 0x04, 0x25, 0x08, 0x03, 0x07, 0x0F,
0x12, 0x2C, 0x00, 0x2B, 0x2A, 0x26, 0x0D, 0x07, 0x0F,
0x04, 0x0B, 0x08, 0x01, 0x10, 0x0D, 0x09, 0x00, 0x00,
0x00, 0x01, 0x08, 0x04, 0x01, 0x08, 0x23, 0x01, 0x08,
0x00, 0x00, 0x1B, 0x00, 0x00, 0x1B, 0x1F, 0x0E, 0x1B,
0x00, 0x00, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x1B, 0x00,
0x00, 0x00, 0x01, 0x0F, 0x0D, 0x01, 0x09, 0x1E, 0x2B,
0x00, 0x23, 0x1A, 0x07, 0x1E, 0x0C, 0x0F, 0x00, 0x00,
0x1E, 0x12, 0x00, 0x00, 0x12, 0x00, 0x00, 0x12, 0x1A,
0x1E, 0x00, 0x10, 0x0F, 0x24, 0x1E, 0x34, 0x1D, 0x00,
0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x2F, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0x09, 0x15,
0x00, 0x00, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x28,
0x00, 0x00, 0x2B, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x23,
0x24, 0x0C, 0x1E, 0x0F, 0x00, 0x07, 0x03, 0x0F, 0x00,
0x00, 0x00, 0x01, 0x0F, 0x07, 0x0B, 0x0F, 0x25, 0x0F,
0x0F, 0x04, 0x00, 0x00, 0x00, 0x12, 0x09, 0x0C, 0x12,
0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x09, 0x0C,
0x03, 0x00, 0x00, 0x00, 0x04, 0x32, 0x24, 0x0F, 0x23,
0x0E, 0x0D, 0x00, 0x00, 0x00, 0x00, 0x09, 0x1E, 0x1A,
0x07, 0x0B, 0x0F, 0x07, 0x0C, 0x1E, 0x1A, 0x0F, 0x00,
0x0E, 0x0D, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x08,
0x12, 0x00, 0x01, 0x0B, 0x00, 0x00, 0x00, 0x00, 0x09,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x0B,
0x00, 0x00, 0x12, 0x00, 0x00, 0x12, 0x04, 0x0C, 0x12,
0x32, 0x00, 0x00, 0x32, 0x00, 0x00, 0x08, 0x36, 0x00,
0x02, 0x0D, 0x00, 0x01, 0x0F, 0x0D, 0x00, 0x0E, 0x1E,
0x1E, 0x00, 0x10, 0x0F, 0x07, 0x0B, 0x34, 0x0F, 0x1D,
0x1D, 0x04, 0x08, 0x36, 0x00, 0x08, 0x12, 0x00, 0x00,
0x03, 0x1E, 0x0F, 0x26, 0x0A, 0x02, 0x26, 0x3E, 0x08
}
};
struct ROM mcu1303_rom =
{
.micro =
{
0x0000000, 0x0800001, 0x0040020, 0x1440090,
0x0A00081, 0x1000000, 0x1400020, 0x0800008,
0x0A03180, 0x1002200, 0x0800400, 0x1418001,
0x0080020, 0x0841020, 0x0203100, 0x0203088,
0x0A00820, 0x0800120, 0x08001C0, 0x0810081,
0x0A00089, 0x0800401, 0x0A010A0, 0x0A01081,
0x0818001, 0x1A00220, 0x0201100, 0x0203420,
0x0008000, 0x0801020, 0x0201420, 0x0801190,
0x0040000, 0x0080820, 0x0800002, 0x0140002,
0x0800100, 0x0A03C20, 0x0A00808, 0x0A01008,
0x0200540, 0x0601209, 0x0083100, 0x0A03081,
0x8800004, 0x0058001, 0x1001280, 0x1008001,
0x1200209, 0x4018001, 0x0040002, 0x1000001,
0x0010200, 0x0800840, 0x0A01181, 0x4018801,
0x0A10181, 0x0800801, 0x0040001, 0x0011190,
0x0858001, 0x0040020, 0x3200209, 0x08000C0,
0x4000020, 0x0600081, 0x1000000, 0x1000180
},
.macro =
{
0x00386050, 0x005B3F3E, 0x000F5970, 0x00152470,
0x000C3D50, 0x0011312F, 0x005B4544, 0x00165050,
0x000C3404, 0x005B3F3E, 0x00D40450, 0x00162424,
0x000C4962, 0x01FB5250, 0x000D4924, 0x01BB2222,
0x00155050, 0x010F5247, 0x00182525, 0x00080505,
0x000E041E, 0x00123433, 0x007F6425, 0x007F0D25,
0x01650950, 0x01176553, 0x007E2432, 0x00087150,
0x007E2455, 0x00135076, 0x00085977, 0x005B4544,
0x000C2E26, 0x00310D2E, 0x00100E35, 0x00316B47,
0x01381250, 0x0011302E, 0x01385F50, 0x00050250,
0x011C0101, 0x00195050, 0x00382C2C, 0x016F2222,
0x013A2222, 0x002F6B56, 0x00093D6C, 0x00F04D50,
0x000C1750, 0x00074A50, 0x01B45047, 0x003C2020,
0x01AA2B6A, 0x00123432, 0x001D4933, 0x0113500C,
0x00052556, 0x00087C50, 0x01130000, 0x00142B2B,
0x004A1D50, 0x006E5756, 0x00496050, 0x00E57D58,
0x011E5D22, 0x01F35F50, 0x00EA0505, 0x001C7A50,
0x01080B50, 0x0054244B, 0x000C4050, 0x002A2121,
0x00135C5C, 0x000A4650, 0x00152504, 0x009D2B60,
0x00064350, 0x00192020, 0x00292C2C, 0x01235C50,
0x006D3C3C, 0x0031017D, 0x00092D2D, 0x004E2D2D,
0x01596A7E, 0x00E3396E, 0x006E3654, 0x016E6E47,
0x00534950, 0x00EE2062, 0x0016226E, 0x00660525,
0x00135C5C, 0x000A4241, 0x00383B3B, 0x000C7277,
0x00360404, 0x00042020, 0x00100A2E, 0x00155050,
0x00532404, 0x0004642B, 0x01843C47, 0x01A35047,
0x01847250, 0x015C112F, 0x00080434, 0x00152F23,
0x00080505, 0x00906047, 0x0113150C, 0x006D2224,
0x00747250, 0x000C632B, 0x00AD672B, 0x000A612E,
0x01B97463, 0x00417374, 0x00BD0658, 0x00EA2450,
0x00087166, 0x01BD3950, 0x001A2E50, 0x00BD6047,
0x00175079, 0x005E6035, 0x000A3847, 0x01067F47,
0x008C5251, 0x0013612E, 0x0087602E, 0x005B3F3E,
0x00DC2121, 0x00177374, 0x00182525, 0x00286050,
0x00064F4E, 0x000C5251, 0x006E2926, 0x008F602F,
0x008C502A, 0x00172928, 0x00814F4E, 0x003F534B,
0x000F075B, 0x00082525, 0x01E85047, 0x00790505,
0x00152F23, 0x0017506A, 0x00095047, 0x00082525,
0x00E63A62, 0x00DA0B47, 0x01174150, 0x00182525,
0x00090450, 0x01175B50, 0x00094850, 0x001B2F50,
0x00806047, 0x000A3720, 0x0010382F, 0x002C0505,
0x009B5021, 0x00160505, 0x01ED3A50, 0x00040505,
0x00082525, 0x01080F50, 0x01B35047, 0x000D3D4C,
0x00180404, 0x01C03A50, 0x00E20421, 0x00287B50,
0x00097F26, 0x0013612E, 0x01B6112F, 0x00322425,
0x01B81847, 0x00BA714B, 0x00182450, 0x00080505,
0x00182525, 0x004F1D24, 0x00736F5C, 0x00A67569,
0x00AD2726, 0x01BE5022, 0x000A5E04, 0x00173A62,
0x00CB752E, 0x00B11E25, 0x00CB0953, 0x00085068,
0x002B2020, 0x01984150, 0x00C77C04, 0x00DA0950,
0x00160404, 0x00F56040, 0x00DE0450, 0x01CB1160,
0x00CF4950, 0x000A4747, 0x001F210B, 0x00145050,
0x01171050, 0x00052075, 0x001D3D37, 0x00365555,
0x00130101, 0x01D57424, 0x00D66047, 0x01C47850,
0x004D2C2C, 0x01174150, 0x00174847, 0x00C90350,
0x000A2760, 0x0019502E, 0x00D72C2C, 0x01174850,
0x006C224B, 0x000A495B, 0x00100E35, 0x00312104,
0x01C00850, 0x00115A2F, 0x00EA0505, 0x00080574,
0x00152F23, 0x005C6050, 0x01C94122, 0x01A42222,
0x00DF2847, 0x00C9202E, 0x00A76047, 0x0117502F,
0x002E2020, 0x01205048, 0x00F8606D, 0x002D604C,
0x00443A62, 0x000D3D2E, 0x015C3950, 0x01625022,
0x006E136E, 0x0031602E, 0x01085D1A, 0x010F6F50,
0x0017506A, 0x00FB5020, 0x000A3C47, 0x00174D50
},
.synchro =
{
0x2C, 0x23, 0x00, 0x2C, 0x23, 0x00, 0x2C, 0x23, 0x30,
0x31, 0x32, 0x00, 0x31, 0x32, 0x12, 0x31, 0x32, 0x30,
0x00, 0x00, 0x00, 0x11, 0x23, 0x00, 0x1F, 0x06, 0x00,
0x31, 0x00, 0x1C, 0x31, 0x00, 0x00, 0x31, 0x08, 0x1D,
0x2C, 0x02, 0x0E, 0x2C, 0x02, 0x01, 0x2C, 0x02, 0x08,
0x08, 0x3A, 0x00, 0x00, 0x3A, 0x01, 0x05, 0x3A, 0x11,
0x18, 0x0A, 0x2B, 0x00, 0x01, 0x33, 0x02, 0x24, 0x25,
0x37, 0x3A, 0x18, 0x31, 0x3A, 0x1F, 0x31, 0x3A, 0x3D,
0x37, 0x02, 0x06, 0x31, 0x02, 0x12, 0x31, 0x10, 0x19,
0x39, 0x02, 0x26, 0x33, 0x09, 0x08, 0x19, 0x19, 0x08,
0x01, 0x14, 0x0C, 0x00, 0x00, 0x00, 0x1B, 0x06, 0x01,
0x26, 0x00, 0x21, 0x12, 0x14, 0x24, 0x06, 0x12, 0x00,
0x39, 0x00, 0x21, 0x08, 0x22, 0x00, 0x10, 0x14, 0x00,
0x20, 0x00, 0x00, 0x39, 0x02, 0x00, 0x06, 0x25, 0x25,
0x19, 0x02, 0x16, 0x09, 0x11, 0x19, 0x16, 0x11, 0x13,
0x18, 0x08, 0x10, 0x18, 0x00, 0x01, 0x1F, 0x06, 0x12,
0x1A, 0x12, 0x2E, 0x19, 0x02, 0x00, 0x33, 0x38, 0x00,
0x0D, 0x06, 0x3B, 0x13, 0x0A, 0x02, 0x00, 0x27, 0x00,
0x00, 0x00, 0x33, 0x13, 0x3C, 0x00, 0x11, 0x14, 0x04,
0x11, 0x1D, 0x34, 0x13, 0x01, 0x00, 0x14, 0x27, 0x00,
0x2C, 0x10, 0x21, 0x2C, 0x02, 0x33, 0x00, 0x00, 0x00,
0x37, 0x12, 0x2A, 0x31, 0x02, 0x00, 0x12, 0x06, 0x09,
0x37, 0x12, 0x2A, 0x31, 0x14, 0x0C, 0x00, 0x00, 0x00,
0x39, 0x0D, 0x12, 0x10, 0x0F, 0x00, 0x00, 0x27, 0x03,
0x37, 0x12, 0x0C, 0x31, 0x05, 0x00, 0x31, 0x00, 0x00,
0x37, 0x20, 0x0A, 0x31, 0x00, 0x00, 0x00, 0x00, 0x00,
0x11, 0x13, 0x0E, 0x01, 0x0D, 0x11, 0x05, 0x25, 0x24,
0x0A, 0x24, 0x0C, 0x08, 0x0D, 0x21, 0x00, 0x00, 0x00,
0x37, 0x06, 0x3A, 0x31, 0x05, 0x02, 0x0A, 0x1D, 0x16,
0x38, 0x14, 0x0C, 0x00, 0x08, 0x06, 0x20, 0x1B, 0x34,
0x0E, 0x02, 0x06, 0x00, 0x02, 0x1F, 0x19, 0x20, 0x08,
0x37, 0x10, 0x21, 0x31, 0x12, 0x0C, 0x00, 0x00, 0x00,
0x01, 0x2D, 0x30, 0x01, 0x2D, 0x00, 0x01, 0x2D, 0x30,
0x33, 0x34, 0x06, 0x01, 0x18, 0x00, 0x01, 0x18, 0x08,
0x31, 0x20, 0x34, 0x31, 0x20, 0x05, 0x31, 0x20, 0x08,
0x1F, 0x3A, 0x20, 0x14, 0x3A, 0x20, 0x0C, 0x00, 0x20,
0x0A, 0x20, 0x06, 0x30, 0x1F, 0x0C, 0x00, 0x20, 0x00,
0x35, 0x20, 0x05, 0x34, 0x14, 0x09, 0x30, 0x20, 0x11,
0x08, 0x18, 0x18, 0x08, 0x18, 0x18, 0x08, 0x33, 0x20,
0x04, 0x16, 0x06, 0x36, 0x06, 0x0C, 0x01, 0x03, 0x00,
0x2F, 0x08, 0x18, 0x1C, 0x00, 0x18, 0x00, 0x20, 0x18,
0x00, 0x18, 0x14, 0x35, 0x1D, 0x06, 0x14, 0x00, 0x3B,
0x06, 0x20, 0x05, 0x34, 0x14, 0x09, 0x19, 0x00, 0x21,
0x05, 0x3A, 0x3A, 0x06, 0x3A, 0x3A, 0x05, 0x3A, 0x3A,
0x01, 0x23, 0x00, 0x01, 0x23, 0x00, 0x01, 0x23, 0x08,
0x01, 0x32, 0x02, 0x01, 0x32, 0x02, 0x01, 0x32, 0x02,
0x15, 0x04, 0x03, 0x15, 0x17, 0x03, 0x15, 0x17, 0x03,
0x07, 0x2B, 0x03, 0x07, 0x17, 0x03, 0x07, 0x17, 0x03,
0x04, 0x1E, 0x06, 0x1E, 0x3F, 0x0E, 0x09, 0x11, 0x13,
0x0F, 0x29, 0x05, 0x09, 0x28, 0x09, 0x09, 0x09, 0x01,
0x08, 0x0B, 0x0B, 0x1B, 0x0B, 0x0B, 0x1E, 0x0B, 0x00,
0x08, 0x0B, 0x0B, 0x0E, 0x0B, 0x0B, 0x1A, 0x0B, 0x00,
0x11, 0x1D, 0x06, 0x08, 0x10, 0x04, 0x02, 0x06, 0x2F,
0x1F, 0x1C, 0x2F, 0x00, 0x1C, 0x1C, 0x09, 0x18, 0x11,
0x0B, 0x0C, 0x0C, 0x0B, 0x02, 0x30, 0x00, 0x00, 0x00,
0x25, 0x1C, 0x04, 0x01, 0x1C, 0x1D, 0x1D, 0x06, 0x08,
0x01, 0x30, 0x21, 0x3F, 0x2E, 0x11, 0x19, 0x25, 0x01,
0x16, 0x00, 0x00, 0x03, 0x0C, 0x0A, 0x19, 0x0A, 0x19,
0x0E, 0x16, 0x1B, 0x11, 0x1D, 0x10, 0x3C, 0x3A, 0x05,
0x20, 0x08, 0x10, 0x06, 0x22, 0x19, 0x02, 0x22, 0x18,
0x06, 0x0C, 0x01, 0x10, 0x00, 0x00, 0x00, 0x11, 0x13,
0x0A, 0x2B, 0x03, 0x0A, 0x17, 0x03, 0x0A, 0x17, 0x03,
0x12, 0x14, 0x06, 0x12, 0x02, 0x00, 0x0A, 0x02, 0x00,
0x0A, 0x24, 0x0C, 0x00, 0x0A, 0x21, 0x06, 0x20, 0x18,
0x0A, 0x21, 0x21, 0x35, 0x02, 0x08, 0x10, 0x02, 0x05,
0x00, 0x12, 0x0F, 0x11, 0x24, 0x21, 0x35, 0x02, 0x05,
0x06, 0x25, 0x0C, 0x06, 0x02, 0x12, 0x14, 0x02, 0x18,
0x12, 0x20, 0x14, 0x00, 0x00, 0x21, 0x18, 0x12, 0x0B,
0x0A, 0x24, 0x06, 0x00, 0x20, 0x08, 0x25, 0x02, 0x00,
0x24, 0x02, 0x35, 0x18, 0x12, 0x14, 0x34, 0x00, 0x18,
0x12, 0x14, 0x0C, 0x00, 0x0A, 0x21, 0x35, 0x02, 0x00,
0x00, 0x26, 0x03, 0x06, 0x27, 0x03, 0x06, 0x27, 0x03,
0x26, 0x03, 0x00, 0x27, 0x03, 0x00, 0x27, 0x03, 0x00,
0x11, 0x04, 0x03, 0x00, 0x36, 0x03, 0x00, 0x36, 0x03,
0x06, 0x04, 0x03, 0x07, 0x17, 0x03, 0x07, 0x17, 0x03,
0x0A, 0x20, 0x24, 0x25, 0x03, 0x06, 0x08, 0x02, 0x0B,
0x12, 0x04, 0x16, 0x0A, 0x17, 0x03, 0x0A, 0x17, 0x03,
0x07, 0x2B, 0x00, 0x07, 0x17, 0x00, 0x07, 0x17, 0x25,
0x00, 0x07, 0x2B, 0x03, 0x07, 0x17, 0x03, 0x07, 0x17,
0x03, 0x36, 0x03, 0x11, 0x24, 0x1D, 0x24, 0x03, 0x06,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x15, 0x04, 0x03, 0x15, 0x17, 0x03, 0x15, 0x17,
0x03, 0x12, 0x1D, 0x1D, 0x14, 0x06, 0x12, 0x06, 0x00,
0x1C, 0x1C, 0x00, 0x1C, 0x2F, 0x00, 0x06, 0x20, 0x20,
0x00, 0x0B, 0x02, 0x00, 0x0B, 0x02, 0x00, 0x36, 0x00,
0x01, 0x18, 0x18, 0x01, 0x18, 0x18, 0x01, 0x18, 0x18,
0x00, 0x00, 0x00, 0x33, 0x00, 0x08, 0x18, 0x04, 0x28,
0x1F, 0x0C, 0x08, 0x25, 0x06, 0x0E, 0x06, 0x00, 0x18,
0x06, 0x0E, 0x00, 0x16, 0x16, 0x00, 0x1D, 0x20, 0x20,
0x18, 0x07, 0x06, 0x35, 0x10, 0x34, 0x05, 0x09, 0x24,
0x05, 0x09, 0x09, 0x09, 0x09, 0x01, 0x0D, 0x10, 0x09,
0x08, 0x25, 0x33, 0x2E, 0x06, 0x1B, 0x06, 0x00, 0x13,
0x1C, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x1C, 0x00, 0x38,
0x1D, 0x3D, 0x05, 0x1D, 0x3D, 0x05, 0x1D, 0x3D, 0x05,
0x1D, 0x20, 0x1D, 0x00, 0x18, 0x00, 0x33, 0x34, 0x06,
0x39, 0x3C, 0x21, 0x01, 0x3C, 0x01, 0x06, 0x1F, 0x19,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
0x0A, 0x17, 0x03, 0x11, 0x13, 0x14, 0x00, 0x05, 0x35,
0x00, 0x05, 0x34, 0x00, 0x00, 0x34, 0x00, 0x00, 0x34,
0x38, 0x04, 0x02, 0x33, 0x00, 0x11, 0x04, 0x00, 0x00,
0x0C, 0x00, 0x26, 0x33, 0x09, 0x09, 0x20, 0x08, 0x18,
0x0F, 0x36, 0x00, 0x00, 0x00, 0x00, 0x00, 0x27, 0x05,
0x30, 0x09, 0x20, 0x20, 0x06, 0x20, 0x21, 0x00, 0x00,
0x0A, 0x3A, 0x10, 0x2B, 0x18, 0x38, 0x38, 0x0E, 0x02,
0x16, 0x0C, 0x35, 0x05, 0x00, 0x00, 0x19, 0x30, 0x00,
0x08, 0x1C, 0x18, 0x00, 0x1C, 0x00, 0x00, 0x05, 0x3A,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x20,
0x1F, 0x0C, 0x08, 0x25, 0x06, 0x08, 0x00, 0x00, 0x34,
0x06, 0x1C, 0x05, 0x25, 0x1C, 0x25, 0x1F, 0x18, 0x34,
0x33, 0x20, 0x26, 0x0B, 0x02, 0x00, 0x34, 0x09, 0x09,
0x00, 0x06, 0x36, 0x00, 0x00, 0x11, 0x24, 0x0B, 0x34,
0x20, 0x20, 0x00, 0x00, 0x39, 0x02, 0x08, 0x1D, 0x00,
0x00, 0x08, 0x00, 0x3E, 0x00, 0x00, 0x37, 0x08, 0x1D,
0x00, 0x00, 0x20, 0x00, 0x35, 0x20, 0x05, 0x34, 0x34,
0x12, 0x14, 0x24, 0x34, 0x2E, 0x30, 0x1F, 0x06, 0x08,
0x01, 0x05, 0x30, 0x04, 0x30, 0x2E, 0x06, 0x0E, 0x00,
0x36, 0x00, 0x00, 0x00, 0x00, 0x00, 0x34, 0x34, 0x00,
0x0A, 0x06, 0x1B, 0x1F, 0x00, 0x00, 0x25, 0x00, 0x3B,
0x25, 0x10, 0x06, 0x00, 0x00, 0x0A, 0x10, 0x07, 0x03,
0x0A, 0x10, 0x01, 0x00, 0x00, 0x00, 0x16, 0x19, 0x35,
0x06, 0x12, 0x10, 0x19, 0x10, 0x00, 0x00, 0x00, 0x3A,
0x11, 0x06, 0x09, 0x35, 0x16, 0x10, 0x3E, 0x13, 0x0D,
0x24, 0x3D, 0x10, 0x0E, 0x12, 0x33, 0x03, 0x06, 0x30,
0x00, 0x26, 0x00, 0x00, 0x27, 0x00, 0x00, 0x3B, 0x08,
0x06, 0x0C, 0x0C, 0x20, 0x0A, 0x06, 0x11, 0x14, 0x00,
0x18, 0x24, 0x06, 0x0A, 0x10, 0x18, 0x11, 0x24, 0x18,
0x10, 0x25, 0x05, 0x06, 0x3C, 0x05, 0x06, 0x00, 0x00,
0x06, 0x0C, 0x0C, 0x00, 0x00, 0x12, 0x24, 0x1D, 0x1D
}
};
struct ROM mcu1306_rom =
{
.micro =
{
0x0000000, 0x0800008, 0x0040020, 0x0800001,
0x0800021, 0x0080020, 0x0A00028, 0x0040100,
0x4000100, 0x0010100, 0x0A00101, 0x0201089,
0x0213201, 0x0800004, 0x0800800, 0x0800820,
0x0200088, 0x4810002, 0x0A00820, 0x0800400,
0x0801000, 0x0100000, 0x8800004, 0x0008000,
0x1400020, 0x0800005, 0x4000020, 0x0A00180,
0x0100000, 0x4000001, 0x8241004, 0x0400000,
0x0080001, 0x0040001, 0x0212801, 0x0200808,
0x0800000, 0x0010020, 0x0A00808, 0x0040090,
0x0A01008, 0x0800401, 0x0A00081, 0x0A01081,
0x0803400, 0x0A01001, 0x0A11801, 0x0011001,
0x0A10801, 0x0213801, 0x0098001, 0x0818001,
0x0800420, 0x0880090, 0x0203C08, 0x0200809,
0x0A00089, 0x0203090, 0x0840090, 0x0810002,
0x0210801, 0x0210081, 0x0010000, 0x0200090,
0x0210081, 0x0212801, 0x0A01020, 0x0A01020
},
.macro =
{
0x0070000, 0x0060040, 0x0076A2F, 0x00B4C00,
0x0090000, 0x00B4D00, 0x0090000, 0x0055300,
0x0090000, 0x00B5400, 0x0090000, 0x0054600,
0x0061000, 0x00B4800, 0x0065657, 0x0057300,
0x0090000, 0x0075655, 0x0071700, 0x0060040,
0x0070000, 0x0070000, 0x0074444, 0x00C4545,
0x0280058, 0x0682825, 0x08A0000, 0x0280059,
0x0800058, 0x0800059, 0x04D5F5F, 0x0FB2F22,
0x0FB2F21, 0x0F80000, 0x0FB2F20, 0x0940000,
0x0B80059, 0x0B80058, 0x0830000, 0x03D4343,
0x0075E5E, 0x0075B00, 0x0695900, 0x007002B,
0x0070028, 0x0070003, 0x0070028, 0x0070052,
0x0070015, 0x00C0037, 0x00F5C00, 0x0075C01,
0x0075D5D, 0x007285F, 0x0DC585B, 0x00C005C,
0x0680000, 0x0070A0A, 0x0075B59, 0x0070254,
0x02A5F5F, 0x0075F5F, 0x00B0076, 0x0077700,
0x00B0039, 0x0063A2A, 0x01B3B2A, 0x0682828,
0x0680000, 0x0F05800, 0x00B003D, 0x04A0000,
0x0053200, 0x0502800, 0x0054E00, 0x0560000,
0x0530000, 0x00B0076, 0x0077700, 0x03E5F5F,
0x0DC0058, 0x0050032, 0x0682828, 0x005002A,
0x0682C2C, 0x0682828, 0x0050039, 0x0682828,
0x0682C2C, 0x0CA0025, 0x0070013, 0x0070066,
0x0070014, 0x0070066, 0x0070014, 0x0F6005F,
0x00B3E00, 0x0065300, 0x00B4E00, 0x0065300,
0x0063B58, 0x0052A00, 0x0070058, 0x0184343,
0x0FC7576, 0x00A2828, 0x0052A00, 0x0065300,
0x00C0000, 0x0180000, 0x0682F2F, 0x0053C00,
0x0065300, 0x00C0000, 0x0182F2F, 0x0680000,
0x007042E, 0x0051600, 0x07A0000, 0x0070447,
0x00B164B, 0x0770000, 0x00C3119, 0x0180000,
0x007005D, 0x0DC585F, 0x0830000, 0x0680000,
0x0695E5E, 0x0830000, 0x0680000, 0x00A0009,
0x00B0016, 0x00B0061, 0x0185A5A, 0x0075866,
0x0F00900, 0x0840004, 0x0052F26, 0x068002F,
0x0680027, 0x0056D00, 0x0180000, 0x0920000,
0x0F00959, 0x0180000, 0x00A0000, 0x0B50015,
0x0070011, 0x0070052, 0x0070066, 0x0070001,
0x0070001, 0x0070066, 0x0070001, 0x0070066,
0x0070001, 0x0070001, 0x0070066, 0x0070001,
0x0070066, 0x0070002, 0x0070066, 0x0070001,
0x0075D5D, 0x0070052, 0x0075D5D, 0x0075D5D,
0x0590003, 0x00A5A00, 0x00B2A00, 0x01C7400,
0x00B3F00, 0x0185E00, 0x00B7458, 0x0B2005F,
0x0F00947, 0x0AE0000, 0x00B5E63, 0x0090000,
0x0186B2C, 0x00C006E, 0x0180000, 0x0180001,
0x0072828, 0x00B3000, 0x0680000, 0x00C3636,
0x0C10000, 0x0F07259, 0x0A90000, 0x0C45F00,
0x0073131, 0x0A95A5A, 0x0C45A5A, 0x0680000,
0x00A0000, 0x0690059, 0x0CA2C00, 0x0DC5931,
0x0DC596B, 0x08D0000, 0x00A5A5A, 0x007000E,
0x0072E2E, 0x0074242, 0x0073334, 0x00B6265,
0x0DB5E5E, 0x0070064, 0x007075F, 0x0075F51,
0x00B1A03, 0x00F0051, 0x0D40068, 0x0075F5F,
0x0070052, 0x0070065, 0x0CF0038, 0x0180067,
0x00A4242, 0x005004E, 0x0070051, 0x0066000,
0x0065300, 0x005004F, 0x0065300, 0x0064650,
0x005004F, 0x0070050, 0x0070059, 0x0070052,
0x01B353E, 0x005002A, 0x0070058, 0x007000E,
0x0063B51, 0x005004E, 0x0075800, 0x0184343,
0x00A4242, 0x0066000, 0x0063B00, 0x0070000,
0x0075000, 0x0605259, 0x0837125, 0x0680000,
0x0070023, 0x0070024, 0x0072F29, 0x0070041,
0x1060040, 0x0074900, 0x0075F5F, 0x0094A4A
},
.synchro =
{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x2C, 0x2A, 0x27, 0x13, 0x2B, 0x27, 0x13, 0x2B, 0x27,
0x34, 0x2A, 0x27, 0x13, 0x2B, 0x27, 0x13, 0x2B, 0x27,
0x29, 0x2A, 0x35, 0x29, 0x2B, 0x35, 0x29, 0x2B, 0x35,
0x29, 0x12, 0x35, 0x29, 0x3F, 0x35, 0x29, 0x3F, 0x35,
0x2E, 0x00, 0x00, 0x2D, 0x02, 0x00, 0x00, 0x00, 0x00,
0x2A, 0x02, 0x00, 0x2D, 0x02, 0x00, 0x00, 0x00, 0x00,
0x03, 0x12, 0x05, 0x2D, 0x02, 0x00, 0x00, 0x00, 0x00,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x0E, 0x02, 0x00, 0x24, 0x02, 0x00, 0x24, 0x02, 0x00,
0x30, 0x1D, 0x05, 0x2F, 0x1D, 0x00, 0x00, 0x1D, 0x00,
0x22, 0x00, 0x00, 0x2D, 0x02, 0x00, 0x00, 0x00, 0x00,
0x0C, 0x00, 0x00, 0x2D, 0x02, 0x00, 0x00, 0x00, 0x00,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, 0x25, 0x00,
0x31, 0x00, 0x00, 0x2D, 0x02, 0x00, 0x00, 0x00, 0x00,
0x0E, 0x0F, 0x0F, 0x00, 0x00, 0x00, 0x0F, 0x0F, 0x0F,
0x0E, 0x34, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x03, 0x18, 0x00, 0x00, 0x00,
0x18, 0x25, 0x00, 0x03, 0x18, 0x00, 0x00, 0x00, 0x00,
0x1B, 0x03, 0x39, 0x00, 0x00, 0x00, 0x14, 0x18, 0x00,
0x36, 0x00, 0x00, 0x03, 0x0B, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x03, 0x18, 0x00, 0x00, 0x00, 0x00,
0x37, 0x1E, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x00,
0x01, 0x06, 0x07, 0x01, 0x06, 0x07, 0x01, 0x06, 0x07,
0x34, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x3C, 0x00, 0x00, 0x2D, 0x02, 0x00, 0x00, 0x00, 0x00,
0x3E, 0x00, 0x00, 0x2D, 0x02, 0x00, 0x00, 0x00, 0x00,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x01, 0x25, 0x00, 0x01, 0x25, 0x00, 0x24, 0x02, 0x00,
0x03, 0x04, 0x02, 0x03, 0x04, 0x02, 0x24, 0x02, 0x00,
0x01, 0x06, 0x07, 0x01, 0x06, 0x07, 0x24, 0x02, 0x00,
0x01, 0x04, 0x08, 0x01, 0x04, 0x08, 0x24, 0x00, 0x1A,
0x03, 0x06, 0x09, 0x03, 0x06, 0x09, 0x24, 0x00, 0x02,
0x03, 0x25, 0x00, 0x03, 0x25, 0x00, 0x24, 0x25, 0x00,
0x03, 0x00, 0x38, 0x03, 0x00, 0x0B, 0x03, 0x25, 0x00,
0x24, 0x25, 0x00, 0x24, 0x25, 0x0E, 0x05, 0x00, 0x00,
0x03, 0x25, 0x00, 0x03, 0x25, 0x00, 0x03, 0x25, 0x00,
0x00, 0x00, 0x19, 0x05, 0x00, 0x19, 0x05, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x12,
0x00, 0x21, 0x00, 0x00, 0x21, 0x24, 0x25, 0x03, 0x25,
0x00, 0x0D, 0x02, 0x00, 0x0D, 0x02, 0x00, 0x0D, 0x02,
0x00, 0x17, 0x00, 0x00, 0x17, 0x24, 0x05, 0x00, 0x00,
0x24, 0x00, 0x05, 0x24, 0x00, 0x05, 0x24, 0x00, 0x05,
0x24, 0x25, 0x00, 0x24, 0x25, 0x00, 0x24, 0x25, 0x00,
0x13, 0x0A, 0x00, 0x00, 0x03, 0x0B, 0x00, 0x28, 0x00,
0x00, 0x03, 0x05, 0x00, 0x03, 0x05, 0x00, 0x03, 0x05,
0x1B, 0x03, 0x00, 0x0B, 0x03, 0x0B, 0x00, 0x00, 0x00,
0x2C, 0x02, 0x00, 0x24, 0x02, 0x00, 0x24, 0x02, 0x00,
0x0E, 0x0F, 0x0F, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00,
0x0E, 0x0F, 0x0F, 0x00, 0x00, 0x00, 0x0F, 0x0F, 0x00,
0x00, 0x16, 0x00, 0x00, 0x16, 0x00, 0x00, 0x16, 0x00,
0x00, 0x17, 0x00, 0x00, 0x17, 0x00, 0x00, 0x00, 0x21,
0x00, 0x00, 0x17, 0x00, 0x00, 0x17, 0x24, 0x02, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x12, 0x00,
0x14, 0x0F, 0x0F, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x00,
0x24, 0x0F, 0x0F, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x00,
0x13, 0x0A, 0x00, 0x00, 0x03, 0x0B, 0x00, 0x00, 0x00,
0x1B, 0x18, 0x0B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x13, 0x0F, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x2A, 0x00,
0x1B, 0x03, 0x3D, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x3B, 0x00, 0x00, 0x3B, 0x00, 0x12, 0x14, 0x00,
0x0E, 0x00, 0x00, 0x0F, 0x00, 0x00, 0x05, 0x24, 0x02,
0x03, 0x00, 0x25, 0x03, 0x00, 0x25, 0x03, 0x00, 0x25,
0x00, 0x00, 0x20, 0x00, 0x00, 0x20, 0x00, 0x00, 0x20,
0x00, 0x11, 0x05, 0x00, 0x11, 0x05, 0x00, 0x11, 0x05,
0x00, 0x11, 0x25, 0x00, 0x11, 0x25, 0x00, 0x11, 0x25,
0x0E, 0x0F, 0x0F, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00,
0x03, 0x00, 0x00, 0x33, 0x00, 0x00, 0x00, 0x00, 0x00,
0x0E, 0x0F, 0x0F, 0x2A, 0x0F, 0x0F, 0x12, 0x00, 0x00,
0x00, 0x00, 0x00, 0x1B, 0x1C, 0x00, 0x00, 0x00, 0x00,
0x03, 0x15, 0x00, 0x03, 0x15, 0x00, 0x03, 0x15, 0x00,
0x1B, 0x02, 0x00, 0x00, 0x00, 0x00, 0x1B, 0x02, 0x00,
0x00, 0x00, 0x00, 0x03, 0x00, 0x12, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x03, 0x12, 0x12, 0x12, 0x12, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, 0x23, 0x02,
0x00, 0x00, 0x00, 0x00, 0x00, 0x24, 0x23, 0x02, 0x00,
0x26, 0x27, 0x00, 0x28, 0x27, 0x00, 0x28, 0x27, 0x00,
0x00, 0x26, 0x27, 0x00, 0x28, 0x27, 0x00, 0x28, 0x27,
0x29, 0x2A, 0x27, 0x29, 0x2B, 0x27, 0x29, 0x2B, 0x3A,
0x0E, 0x12, 0x12, 0x12, 0x00, 0x00, 0x10, 0x00, 0x00,
0x0E, 0x12, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00,
0x0E, 0x0F, 0x0F, 0x00, 0x00, 0x0F, 0x0F, 0x0F, 0x00,
0x00, 0x00, 0x00, 0x25, 0x00, 0x0E, 0x0F, 0x0F, 0x0F,
0x0E, 0x0F, 0x0F, 0x00, 0x00, 0x0F, 0x0F, 0x0F, 0x0F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x18, 0x00,
0x24, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x1D, 0x00, 0x00, 0x1D, 0x00, 0x00, 0x1D, 0x00,
0x1F, 0x1A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x16, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x16, 0x05, 0x00, 0x16, 0x05, 0x00, 0x16, 0x05,
0x00, 0x16, 0x02, 0x00, 0x16, 0x02, 0x00, 0x16, 0x02,
0x03, 0x21, 0x02, 0x03, 0x21, 0x02, 0x03, 0x21, 0x02,
0x18, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x1B, 0x03, 0x0B, 0x00, 0x00, 0x00,
0x03, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x1B, 0x03, 0x3D, 0x00, 0x00, 0x00,
0x24, 0x18, 0x03, 0x18, 0x05, 0x03, 0x18, 0x05, 0x00,
0x03, 0x00, 0x32, 0x03, 0x00, 0x32, 0x03, 0x00, 0x32,
0x24, 0x33, 0x00, 0x00, 0x33, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x21, 0x00, 0x00, 0x21, 0x00, 0x00, 0x00,
0x00, 0x2C, 0x2A, 0x27, 0x13, 0x2B, 0x27, 0x00, 0x00,
0x03, 0x25, 0x00, 0x03, 0x25, 0x00, 0x13, 0x09, 0x00,
0x00, 0x3B, 0x05, 0x00, 0x3B, 0x05, 0x00, 0x3B, 0x05,
0x00, 0x0D, 0x05, 0x00, 0x0D, 0x05, 0x00, 0x0D, 0x05,
0x13, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07,
0x1B, 0x18, 0x0B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x17, 0x00, 0x00, 0x17, 0x0E, 0x05, 0x0D, 0x02,
0x18, 0x00, 0x25, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00,
0x13, 0x09, 0x00, 0x00, 0x09, 0x00, 0x00, 0x09, 0x00,
0x0E, 0x0F, 0x02, 0x24, 0x25, 0x00, 0x24, 0x25, 0x00,
0x00, 0x00, 0x00, 0x29, 0x0F, 0x0F, 0x0F, 0x12, 0x00,
0x00, 0x29, 0x12, 0x00, 0x29, 0x3F, 0x00, 0x13, 0x0F,
0x00, 0x3D, 0x00, 0x00, 0x3D, 0x00, 0x00, 0x3D, 0x00,
0x1B, 0x03, 0x00, 0x0B, 0x03, 0x0B, 0x13, 0x39, 0x24,
0x0E, 0x02, 0x00, 0x24, 0x02, 0x00, 0x13, 0x07, 0x00,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F
}
};
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from flash.pointcloud.detection import PointCloudObjectDetector
from tests.helpers.utils import _POINTCLOUD_TESTING
@pytest.mark.skipif(not _POINTCLOUD_TESTING, reason="pointcloud libraries aren't installed")
def test_backbones():
backbones = PointCloudObjectDetector.available_backbones()
assert backbones == ["pointpillars", "pointpillars_kitti"]
|
//导入核心模块http
var http = require('http');
/**
* 1. 能在特定的IP 特定端口上监听 客户端的请求
* 2. 当请求到来的时候能执行监听函数,并返回响应
*
* 创建一个服务器
* 指定监听函数 每当有客户端请求到来的时候执行的函数
* request 代表客户端的请求,可以从中获取请求过来的信息
* response 代表向客户端发的响应,可以通过它向客户端发响应
*
*/
var fs = require('fs');
var mime = require('mime');
var path = require('path');
//node亲生的模块,帮助我们解析请求中的URL的
var url = require('url');
var server = http.createServer(function(request,response){
//把url转成url对象
var urlObj = url.parse(request.url,true);
response.writeHead(200,{'Content-Type':'text/html;charset=utf-8'});
//pathname 指的是路径名 问号和端口号中间的那一部分
if(urlObj.pathname == '/apple'){
// query 查询字符串,true,则转成对象
response.end(urlObj.query.num+"袋苹果");
}
});
//在8080端口上进行监听 ,主机名是localhost
// 0 - 65535
// ps -ef | grep node
server.listen(8080,'localhost');
|
// @flow
import * as React from 'react'
import {connect, type TypedState} from '../../util/container'
import Splash from './splash/container'
import Intro from './intro/container'
const mapStateToProps = (state: TypedState, {navigateAppend}) => ({bootStatus: state.config.bootStatus})
const mapDispatchToProps = (dispatch: Dispatch, {navigateAppend}) => ({})
const Switcher = ({bootStatus, navigateAppend}) => {
switch (bootStatus) {
case 'bootStatusLoading':
case 'bootStatusFailure':
return <Splash navigateAppend={navigateAppend} />
default:
return <Intro navigateAppend={navigateAppend} />
}
}
export default connect(mapStateToProps, mapDispatchToProps)(Switcher)
|
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 3.2.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
import django_heroku
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-a9d779i49x+j872)d*^g$qgcl*ll$#)41)kx4u%jhs%+a$99r_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
LOGIN_REDIRECT_URL = "home"
LOGOUT_REDIRECT_URL = "login"
# Application definition
INSTALLED_APPS = [
'jazzmin',
'corsheaders',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'user',
'favorited',
'centre',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
ROOT_URLCONF = 'app.urls'
CORS_ALLOWED_ORIGINS = [
'http://localhost:3030',
'https://burkaysalescentre.herokuapp.com',
'http://burkaysalescentre.herokuapp.com'
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
SETTINGS_PATH = os.path.dirname(os.path.dirname(__file__))
TEMPLATE_DIRS = (
os.path.join(SETTINGS_PATH, 'templates'),
)
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_simplejwt.authentication.JWTAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication',
),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 100
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
django_heroku.settings(locals())
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[7],{155:function(t,e,r){"use strict";function n(){return(n=Object.assign||function(t){for(var e=1;e<arguments.length;e++){var r=arguments[e];for(var n in r)Object.prototype.hasOwnProperty.call(r,n)&&(t[n]=r[n])}return t}).apply(this,arguments)}r.d(e,"a",function(){return n})},156:function(t,e,r){"use strict";function n(t,e){if(null==t)return{};var r,n,a={},c=Object.keys(t);for(n=0;n<c.length;n++)r=c[n],e.indexOf(r)>=0||(a[r]=t[r]);return a}r.d(e,"a",function(){return n})},157:function(t,e,r){var n;!function(){"use strict";var r={}.hasOwnProperty;function a(){for(var t=[],e=0;e<arguments.length;e++){var n=arguments[e];if(n){var c=typeof n;if("string"===c||"number"===c)t.push(n);else if(Array.isArray(n)&&n.length){var o=a.apply(null,n);o&&t.push(o)}else if("object"===c)for(var i in n)r.call(n,i)&&n[i]&&t.push(i)}}return t.join(" ")}t.exports?(a.default=a,t.exports=a):void 0===(n=function(){return a}.apply(e,[]))||(t.exports=n)}()},158:function(t,e,r){"use strict";r.d(e,"b",function(){return o}),r.d(e,"a",function(){return u});r(172);var n,a=r(4),c=r.n(a);function o(t,e){return void 0===t&&(t=""),void 0===e&&(e=n),e?t.split(" ").map(function(t){return e[t]||t}).join(" "):t}var i={};function u(t,e){return function(r,n,a){var c;null!==r[n]&&void 0!==r[n]&&(i[c='"'+n+'" property of "'+a+'" has been deprecated.\n'+e]||("undefined"!=typeof console&&console.error(c),i[c]=!0));for(var o=arguments.length,u=new Array(o>3?o-3:0),f=3;f<o;f++)u[f-3]=arguments[f];return t.apply(void 0,[r,n,a].concat(u))}}c.a.oneOfType([c.a.string,c.a.func,function(t,e,r){if(!(t[e]instanceof Element))return new Error("Invalid prop `"+e+"` supplied to `"+r+"`. Expected prop to be an instance of Element. Validation failed.")},c.a.shape({current:c.a.any})]),"undefined"==typeof window||!window.document||window.document.createElement},159:function(t,e,r){"use strict";r.d(e,"c",function(){return a}),r.d(e,"e",function(){return c}),r.d(e,"a",function(){return o}),r.d(e,"b",function(){return i}),r.d(e,"d",function(){return u});var n=r(160),a=function(t){return Object(n.a)({tag:"svg",attr:{viewBox:"0 0 496 512"},child:[{tag:"path",attr:{d:"M165.9 397.4c0 2-2.3 3.6-5.2 3.6-3.3.3-5.6-1.3-5.6-3.6 0-2 2.3-3.6 5.2-3.6 3-.3 5.6 1.3 5.6 3.6zm-31.1-4.5c-.7 2 1.3 4.3 4.3 4.9 2.6 1 5.6 0 6.2-2s-1.3-4.3-4.3-5.2c-2.6-.7-5.5.3-6.2 2.3zm44.2-1.7c-2.9.7-4.9 2.6-4.6 4.9.3 2 2.9 3.3 5.9 2.6 2.9-.7 4.9-2.6 4.6-4.6-.3-1.9-3-3.2-5.9-2.9zM244.8 8C106.1 8 0 113.3 0 252c0 110.9 69.8 205.8 169.5 239.2 12.8 2.3 17.3-5.6 17.3-12.1 0-6.2-.3-40.4-.3-61.4 0 0-70 15-84.7-29.8 0 0-11.4-29.1-27.8-36.6 0 0-22.9-15.7 1.6-15.4 0 0 24.9 2 38.6 25.8 21.9 38.6 58.6 27.5 72.9 20.9 2.3-16 8.8-27.1 16-33.7-55.9-6.2-112.3-14.3-112.3-110.5 0-27.5 7.6-41.3 23.6-58.9-2.6-6.5-11.1-33.3 2.6-67.9 20.9-6.5 69 27 69 27 20-5.6 41.5-8.5 62.8-8.5s42.8 2.9 62.8 8.5c0 0 48.1-33.6 69-27 13.7 34.7 5.2 61.4 2.6 67.9 16 17.7 25.8 31.5 25.8 58.9 0 96.5-58.9 104.2-114.8 110.5 9.2 7.9 17 22.9 17 46.4 0 33.7-.3 75.4-.3 83.6 0 6.5 4.6 14.4 17.3 12.1C428.2 457.8 496 362.9 496 252 496 113.3 383.5 8 244.8 8zM97.2 352.9c-1.3 1-1 3.3.7 5.2 1.6 1.6 3.9 2.3 5.2 1 1.3-1 1-3.3-.7-5.2-1.6-1.6-3.9-2.3-5.2-1zm-10.8-8.1c-.7 1.3.3 2.9 2.3 3.9 1.6 1 3.6.7 4.3-.7.7-1.3-.3-2.9-2.3-3.9-2-.6-3.6-.3-4.3.7zm32.4 35.6c-1.6 1.3-1 4.3 1.3 6.2 2.3 2.3 5.2 2.6 6.5 1 1.3-1.3.7-4.3-1.3-6.2-2.2-2.3-5.2-2.6-6.5-1zm-11.4-14.7c-1.6 1-1.6 3.6 0 5.9 1.6 2.3 4.3 3.3 5.6 2.3 1.6-1.3 1.6-3.9 0-6.2-1.4-2.3-4-3.3-5.6-2z"}}]})(t)};a.displayName="FaGithub";var c=function(t){return Object(n.a)({tag:"svg",attr:{viewBox:"0 0 448 512"},child:[{tag:"path",attr:{d:"M416 32H31.9C14.3 32 0 46.5 0 64.3v383.4C0 465.5 14.3 480 31.9 480H416c17.6 0 32-14.5 32-32.3V64.3c0-17.8-14.4-32.3-32-32.3zM135.4 416H69V202.2h66.5V416zm-33.2-243c-21.3 0-38.5-17.3-38.5-38.5S80.9 96 102.2 96c21.2 0 38.5 17.3 38.5 38.5 0 21.3-17.2 38.5-38.5 38.5zm282.1 243h-66.4V312c0-24.8-.5-56.7-34.5-56.7-34.6 0-39.9 27-39.9 54.9V416h-66.4V202.2h63.7v29.2h.9c8.9-16.8 30.6-34.5 62.9-34.5 67.2 0 79.7 44.3 79.7 101.9V416z"}}]})(t)};c.displayName="FaLinkedin";var o=function(t){return Object(n.a)({tag:"svg",attr:{viewBox:"0 0 576 512"},child:[{tag:"path",attr:{d:"M528 32H48C21.5 32 0 53.5 0 80v352c0 26.5 21.5 48 48 48h480c26.5 0 48-21.5 48-48V80c0-26.5-21.5-48-48-48zm-352 96c35.3 0 64 28.7 64 64s-28.7 64-64 64-64-28.7-64-64 28.7-64 64-64zm112 236.8c0 10.6-10 19.2-22.4 19.2H86.4C74 384 64 375.4 64 364.8v-19.2c0-31.8 30.1-57.6 67.2-57.6h5c12.3 5.1 25.7 8 39.8 8s27.6-2.9 39.8-8h5c37.1 0 67.2 25.8 67.2 57.6v19.2zM512 312c0 4.4-3.6 8-8 8H360c-4.4 0-8-3.6-8-8v-16c0-4.4 3.6-8 8-8h144c4.4 0 8 3.6 8 8v16zm0-64c0 4.4-3.6 8-8 8H360c-4.4 0-8-3.6-8-8v-16c0-4.4 3.6-8 8-8h144c4.4 0 8 3.6 8 8v16zm0-64c0 4.4-3.6 8-8 8H360c-4.4 0-8-3.6-8-8v-16c0-4.4 3.6-8 8-8h144c4.4 0 8 3.6 8 8v16z"}}]})(t)};o.displayName="FaAddressCard";var i=function(t){return Object(n.a)({tag:"svg",attr:{viewBox:"0 0 512 512"},child:[{tag:"path",attr:{d:"M502.3 190.8c3.9-3.1 9.7-.2 9.7 4.7V400c0 26.5-21.5 48-48 48H48c-26.5 0-48-21.5-48-48V195.6c0-5 5.7-7.8 9.7-4.7 22.4 17.4 52.1 39.5 154.1 113.6 21.1 15.4 56.7 47.8 92.2 47.6 35.7.3 72-32.8 92.3-47.6 102-74.1 131.6-96.3 154-113.7zM256 320c23.2.4 56.6-29.2 73.4-41.4 132.7-96.3 142.8-104.7 173.4-128.7 5.8-4.5 9.2-11.5 9.2-18.9v-19c0-26.5-21.5-48-48-48H48C21.5 64 0 85.5 0 112v19c0 7.4 3.4 14.3 9.2 18.9 30.6 23.9 40.7 32.4 173.4 128.7 16.8 12.2 50.2 41.8 73.4 41.4z"}}]})(t)};i.displayName="FaEnvelope";var u=function(t){return Object(n.a)({tag:"svg",attr:{viewBox:"0 0 496 512"},child:[{tag:"path",attr:{d:"M336.5 160C322 70.7 287.8 8 248 8s-74 62.7-88.5 152h177zM152 256c0 22.2 1.2 43.5 3.3 64h185.3c2.1-20.5 3.3-41.8 3.3-64s-1.2-43.5-3.3-64H155.3c-2.1 20.5-3.3 41.8-3.3 64zm324.7-96c-28.6-67.9-86.5-120.4-158-141.6 24.4 33.8 41.2 84.7 50 141.6h108zM177.2 18.4C105.8 39.6 47.8 92.1 19.3 160h108c8.7-56.9 25.5-107.8 49.9-141.6zM487.4 192H372.7c2.1 21 3.3 42.5 3.3 64s-1.2 43-3.3 64h114.6c5.5-20.5 8.6-41.8 8.6-64s-3.1-43.5-8.5-64zM120 256c0-21.5 1.2-43 3.3-64H8.6C3.2 212.5 0 233.8 0 256s3.2 43.5 8.6 64h114.6c-2-21-3.2-42.5-3.2-64zm39.5 96c14.5 89.3 48.7 152 88.5 152s74-62.7 88.5-152h-177zm159.3 141.6c71.4-21.2 129.4-73.7 158-141.6h-108c-8.8 56.9-25.6 107.8-50 141.6zM19.3 352c28.6 67.9 86.5 120.4 158 141.6-24.4-33.8-41.2-84.7-50-141.6h-108z"}}]})(t)};u.displayName="FaGlobe"},160:function(t,e,r){"use strict";var n=r(0),a=r.n(n),c={color:void 0,size:void 0,className:void 0,style:void 0,attr:void 0},o=a.a.createContext&&a.a.createContext(c);r.d(e,"a",function(){return f});var i=function(){return(i=Object.assign||function(t){for(var e,r=1,n=arguments.length;r<n;r++)for(var a in e=arguments[r])Object.prototype.hasOwnProperty.call(e,a)&&(t[a]=e[a]);return t}).apply(this,arguments)},u=function(t,e){var r={};for(var n in t)Object.prototype.hasOwnProperty.call(t,n)&&e.indexOf(n)<0&&(r[n]=t[n]);if(null!=t&&"function"==typeof Object.getOwnPropertySymbols){var a=0;for(n=Object.getOwnPropertySymbols(t);a<n.length;a++)e.indexOf(n[a])<0&&(r[n[a]]=t[n[a]])}return r};function f(t){return function(e){return a.a.createElement(d,i({attr:i({},t.attr)},e),function t(e){return e&&e.map(function(e,r){return a.a.createElement(e.tag,i({key:r},e.attr),t(e.child))})}(t.child))}}function d(t){var e=function(e){var r,n=t.size||e.size||"1em";e.className&&(r=e.className),t.className&&(r=(r?r+" ":"")+t.className);var c=t.attr,o=u(t,["attr"]);return a.a.createElement("svg",i({stroke:"currentColor",fill:"currentColor",strokeWidth:"0"},e.attr,c,o,{className:r,style:i({color:t.color||e.color},e.style,t.style),height:n,width:n}),t.children)};return void 0!==o?a.a.createElement(o.Consumer,null,function(t){return e(t)}):e(c)}},172:function(t,e,r){(function(e){var r="[object AsyncFunction]",n="[object Function]",a="[object GeneratorFunction]",c="[object Null]",o="[object Proxy]",i="[object Undefined]",u="object"==typeof e&&e&&e.Object===Object&&e,f="object"==typeof self&&self&&self.Object===Object&&self,d=u||f||Function("return this")(),v=Object.prototype,s=v.hasOwnProperty,l=v.toString,p=d.Symbol,h=p?p.toStringTag:void 0;function b(t){return null==t?void 0===t?i:c:h&&h in Object(t)?function(t){var e=s.call(t,h),r=t[h];try{t[h]=void 0;var n=!0}catch(c){}var a=l.call(t);n&&(e?t[h]=r:delete t[h]);return a}(t):function(t){return l.call(t)}(t)}t.exports=function(t){if(!function(t){var e=typeof t;return null!=t&&("object"==e||"function"==e)}(t))return!1;var e=b(t);return e==n||e==a||e==r||e==o}}).call(this,r(73))},173:function(t,e){t.exports=function(t){var e=typeof t;return!!t&&("object"==e||"function"==e)}},174:function(t,e,r){"use strict";r.d(e,"a",function(){return a}),r.d(e,"b",function(){return c});var n=r(160),a=function(t){return Object(n.a)({tag:"svg",attr:{viewBox:"0 0 14 16"},child:[{tag:"path",attr:{fillRule:"evenodd",d:"M9.5 3L8 4.5 11.5 8 8 11.5 9.5 13 14 8 9.5 3zm-5 0L0 8l4.5 5L6 11.5 2.5 8 6 4.5 4.5 3z"}}]})(t)};a.displayName="GoCode";var c=function(t){return Object(n.a)({tag:"svg",attr:{viewBox:"0 0 16 16"},child:[{tag:"path",attr:{fillRule:"evenodd",d:"M15 2H1c-.55 0-1 .45-1 1v9c0 .55.45 1 1 1h5.34c-.25.61-.86 1.39-2.34 2h8c-1.48-.61-2.09-1.39-2.34-2H15c.55 0 1-.45 1-1V3c0-.55-.45-1-1-1zm0 9H1V3h14v8z"}}]})(t)};c.displayName="GoDeviceDesktop"},175:function(t,e,r){"use strict";var n=r(155),a=r(156),c=r(0),o=r.n(c),i=r(4),u=r.n(i),f=r(157),d=r.n(f),v=r(158),s={tag:u.a.oneOfType([u.a.func,u.a.string]),noGutters:u.a.bool,className:u.a.string,cssModule:u.a.object,form:u.a.bool},l=function(t){var e=t.className,r=t.cssModule,c=t.noGutters,i=t.tag,u=t.form,f=Object(a.a)(t,["className","cssModule","noGutters","tag","form"]),s=Object(v.b)(d()(e,c?"no-gutters":null,u?"form-row":"row"),r);return o.a.createElement(i,Object(n.a)({},f,{className:s}))};l.propTypes=s,l.defaultProps={tag:"div"},e.a=l},176:function(t,e,r){"use strict";var n=r(155),a=r(156),c=r(173),o=r.n(c),i=r(0),u=r.n(i),f=r(4),d=r.n(f),v=r(157),s=r.n(v),l=r(158),p=d.a.oneOfType([d.a.number,d.a.string]),h=d.a.oneOfType([d.a.bool,d.a.number,d.a.string,d.a.shape({size:d.a.oneOfType([d.a.bool,d.a.number,d.a.string]),push:Object(l.a)(p,'Please use the prop "order"'),pull:Object(l.a)(p,'Please use the prop "order"'),order:p,offset:p})]),b={tag:d.a.oneOfType([d.a.func,d.a.string]),xs:h,sm:h,md:h,lg:h,xl:h,className:d.a.string,cssModule:d.a.object,widths:d.a.array},y={tag:"div",widths:["xs","sm","md","lg","xl"]},g=function(t,e,r){return!0===r||""===r?t?"col":"col-"+e:"auto"===r?t?"col-auto":"col-"+e+"-auto":t?"col-"+r:"col-"+e+"-"+r},j=function(t){var e=t.className,r=t.cssModule,c=t.widths,i=t.tag,f=Object(a.a)(t,["className","cssModule","widths","tag"]),d=[];c.forEach(function(e,n){var a=t[e];if(delete f[e],a||""===a){var c=!n;if(o()(a)){var i,u=c?"-":"-"+e+"-",v=g(c,e,a.size);d.push(Object(l.b)(s()(((i={})[v]=a.size||""===a.size,i["order"+u+a.order]=a.order||0===a.order,i["offset"+u+a.offset]=a.offset||0===a.offset,i)),r))}else{var p=g(c,e,a);d.push(p)}}}),d.length||d.push("col");var v=Object(l.b)(s()(e,d),r);return u.a.createElement(i,Object(n.a)({},f,{className:v}))};j.propTypes=b,j.defaultProps=y,e.a=j}}]);
//# sourceMappingURL=7-39c49108b18fee9d9802.js.map
|
import numpy as np
import matplotlib.pyplot as plt
from xas.file_io import load_binned_df_from_file
import time as ttime
from xas.fitting import fit_gaussian_with_estimation, fit_gaussian, Nominal2ActualConverter
# from lmfit.models import GaussianModel, LinearModel
# _gmodel = GaussianModel() + LinearModel()
class VonHamosScan:
def __init__(self, db, fname):
df, header = load_binned_df_from_file(fname)
uid = [i for i in header.split('\n# ') if 'uid' in i][0].split(' ')[-1]
start = ttime.time()
t = db[uid].table(fill=True)
print(f'db read in {ttime.time() - start}')
self.energy = df.energy.values
self.i0 = df.i0.values
self.iff = df.iff.values
self.images = np.array([i.squeeze() for i in t.pil100k_image])
if not np.isclose(t.hhm_energy.values[0], self.energy[0], 1e-4):
self.images = self.images[::-1, :, :]
self.images = self.images/np.abs(self.i0)[:, None, None]
self.muf = self.iff/self.i0
self.total_image = np.sum(self.images, axis=0)
self.y = 0
self.x = 0
self.dy, self.dx = self.total_image.shape
self.energy_converter = None
def set_roi(self, y, dy, x, dx):
self.y = y
self.dy = dy
self.x = x
self.dx = dx
def show_roi(self, fignum=1, vmin=None, vmax=None):
if vmin is None: vmin = self.total_image.min()
if vmax is None: vmax = np.percentile(np.unique(self.total_image), 50)
ysize, xsize = self.total_image.shape
plt.figure(fignum)
plt.clf()
plt.imshow(self.total_image, vmin=vmin, vmax=vmax)
plt.vlines([self.x, self.x + self.dx], 0, ysize, colors='r')
plt.hlines([self.y, self.y + self.dy], 0, xsize, colors='r')
plt.xlim(self.x - 10, self.x + self.dx + 10)
plt.ylim(self.y - 10, self.y + self.dy + 10)
def integrate_images(self):
self.pixel = np.arange(self.x, self.x + self.dx + 1)
self.rixs = np.mean(self.images[:, self.y : self.y + self.dy + 1, self.x : self.x + self.dx + 1], axis=1).T
def append_calibration(self, calibration):
self.energy_converter = calibration.energy_converter
@property
def emission_energy(self):
try:
return self.energy_converter.nom2act(self.pixel)
except:
raise Exception('No energy converter')
class VonHamosCalibration(VonHamosScan):
def __init__(self, *args):
super().__init__(*args)
def calibrate(self):
self.rixs /= np.max(self.rixs, axis=0)[None, :]
n_scans = self.rixs.shape[1]
self.pixel_cen = np.zeros(n_scans)
self.pixel_fwhm = np.zeros(n_scans)
plt.figure(2)
plt.clf()
for i in range(n_scans):
self.pixel_cen[i], self.pixel_fwhm[i], I_fit_raw = self._fit_elastic_line(self.pixel, self.rixs[:, i])
plt.plot(self.pixel, self.rixs[:, i] - i, 'k-')
plt.plot(self.pixel, I_fit_raw - i, 'r')
self.energy_converter = Nominal2ActualConverter(self.pixel_cen, self.energy, n_poly=2)
def _fit_elastic_line(self, x, y, threshold=5):
cen, fwhm, _, _, y_fit = fit_gaussian(x, y, x.min() + y.argmax(), 1)
if fwhm > threshold:
y_new = y - y_fit
y_new /= y_new.max()
# pix_cen, fwhm, _, _, I_fit_raw = fit_gaussian_with_estimation(self.pixel, new_spectrum)
cen, fwhm, _, _, y_fit = fit_gaussian(x, y_new, x.min() + y_new.argmax(), 1)
return cen, fwhm, y_fit
#
#
# plt.vlines(self.pixel.min() + self.rixs[:, 10].argmax(), 0, 1)
#
|
from django.conf.urls import url
from . import constants, views # isort:skip
urlpatterns = [
url(
r'^create-alias/$',
views.create_alias_view,
name=constants.CREATE_ALIAS_URL_NAME,
),
url(
r'^aliases/$',
views.CategoryListView.as_view(),
name=constants.CATEGORY_LIST_URL_NAME,
),
url(
r'^aliases/category/(?P<category_pk>\d+)/$',
views.AliasListView.as_view(),
name=constants.LIST_ALIASES_URL_NAME,
),
url(
r'^aliases/(?P<pk>\d+)/usage/$',
views.alias_usage_view,
name=constants.USAGE_ALIAS_URL_NAME,
),
url(
r'^detach-alias/(?P<plugin_pk>\d+)/$',
views.detach_alias_plugin_view,
name=constants.DETACH_ALIAS_PLUGIN_URL_NAME,
),
url(
r'^delete-alias/(?P<pk>\d+)/$',
views.delete_alias_view,
name=constants.DELETE_ALIAS_URL_NAME,
),
url(
r'^set-alias-position/$',
views.set_alias_position_view,
name=constants.SET_ALIAS_POSITION_URL_NAME,
),
url(
r'^select2/$',
views.AliasSelect2View.as_view(),
name=constants.SELECT2_ALIAS_URL_NAME,
),
]
|
import React from 'react';
import ReactDOM from 'react-dom';
import App from './App';
ReactDOM.render(
<React.StrictMode>
<App />
</React.StrictMode>,
document.getElementById('root')
);
// If you want to start measuring performance in your app, pass a function
// to log results (for example: reportWebVitals(console.log))
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
|
'use strict';
var _ = require('lodash');
var BN = require('../crypto/bn');
var BufferUtil = require('../util/buffer');
var BufferReader = require('../encoding/bufferreader');
var BufferWriter = require('../encoding/bufferwriter');
var Hash = require('../crypto/hash');
var JSUtil = require('../util/js');
var $ = require('../util/preconditions');
var GENESIS_BITS = 0x1d00ffff;
/**
* Instantiate a BlockHeader from a Buffer, JSON object, or Object with
* the properties of the BlockHeader
*
* @param {*} - A Buffer, JSON string, or Object
* @returns {BlockHeader} - An instance of block header
* @constructor
*/
var BlockHeader = function BlockHeader(arg) {
if (!(this instanceof BlockHeader)) {
return new BlockHeader(arg);
}
var info = BlockHeader._from(arg);
this.version = info.version;
this.prevHash = info.prevHash;
this.merkleRoot = info.merkleRoot;
this.witnessMerkleRoot = info.witnessMerkleRoot;
this.time = info.time;
this.timestamp = info.time;
this.bits = info.bits;
this.nonce = info.nonce;
if (info.hash) {
$.checkState(
this.hash === info.hash,
'Argument object hash property does not match block hash.'
);
}
return this;
};
/**
* @param {*} - A Buffer, JSON string or Object
* @returns {Object} - An object representing block header data
* @throws {TypeError} - If the argument was not recognized
* @private
*/
BlockHeader._from = function _from(arg) {
var info = {};
if (BufferUtil.isBuffer(arg)) {
info = BlockHeader._fromBufferReader(BufferReader(arg));
} else if (_.isObject(arg)) {
info = BlockHeader._fromObject(arg);
} else {
throw new TypeError('Unrecognized argument for BlockHeader');
}
return info;
};
/**
* @param {Object} - A JSON string
* @returns {Object} - An object representing block header data
* @private
*/
BlockHeader._fromObject = function _fromObject(data) {
$.checkArgument(data, 'data is required');
var prevHash = data.prevHash;
var merkleRoot = data.merkleRoot;
var witnessMerkleRoot = data.witnessMerkleRoot;
if (_.isString(data.prevHash)) {
prevHash = BufferUtil.reverse(Buffer.from(data.prevHash, 'hex'));
}
if (_.isString(data.merkleRoot)) {
merkleRoot = BufferUtil.reverse(Buffer.from(data.merkleRoot, 'hex'));
}
if (_.isString(data.witnessMerkleRoot)) {
witnessMerkleRoot = BufferUtil.reverse(Buffer.from(data.witnessMerkleRoot, 'hex'));
}
var info = {
hash: data.hash,
version: data.version,
prevHash: prevHash,
merkleRoot: merkleRoot,
witnessMerkleRoot: witnessMerkleRoot,
time: data.time,
timestamp: data.time,
bits: data.bits,
nonce: data.nonce
};
return info;
};
/**
* @param {Object} - A plain JavaScript object
* @returns {BlockHeader} - An instance of block header
*/
BlockHeader.fromObject = function fromObject(obj) {
var info = BlockHeader._fromObject(obj);
return new BlockHeader(info);
};
/**
* @param {Binary} - Raw block binary data or buffer
* @returns {BlockHeader} - An instance of block header
*/
BlockHeader.fromRawBlock = function fromRawBlock(data) {
if (!BufferUtil.isBuffer(data)) {
data = Buffer.from(data, 'binary');
}
var br = BufferReader(data);
br.pos = BlockHeader.Constants.START_OF_HEADER;
var info = BlockHeader._fromBufferReader(br);
return new BlockHeader(info);
};
/**
* @param {Buffer} - A buffer of the block header
* @returns {BlockHeader} - An instance of block header
*/
BlockHeader.fromBuffer = function fromBuffer(buf) {
var info = BlockHeader._fromBufferReader(BufferReader(buf));
return new BlockHeader(info);
};
/**
* @param {string} - A hex encoded buffer of the block header
* @returns {BlockHeader} - An instance of block header
*/
BlockHeader.fromString = function fromString(str) {
var buf = Buffer.from(str, 'hex');
return BlockHeader.fromBuffer(buf);
};
/**
* @param {BufferReader} - A BufferReader of the block header
* @returns {Object} - An object representing block header data
* @private
*/
BlockHeader._fromBufferReader = function _fromBufferReader(br) {
var info = {};
info.version = br.readInt32LE();
info.prevHash = br.read(32);
info.merkleRoot = br.read(32);
info.witnessMerkleRoot = br.read(32);
info.time = br.readUInt32LE();
info.bits = br.readUInt32LE();
info.nonce = br.readUInt32LE();
return info;
};
/**
* @param {BufferReader} - A BufferReader of the block header
* @returns {BlockHeader} - An instance of block header
*/
BlockHeader.fromBufferReader = function fromBufferReader(br) {
var info = BlockHeader._fromBufferReader(br);
return new BlockHeader(info);
};
/**
* @returns {Object} - A plain object of the BlockHeader
*/
BlockHeader.prototype.toObject = BlockHeader.prototype.toJSON = function toObject() {
return {
hash: this.hash,
version: this.version,
prevHash: BufferUtil.reverse(this.prevHash).toString('hex'),
merkleRoot: BufferUtil.reverse(this.merkleRoot).toString('hex'),
witnessMerkleRoot: BufferUtil.reverse(this.witnessMerkleRoot).toString('hex'),
time: this.time,
bits: this.bits,
nonce: this.nonce
};
};
/**
* @returns {Buffer} - A Buffer of the BlockHeader
*/
BlockHeader.prototype.toBuffer = function toBuffer() {
return this.toBufferWriter().concat();
};
/**
* @returns {string} - A hex encoded string of the BlockHeader
*/
BlockHeader.prototype.toString = function toString() {
return this.toBuffer().toString('hex');
};
/**
* @param {BufferWriter} - An existing instance BufferWriter
* @returns {BufferWriter} - An instance of BufferWriter representation of the BlockHeader
*/
BlockHeader.prototype.toBufferWriter = function toBufferWriter(bw) {
if (!bw) {
bw = new BufferWriter();
}
bw.writeInt32LE(this.version);
bw.write(this.prevHash);
bw.write(this.merkleRoot);
bw.write(this.witnessMerkleRoot);
bw.writeUInt32LE(this.time);
bw.writeUInt32LE(this.bits);
bw.writeUInt32LE(this.nonce);
return bw;
};
/**
* Returns the target difficulty for this block
* @param {Number} bits
* @returns {BN} An instance of BN with the decoded difficulty bits
*/
BlockHeader.prototype.getTargetDifficulty = function getTargetDifficulty(bits) {
bits = bits || this.bits;
var target = new BN(bits & 0xffffff);
var mov = 8 * ((bits >>> 24) - 3);
while (mov-- > 0) {
target = target.mul(new BN(2));
}
return target;
};
/**
* @link https://en.bitcoin.it/wiki/Difficulty
* @return {Number}
*/
BlockHeader.prototype.getDifficulty = function getDifficulty() {
var difficulty1TargetBN = this.getTargetDifficulty(GENESIS_BITS).mul(new BN(Math.pow(10, 8)));
var currentTargetBN = this.getTargetDifficulty();
var difficultyString = difficulty1TargetBN.div(currentTargetBN).toString(10);
var decimalPos = difficultyString.length - 8;
difficultyString = difficultyString.slice(0, decimalPos) + '.' + difficultyString.slice(decimalPos);
return parseFloat(difficultyString);
};
/**
* @returns {Buffer} - The little endian hash buffer of the header
*/
BlockHeader.prototype._getHash = function hash() {
var buf = this.toBuffer();
return Hash.sha256sha256(buf);
};
var idProperty = {
configurable: false,
enumerable: true,
/**
* @returns {string} - The big endian hash buffer of the header
*/
get: function() {
if (!this._id) {
this._id = BufferReader(this._getHash()).readReverse().toString('hex');
}
return this._id;
},
set: _.noop
};
Object.defineProperty(BlockHeader.prototype, 'id', idProperty);
Object.defineProperty(BlockHeader.prototype, 'hash', idProperty);
/**
* @returns {Boolean} - If timestamp is not too far in the future
*/
BlockHeader.prototype.validTimestamp = function validTimestamp() {
var currentTime = Math.round(new Date().getTime() / 1000);
if (this.time > currentTime + BlockHeader.Constants.MAX_TIME_OFFSET) {
return false;
}
return true;
};
/**
* @returns {Boolean} - If the proof-of-work hash satisfies the target difficulty
*/
BlockHeader.prototype.validProofOfWork = function validProofOfWork() {
var pow = new BN(this.id, 'hex');
var target = this.getTargetDifficulty();
if (pow.cmp(target) > 0) {
return false;
}
return true;
};
/**
* @returns {string} - A string formatted for the console
*/
BlockHeader.prototype.inspect = function inspect() {
return '<BlockHeader ' + this.id + '>';
};
BlockHeader.Constants = {
START_OF_HEADER: 8, // Start buffer position in raw block data
MAX_TIME_OFFSET: 2 * 60 * 60, // The max a timestamp can be in the future
LARGEST_HASH: new BN('10000000000000000000000000000000000000000000000000000000000000000', 'hex')
};
module.exports = BlockHeader;
|
import logging
from classy_vision.meters import ClassyMeter, register_meter
from classy_vision.generic.distributed_util import all_reduce_sum, gather_from_all
from vissl.config import AttrDict
from vissl.utils.env import get_machine_local_and_dist_rank
import numpy as np
import torch
import torch.nn.functional as F
from sklearn.metrics import roc_auc_score, roc_curve, precision_recall_curve, \
balanced_accuracy_score, \
recall_score, precision_score, confusion_matrix
@register_meter("auc_meter")
class AUCMeter(ClassyMeter):
"""
Add docuementation on what this meter does
Args:
add documentation about each meter parameter
"""
def __init__(self, meters_config: AttrDict):
# implement what the init method should do like
# setting variable to update etc.
self.num_classes = meters_config.get("num_classes")
self._total_sample_count = None
self._curr_sample_count = None
# self._meters = ?
self.reset()
@classmethod
def from_config(cls, meters_config: AttrDict):
"""
Get the AUCMeter instance from the user defined config
"""
return cls(meters_config)
@property
def name(self):
"""
Name of the meter
"""
return "auc_binary_meter"
@property
def value(self):
"""
Value of the meter globally synced. mean AP and AP for each class is returned
"""
def sigmoid(X):
return 1 / (1 + np.exp(-X))
_, distributed_rank = get_machine_local_and_dist_rank()
logging.info(
f"Rank: {distributed_rank} AUC meter: "
)
# in scikit-learn naming
y_true_hotencoded = self._targets.detach().numpy() # [n_samples, n_classes]
y_score_logits = self._scores.detach().numpy() # [n_samples, n_classes] in logits
y_true = np.argmax(y_true_hotencoded, axis = 1) # [n_samples,]
y_score = np.argmax(sigmoid(y_score_logits), axis = 1) # [n_samples,]
# If you want some curves?
# fpr, tpr, thresholds = roc_curve(y_true, y_score, pos_label=1)
# precision, recall, thresholds = precision_recall_curve(y_true, y_score)
# AUCR - coverage/risk curve for selective classification? see e.g. Y Geifman et al. 2017, 2019
# "Selectivenet: A deep neural network with an integrated reject option"
# "Selective classification for deep neural networks"
auroc = roc_auc_score(y_true, y_score)
# I assume that these internally compute confusion_matrix, so you could compute these directly
# from the "tn, fp, fn, tp", maybe save some milliseconds, benchmark later?
# balanced_accuracy = balanced_accuracy_score(y_true, y_score)
tn, fp, fn, tp = confusion_matrix(y_true, y_score).ravel()
# sensitivity = recall_score(y_true, y_score, pos_label = 1) # sensitivity, recall
sensitivity = tp / (tp+fn)
# specificity = recall_score(y_true, y_score, pos_label = 0) # specificity, selectivity or true negative rate, TNR
specificity = tn / (tn+fp)
# precision = precision_score(y_true, y_score)
precision = tp / (tp + fp)
return {"AUC": auroc,
"Sensitivity": sensitivity,
"Specificity": specificity,
"precision": precision}
def gather_scores(self, scores: torch.Tensor):
"""
Do a gather over all embeddings, so we can compute the loss.
Final shape is like: (batch_size * num_gpus) x embedding_dim
"""
if torch.distributed.is_available() and torch.distributed.is_initialized():
# gather all embeddings.
scores_gathered = gather_from_all(scores)
else:
scores_gathered = scores
return scores_gathered
def gather_targets(self, targets: torch.Tensor):
"""
Do a gather over all embeddings, so we can compute the loss.
Final shape is like: (batch_size * num_gpus) x embedding_dim
"""
if torch.distributed.is_available() and torch.distributed.is_initialized():
# gather all embeddings.
targets_gathered = gather_from_all(targets)
else:
targets_gathered = targets
return targets_gathered
def sync_state(self):
"""
Globally syncing the state of each meter across all the trainers.
Should perform distributed communications like all_gather etc
to correctly gather the global values to compute the metric
"""
# Communications
self._curr_sample_count = all_reduce_sum(self._curr_sample_count)
self._scores = self.gather_scores(self._scores)
self._targets = self.gather_targets(self._targets)
# Store results
self._total_sample_count += self._curr_sample_count
# Reset values until next sync
self._curr_sample_count.zero_()
def reset(self):
"""
Reset the meter. Should reset all the meter variables, values.
"""
self._scores = torch.zeros(0, self.num_classes, dtype=torch.float32)
self._targets = torch.zeros(0, self.num_classes, dtype=torch.int8)
self._total_sample_count = torch.zeros(1)
self._curr_sample_count = torch.zeros(1)
def __repr__(self):
# implement what information about meter params should be
# printed by print(meter). This is helpful for debugging
return repr({"name": self.name, "value": self.value})
def set_classy_state(self, state):
"""
Set the state of meter. This is the state loaded from a checkpoint when the model
is resumed
"""
"""
Set the state of meter
"""
assert (
self.name == state["name"]
), f"State name {state['name']} does not match meter name {self.name}"
assert self.num_classes == state["num_classes"], (
f"num_classes of state {state['num_classes']} "
f"does not match object's num_classes {self.num_classes}"
)
# Restore the state -- correct_predictions and sample_count.
self.reset()
self._total_sample_count = state["total_sample_count"].clone()
self._curr_sample_count = state["curr_sample_count"].clone()
self._scores = state["scores"]
self._targets = state["targets"]
def get_classy_state(self):
"""
Returns the states of meter that will be checkpointed. This should include
the variables that are global, updated and affect meter value.
"""
return {
"name": self.name,
"num_classes": self.num_classes,
"scores": self._scores,
"targets": self._targets,
"total_sample_count": self._total_sample_count,
"curr_sample_count": self._curr_sample_count,
}
def verify_target(self, target):
"""
Verify that the target contains {-1, 0, 1} values only
"""
assert torch.all(
torch.eq(target, 0) + torch.eq(target, 1)
), "Target values should be either 0 OR 1"
def update(self, model_output, target):
"""
Update the meter every time meter is calculated
"""
target = F.one_hot(target, num_classes=self.num_classes)
self.validate(model_output, target)
self.verify_target(target)
self._curr_sample_count += model_output.shape[0]
# scores as in logits I think
curr_scores, curr_targets = self._scores, self._targets
sample_count_so_far = curr_scores.shape[0]
self._scores = torch.zeros(
int(self._curr_sample_count[0]), self.num_classes, dtype=torch.float32
)
self._targets = torch.zeros(
int(self._curr_sample_count[0]), self.num_classes, dtype=torch.int8
)
if sample_count_so_far > 0:
self._scores[:sample_count_so_far, :] = curr_scores
self._targets[:sample_count_so_far, :] = curr_targets
self._scores[sample_count_so_far:, :] = model_output
self._targets[sample_count_so_far:, :] = target
del curr_scores, curr_targets
def validate(self, model_output, target):
"""
Validate that the input to meter is valid
"""
# print(model_output)
# tensor([[-0.1294, -0.2052],
# [ 0.1195, 0.6869],
# [ 0.1144, 0.6331],
# [ 0.1587, 0.5295],
# [ 0.0170, 0.2926],
# [ 0.3461, -1.0801],
# [ 0.0151, -2.4991],
# [ 0.2493, 0.6296],
# [-0.0481, 0.1129],
# [ 0.0467, 0.6814],
# [-0.1267, 0.6976],
# [-0.0636, 0.2439],
# [ 0.1606, -0.0859],
# [ 0.0937, -0.1886],
# [-0.2975, -1.9722],
# [-0.4319, -1.0826],
# [-0.0712, 0.6761],
# [ 0.2507, -0.7117],
# [-0.3131, 0.9777],
# [-0.0754, 0.6377],
# [ 0.0230, 0.8063],
# [-0.1999, 1.0083],
# [ 0.1245, 0.4354],
# [ 0.1796, -0.2110],
# [ 0.2410, -1.3138]], grad_fn=<CopyBackwards>)
# print(target)
# tensor([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
# 0])
# assert len(model_output.shape) == 2, "model_output should be a 2D tensor"
# assert len(target.shape) == 2, "target should be a 2D tensor"
# assert (
# model_output.shape[0] == target.shape[0]
# ), "Expect same shape in model output and target"
# assert (
# model_output.shape[1] == target.shape[1]
# ), "Expect same shape in model output and target"
# num_classes = target.shape[1]
# assert num_classes == self.num_classes, "number of classes is not consistent"
a = 1
|
function MapRoute(lat, lng) {
var pointA = new google.maps.LatLng(10.7736594,106.7004169),
pointB = new google.maps.LatLng(lat, lng),
myOptions = {
zoom: 7,
center: pointA
},
map = new google.maps.Map(document.getElementById('map-canvas'), myOptions),
// Instantiate a directions service.
directionsService = new google.maps.DirectionsService,
directionsDisplay = new google.maps.DirectionsRenderer({
map: map
});
// get route from A to B
calculateAndDisplayRoute(directionsService, directionsDisplay, pointA, pointB);
}
function calculateAndDisplayRoute(directionsService, directionsDisplay, pointA, pointB) {
directionsService.route({
origin: pointA,
destination: pointB,
travelMode: google.maps.TravelMode.DRIVING
}, function(response, status) {
if (status == google.maps.DirectionsStatus.OK) {
directionsDisplay.setDirections(response);
} else {
window.alert('Directions request failed due to ' + status);
}
});
}
function initMap() {
var origin = new google.maps.LatLng(10.7736594,106.7004169),
map = new google.maps.Map(document.getElementById('map-canvas'), {
center: origin,
zoom: 16
});
markerA = new google.maps.Marker({
position: origin,
title: "Eyluxlashes",
label: "A",
map: map
});
}
|
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
from aiida.backends.testbase import AiidaTestCase
from aiida.orm.data.frozendict import FrozenDict
from aiida.orm.data.base import Int, Str
class TestFrozenDict(AiidaTestCase):
def test_create(self):
d = FrozenDict(dict={})
def test_create_invalid(self):
with self.assertRaises(AssertionError):
d = FrozenDict(dict={'a': 5})
def test_get_value(self):
input = {'a': Int(5).store()}
d = FrozenDict(dict=input)
self.assertEqual(d['a'], input['a'])
def test_iterate(self):
input = {'a': Int(5).store(), 'b': Str('testing').store()}
d = FrozenDict(dict=input)
for k, v in d.iteritems():
self.assertEqual(input[k], v)
def test_length(self):
input = {'a': Int(5).store(), 'b': Str('testing').store()}
d = FrozenDict(dict=input)
self.assertEqual(len(input), len(d))
|
# ==============================================================================
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root
# for full license information.
# ==============================================================================
'''
attention -- standard attention model
'''
from __future__ import division
from cntk.ops.functions import Function
from ..blocks import _inject_name # helpers
from .. import *
# AttentionModel block
def AttentionModel(attention_dim, attention_span=None, attention_axis=None,
init=default_override_or(glorot_uniform()),
go_backwards=default_override_or(False),
enable_self_stabilization=default_override_or(True), name=''):
'''
AttentionModel(attention_dim, attention_span=None, attention_axis=None, init=glorot_uniform(), go_backwards=False, enable_self_stabilization=True, name='')
Layer factory function to create a function object that implements an attention model
as described in Bahdanau, et al., "Neural machine translation by jointly learning to align and translate."
'''
init = get_default_override(AttentionModel, init=init)
go_backwards = get_default_override(AttentionModel, go_backwards=go_backwards)
enable_self_stabilization = get_default_override(AttentionModel, enable_self_stabilization=enable_self_stabilization)
# until CNTK can handle multiple nested dynamic loops, we require fixed windows and fake it
if attention_span is None or attention_axis is None:
raise NotImplementedError('AttentionModel currently requires a fixed attention_span and a static attention_axis to be specified')
# model parameters
with default_options(bias=False): # all the projections have no bias
attn_proj_enc = Stabilizer(enable_self_stabilization=enable_self_stabilization) >> Dense(attention_dim, init=init, input_rank=1) # projects input hidden state, keeping span axes intact
attn_proj_dec = Stabilizer(enable_self_stabilization=enable_self_stabilization) >> Dense(attention_dim, init=init, input_rank=1) # projects decoder hidden state, but keeping span and beam-search axes intact
attn_proj_tanh = Stabilizer(enable_self_stabilization=enable_self_stabilization) >> Dense(1 , init=init, input_rank=1) # projects tanh output, keeping span and beam-search axes intact
attn_final_stab = Stabilizer(enable_self_stabilization=enable_self_stabilization)
# attention function
@Function
def attention(h_enc, h_dec):
history_axis = h_dec # we use history_axis wherever we pass this only for the sake of passing its axis
# TODO: pull this apart so that we can compute the encoder window only once and apply it to multiple decoders
# --- encoder state window
(h_enc, h_enc_valid) = PastValueWindow(attention_span, axis=attention_axis, go_backwards=go_backwards)(h_enc).outputs
h_enc_proj = attn_proj_enc(h_enc)
# window must be broadcast to every decoder time step
h_enc_proj = sequence.broadcast_as(h_enc_proj, history_axis)
h_enc_valid = sequence.broadcast_as(h_enc_valid, history_axis)
# --- decoder state
# project decoder hidden state
h_dec_proj = attn_proj_dec(h_dec)
tanh_out = tanh(h_dec_proj + h_enc_proj) # (attention_span, attention_dim)
u = attn_proj_tanh(tanh_out) # (attention_span, 1)
u_masked = u + (h_enc_valid - 1) * 50 # logzero-out the unused elements for the softmax denominator TODO: use a less arbitrary number than 50
attention_weights = softmax(u_masked, axis=attention_axis) #, name='attention_weights')
attention_weights = Label('attention_weights')(attention_weights)
# now take weighted sum over the encoder state vectors
h_att = reduce_sum(element_times(h_enc_proj, attention_weights), axis=attention_axis)
h_att = attn_final_stab(h_att)
return h_att
return _inject_name(attention, name)
|
"""
Python Markdown
===============
Python Markdown converts Markdown to HTML and can be used as a library or
called from the command line.
## Basic usage as a module:
import markdown
md = Markdown()
html = md.convert(your_text_string)
## Basic use from the command line:
markdown source.txt > destination.html
Run "markdown --help" to see more options.
## Extensions
See <http://www.freewisdom.org/projects/python-markdown/> for more
information and instructions on how to extend the functionality of
Python Markdown. Read that before you try modifying this file.
## Authors and License
Started by [Manfred Stienstra](http://www.dwerg.net/). Continued and
maintained by [Yuri Takhteyev](http://www.freewisdom.org), [Waylan
Limberg](http://achinghead.com/) and [Artem Yunusov](http://blog.splyer.com).
Contact: markdown@freewisdom.org
Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later)
Copyright 200? Django Software Foundation (OrderedDict implementation)
Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b)
Copyright 2004 Manfred Stienstra (the original version)
License: BSD (see docs/LICENSE for details).
"""
version = "2.0.3"
version_info = (2,0,3, "Final")
import re
import codecs
import sys
import warnings
import logging
from logging import DEBUG, INFO, WARN, ERROR, CRITICAL
"""
CONSTANTS
=============================================================================
"""
"""
Constants you might want to modify
-----------------------------------------------------------------------------
"""
# default logging level for command-line use
COMMAND_LINE_LOGGING_LEVEL = CRITICAL
TAB_LENGTH = 4 # expand tabs to this many spaces
ENABLE_ATTRIBUTES = True # @id = xyz -> <... id="xyz">
SMART_EMPHASIS = True # this_or_that does not become this<i>or</i>that
DEFAULT_OUTPUT_FORMAT = 'xhtml1' # xhtml or html4 output
HTML_REMOVED_TEXT = "[HTML_REMOVED]" # text used instead of HTML in safe mode
BLOCK_LEVEL_ELEMENTS = re.compile("p|div|h[1-6]|blockquote|pre|table|dl|ol|ul"
"|script|noscript|form|fieldset|iframe|math"
"|ins|del|hr|hr/|style|li|dt|dd|thead|tbody"
"|tr|th|td")
DOC_TAG = "div" # Element used to wrap document - later removed
# Placeholders
STX = u'\u0002' # Use STX ("Start of text") for start-of-placeholder
ETX = u'\u0003' # Use ETX ("End of text") for end-of-placeholder
INLINE_PLACEHOLDER_PREFIX = STX+"klzzwxh:"
INLINE_PLACEHOLDER = INLINE_PLACEHOLDER_PREFIX + "%s" + ETX
AMP_SUBSTITUTE = STX+"amp"+ETX
"""
Constants you probably do not need to change
-----------------------------------------------------------------------------
"""
RTL_BIDI_RANGES = ( (u'\u0590', u'\u07FF'),
# Hebrew (0590-05FF), Arabic (0600-06FF),
# Syriac (0700-074F), Arabic supplement (0750-077F),
# Thaana (0780-07BF), Nko (07C0-07FF).
(u'\u2D30', u'\u2D7F'), # Tifinagh
)
"""
AUXILIARY GLOBAL FUNCTIONS
=============================================================================
"""
def message(level, text):
""" A wrapper method for logging debug messages. """
logger = logging.getLogger('MARKDOWN')
if logger.handlers:
# The logger is configured
logger.log(level, text)
if level > WARN:
sys.exit(0)
elif level > WARN:
raise MarkdownException, text
else:
warnings.warn(text, MarkdownWarning)
def isBlockLevel(tag):
"""Check if the tag is a block level HTML tag."""
return BLOCK_LEVEL_ELEMENTS.match(tag)
"""
MISC AUXILIARY CLASSES
=============================================================================
"""
class AtomicString(unicode):
"""A string which should not be further processed."""
pass
class MarkdownException(Exception):
""" A Markdown Exception. """
pass
class MarkdownWarning(Warning):
""" A Markdown Warning. """
pass
"""
OVERALL DESIGN
=============================================================================
Markdown processing takes place in four steps:
1. A bunch of "preprocessors" munge the input text.
2. BlockParser() parses the high-level structural elements of the
pre-processed text into an ElementTree.
3. A bunch of "treeprocessors" are run against the ElementTree. One such
treeprocessor runs InlinePatterns against the ElementTree, detecting inline
markup.
4. Some post-processors are run against the text after the ElementTree has
been serialized into text.
5. The output is written to a string.
Those steps are put together by the Markdown() class.
"""
import preprocessors
import blockprocessors
import treeprocessors
import inlinepatterns
import postprocessors
import blockparser
import etree_loader
import odict
# Extensions should use "markdown.etree" instead of "etree" (or do `from
# markdown import etree`). Do not import it by yourself.
etree = etree_loader.importETree()
# Adds the ability to output html4
import html4
class Markdown:
"""Convert Markdown to HTML."""
def __init__(self,
extensions=[],
extension_configs={},
safe_mode = False,
output_format=DEFAULT_OUTPUT_FORMAT):
"""
Creates a new Markdown instance.
Keyword arguments:
* extensions: A list of extensions.
If they are of type string, the module mdx_name.py will be loaded.
If they are a subclass of markdown.Extension, they will be used
as-is.
* extension-configs: Configuration setting for extensions.
* safe_mode: Disallow raw html. One of "remove", "replace" or "escape".
* output_format: Format of output. Supported formats are:
* "xhtml1": Outputs XHTML 1.x. Default.
* "xhtml": Outputs latest supported version of XHTML (currently XHTML 1.1).
* "html4": Outputs HTML 4
* "html": Outputs latest supported version of HTML (currently HTML 4).
Note that it is suggested that the more specific formats ("xhtml1"
and "html4") be used as "xhtml" or "html" may change in the future
if it makes sense at that time.
"""
self.safeMode = safe_mode
self.registeredExtensions = []
self.docType = ""
self.stripTopLevelTags = True
# Preprocessors
self.preprocessors = odict.OrderedDict()
self.preprocessors["html_block"] = \
preprocessors.HtmlBlockPreprocessor(self)
self.preprocessors["reference"] = \
preprocessors.ReferencePreprocessor(self)
# footnote preprocessor will be inserted with "<reference"
# Block processors - ran by the parser
self.parser = blockparser.BlockParser()
self.parser.blockprocessors['empty'] = \
blockprocessors.EmptyBlockProcessor(self.parser)
self.parser.blockprocessors['indent'] = \
blockprocessors.ListIndentProcessor(self.parser)
self.parser.blockprocessors['code'] = \
blockprocessors.CodeBlockProcessor(self.parser)
self.parser.blockprocessors['hashheader'] = \
blockprocessors.HashHeaderProcessor(self.parser)
self.parser.blockprocessors['setextheader'] = \
blockprocessors.SetextHeaderProcessor(self.parser)
self.parser.blockprocessors['hr'] = \
blockprocessors.HRProcessor(self.parser)
self.parser.blockprocessors['olist'] = \
blockprocessors.OListProcessor(self.parser)
self.parser.blockprocessors['ulist'] = \
blockprocessors.UListProcessor(self.parser)
self.parser.blockprocessors['quote'] = \
blockprocessors.BlockQuoteProcessor(self.parser)
self.parser.blockprocessors['paragraph'] = \
blockprocessors.ParagraphProcessor(self.parser)
#self.prePatterns = []
# Inline patterns - Run on the tree
self.inlinePatterns = odict.OrderedDict()
self.inlinePatterns["backtick"] = \
inlinepatterns.BacktickPattern(inlinepatterns.BACKTICK_RE)
self.inlinePatterns["escape"] = \
inlinepatterns.SimpleTextPattern(inlinepatterns.ESCAPE_RE)
self.inlinePatterns["reference"] = \
inlinepatterns.ReferencePattern(inlinepatterns.REFERENCE_RE, self)
self.inlinePatterns["link"] = \
inlinepatterns.LinkPattern(inlinepatterns.LINK_RE, self)
self.inlinePatterns["image_link"] = \
inlinepatterns.ImagePattern(inlinepatterns.IMAGE_LINK_RE, self)
self.inlinePatterns["image_reference"] = \
inlinepatterns.ImageReferencePattern(inlinepatterns.IMAGE_REFERENCE_RE, self)
self.inlinePatterns["autolink"] = \
inlinepatterns.AutolinkPattern(inlinepatterns.AUTOLINK_RE, self)
self.inlinePatterns["automail"] = \
inlinepatterns.AutomailPattern(inlinepatterns.AUTOMAIL_RE, self)
self.inlinePatterns["linebreak2"] = \
inlinepatterns.SubstituteTagPattern(inlinepatterns.LINE_BREAK_2_RE, 'br')
self.inlinePatterns["linebreak"] = \
inlinepatterns.SubstituteTagPattern(inlinepatterns.LINE_BREAK_RE, 'br')
self.inlinePatterns["html"] = \
inlinepatterns.HtmlPattern(inlinepatterns.HTML_RE, self)
self.inlinePatterns["entity"] = \
inlinepatterns.HtmlPattern(inlinepatterns.ENTITY_RE, self)
self.inlinePatterns["not_strong"] = \
inlinepatterns.SimpleTextPattern(inlinepatterns.NOT_STRONG_RE)
self.inlinePatterns["strong_em"] = \
inlinepatterns.DoubleTagPattern(inlinepatterns.STRONG_EM_RE, 'strong,em')
self.inlinePatterns["strong"] = \
inlinepatterns.SimpleTagPattern(inlinepatterns.STRONG_RE, 'strong')
self.inlinePatterns["emphasis"] = \
inlinepatterns.SimpleTagPattern(inlinepatterns.EMPHASIS_RE, 'em')
self.inlinePatterns["emphasis2"] = \
inlinepatterns.SimpleTagPattern(inlinepatterns.EMPHASIS_2_RE, 'em')
# The order of the handlers matters!!!
# Tree processors - run once we have a basic parse.
self.treeprocessors = odict.OrderedDict()
self.treeprocessors["inline"] = treeprocessors.InlineProcessor(self)
self.treeprocessors["prettify"] = \
treeprocessors.PrettifyTreeprocessor(self)
# Postprocessors - finishing touches.
self.postprocessors = odict.OrderedDict()
self.postprocessors["raw_html"] = \
postprocessors.RawHtmlPostprocessor(self)
self.postprocessors["amp_substitute"] = \
postprocessors.AndSubstitutePostprocessor()
# footnote postprocessor will be inserted with ">amp_substitute"
# Map format keys to serializers
self.output_formats = {
'html' : html4.to_html_string,
'html4' : html4.to_html_string,
'xhtml' : etree.tostring,
'xhtml1': etree.tostring,
}
self.references = {}
self.htmlStash = preprocessors.HtmlStash()
self.registerExtensions(extensions = extensions,
configs = extension_configs)
self.set_output_format(output_format)
self.reset()
def registerExtensions(self, extensions, configs):
"""
Register extensions with this instance of Markdown.
Keyword aurguments:
* extensions: A list of extensions, which can either
be strings or objects. See the docstring on Markdown.
* configs: A dictionary mapping module names to config options.
"""
for ext in extensions:
if isinstance(ext, basestring):
ext = load_extension(ext, configs.get(ext, []))
if isinstance(ext, Extension):
try:
ext.extendMarkdown(self, globals())
except NotImplementedError, e:
message(ERROR, e)
else:
message(ERROR, 'Extension "%s.%s" must be of type: "markdown.Extension".' \
% (ext.__class__.__module__, ext.__class__.__name__))
def registerExtension(self, extension):
""" This gets called by the extension """
self.registeredExtensions.append(extension)
def reset(self):
"""
Resets all state variables so that we can start with a new text.
"""
self.htmlStash.reset()
self.references.clear()
for extension in self.registeredExtensions:
extension.reset()
def set_output_format(self, format):
""" Set the output format for the class instance. """
try:
self.serializer = self.output_formats[format.lower()]
except KeyError:
message(CRITICAL, 'Invalid Output Format: "%s". Use one of %s.' \
% (format, self.output_formats.keys()))
def convert(self, source):
"""
Convert markdown to serialized XHTML or HTML.
Keyword arguments:
* source: Source text as a Unicode string.
"""
# Fixup the source text
if not source.strip():
return u"" # a blank unicode string
try:
source = unicode(source)
except UnicodeDecodeError:
message(CRITICAL, 'UnicodeDecodeError: Markdown only accepts unicode or ascii input.')
return u""
source = source.replace(STX, "").replace(ETX, "")
source = source.replace("\r\n", "\n").replace("\r", "\n") + "\n\n"
source = re.sub(r'\n\s+\n', '\n\n', source)
source = source.expandtabs(TAB_LENGTH)
# Split into lines and run the line preprocessors.
self.lines = source.split("\n")
for prep in self.preprocessors.values():
self.lines = prep.run(self.lines)
# Parse the high-level elements.
root = self.parser.parseDocument(self.lines).getroot()
# Run the tree-processors
for treeprocessor in self.treeprocessors.values():
newRoot = treeprocessor.run(root)
if newRoot:
root = newRoot
# Serialize _properly_. Strip top-level tags.
output, length = codecs.utf_8_decode(self.serializer(root, encoding="utf-8"))
if self.stripTopLevelTags:
try:
start = output.index('<%s>'%DOC_TAG)+len(DOC_TAG)+2
end = output.rindex('</%s>'%DOC_TAG)
output = output[start:end].strip()
except ValueError:
if output.strip().endswith('<%s />'%DOC_TAG):
# We have an empty document
output = ''
else:
# We have a serious problem
message(CRITICAL, 'Failed to strip top level tags.')
# Run the text post-processors
for pp in self.postprocessors.values():
output = pp.run(output)
return output.strip()
def convertFile(self, input=None, output=None, encoding=None):
"""Converts a markdown file and returns the HTML as a unicode string.
Decodes the file using the provided encoding (defaults to utf-8),
passes the file content to markdown, and outputs the html to either
the provided stream or the file with provided name, using the same
encoding as the source file.
**Note:** This is the only place that decoding and encoding of unicode
takes place in Python-Markdown. (All other code is unicode-in /
unicode-out.)
Keyword arguments:
* input: Name of source text file.
* output: Name of output file. Writes to stdout if `None`.
* encoding: Encoding of input and output files. Defaults to utf-8.
"""
encoding = encoding or "utf-8"
# Read the source
input_file = codecs.open(input, mode="r", encoding=encoding)
text = input_file.read()
input_file.close()
text = text.lstrip(u'\ufeff') # remove the byte-order mark
# Convert
html = self.convert(text)
# Write to file or stdout
if isinstance(output, (str, unicode)):
output_file = codecs.open(output, "w", encoding=encoding)
output_file.write(html)
output_file.close()
else:
output.write(html.encode(encoding))
"""
Extensions
-----------------------------------------------------------------------------
"""
class Extension:
""" Base class for extensions to subclass. """
def __init__(self, configs = {}):
"""Create an instance of an Extention.
Keyword arguments:
* configs: A dict of configuration setting used by an Extension.
"""
self.config = configs
def getConfig(self, key):
""" Return a setting for the given key or an empty string. """
if key in self.config:
return self.config[key][0]
else:
return ""
def getConfigInfo(self):
""" Return all config settings as a list of tuples. """
return [(key, self.config[key][1]) for key in self.config.keys()]
def setConfig(self, key, value):
""" Set a config setting for `key` with the given `value`. """
self.config[key][0] = value
def extendMarkdown(self, md, md_globals):
"""
Add the various proccesors and patterns to the Markdown Instance.
This method must be overriden by every extension.
Keyword arguments:
* md: The Markdown instance.
* md_globals: Global variables in the markdown module namespace.
"""
raise NotImplementedError, 'Extension "%s.%s" must define an "extendMarkdown"' \
'method.' % (self.__class__.__module__, self.__class__.__name__)
def load_extension(ext_name, configs = []):
"""Load extension by name, then return the module.
The extension name may contain arguments as part of the string in the
following format: "extname(key1=value1,key2=value2)"
"""
# Parse extensions config params (ignore the order)
configs = dict(configs)
pos = ext_name.find("(") # find the first "("
if pos > 0:
ext_args = ext_name[pos+1:-1]
ext_name = ext_name[:pos]
pairs = [x.split("=") for x in ext_args.split(",")]
configs.update([(x.strip(), y.strip()) for (x, y) in pairs])
# Setup the module names
ext_module = 'markdown.extensions'
module_name_new_style = '.'.join([ext_module, ext_name])
module_name_old_style = '_'.join(['mdx', ext_name])
# Try loading the extention first from one place, then another
try: # New style (markdown.extensons.<extension>)
module = __import__(module_name_new_style, {}, {}, [ext_module])
except ImportError:
try: # Old style (mdx.<extension>)
module = __import__(module_name_old_style)
except ImportError:
message(WARN, "Failed loading extension '%s' from '%s' or '%s'"
% (ext_name, module_name_new_style, module_name_old_style))
# Return None so we don't try to initiate none-existant extension
return None
# If the module is loaded successfully, we expect it to define a
# function called makeExtension()
try:
return module.makeExtension(configs.items())
except AttributeError:
message(CRITICAL, "Failed to initiate extension '%s'" % ext_name)
def load_extensions(ext_names):
"""Loads multiple extensions"""
extensions = []
for ext_name in ext_names:
extension = load_extension(ext_name)
if extension:
extensions.append(extension)
return extensions
"""
EXPORTED FUNCTIONS
=============================================================================
Those are the two functions we really mean to export: markdown() and
markdownFromFile().
"""
def markdown(text,
extensions = [],
safe_mode = False,
output_format = DEFAULT_OUTPUT_FORMAT):
"""Convert a markdown string to HTML and return HTML as a unicode string.
This is a shortcut function for `Markdown` class to cover the most
basic use case. It initializes an instance of Markdown, loads the
necessary extensions and runs the parser on the given text.
Keyword arguments:
* text: Markdown formatted text as Unicode or ASCII string.
* extensions: A list of extensions or extension names (may contain config args).
* safe_mode: Disallow raw html. One of "remove", "replace" or "escape".
* output_format: Format of output. Supported formats are:
* "xhtml1": Outputs XHTML 1.x. Default.
* "xhtml": Outputs latest supported version of XHTML (currently XHTML 1.1).
* "html4": Outputs HTML 4
* "html": Outputs latest supported version of HTML (currently HTML 4).
Note that it is suggested that the more specific formats ("xhtml1"
and "html4") be used as "xhtml" or "html" may change in the future
if it makes sense at that time.
Returns: An HTML document as a string.
"""
md = Markdown(extensions=load_extensions(extensions),
safe_mode=safe_mode,
output_format=output_format)
return md.convert(text)
def markdownFromFile(input = None,
output = None,
extensions = [],
encoding = None,
safe_mode = False,
output_format = DEFAULT_OUTPUT_FORMAT):
"""Read markdown code from a file and write it to a file or a stream."""
md = Markdown(extensions=load_extensions(extensions),
safe_mode=safe_mode,
output_format=output_format)
md.convertFile(input, output, encoding)
|
r"""
Subsets
The set of subsets of a finite set. The set can be given as a list or a Set
or else as an integer `n` which encodes the set `\{1,2,...,n\}`.
See :class:`Subsets` for more information and examples.
AUTHORS:
- Mike Hansen: initial version
- Florent Hivert (2009/02/06): doc improvements + new methods
"""
#*****************************************************************************
# Copyright (C) 2007 Mike Hansen <mhansen@gmail.com>,
# 2014 Vincent Delecroix <20100.delecroix@gmail.com>,
#
# Distributed under the terms of the GNU General Public License (GPL)
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# The full text of the GPL is available at:
#
# http://www.gnu.org/licenses/
#*****************************************************************************
from __future__ import print_function, absolute_import
import six
from six.moves import range
import sage.misc.prandom as rnd
import itertools
from sage.categories.sets_cat import EmptySetError, Sets
from sage.categories.enumerated_sets import EnumeratedSets
from sage.categories.finite_enumerated_sets import FiniteEnumeratedSets
from sage.structure.parent import Parent
from sage.structure.element import Element
from sage.sets.set import Set, Set_object_enumerated
from sage.arith.all import binomial
from sage.rings.integer_ring import ZZ
from sage.rings.integer import Integer
from . import combination
ZZ_0 = ZZ.zero()
def Subsets(s, k=None, submultiset=False):
r"""
Return the combinatorial class of the subsets of the finite set
``s``. The set can be given as a list, Set or any iterable
convertible to a set. Alternatively, a non-negative integer `n`
can be provided in place of ``s``; in this case, the result is
the combinatorial class of the subsets of the set
`\{1,2,\dots,n\}` (i.e. of the Sage ``range(1,n+1)``).
A second optional parameter ``k`` can be given. In this case,
``Subsets`` returns the combinatorial class of subsets of ``s``
of size ``k``.
.. WARNING::
The subsets are returned as Sets. Do not assume that
these Sets are ordered; they often are not!
(E.g., ``Subsets(10).list()[619]`` returns
``{10, 4, 5, 6, 7}`` on my system.)
See :class:`SubsetsSorted` for a similar class which
returns the subsets as sorted tuples.
Finally the option ``submultiset`` allows one to deal with sets with
repeated elements, usually called multisets. The method then
returns the class of all multisets in which every element is
contained at most as often as it is contained in ``s``. These
multisets are encoded as lists.
EXAMPLES::
sage: S = Subsets([1, 2, 3]); S
Subsets of {1, 2, 3}
sage: S.cardinality()
8
sage: S.first()
{}
sage: S.last()
{1, 2, 3}
sage: S.random_element() # random
{2}
sage: S.list()
[{}, {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}]
Here is the same example where the set is given as an integer::
sage: S = Subsets(3)
sage: S.list()
[{}, {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}]
We demonstrate various the effect of the various options::
sage: S = Subsets(3, 2); S
Subsets of {1, 2, 3} of size 2
sage: S.list()
[{1, 2}, {1, 3}, {2, 3}]
sage: S = Subsets([1, 2, 2], submultiset=True); S
SubMultiset of [1, 2, 2]
sage: S.list()
[[], [1], [2], [1, 2], [2, 2], [1, 2, 2]]
sage: S = Subsets([1, 2, 2, 3], 3, submultiset=True); S
SubMultiset of [1, 2, 2, 3] of size 3
sage: S.list()
[[1, 2, 2], [1, 2, 3], [2, 2, 3]]
sage: S = Subsets(['a','b','a','b'], 2, submultiset=True); S.list()
[['a', 'a'], ['a', 'b'], ['b', 'b']]
And it is possible to play with subsets of subsets::
sage: S = Subsets(3)
sage: S2 = Subsets(S); S2
Subsets of Subsets of {1, 2, 3}
sage: S2.cardinality()
256
sage: it = iter(S2)
sage: [next(it) for _ in range(8)]
[{}, {{}}, {{1}}, {{2}}, {{3}}, {{1, 2}}, {{1, 3}}, {{2, 3}}]
sage: S2.random_element() # random
{{2}, {1, 2, 3}, {}}
sage: [S2.unrank(k) for k in range(256)] == S2.list()
True
sage: S3 = Subsets(S2)
sage: S3.cardinality()
115792089237316195423570985008687907853269984665640564039457584007913129639936
sage: S3.unrank(14123091480)
{{{1, 3}, {1, 2, 3}, {2}, {1}},
{{2}, {1, 2, 3}, {}, {1, 2}},
{},
{{2}, {1, 2, 3}, {}, {3}, {1, 2}},
{{1, 2, 3}, {}, {1}}, {{2}, {2, 3}, {}, {1, 2}}}
sage: T = Subsets(S2, 10)
sage: T.cardinality()
278826214642518400
sage: T.unrank(1441231049)
{{{3}, {1, 2}, {}, {2, 3}, {1}, {1, 3}, ..., {{2, 3}, {}}, {{}}}
"""
if k is not None:
k = Integer(k)
if isinstance(s, (int, Integer)):
if s < 0:
raise ValueError("s must be non-negative")
from sage.sets.integer_range import IntegerRange
s = IntegerRange(1,s+1)
if k is None:
if submultiset:
return SubMultiset_s(s)
else:
return Subsets_s(s)
else:
if submultiset:
return SubMultiset_sk(s, k)
else:
return Subsets_sk(s, k)
class Subsets_s(Parent):
r"""
Subsets of a given set.
EXAMPLES::
sage: S = Subsets(4); S
Subsets of {1, 2, 3, 4}
sage: S.cardinality()
16
sage: Subsets(4).list()
[{}, {1}, {2}, {3}, {4},
{1, 2}, {1, 3}, {1, 4}, {2, 3}, {2, 4}, {3, 4},
{1, 2, 3}, {1, 2, 4}, {1, 3, 4}, {2, 3, 4},
{1, 2, 3, 4}]
sage: S = Subsets(Subsets(Subsets(GF(3)))); S
Subsets of Subsets of Subsets of Finite Field of size 3
sage: S.cardinality()
115792089237316195423570985008687907853269984665640564039457584007913129639936
sage: S.unrank(3149254230)
{{{1, 2}, {0, 1, 2}, {0, 2}, {0, 1}},
{{1, 2}, {}, {0, 2}, {1}, {0, 1, 2}, {2}},
{{1, 2}, {0}}, {{1, 2}, {0, 1}, {0, 1, 2}, {1}},
{{0, 2}, {1}}}
"""
# TODO: Set_object_enumerated does not inherit from Element... so we set
# directly element_class as Set_object_enumerated
# (see also below the failed test in __init__)
element_class = Set_object_enumerated
def __init__(self, s):
"""
TESTS::
sage: s = Subsets(Set([1]))
sage: e = s.first()
sage: isinstance(e, s.element_class)
True
In the following "_test_elements" is temporarily disabled
until :class:`sage.sets.set.Set_object_enumerated` objects
pass the category tests::
sage: S = Subsets([1,2,3])
sage: TestSuite(S).run(skip=["_test_elements"])
sage: S = sage.sets.set.Set_object_enumerated([1,2])
sage: TestSuite(S).run() # todo: not implemented
"""
Parent.__init__(self, category=EnumeratedSets().Finite())
if s not in EnumeratedSets():
from sage.misc.misc import uniq
from sage.sets.finite_enumerated_set import FiniteEnumeratedSet
s = list(s)
us = uniq(s)
if len(us) == len(s):
s = FiniteEnumeratedSet(s)
else:
s = FiniteEnumeratedSet(us)
self._s = s
@property
def _ls(self):
r"""
The list of elements of the underlying set.
We try as much as possible to *not* use it.
TESTS::
sage: S = Subsets([1,2,3,4])
sage: S._ls
[1, 2, 3, 4]
"""
return self._s.list()
def underlying_set(self):
r"""
Return the set of elements.
EXAMPLES::
sage: Subsets(GF(13)).underlying_set()
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}
"""
return self.element_class(self._s)
def __eq__(self, other):
r"""
Equality test
TESTS::
sage: Subsets([0,1,2]) == Subsets([1,2,3])
False
sage: Subsets([0,1,2]) == Subsets([0,1,2])
True
sage: Subsets([0,1,2]) == Subsets([0,1,2],2)
False
"""
if self.__class__ != other.__class__:
return False
return self._s == other._s
def __ne__(self, other):
r"""
Difference test
TESTS::
sage: Subsets([0,1,2]) != Subsets([1,2,3])
True
sage: Subsets([0,1,2]) != Subsets([0,1,2])
False
sage: Subsets([0,1,2]) != Subsets([0,1,2],2)
True
"""
return not self == other
def __hash__(self):
"""
Return the hash of ``self``.
TESTS::
sage: hash(Subsets([0,1,2])) == hash(Subsets([1,2,3]))
False
sage: hash(Subsets([0,1,2])) == hash(Subsets([0,1,2]))
True
sage: hash(Subsets([0,1,2])) == hash(Subsets([0,1,2],2))
False
"""
return hash(self._s)
def _repr_(self):
"""
TESTS::
sage: repr(Subsets([1,2,3])) #indirect doctest
'Subsets of {1, 2, 3}'
"""
return "Subsets of {}".format(self._s)
def __contains__(self, value):
"""
TESTS::
sage: S = Subsets([1,2,3])
sage: Set([1,2]) in S
True
sage: Set([1,4]) in S
False
sage: Set([]) in S
True
sage: 2 in S
False
"""
if value not in Sets():
return False
return all(v in self._s for v in value)
def cardinality(self):
r"""
Return the number of subsets of the set ``s``.
This is given by `2^{|s|}`.
EXAMPLES::
sage: Subsets(Set([1,2,3])).cardinality()
8
sage: Subsets([1,2,3,3]).cardinality()
8
sage: Subsets(3).cardinality()
8
"""
return Integer(1) << self._s.cardinality()
__len__ = cardinality
def first(self):
"""
Returns the first subset of ``s``. Since we aren't restricted to
subsets of a certain size, this is always the empty set.
EXAMPLES::
sage: Subsets([1,2,3]).first()
{}
sage: Subsets(3).first()
{}
"""
return self.element_class([])
def last(self):
"""
Return the last subset of ``s``. Since we aren't restricted to
subsets of a certain size, this is always the set ``s`` itself.
EXAMPLES::
sage: Subsets([1,2,3]).last()
{1, 2, 3}
sage: Subsets(3).last()
{1, 2, 3}
"""
return self.element_class(self._s)
def __iter__(self):
"""
Iterate through the subsets of ``s``.
EXAMPLES::
sage: [sub for sub in Subsets(Set([1,2,3]))]
[{}, {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}]
sage: [sub for sub in Subsets(3)]
[{}, {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}]
sage: [sub for sub in Subsets([1,2,3,3])]
[{}, {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}]
"""
k = ZZ_0
while k <= self._s.cardinality():
for ss in Subsets_sk(self._s, k)._fast_iterator():
yield self.element_class(ss)
k += 1
def random_element(self):
"""
Return a random element of the class of subsets of ``s`` (in other
words, a random subset of ``s``).
EXAMPLES::
sage: Subsets(3).random_element() # random
{2}
sage: Subsets([4,5,6]).random_element() # random
{5}
sage: S = Subsets(Subsets(Subsets([0,1,2])))
sage: S.cardinality()
115792089237316195423570985008687907853269984665640564039457584007913129639936
sage: s = S.random_element()
sage: s # random
{{{1, 2}, {2}, {0}, {1}}, {{1, 2}, {0, 1, 2}, {0, 2}, {0}, {0, 1}}, ..., {{1, 2}, {2}, {1}}, {{2}, {0, 2}, {}, {1}}}
sage: s in S
True
"""
k = ZZ.random_element(0, self.cardinality())
return self.unrank(k)
def rank(self, sub):
"""
Return the rank of ``sub`` as a subset of ``s``.
EXAMPLES::
sage: Subsets(3).rank([])
0
sage: Subsets(3).rank([1,2])
4
sage: Subsets(3).rank([1,2,3])
7
sage: Subsets(3).rank([2,3,4])
Traceback (most recent call last):
...
ValueError: {2, 3, 4} is not a subset of {1, 2, 3}
"""
if sub not in Sets():
ssub = Set(sub)
if len(sub) != len(ssub):
raise ValueError("repeated elements in {}".format(sub))
sub = ssub
try:
index_list = sorted(self._s.rank(x) for x in sub)
except (ValueError,IndexError):
raise ValueError("{} is not a subset of {}".format(
Set(sub), self._s))
n = self._s.cardinality()
r = sum(binomial(n,i) for i in range(len(index_list)))
return r + combination.rank(index_list,n)
def unrank(self, r):
"""
Return the subset of ``s`` that has rank ``k``.
EXAMPLES::
sage: Subsets(3).unrank(0)
{}
sage: Subsets([2,4,5]).unrank(1)
{2}
sage: Subsets([1,2,3]).unrank(257)
Traceback (most recent call last):
...
IndexError: index out of range
"""
r = Integer(r)
if r >= self.cardinality() or r < 0:
raise IndexError("index out of range")
else:
k = ZZ_0
n = self._s.cardinality()
bin = Integer(1)
while r >= bin:
r -= bin
k += 1
bin = binomial(n,k)
return self.element_class([self._s.unrank(i) for i in combination.from_rank(r, n, k)])
def __call__(self, el):
r"""
Workaround for returning non elements.
See the extensive documentation in
:meth:`sage.sets.finite_enumerated_set.FiniteEnumeratedSet.__call__`.
TESTS::
sage: Subsets(['a','b','c'])(['a','b']) # indirect doctest
{'a', 'b'}
"""
if not isinstance(el, Element):
return self._element_constructor_(el)
else:
return Parent.__call__(self, el)
def _element_constructor_(self,X):
"""
TESTS::
sage: S3 = Subsets(3); S3([1,2]) #indirect doctest
{1, 2}
sage: S3([0,1,2])
Traceback (most recent call last):
...
ValueError: {0, 1, 2} not in Subsets of {1, 2, 3}
"""
e = self.element_class(X)
if e not in self:
raise ValueError("{} not in {}".format(e,self))
return e
def an_element(self):
"""
Returns an example of subset.
EXAMPLES::
sage: Subsets(0).an_element()
{}
sage: Subsets(3).an_element()
{1, 2}
sage: Subsets([2,4,5]).an_element()
{2, 4}
"""
return self.unrank(self.cardinality() // 2)
class Subsets_sk(Subsets_s):
r"""
Subsets of fixed size of a set.
EXAMPLES::
sage: S = Subsets([0,1,2,5,7], 3); S
Subsets of {0, 1, 2, 5, 7} of size 3
sage: S.cardinality()
10
sage: S.first(), S.last()
({0, 1, 2}, {2, 5, 7})
sage: S.random_element() # random
{0, 5, 7}
sage: S([0,2,7])
{0, 2, 7}
sage: S([0,3,5])
Traceback (most recent call last):
...
ValueError: {0, 3, 5} not in Subsets of {0, 1, 2, 5, 7} of size 3
sage: S([0])
Traceback (most recent call last):
...
ValueError: {0} not in Subsets of {0, 1, 2, 5, 7} of size 3
"""
def __init__(self, s, k):
"""
TESTS::
sage: s = Subsets(Set([1]))
sage: e = s.first()
sage: isinstance(e, s.element_class)
True
In the following "_test_elements" is temporarily disabled
until :class:`sage.sets.set.Set_object_enumerated` objects
pass the category tests::
sage: S = Subsets(3,2)
sage: TestSuite(S).run(skip=["_test_elements"])
"""
Subsets_s.__init__(self, s)
self._k = Integer(k)
if self._k < 0:
raise ValueError("the integer k (={}) should be non-negative".format(k))
def _repr_(self):
"""
TESTS::
sage: repr(Subsets(3,2)) #indirect doctest
'Subsets of {1, 2, 3} of size 2'
"""
return Subsets_s._repr_(self) + " of size {}".format(self._k)
def __contains__(self, value):
"""
TESTS::
sage: S = Subsets([1,2,3], 2)
sage: Set([1,2]) in S
True
sage: Set([1,4]) in S
False
sage: Set([]) in S
False
"""
return len(value) == self._k and Subsets_s.__contains__(self,value)
def __eq__(self, other):
r"""
Equality test
TESTS::
sage: Subsets(5,3) == Subsets(5,3)
True
sage: Subsets(4,2) == Subsets(5,2) or Subsets(4,2) == Subsets(4,3)
False
"""
if self.__class__ != other.__class__:
return False
return self._s == other._s and self._k == other._k
def __ne__(self, other):
r"""
Difference test
TESTS::
sage: Subsets(5,3) != Subsets(5,3)
False
sage: Subsets(4,2) != Subsets(5,2) and Subsets(4,2) != Subsets(4,3)
True
"""
return not self == other
def __hash__(self):
"""
Return the hash of ``self``.
TESTS::
sage: hash(Subsets(5,3)) == hash(Subsets(5,3))
True
sage: hash(Subsets(4,2)) == hash(Subsets(5,2))
False
"""
return hash((self._s, self._k))
def cardinality(self):
"""
EXAMPLES::
sage: Subsets(Set([1,2,3]), 2).cardinality()
3
sage: Subsets([1,2,3,3], 2).cardinality()
3
sage: Subsets([1,2,3], 1).cardinality()
3
sage: Subsets([1,2,3], 3).cardinality()
1
sage: Subsets([1,2,3], 0).cardinality()
1
sage: Subsets([1,2,3], 4).cardinality()
0
sage: Subsets(3,2).cardinality()
3
sage: Subsets(3,4).cardinality()
0
"""
if self._k > self._s.cardinality():
return ZZ_0
return binomial(self._s.cardinality(), self._k)
__len__ = cardinality
def first(self):
"""
Returns the first subset of s of size k.
EXAMPLES::
sage: Subsets(Set([1,2,3]), 2).first()
{1, 2}
sage: Subsets([1,2,3,3], 2).first()
{1, 2}
sage: Subsets(3,2).first()
{1, 2}
sage: Subsets(3,4).first()
Traceback (most recent call last):
...
EmptySetError
"""
if self._k < 0 or self._k > self._s.cardinality():
raise EmptySetError
else:
return self.element_class(list(itertools.islice(self._s, self._k)))
def last(self):
"""
Returns the last subset of s of size k.
EXAMPLES::
sage: Subsets(Set([1,2,3]), 2).last()
{2, 3}
sage: Subsets([1,2,3,3], 2).last()
{2, 3}
sage: Subsets(3,2).last()
{2, 3}
sage: Subsets(3,4).last()
Traceback (most recent call last):
...
EmptySetError
"""
if self._k > self._s.cardinality():
raise EmptySetError
else:
return self.element_class([i for i in itertools.islice(reversed(self._s),self._k)])
def _fast_iterator(self):
r"""
Iterate through the subsets of size k if s.
Beware that this function yield tuples and not sets. If you need sets
use __iter__
EXAMPLES::
sage: list(Subsets(range(3), 2)._fast_iterator())
[(0, 1), (0, 2), (1, 2)]
"""
return itertools.combinations(self._s, self._k)
def __iter__(self):
"""
Iterates through the subsets of s of size k.
EXAMPLES::
sage: Subsets(Set([1,2,3]), 2).list()
[{1, 2}, {1, 3}, {2, 3}]
sage: Subsets([1,2,3,3], 2).list()
[{1, 2}, {1, 3}, {2, 3}]
sage: Subsets(3,2).list()
[{1, 2}, {1, 3}, {2, 3}]
sage: Subsets(3,3).list()
[{1, 2, 3}]
"""
for x in self._fast_iterator():
yield self.element_class(x)
def random_element(self):
"""
Return a random element of the class of subsets of ``s`` of size
``k`` (in other words, a random subset of ``s`` of size ``k``).
EXAMPLES::
sage: Subsets(3, 2).random_element()
{1, 2}
sage: Subsets(3,4).random_element()
Traceback (most recent call last):
...
EmptySetError
"""
lset = self._ls
if self._k > len(lset):
raise EmptySetError
else:
return self.element_class(rnd.sample(lset, self._k))
def rank(self, sub):
"""
Return the rank of ``sub`` as a subset of ``s`` of size ``k``.
EXAMPLES::
sage: Subsets(3,2).rank([1,2])
0
sage: Subsets([2,3,4],2).rank([3,4])
2
sage: Subsets([2,3,4],2).rank([2])
Traceback (most recent call last):
...
ValueError: {2} is not a subset of length 2 of {2, 3, 4}
sage: Subsets([2,3,4],4).rank([2,3,4,5])
Traceback (most recent call last):
...
ValueError: {2, 3, 4, 5} is not a subset of length 4 of {2, 3, 4}
"""
sub = Set(sub)
n = self._s.cardinality()
if self._k != sub.cardinality() or self._k > n:
raise ValueError("{} is not a subset of length {} of {}".format(
sub, self._k, self._s))
try:
index_list = sorted(self._s.rank(x) for x in sub)
except ValueError:
raise ValueError("{} is not a subset of length {} of {}".format(
sub, self._k, self._s))
return combination.rank(index_list, n)
def unrank(self, r):
"""
Return the subset of ``s`` of size ``k`` that has rank ``r``.
EXAMPLES::
sage: Subsets(3,2).unrank(0)
{1, 2}
sage: Subsets([2,4,5],2).unrank(0)
{2, 4}
sage: Subsets([1,2,8],3).unrank(42)
Traceback (most recent call last):
...
IndexError: index out of range
"""
lset = self._ls
n = len(lset)
if self._k > n or r >= self.cardinality() or r < 0:
raise IndexError("index out of range")
else:
return self.element_class([lset[i] for i in combination.from_rank(r, n, self._k)])
def an_element(self):
"""
Returns an example of subset.
EXAMPLES::
sage: Subsets(0,0).an_element()
{}
sage: Subsets(3,2).an_element()
{1, 3}
sage: Subsets([2,4,5],2).an_element()
{2, 5}
"""
return self.unrank(self.cardinality() // 2)
def dict_to_list(d):
r"""
Return a list whose elements are the elements of i of d repeated with
multiplicity d[i].
EXAMPLES::
sage: from sage.combinat.subset import dict_to_list
sage: dict_to_list({'a':1, 'b':3})
['a', 'b', 'b', 'b']
"""
l = []
for i,j in six.iteritems(d):
l.extend([i]*j)
return l
def list_to_dict(l):
r"""
Return a dictionary whose keys are the elements of l and values are the
multiplicity they appear in l.
EXAMPLES::
sage: from sage.combinat.subset import list_to_dict
sage: list_to_dict(['a', 'b', 'b', 'b'])
{'a': 1, 'b': 3}
"""
d = {}
for elt in l:
if elt not in d:
d[elt] = 0
d[elt] += 1
return d
class SubMultiset_s(Parent):
"""
The combinatorial class of the sub multisets of ``s``.
EXAMPLES::
sage: S = Subsets([1,2,2,3], submultiset=True)
sage: S.cardinality()
12
sage: S.list()
[[],
[1],
[2],
[3],
[1, 2],
[1, 3],
[2, 2],
[2, 3],
[1, 2, 2],
[1, 2, 3],
[2, 2, 3],
[1, 2, 2, 3]]
sage: S.first()
[]
sage: S.last()
[1, 2, 2, 3]
"""
# TODO: list does not inherit from Element... so we set
# directly element_class as list
element_class = list
def __init__(self, s):
"""
Constructs the combinatorial class of the sub multisets of s.
EXAMPLES::
sage: S = Subsets([1,2,2,3], submultiset=True)
sage: Subsets([1,2,3,3], submultiset=True).cardinality()
12
sage: TestSuite(S).run()
"""
Parent.__init__(self, category=FiniteEnumeratedSets())
self._d = s
if not isinstance(s, dict):
self._d = list_to_dict(s)
def _repr_(self):
"""
TESTS::
sage: S = Subsets([1, 2, 2, 3], submultiset=True); S #indirect doctest
SubMultiset of [1, 2, 2, 3]
"""
return "SubMultiset of {}".format(dict_to_list(self._d))
def __eq__(self, other):
r"""
TESTS::
sage: Subsets([1,2,2,3], submultiset=True) == Subsets([1,2,2,3], submultiset=True)
True
sage: Subsets([1,2,2,3], submultiset=True) == Subsets([1,2,3,3], submultiset=True)
False
"""
if self.__class__ != other.__class__:
return False
return self._d == other._d
def __ne__(self, other):
r"""
TESTS::
sage: Subsets([1,2,2,3], submultiset=True) != Subsets([1,2,2,3], submultiset=True)
False
sage: Subsets([1,2,2,3], submultiset=True) != Subsets([1,2,3,3], submultiset=True)
True
"""
return not self == other
def __contains__(self, s):
"""
TESTS::
sage: S = Subsets([1,2,2,3], submultiset=True)
sage: [] in S
True
sage: [1, 2, 2] in S
True
sage: all(i in S for i in S)
True
sage: [1, 2, 2, 2] in S
False
sage: [1, 3, 2, 2] in S
True
sage: [4] in S
False
"""
dd = {}
for elt in s:
if elt in dd:
dd[elt] += 1
if dd[elt] > self._d[elt]:
return False
elif elt not in self._d:
return False
else:
dd[elt] = 1
return True
def cardinality(self):
r"""
Return the cardinality of self
EXAMPLES::
sage: S = Subsets([1,1,2,3],submultiset=True)
sage: S.cardinality()
12
sage: len(S.list())
12
sage: S = Subsets([1,1,2,2,3],submultiset=True)
sage: S.cardinality()
18
sage: len(S.list())
18
sage: S = Subsets([1,1,1,2,2,3],submultiset=True)
sage: S.cardinality()
24
sage: len(S.list())
24
"""
from sage.all import prod
return Integer(prod(k+1 for k in self._d.values()))
def random_element(self):
r"""
Return a random element of self with uniform law
EXAMPLES::
sage: S = Subsets([1,1,2,3], submultiset=True)
sage: S.random_element()
[2]
"""
l = []
for i in self._d:
l.extend([i]*rnd.randint(0,self._d[i]))
return l
def generating_serie(self,variable='x'):
r"""
Return the serie (here a polynom) associated to the counting of the
element of self weighted by the number of element they contain.
EXAMPLES::
sage: Subsets([1,1],submultiset=True).generating_serie()
x^2 + x + 1
sage: Subsets([1,1,2,3],submultiset=True).generating_serie()
x^4 + 3*x^3 + 4*x^2 + 3*x + 1
sage: Subsets([1,1,1,2,2,3,3,4],submultiset=True).generating_serie()
x^8 + 4*x^7 + 9*x^6 + 14*x^5 + 16*x^4 + 14*x^3 + 9*x^2 + 4*x + 1
sage: S = Subsets([1,1,1,2,2,3,3,4],submultiset=True)
sage: S.cardinality()
72
sage: sum(S.generating_serie())
72
"""
from sage.all import prod
R = ZZ[variable]
return prod(R([1]*(n+1)) for n in self._d.values())
def __iter__(self):
"""
Iterates through the subsets of ``self``. Note that each subset is
represented by a list of its elements rather than a set since we can
have multiplicities (no multiset data structure yet in sage).
EXAMPLES::
sage: S = Subsets([1,2,2,3], submultiset=True)
sage: S.list()
[[],
[1],
[2],
[3],
[1, 2],
[1, 3],
[2, 2],
[2, 3],
[1, 2, 2],
[1, 2, 3],
[2, 2, 3],
[1, 2, 2, 3]]
"""
for k in range(sum(self._d.values())+1):
for s in SubMultiset_sk(self._d, k):
yield s
def __call__(self, el):
r"""
Workaround for returning non elements.
See the extensive documentation in
:meth:`sage.sets.finite_enumerated_set.FiniteEnumeratedSet.__call__`.
TESTS::
sage: Subsets(['a','b','b','c'], submultiset=True)(['a','b']) # indirect doctest
['a', 'b']
"""
if not isinstance(el, Element):
return self._element_constructor_(el)
else:
return Parent.__call__(self, el)
def _element_constructor_(self,X):
"""
TESTS::
sage: S = Subsets(['a','b','b','c'], submultiset=True)
sage: S(['d'])
Traceback (most recent call last):
...
ValueError: ['d'] not in SubMultiset of ['a', 'c', 'b', 'b']
"""
e = self.element_class(X)
if e not in self:
raise ValueError("{} not in {}".format(e,self))
return e
class SubMultiset_sk(SubMultiset_s):
"""
The combinatorial class of the subsets of size k of a multiset s. Note
that each subset is represented by a list of the elements rather than a
set since we can have multiplicities (no multiset data structure yet in
sage).
EXAMPLES::
sage: S = Subsets([1,2,3,3],2,submultiset=True)
sage: S._k
2
sage: S.cardinality()
4
sage: S.first()
[1, 2]
sage: S.last()
[3, 3]
sage: [sub for sub in S]
[[1, 2], [1, 3], [2, 3], [3, 3]]
"""
def __init__(self, s, k):
"""
TESTS::
sage: S = Subsets([1,2,3,3],2,submultiset=True)
sage: [sub for sub in S]
[[1, 2], [1, 3], [2, 3], [3, 3]]
sage: TestSuite(S).run()
"""
SubMultiset_s.__init__(self, s)
self._l = dict_to_list(self._d)
self._k = k
def __eq__(self, other):
r"""
TESTS::
sage: Subsets([1,2,2,3], submultiset=True) == Subsets([1,2,2,3], submultiset=True)
True
sage: Subsets([1,2,2,3], submultiset=True) == Subsets([1,2,3,3], submultiset=True)
False
"""
if self.__class__ != other.__class__:
return False
return self._d == other._d and self._k == other._k
def generating_serie(self,variable='x'):
r"""
Return the serie (this case a polynom) associated to the counting of the
element of self weighted by the number of element they contains
EXAMPLES::
sage: x = ZZ['x'].gen()
sage: l = [1,1,1,1,2,2,3]
sage: for k in range(len(l)):
....: S = Subsets(l,k,submultiset=True)
....: print(S.generating_serie('x') == S.cardinality()*x**k)
True
True
True
True
True
True
True
"""
x = ZZ[variable].gen()
P = SubMultiset_s.generating_serie(self)
return P[self._k] * (x**self._k)
def cardinality(self):
r"""
Return the cardinality of self
EXAMPLES::
sage: S = Subsets([1,2,2,3,3,3],4,submultiset=True)
sage: S.cardinality()
5
sage: len(list(S))
5
sage: S = Subsets([1,2,2,3,3,3],3,submultiset=True)
sage: S.cardinality()
6
sage: len(list(S))
6
"""
return Integer(sum(1 for _ in self))
def _repr_(self):
"""
TESTS::
sage: S = Subsets([1, 2, 2, 3], 3, submultiset=True)
sage: repr(S) #indirect doctest
'SubMultiset of [1, 2, 2, 3] of size 3'
"""
return "{} of size {}".format(SubMultiset_s._repr_(self), self._k)
def __contains__(self, s):
"""
TESTS::
sage: S = Subsets([1,2,2,3], 2, submultiset=True)
sage: [] in S
False
sage: [1, 2, 2] in S
False
sage: all(i in S for i in S)
True
sage: [2, 2] in S
True
sage: [1, 3] in S
True
sage: [4] in S
False
sage: [3, 3] in S
False
"""
return len(s) == self._k and SubMultiset_s.__contains__(self, s)
def random_element(self):
r"""
Return a random submultiset of given length
EXAMPLES::
sage: Subsets(7,3).random_element()
{1, 4, 7}
sage: Subsets(7,5).random_element()
{1, 3, 4, 5, 7}
"""
return rnd.sample(self._l, self._k)
def __iter__(self):
"""
Iterates through the subsets of size ``self._k`` of the multiset
``self._s``. Note that each subset is represented by a list of the
elements rather than a set since we can have multiplicities (no
multiset data structure yet in sage).
EXAMPLES::
sage: S = Subsets([1,2,2,3],2, submultiset=True)
sage: S.list()
[[1, 2], [1, 3], [2, 2], [2, 3]]
"""
from sage.combinat.integer_vector import IntegerVectors
elts = list(self._d)
for iv in IntegerVectors(self._k, len(self._d), outer=self._d.values()):
yield sum([[elts[i]] * iv[i] for i in range(len(iv))], [])
class SubsetsSorted(Subsets_s):
"""
Lightweight class of all subsets of some set `S`, with each
subset being encoded as a sorted tuple.
Used to model indices of algebras given by subsets (so we don't
have to explicitly build all `2^n` subsets in memory).
For example, :class:`CliffordAlgebra`.
"""
element_class = tuple
def __contains__(self, value):
"""
TESTS::
sage: from sage.combinat.subset import SubsetsSorted
sage: S = SubsetsSorted(range(3))
sage: Set([1,2]) in S
True
sage: Set([1,4]) in S
False
sage: Set([]) in S
True
sage: (0,2) in S
True
sage: 2 in S
False
"""
if not isinstance(value, (list, tuple)) and value not in Sets():
return False
return all(v in self._s for v in value)
def __iter__(self):
"""
Iterate over ``self``.
EXAMPLES::
sage: from sage.combinat.subset import SubsetsSorted
sage: S = SubsetsSorted(range(3))
sage: [s for s in S]
[(), (0,), (1,), (2,), (0, 1), (0, 2), (1, 2), (0, 1, 2)]
"""
k = ZZ_0
while k <= self._s.cardinality():
for ss in Subsets_sk(self._s, k)._fast_iterator():
yield self.element_class(sorted(ss))
k += 1
def first(self):
"""
Return the first element of ``self``.
EXAMPLES::
sage: from sage.combinat.subset import SubsetsSorted
sage: S = SubsetsSorted(range(3))
sage: S.first()
()
"""
return self.element_class([])
def last(self):
"""
Return the last element of ``self``.
EXAMPLES::
sage: from sage.combinat.subset import SubsetsSorted
sage: S = SubsetsSorted(range(3))
sage: S.last()
(0, 1, 2)
"""
return tuple(sorted(self._s))
def random_element(self):
"""
Return a random element of ``self``.
EXAMPLES::
sage: from sage.combinat.subset import SubsetsSorted
sage: S = SubsetsSorted(range(3))
sage: isinstance(S.random_element(), tuple)
True
"""
return tuple(sorted(Subsets_s.random_element(self)))
def unrank(self, r):
"""
Return the subset which has rank ``r``.
EXAMPLES::
sage: from sage.combinat.subset import SubsetsSorted
sage: S = SubsetsSorted(range(3))
sage: S.unrank(4)
(0, 1)
"""
r = Integer(r)
if r >= self.cardinality() or r < 0:
raise IndexError("index out of range")
k = ZZ_0
n = self._s.cardinality()
binom = ZZ.one()
while r >= binom:
r -= binom
k += 1
binom = binomial(n,k)
C = combination.from_rank(r, n, k)
return self.element_class(sorted([self._s.unrank(i) for i in C]))
def _an_element_(self):
"""
Return an element of ``self``.
EXAMPLES::
sage: from sage.combinat.subset import SubsetsSorted
sage: S = SubsetsSorted(range(3))
sage: S.an_element()
(0, 1)
"""
return self.element_class(sorted(Subsets_s._an_element_(self)))
def _element_constructor_(self, x):
"""
Construct an element of ``self``.
EXAMPLES::
sage: from sage.combinat.subset import SubsetsSorted
sage: S = SubsetsSorted(range(3))
sage: [s for s in S]
[(), (0,), (1,), (2,), (0, 1), (0, 2), (1, 2), (0, 1, 2)]
"""
return self.element_class(sorted(set(x)))
|
/*
*
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// This file is autogenerated from a template file. Please make
// modifications to
// `templates/src/objective-c/GRPCClient/private/version.h.template`
// instead. This file can be regenerated from the template by running
// `tools/buildgen/generate_projects.sh`.
#define GRPC_OBJC_VERSION_STRING @"1.17.1"
#define GRPC_C_VERSION_STRING @"7.0.0"
|
"""Support for Russound multizone controllers using RIO Protocol."""
from __future__ import annotations
from russound_rio import Russound
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
SUPPORT_RUSSOUND = (
SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_PORT, default=9621): cv.port,
}
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Russound RIO platform."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
russ = Russound(hass.loop, host, port)
await russ.connect()
# Discover sources and zones
sources = await russ.enumerate_sources()
valid_zones = await russ.enumerate_zones()
devices = []
for zone_id, name in valid_zones:
await russ.watch_zone(zone_id)
dev = RussoundZoneDevice(russ, zone_id, name, sources)
devices.append(dev)
@callback
def on_stop(event):
"""Shutdown cleanly when hass stops."""
hass.loop.create_task(russ.close())
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_stop)
async_add_entities(devices)
class RussoundZoneDevice(MediaPlayerEntity):
"""Representation of a Russound Zone."""
def __init__(self, russ, zone_id, name, sources):
"""Initialize the zone device."""
super().__init__()
self._name = name
self._russ = russ
self._zone_id = zone_id
self._sources = sources
def _zone_var(self, name, default=None):
return self._russ.get_cached_zone_variable(self._zone_id, name, default)
def _source_var(self, name, default=None):
current = int(self._zone_var("currentsource", 0))
if current:
return self._russ.get_cached_source_variable(current, name, default)
return default
def _source_na_var(self, name):
"""Will replace invalid values with None."""
current = int(self._zone_var("currentsource", 0))
if current:
value = self._russ.get_cached_source_variable(current, name, None)
if value in (None, "", "------"):
return None
return value
return None
def _zone_callback_handler(self, zone_id, *args):
if zone_id == self._zone_id:
self.schedule_update_ha_state()
def _source_callback_handler(self, source_id, *args):
current = int(self._zone_var("currentsource", 0))
if source_id == current:
self.schedule_update_ha_state()
async def async_added_to_hass(self):
"""Register callback handlers."""
self._russ.add_zone_callback(self._zone_callback_handler)
self._russ.add_source_callback(self._source_callback_handler)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the zone."""
return self._zone_var("name", self._name)
@property
def state(self):
"""Return the state of the device."""
status = self._zone_var("status", "OFF")
if status == "ON":
return STATE_ON
if status == "OFF":
return STATE_OFF
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_RUSSOUND
@property
def source(self):
"""Get the currently selected source."""
return self._source_na_var("name")
@property
def source_list(self):
"""Return a list of available input sources."""
return [x[1] for x in self._sources]
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_title(self):
"""Title of current playing media."""
return self._source_na_var("songname")
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return self._source_na_var("artistname")
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
return self._source_na_var("albumname")
@property
def media_image_url(self):
"""Image url of current playing media."""
return self._source_na_var("coverarturl")
@property
def volume_level(self):
"""Volume level of the media player (0..1).
Value is returned based on a range (0..50).
Therefore float divide by 50 to get to the required range.
"""
return float(self._zone_var("volume", 0)) / 50.0
async def async_turn_off(self):
"""Turn off the zone."""
await self._russ.send_zone_event(self._zone_id, "ZoneOff")
async def async_turn_on(self):
"""Turn on the zone."""
await self._russ.send_zone_event(self._zone_id, "ZoneOn")
async def async_set_volume_level(self, volume):
"""Set the volume level."""
rvol = int(volume * 50.0)
await self._russ.send_zone_event(self._zone_id, "KeyPress", "Volume", rvol)
async def async_select_source(self, source):
"""Select the source input for this zone."""
for source_id, name in self._sources:
if name.lower() != source.lower():
continue
await self._russ.send_zone_event(self._zone_id, "SelectSource", source_id)
break
|
from abc import ABC, abstractmethod
from typing import Callable, Optional
from bs4 import BeautifulSoup as BS
import json
from requests import Response
class ApiResponse(ABC):
response: Response
def __init__(self, response: Response) -> None:
super().__init__()
self.response = response
@abstractmethod
def prettify(self) -> str:
pass
def status_code(self) -> int:
return self.response.status_code
class HtmlResponse(ApiResponse):
def prettify(self) -> str:
return BS(self.response.text, "html.parser").prettify()
class JsonResponse(ApiResponse):
def prettify(self) -> str:
return json.dumps(self.response.json(), indent=4, sort_keys=False)
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2017, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'simple05.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test font formatting."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.set_row(5, 18)
worksheet.set_row(6, 18)
format1 = workbook.add_format({'bold': 1})
format2 = workbook.add_format({'italic': 1})
format3 = workbook.add_format({'bold': 1, 'italic': 1})
format4 = workbook.add_format({'underline': 1})
format5 = workbook.add_format({'font_strikeout': 1})
format6 = workbook.add_format({'font_script': 1})
format7 = workbook.add_format({'font_script': 2})
worksheet.write_string(0, 0, 'Foo', format1)
worksheet.write_string(1, 0, 'Foo', format2)
worksheet.write_string(2, 0, 'Foo', format3)
worksheet.write_string(3, 0, 'Foo', format4)
worksheet.write_string(4, 0, 'Foo', format5)
worksheet.write_string(5, 0, 'Foo', format6)
worksheet.write_string(6, 0, 'Foo', format7)
workbook.close()
self.assertExcelEqual()
def test_create_file_in_memory(self):
"""Test font formatting."""
workbook = Workbook(self.got_filename, {'in_memory': True})
worksheet = workbook.add_worksheet()
worksheet.set_row(5, 18)
worksheet.set_row(6, 18)
format1 = workbook.add_format({'bold': 1})
format2 = workbook.add_format({'italic': 1})
format3 = workbook.add_format({'bold': 1, 'italic': 1})
format4 = workbook.add_format({'underline': 1})
format5 = workbook.add_format({'font_strikeout': 1})
format6 = workbook.add_format({'font_script': 1})
format7 = workbook.add_format({'font_script': 2})
worksheet.write_string(0, 0, 'Foo', format1)
worksheet.write_string(1, 0, 'Foo', format2)
worksheet.write_string(2, 0, 'Foo', format3)
worksheet.write_string(3, 0, 'Foo', format4)
worksheet.write_string(4, 0, 'Foo', format5)
worksheet.write_string(5, 0, 'Foo', format6)
worksheet.write_string(6, 0, 'Foo', format7)
workbook.close()
self.assertExcelEqual()
|
import sys,os,argparse,time
import numpy as np
import torch
import torch.nn as nn
from torch.utils.tensorboard import SummaryWriter
import utils
tstart=time.time()
# Arguments
parser=argparse.ArgumentParser(description='xxx')
parser.add_argument('--seed',type=int,default=0,help='(default=%(default)d)')
parser.add_argument('--experiment',default='',type=str,required=True,choices=['mnist2','pmnist','cifar','mixture'],help='(default=%(default)s)')
parser.add_argument('--approach',default='',type=str,required=True,choices=['random','sgd','sgd-frozen','lwf','lfl','ewc','imm-mean','progressive','pathnet',
'imm-mode','sgd-restart',
'joint','hat','hat-test'],help='(default=%(default)s)')
parser.add_argument('--output',default='',type=str,required=False,help='(default=%(default)s)')
parser.add_argument('--nepochs',default=200,type=int,required=False,help='(default=%(default)d)')
parser.add_argument('--lr',default=0.05,type=float,required=False,help='(default=%(default)f)')
parser.add_argument('--parameter',type=str,default='',help='(default=%(default)s)')
args=parser.parse_args()
if args.output=='':
args.output='../res/'+args.experiment+'_'+args.approach+'_'+str(args.seed)+'.txt'
print('='*100)
print('Arguments =')
for arg in vars(args):
print('\t'+arg+':',getattr(args,arg))
print('='*100)
########################################################################################################################
# Seed
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if torch.cuda.is_available(): torch.cuda.manual_seed(args.seed)
else: print('[CUDA unavailable]'); sys.exit()
# Args -- Experiment
if args.experiment=='mnist2':
from dataloaders import mnist2 as dataloader
elif args.experiment=='pmnist':
from dataloaders import pmnist as dataloader
elif args.experiment=='cifar':
from dataloaders import cifar as dataloader
elif args.experiment=='mixture':
from dataloaders import mixture as dataloader
# Args -- Approach
if args.approach=='random':
from approaches import random as approach
elif args.approach=='sgd':
from approaches import sgd as approach
elif args.approach=='sgd-restart':
from approaches import sgd_restart as approach
elif args.approach=='sgd-frozen':
from approaches import sgd_frozen as approach
elif args.approach=='lwf':
from approaches import lwf as approach
elif args.approach=='lfl':
from approaches import lfl as approach
elif args.approach=='ewc':
from approaches import ewc as approach
elif args.approach=='imm-mean':
from approaches import imm_mean as approach
elif args.approach=='imm-mode':
from approaches import imm_mode as approach
elif args.approach=='progressive':
from approaches import progressive as approach
elif args.approach=='pathnet':
from approaches import pathnet as approach
elif args.approach=='hat-test':
from approaches import hat_test as approach
elif args.approach=='hat':
from approaches import hat as approach
elif args.approach=='joint':
from approaches import joint as approach
# Args -- Network
if args.experiment=='mnist2' or args.experiment=='pmnist':
if args.approach=='hat' or args.approach=='hat-test':
from networks import mlp_hat as network
else:
from networks import mlp as network
else:
if args.approach=='lfl':
from networks import alexnet_lfl as network
elif args.approach=='hat':
from networks import alexnet_hat as network
elif args.approach=='progressive':
from networks import alexnet_progressive as network
elif args.approach=='pathnet':
from networks import alexnet_pathnet as network
elif args.approach=='hat-test':
from networks import alexnet_hat_test as network
else:
from networks import alexnet as network
########################################################################################################################
# Load
print('Load data...')
data,taskcla,inputsize=dataloader.get(seed=args.seed)
print('Input size =',inputsize,'\nTask info =',taskcla)
# Inits
print('Inits...')
net = network.Net(inputsize,taskcla).cuda()
# net = nn.DataParallel(net)
utils.print_model_report(net)
appr=approach.Appr(net,nepochs=args.nepochs,lr=args.lr,args=args)
print(appr.criterion)
utils.print_optimizer_config(appr.optimizer)
print('-'*100)
# tensorboard
board = SummaryWriter('/home/user/Github/hat/logs')
# board.add_graph(net)
# Loop tasks
acc=np.zeros((len(taskcla),len(taskcla)),dtype=np.float32)
lss=np.zeros((len(taskcla),len(taskcla)),dtype=np.float32)
for t,ncla in taskcla:
print('*'*100)
print('Task {:2d} ({:s})'.format(t,data[t]['name']))
print('*'*100)
if args.approach == 'joint':
# Get data. We do not put it to GPU
if t==0:
xtrain=data[t]['train']['x']
ytrain=data[t]['train']['y']
xvalid=data[t]['valid']['x']
yvalid=data[t]['valid']['y']
task_t=t*torch.ones(xtrain.size(0)).int()
task_v=t*torch.ones(xvalid.size(0)).int()
task=[task_t,task_v]
else:
xtrain=torch.cat((xtrain,data[t]['train']['x']))
ytrain=torch.cat((ytrain,data[t]['train']['y']))
xvalid=torch.cat((xvalid,data[t]['valid']['x']))
yvalid=torch.cat((yvalid,data[t]['valid']['y']))
task_t=torch.cat((task_t,t*torch.ones(data[t]['train']['y'].size(0)).int()))
task_v=torch.cat((task_v,t*torch.ones(data[t]['valid']['y'].size(0)).int()))
task=[task_t,task_v]
else:
# Get data
xtrain=data[t]['train']['x'].cuda()
ytrain=data[t]['train']['y'].cuda()
xvalid=data[t]['valid']['x'].cuda()
yvalid=data[t]['valid']['y'].cuda()
task=t
# Train
appr.train(task,xtrain,ytrain,xvalid,yvalid,board=board)
print('-'*100)
# Test
for u in range(t+1):
xtest=data[u]['test']['x'].cuda()
ytest=data[u]['test']['y'].cuda()
test_loss,test_acc=appr.eval(u,xtest,ytest)
print('>>> Test on task {:2d} - {:15s}: loss={:.3f}, acc={:5.1f}% <<<'.format(u,data[u]['name'],test_loss,100*test_acc))
acc[t,u]=test_acc
lss[t,u]=test_loss
# tensorboard
# board.add_scalar('test loss', test_loss)
# board.add_scalar('test accuracy', 100*test_acc)
# Save
print('Save at '+args.output)
np.savetxt(args.output,acc,'%.4f')
# Done
print('*'*100)
print('Accuracies =')
for i in range(acc.shape[0]):
print('\t',end='')
for j in range(acc.shape[1]):
print('{:5.1f}% '.format(100*acc[i,j]),end='')
print()
print('*'*100)
print('Done!')
print('[Elapsed time = {:.1f} h]'.format((time.time()-tstart)/(60*60)))
if hasattr(appr, 'logs'):
if appr.logs is not None:
#save task names
from copy import deepcopy
appr.logs['task_name'] = {}
appr.logs['test_acc'] = {}
appr.logs['test_loss'] = {}
for t,ncla in taskcla:
appr.logs['task_name'][t] = deepcopy(data[t]['name'])
appr.logs['test_acc'][t] = deepcopy(acc[t,:])
appr.logs['test_loss'][t] = deepcopy(lss[t,:])
#pickle
import gzip
import pickle
with gzip.open(os.path.join(appr.logpath), 'wb') as output:
pickle.dump(appr.logs, output, pickle.HIGHEST_PROTOCOL)
########################################################################################################################
|
#
# number.py : Number-theoretic functions
#
# Part of the Python Cryptography Toolkit
#
# Written by Andrew M. Kuchling, Barry A. Warsaw, and others
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
#
import math
import sys
import struct
from Crypto import Random
from Crypto.Util.py3compat import *
# Backward compatibility
_fastmath = None
# New functions
from ._number_new import *
def size (N):
"""Returns the size of the number N in bits."""
bits = 0
while N >> bits:
bits += 1
return bits
def getRandomInteger(N, randfunc=None):
"""Return a random number at most N bits long.
If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used.
.. deprecated:: 3.0
This function is for internal use only and may be renamed or removed in
the future. Use :func:`Crypto.Random.random.getrandbits` instead.
"""
if randfunc is None:
randfunc = Random.get_random_bytes
S = randfunc(N>>3)
odd_bits = N % 8
if odd_bits != 0:
rand_bits = ord(randfunc(1)) >> (8-odd_bits)
S = struct.pack('B', rand_bits) + S
value = bytes_to_long(S)
return value
def getRandomRange(a, b, randfunc=None):
"""Return a random number *n* so that *a <= n < b*.
If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used.
.. deprecated:: 3.0
This function is for internal use only and may be renamed or removed in
the future. Use :func:`Crypto.Random.random.randrange` instead.
"""
range_ = b - a - 1
bits = size(range_)
value = getRandomInteger(bits, randfunc)
while value > range_:
value = getRandomInteger(bits, randfunc)
return a + value
def getRandomNBitInteger(N, randfunc=None):
"""Return a random number with exactly N-bits,
i.e. a random number between 2**(N-1) and (2**N)-1.
If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used.
.. deprecated:: 3.0
This function is for internal use only and may be renamed or removed in
the future.
"""
value = getRandomInteger (N-1, randfunc)
value |= 2 ** (N-1) # Ensure high bit is set
assert size(value) >= N
return value
def GCD(x,y):
"""Greatest Common Denominator of :data:`x` and :data:`y`.
"""
x = abs(x) ; y = abs(y)
while x > 0:
x, y = y % x, x
return y
def inverse(u, v):
"""The inverse of :data:`u` *mod* :data:`v`."""
u3, v3 = int(u), int(v)
u1, v1 = 1, 0
while v3 > 0:
q = u3 // v3
u1, v1 = v1, u1 - v1*q
u3, v3 = v3, u3 - v3*q
while u1<0:
u1 = u1 + v
return u1
# Given a number of bits to generate and a random generation function,
# find a prime number of the appropriate size.
def getPrime(N, randfunc=None):
"""Return a random N-bit prime number.
If randfunc is omitted, then :meth:`Random.get_random_bytes` is used.
"""
if randfunc is None:
randfunc = Random.get_random_bytes
number=getRandomNBitInteger(N, randfunc) | 1
while (not isPrime(number, randfunc=randfunc)):
number=number+2
return number
def _rabinMillerTest(n, rounds, randfunc=None):
"""_rabinMillerTest(n:long, rounds:int, randfunc:callable):int
Tests if n is prime.
Returns 0 when n is definitly composite.
Returns 1 when n is probably prime.
Returns 2 when n is definitly prime.
If randfunc is omitted, then Random.new().read is used.
This function is for internal use only and may be renamed or removed in
the future.
"""
# check special cases (n==2, n even, n < 2)
if n < 3 or (n & 1) == 0:
return n == 2
# n might be very large so it might be beneficial to precalculate n-1
n_1 = n - 1
# determine m and b so that 2**b * m = n - 1 and b maximal
b = 0
m = n_1
while (m & 1) == 0:
b += 1
m >>= 1
tested = []
# we need to do at most n-2 rounds.
for i in range (min (rounds, n-2)):
# randomly choose a < n and make sure it hasn't been tested yet
a = getRandomRange (2, n, randfunc)
while a in tested:
a = getRandomRange (2, n, randfunc)
tested.append (a)
# do the rabin-miller test
z = pow (a, m, n) # (a**m) % n
if z == 1 or z == n_1:
continue
composite = 1
for r in range (b):
z = (z * z) % n
if z == 1:
return 0
elif z == n_1:
composite = 0
break
if composite:
return 0
return 1
def getStrongPrime(N, e=0, false_positive_prob=1e-6, randfunc=None):
"""
Return a random strong *N*-bit prime number.
In this context, *p* is a strong prime if *p-1* and *p+1* have at
least one large prime factor.
Args:
N (integer): the exact length of the strong prime.
It must be a multiple of 128 and > 512.
e (integer): if provided, the returned prime (minus 1)
will be coprime to *e* and thus suitable for RSA where
*e* is the public exponent.
false_positive_prob (float):
The statistical probability for the result not to be actually a
prime. It defaults to 10\ :sup:`-6`.
Note that the real probability of a false-positive is far less. This is
just the mathematically provable limit.
randfunc (callable):
A function that takes a parameter *N* and that returns
a random byte string of such length.
If omitted, :func:`Crypto.Random.get_random_bytes` is used.
Return:
The new strong prime.
.. deprecated:: 3.0
This function is for internal use only and may be renamed or removed in
the future.
"""
# This function was implemented following the
# instructions found in the paper:
# "FAST GENERATION OF RANDOM, STRONG RSA PRIMES"
# by Robert D. Silverman
# RSA Laboratories
# May 17, 1997
# which by the time of writing could be freely downloaded here:
# http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.17.2713&rep=rep1&type=pdf
if randfunc is None:
randfunc = Random.get_random_bytes
# Use the accelerator if available
if _fastmath is not None:
return _fastmath.getStrongPrime(int(N), int(e), false_positive_prob,
randfunc)
if (N < 512) or ((N % 128) != 0):
raise ValueError ("bits must be multiple of 128 and > 512")
rabin_miller_rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4)))
# calculate range for X
# lower_bound = sqrt(2) * 2^{511 + 128*x}
# upper_bound = 2^{512 + 128*x} - 1
x = (N - 512) >> 7;
# We need to approximate the sqrt(2) in the lower_bound by an integer
# expression because floating point math overflows with these numbers
lower_bound = (14142135623730950489 * (2 ** (511 + 128*x))) // 10000000000000000000
upper_bound = (1 << (512 + 128*x)) - 1
# Randomly choose X in calculated range
X = getRandomRange (lower_bound, upper_bound, randfunc)
# generate p1 and p2
p = [0, 0]
for i in (0, 1):
# randomly choose 101-bit y
y = getRandomNBitInteger (101, randfunc)
# initialize the field for sieving
field = [0] * 5 * len (sieve_base)
# sieve the field
for prime in sieve_base:
offset = y % prime
for j in range ((prime - offset) % prime, len (field), prime):
field[j] = 1
# look for suitable p[i] starting at y
result = 0
for j in range(len(field)):
composite = field[j]
# look for next canidate
if composite:
continue
tmp = y + j
result = _rabinMillerTest (tmp, rabin_miller_rounds)
if result > 0:
p[i] = tmp
break
if result == 0:
raise RuntimeError ("Couln't find prime in field. "
"Developer: Increase field_size")
# Calculate R
# R = (p2^{-1} mod p1) * p2 - (p1^{-1} mod p2) * p1
tmp1 = inverse (p[1], p[0]) * p[1] # (p2^-1 mod p1)*p2
tmp2 = inverse (p[0], p[1]) * p[0] # (p1^-1 mod p2)*p1
R = tmp1 - tmp2 # (p2^-1 mod p1)*p2 - (p1^-1 mod p2)*p1
# search for final prime number starting by Y0
# Y0 = X + (R - X mod p1p2)
increment = p[0] * p[1]
X = X + (R - (X % increment))
while 1:
is_possible_prime = 1
# first check candidate against sieve_base
for prime in sieve_base:
if (X % prime) == 0:
is_possible_prime = 0
break
# if e is given make sure that e and X-1 are coprime
# this is not necessarily a strong prime criterion but useful when
# creating them for RSA where the p-1 and q-1 should be coprime to
# the public exponent e
if e and is_possible_prime:
if e & 1:
if GCD(e, X-1) != 1:
is_possible_prime = 0
else:
if GCD(e, (X-1) // 2) != 1:
is_possible_prime = 0
# do some Rabin-Miller-Tests
if is_possible_prime:
result = _rabinMillerTest (X, rabin_miller_rounds)
if result > 0:
break
X += increment
# abort when X has more bits than requested
# TODO: maybe we shouldn't abort but rather start over.
if X >= 1 << N:
raise RuntimeError ("Couln't find prime in field. "
"Developer: Increase field_size")
return X
def isPrime(N, false_positive_prob=1e-6, randfunc=None):
"""Test if a number *N* is a prime.
Args:
false_positive_prob (float):
The statistical probability for the result not to be actually a
prime. It defaults to 10\ :sup:`-6`.
Note that the real probability of a false-positive is far less.
This is just the mathematically provable limit.
randfunc (callable):
A function that takes a parameter *N* and that returns
a random byte string of such length.
If omitted, :func:`Crypto.Random.get_random_bytes` is used.
Return:
`True` is the input is indeed prime.
"""
if randfunc is None:
randfunc = Random.get_random_bytes
if _fastmath is not None:
return _fastmath.isPrime(int(N), false_positive_prob, randfunc)
if N < 3 or N & 1 == 0:
return N == 2
for p in sieve_base:
if N == p:
return 1
if N % p == 0:
return 0
rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4)))
return _rabinMillerTest(N, rounds, randfunc)
# Improved conversion functions contributed by Barry Warsaw, after
# careful benchmarking
import struct
def long_to_bytes(n, blocksize=0):
"""Convert an integer to a byte string.
In Python 3.2+, use the native method instead::
>>> n.to_bytes(blocksize, 'big')
For instance::
>>> n = 80
>>> n.to_bytes(2, 'big')
b'\x00P'
If the optional :data:`blocksize` is provided and greater than zero,
the byte string is padded with binary zeros (on the front) so that
the total length of the output is a multiple of blocksize.
If :data:`blocksize` is zero or not provided, the byte string will
be of minimal length.
"""
# after much testing, this algorithm was deemed to be the fastest
s = b''
n = int(n)
pack = struct.pack
while n > 0:
s = pack('>I', n & 0xffffffff) + s
n = n >> 32
# strip off leading zeros
for i in range(len(s)):
if s[i] != b'\x00'[0]:
break
else:
# only happens when n == 0
s = b'\x00'
i = 0
s = s[i:]
# add back some pad bytes. this could be done more efficiently w.r.t. the
# de-padding being done above, but sigh...
if blocksize > 0 and len(s) % blocksize:
s = (blocksize - len(s) % blocksize) * b'\x00' + s
return s
def bytes_to_long(s):
"""Convert a byte string to a long integer (big endian).
In Python 3.2+, use the native method instead::
>>> int.from_bytes(s, 'big')
For instance::
>>> int.from_bytes(b'\x00P', 'big')
80
This is (essentially) the inverse of :func:`long_to_bytes`.
"""
acc = 0
unpack = struct.unpack
# Up to Python 2.7.3, struct.unpack can't work with bytearrays
if sys.version_info[0] < 3 and isinstance(s, bytearray):
s = bytes(s)
length = len(s)
if length % 4:
extra = (4 - length % 4)
s = b'\x00' * extra + s
length = length + extra
for i in range(0, length, 4):
acc = (acc << 32) + unpack('>I', s[i:i+4])[0]
return acc
# For backwards compatibility...
import warnings
def long2str(n, blocksize=0):
warnings.warn("long2str() has been replaced by long_to_bytes()")
return long_to_bytes(n, blocksize)
def str2long(s):
warnings.warn("str2long() has been replaced by bytes_to_long()")
return bytes_to_long(s)
# The first 10000 primes used for checking primality.
# This should be enough to eliminate most of the odd
# numbers before needing to do a Rabin-Miller test at all.
sieve_base = (
2, 3, 5, 7, 11, 13, 17, 19, 23, 29,
31, 37, 41, 43, 47, 53, 59, 61, 67, 71,
73, 79, 83, 89, 97, 101, 103, 107, 109, 113,
127, 131, 137, 139, 149, 151, 157, 163, 167, 173,
179, 181, 191, 193, 197, 199, 211, 223, 227, 229,
233, 239, 241, 251, 257, 263, 269, 271, 277, 281,
283, 293, 307, 311, 313, 317, 331, 337, 347, 349,
353, 359, 367, 373, 379, 383, 389, 397, 401, 409,
419, 421, 431, 433, 439, 443, 449, 457, 461, 463,
467, 479, 487, 491, 499, 503, 509, 521, 523, 541,
547, 557, 563, 569, 571, 577, 587, 593, 599, 601,
607, 613, 617, 619, 631, 641, 643, 647, 653, 659,
661, 673, 677, 683, 691, 701, 709, 719, 727, 733,
739, 743, 751, 757, 761, 769, 773, 787, 797, 809,
811, 821, 823, 827, 829, 839, 853, 857, 859, 863,
877, 881, 883, 887, 907, 911, 919, 929, 937, 941,
947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013,
1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069,
1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151,
1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223,
1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291,
1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373,
1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451,
1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511,
1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583,
1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657,
1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733,
1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811,
1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889,
1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987,
1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053,
2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129,
2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213,
2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287,
2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357,
2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423,
2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531,
2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617,
2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687,
2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741,
2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819,
2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903,
2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999,
3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079,
3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181,
3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257,
3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331,
3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413,
3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511,
3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571,
3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643,
3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727,
3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821,
3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907,
3911, 3917, 3919, 3923, 3929, 3931, 3943, 3947, 3967, 3989,
4001, 4003, 4007, 4013, 4019, 4021, 4027, 4049, 4051, 4057,
4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139,
4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231,
4241, 4243, 4253, 4259, 4261, 4271, 4273, 4283, 4289, 4297,
4327, 4337, 4339, 4349, 4357, 4363, 4373, 4391, 4397, 4409,
4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, 4493,
4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583,
4591, 4597, 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657,
4663, 4673, 4679, 4691, 4703, 4721, 4723, 4729, 4733, 4751,
4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831,
4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937,
4943, 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003,
5009, 5011, 5021, 5023, 5039, 5051, 5059, 5077, 5081, 5087,
5099, 5101, 5107, 5113, 5119, 5147, 5153, 5167, 5171, 5179,
5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279,
5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387,
5393, 5399, 5407, 5413, 5417, 5419, 5431, 5437, 5441, 5443,
5449, 5471, 5477, 5479, 5483, 5501, 5503, 5507, 5519, 5521,
5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, 5639,
5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693,
5701, 5711, 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791,
5801, 5807, 5813, 5821, 5827, 5839, 5843, 5849, 5851, 5857,
5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, 5927, 5939,
5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053,
6067, 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133,
6143, 6151, 6163, 6173, 6197, 6199, 6203, 6211, 6217, 6221,
6229, 6247, 6257, 6263, 6269, 6271, 6277, 6287, 6299, 6301,
6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367,
6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473,
6481, 6491, 6521, 6529, 6547, 6551, 6553, 6563, 6569, 6571,
6577, 6581, 6599, 6607, 6619, 6637, 6653, 6659, 6661, 6673,
6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, 6761,
6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833,
6841, 6857, 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917,
6947, 6949, 6959, 6961, 6967, 6971, 6977, 6983, 6991, 6997,
7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, 7079, 7103,
7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207,
7211, 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297,
7307, 7309, 7321, 7331, 7333, 7349, 7351, 7369, 7393, 7411,
7417, 7433, 7451, 7457, 7459, 7477, 7481, 7487, 7489, 7499,
7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561,
7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643,
7649, 7669, 7673, 7681, 7687, 7691, 7699, 7703, 7717, 7723,
7727, 7741, 7753, 7757, 7759, 7789, 7793, 7817, 7823, 7829,
7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, 7919,
7927, 7933, 7937, 7949, 7951, 7963, 7993, 8009, 8011, 8017,
8039, 8053, 8059, 8069, 8081, 8087, 8089, 8093, 8101, 8111,
8117, 8123, 8147, 8161, 8167, 8171, 8179, 8191, 8209, 8219,
8221, 8231, 8233, 8237, 8243, 8263, 8269, 8273, 8287, 8291,
8293, 8297, 8311, 8317, 8329, 8353, 8363, 8369, 8377, 8387,
8389, 8419, 8423, 8429, 8431, 8443, 8447, 8461, 8467, 8501,
8513, 8521, 8527, 8537, 8539, 8543, 8563, 8573, 8581, 8597,
8599, 8609, 8623, 8627, 8629, 8641, 8647, 8663, 8669, 8677,
8681, 8689, 8693, 8699, 8707, 8713, 8719, 8731, 8737, 8741,
8747, 8753, 8761, 8779, 8783, 8803, 8807, 8819, 8821, 8831,
8837, 8839, 8849, 8861, 8863, 8867, 8887, 8893, 8923, 8929,
8933, 8941, 8951, 8963, 8969, 8971, 8999, 9001, 9007, 9011,
9013, 9029, 9041, 9043, 9049, 9059, 9067, 9091, 9103, 9109,
9127, 9133, 9137, 9151, 9157, 9161, 9173, 9181, 9187, 9199,
9203, 9209, 9221, 9227, 9239, 9241, 9257, 9277, 9281, 9283,
9293, 9311, 9319, 9323, 9337, 9341, 9343, 9349, 9371, 9377,
9391, 9397, 9403, 9413, 9419, 9421, 9431, 9433, 9437, 9439,
9461, 9463, 9467, 9473, 9479, 9491, 9497, 9511, 9521, 9533,
9539, 9547, 9551, 9587, 9601, 9613, 9619, 9623, 9629, 9631,
9643, 9649, 9661, 9677, 9679, 9689, 9697, 9719, 9721, 9733,
9739, 9743, 9749, 9767, 9769, 9781, 9787, 9791, 9803, 9811,
9817, 9829, 9833, 9839, 9851, 9857, 9859, 9871, 9883, 9887,
9901, 9907, 9923, 9929, 9931, 9941, 9949, 9967, 9973, 10007,
10009, 10037, 10039, 10061, 10067, 10069, 10079, 10091, 10093, 10099,
10103, 10111, 10133, 10139, 10141, 10151, 10159, 10163, 10169, 10177,
10181, 10193, 10211, 10223, 10243, 10247, 10253, 10259, 10267, 10271,
10273, 10289, 10301, 10303, 10313, 10321, 10331, 10333, 10337, 10343,
10357, 10369, 10391, 10399, 10427, 10429, 10433, 10453, 10457, 10459,
10463, 10477, 10487, 10499, 10501, 10513, 10529, 10531, 10559, 10567,
10589, 10597, 10601, 10607, 10613, 10627, 10631, 10639, 10651, 10657,
10663, 10667, 10687, 10691, 10709, 10711, 10723, 10729, 10733, 10739,
10753, 10771, 10781, 10789, 10799, 10831, 10837, 10847, 10853, 10859,
10861, 10867, 10883, 10889, 10891, 10903, 10909, 10937, 10939, 10949,
10957, 10973, 10979, 10987, 10993, 11003, 11027, 11047, 11057, 11059,
11069, 11071, 11083, 11087, 11093, 11113, 11117, 11119, 11131, 11149,
11159, 11161, 11171, 11173, 11177, 11197, 11213, 11239, 11243, 11251,
11257, 11261, 11273, 11279, 11287, 11299, 11311, 11317, 11321, 11329,
11351, 11353, 11369, 11383, 11393, 11399, 11411, 11423, 11437, 11443,
11447, 11467, 11471, 11483, 11489, 11491, 11497, 11503, 11519, 11527,
11549, 11551, 11579, 11587, 11593, 11597, 11617, 11621, 11633, 11657,
11677, 11681, 11689, 11699, 11701, 11717, 11719, 11731, 11743, 11777,
11779, 11783, 11789, 11801, 11807, 11813, 11821, 11827, 11831, 11833,
11839, 11863, 11867, 11887, 11897, 11903, 11909, 11923, 11927, 11933,
11939, 11941, 11953, 11959, 11969, 11971, 11981, 11987, 12007, 12011,
12037, 12041, 12043, 12049, 12071, 12073, 12097, 12101, 12107, 12109,
12113, 12119, 12143, 12149, 12157, 12161, 12163, 12197, 12203, 12211,
12227, 12239, 12241, 12251, 12253, 12263, 12269, 12277, 12281, 12289,
12301, 12323, 12329, 12343, 12347, 12373, 12377, 12379, 12391, 12401,
12409, 12413, 12421, 12433, 12437, 12451, 12457, 12473, 12479, 12487,
12491, 12497, 12503, 12511, 12517, 12527, 12539, 12541, 12547, 12553,
12569, 12577, 12583, 12589, 12601, 12611, 12613, 12619, 12637, 12641,
12647, 12653, 12659, 12671, 12689, 12697, 12703, 12713, 12721, 12739,
12743, 12757, 12763, 12781, 12791, 12799, 12809, 12821, 12823, 12829,
12841, 12853, 12889, 12893, 12899, 12907, 12911, 12917, 12919, 12923,
12941, 12953, 12959, 12967, 12973, 12979, 12983, 13001, 13003, 13007,
13009, 13033, 13037, 13043, 13049, 13063, 13093, 13099, 13103, 13109,
13121, 13127, 13147, 13151, 13159, 13163, 13171, 13177, 13183, 13187,
13217, 13219, 13229, 13241, 13249, 13259, 13267, 13291, 13297, 13309,
13313, 13327, 13331, 13337, 13339, 13367, 13381, 13397, 13399, 13411,
13417, 13421, 13441, 13451, 13457, 13463, 13469, 13477, 13487, 13499,
13513, 13523, 13537, 13553, 13567, 13577, 13591, 13597, 13613, 13619,
13627, 13633, 13649, 13669, 13679, 13681, 13687, 13691, 13693, 13697,
13709, 13711, 13721, 13723, 13729, 13751, 13757, 13759, 13763, 13781,
13789, 13799, 13807, 13829, 13831, 13841, 13859, 13873, 13877, 13879,
13883, 13901, 13903, 13907, 13913, 13921, 13931, 13933, 13963, 13967,
13997, 13999, 14009, 14011, 14029, 14033, 14051, 14057, 14071, 14081,
14083, 14087, 14107, 14143, 14149, 14153, 14159, 14173, 14177, 14197,
14207, 14221, 14243, 14249, 14251, 14281, 14293, 14303, 14321, 14323,
14327, 14341, 14347, 14369, 14387, 14389, 14401, 14407, 14411, 14419,
14423, 14431, 14437, 14447, 14449, 14461, 14479, 14489, 14503, 14519,
14533, 14537, 14543, 14549, 14551, 14557, 14561, 14563, 14591, 14593,
14621, 14627, 14629, 14633, 14639, 14653, 14657, 14669, 14683, 14699,
14713, 14717, 14723, 14731, 14737, 14741, 14747, 14753, 14759, 14767,
14771, 14779, 14783, 14797, 14813, 14821, 14827, 14831, 14843, 14851,
14867, 14869, 14879, 14887, 14891, 14897, 14923, 14929, 14939, 14947,
14951, 14957, 14969, 14983, 15013, 15017, 15031, 15053, 15061, 15073,
15077, 15083, 15091, 15101, 15107, 15121, 15131, 15137, 15139, 15149,
15161, 15173, 15187, 15193, 15199, 15217, 15227, 15233, 15241, 15259,
15263, 15269, 15271, 15277, 15287, 15289, 15299, 15307, 15313, 15319,
15329, 15331, 15349, 15359, 15361, 15373, 15377, 15383, 15391, 15401,
15413, 15427, 15439, 15443, 15451, 15461, 15467, 15473, 15493, 15497,
15511, 15527, 15541, 15551, 15559, 15569, 15581, 15583, 15601, 15607,
15619, 15629, 15641, 15643, 15647, 15649, 15661, 15667, 15671, 15679,
15683, 15727, 15731, 15733, 15737, 15739, 15749, 15761, 15767, 15773,
15787, 15791, 15797, 15803, 15809, 15817, 15823, 15859, 15877, 15881,
15887, 15889, 15901, 15907, 15913, 15919, 15923, 15937, 15959, 15971,
15973, 15991, 16001, 16007, 16033, 16057, 16061, 16063, 16067, 16069,
16073, 16087, 16091, 16097, 16103, 16111, 16127, 16139, 16141, 16183,
16187, 16189, 16193, 16217, 16223, 16229, 16231, 16249, 16253, 16267,
16273, 16301, 16319, 16333, 16339, 16349, 16361, 16363, 16369, 16381,
16411, 16417, 16421, 16427, 16433, 16447, 16451, 16453, 16477, 16481,
16487, 16493, 16519, 16529, 16547, 16553, 16561, 16567, 16573, 16603,
16607, 16619, 16631, 16633, 16649, 16651, 16657, 16661, 16673, 16691,
16693, 16699, 16703, 16729, 16741, 16747, 16759, 16763, 16787, 16811,
16823, 16829, 16831, 16843, 16871, 16879, 16883, 16889, 16901, 16903,
16921, 16927, 16931, 16937, 16943, 16963, 16979, 16981, 16987, 16993,
17011, 17021, 17027, 17029, 17033, 17041, 17047, 17053, 17077, 17093,
17099, 17107, 17117, 17123, 17137, 17159, 17167, 17183, 17189, 17191,
17203, 17207, 17209, 17231, 17239, 17257, 17291, 17293, 17299, 17317,
17321, 17327, 17333, 17341, 17351, 17359, 17377, 17383, 17387, 17389,
17393, 17401, 17417, 17419, 17431, 17443, 17449, 17467, 17471, 17477,
17483, 17489, 17491, 17497, 17509, 17519, 17539, 17551, 17569, 17573,
17579, 17581, 17597, 17599, 17609, 17623, 17627, 17657, 17659, 17669,
17681, 17683, 17707, 17713, 17729, 17737, 17747, 17749, 17761, 17783,
17789, 17791, 17807, 17827, 17837, 17839, 17851, 17863, 17881, 17891,
17903, 17909, 17911, 17921, 17923, 17929, 17939, 17957, 17959, 17971,
17977, 17981, 17987, 17989, 18013, 18041, 18043, 18047, 18049, 18059,
18061, 18077, 18089, 18097, 18119, 18121, 18127, 18131, 18133, 18143,
18149, 18169, 18181, 18191, 18199, 18211, 18217, 18223, 18229, 18233,
18251, 18253, 18257, 18269, 18287, 18289, 18301, 18307, 18311, 18313,
18329, 18341, 18353, 18367, 18371, 18379, 18397, 18401, 18413, 18427,
18433, 18439, 18443, 18451, 18457, 18461, 18481, 18493, 18503, 18517,
18521, 18523, 18539, 18541, 18553, 18583, 18587, 18593, 18617, 18637,
18661, 18671, 18679, 18691, 18701, 18713, 18719, 18731, 18743, 18749,
18757, 18773, 18787, 18793, 18797, 18803, 18839, 18859, 18869, 18899,
18911, 18913, 18917, 18919, 18947, 18959, 18973, 18979, 19001, 19009,
19013, 19031, 19037, 19051, 19069, 19073, 19079, 19081, 19087, 19121,
19139, 19141, 19157, 19163, 19181, 19183, 19207, 19211, 19213, 19219,
19231, 19237, 19249, 19259, 19267, 19273, 19289, 19301, 19309, 19319,
19333, 19373, 19379, 19381, 19387, 19391, 19403, 19417, 19421, 19423,
19427, 19429, 19433, 19441, 19447, 19457, 19463, 19469, 19471, 19477,
19483, 19489, 19501, 19507, 19531, 19541, 19543, 19553, 19559, 19571,
19577, 19583, 19597, 19603, 19609, 19661, 19681, 19687, 19697, 19699,
19709, 19717, 19727, 19739, 19751, 19753, 19759, 19763, 19777, 19793,
19801, 19813, 19819, 19841, 19843, 19853, 19861, 19867, 19889, 19891,
19913, 19919, 19927, 19937, 19949, 19961, 19963, 19973, 19979, 19991,
19993, 19997, 20011, 20021, 20023, 20029, 20047, 20051, 20063, 20071,
20089, 20101, 20107, 20113, 20117, 20123, 20129, 20143, 20147, 20149,
20161, 20173, 20177, 20183, 20201, 20219, 20231, 20233, 20249, 20261,
20269, 20287, 20297, 20323, 20327, 20333, 20341, 20347, 20353, 20357,
20359, 20369, 20389, 20393, 20399, 20407, 20411, 20431, 20441, 20443,
20477, 20479, 20483, 20507, 20509, 20521, 20533, 20543, 20549, 20551,
20563, 20593, 20599, 20611, 20627, 20639, 20641, 20663, 20681, 20693,
20707, 20717, 20719, 20731, 20743, 20747, 20749, 20753, 20759, 20771,
20773, 20789, 20807, 20809, 20849, 20857, 20873, 20879, 20887, 20897,
20899, 20903, 20921, 20929, 20939, 20947, 20959, 20963, 20981, 20983,
21001, 21011, 21013, 21017, 21019, 21023, 21031, 21059, 21061, 21067,
21089, 21101, 21107, 21121, 21139, 21143, 21149, 21157, 21163, 21169,
21179, 21187, 21191, 21193, 21211, 21221, 21227, 21247, 21269, 21277,
21283, 21313, 21317, 21319, 21323, 21341, 21347, 21377, 21379, 21383,
21391, 21397, 21401, 21407, 21419, 21433, 21467, 21481, 21487, 21491,
21493, 21499, 21503, 21517, 21521, 21523, 21529, 21557, 21559, 21563,
21569, 21577, 21587, 21589, 21599, 21601, 21611, 21613, 21617, 21647,
21649, 21661, 21673, 21683, 21701, 21713, 21727, 21737, 21739, 21751,
21757, 21767, 21773, 21787, 21799, 21803, 21817, 21821, 21839, 21841,
21851, 21859, 21863, 21871, 21881, 21893, 21911, 21929, 21937, 21943,
21961, 21977, 21991, 21997, 22003, 22013, 22027, 22031, 22037, 22039,
22051, 22063, 22067, 22073, 22079, 22091, 22093, 22109, 22111, 22123,
22129, 22133, 22147, 22153, 22157, 22159, 22171, 22189, 22193, 22229,
22247, 22259, 22271, 22273, 22277, 22279, 22283, 22291, 22303, 22307,
22343, 22349, 22367, 22369, 22381, 22391, 22397, 22409, 22433, 22441,
22447, 22453, 22469, 22481, 22483, 22501, 22511, 22531, 22541, 22543,
22549, 22567, 22571, 22573, 22613, 22619, 22621, 22637, 22639, 22643,
22651, 22669, 22679, 22691, 22697, 22699, 22709, 22717, 22721, 22727,
22739, 22741, 22751, 22769, 22777, 22783, 22787, 22807, 22811, 22817,
22853, 22859, 22861, 22871, 22877, 22901, 22907, 22921, 22937, 22943,
22961, 22963, 22973, 22993, 23003, 23011, 23017, 23021, 23027, 23029,
23039, 23041, 23053, 23057, 23059, 23063, 23071, 23081, 23087, 23099,
23117, 23131, 23143, 23159, 23167, 23173, 23189, 23197, 23201, 23203,
23209, 23227, 23251, 23269, 23279, 23291, 23293, 23297, 23311, 23321,
23327, 23333, 23339, 23357, 23369, 23371, 23399, 23417, 23431, 23447,
23459, 23473, 23497, 23509, 23531, 23537, 23539, 23549, 23557, 23561,
23563, 23567, 23581, 23593, 23599, 23603, 23609, 23623, 23627, 23629,
23633, 23663, 23669, 23671, 23677, 23687, 23689, 23719, 23741, 23743,
23747, 23753, 23761, 23767, 23773, 23789, 23801, 23813, 23819, 23827,
23831, 23833, 23857, 23869, 23873, 23879, 23887, 23893, 23899, 23909,
23911, 23917, 23929, 23957, 23971, 23977, 23981, 23993, 24001, 24007,
24019, 24023, 24029, 24043, 24049, 24061, 24071, 24077, 24083, 24091,
24097, 24103, 24107, 24109, 24113, 24121, 24133, 24137, 24151, 24169,
24179, 24181, 24197, 24203, 24223, 24229, 24239, 24247, 24251, 24281,
24317, 24329, 24337, 24359, 24371, 24373, 24379, 24391, 24407, 24413,
24419, 24421, 24439, 24443, 24469, 24473, 24481, 24499, 24509, 24517,
24527, 24533, 24547, 24551, 24571, 24593, 24611, 24623, 24631, 24659,
24671, 24677, 24683, 24691, 24697, 24709, 24733, 24749, 24763, 24767,
24781, 24793, 24799, 24809, 24821, 24841, 24847, 24851, 24859, 24877,
24889, 24907, 24917, 24919, 24923, 24943, 24953, 24967, 24971, 24977,
24979, 24989, 25013, 25031, 25033, 25037, 25057, 25073, 25087, 25097,
25111, 25117, 25121, 25127, 25147, 25153, 25163, 25169, 25171, 25183,
25189, 25219, 25229, 25237, 25243, 25247, 25253, 25261, 25301, 25303,
25307, 25309, 25321, 25339, 25343, 25349, 25357, 25367, 25373, 25391,
25409, 25411, 25423, 25439, 25447, 25453, 25457, 25463, 25469, 25471,
25523, 25537, 25541, 25561, 25577, 25579, 25583, 25589, 25601, 25603,
25609, 25621, 25633, 25639, 25643, 25657, 25667, 25673, 25679, 25693,
25703, 25717, 25733, 25741, 25747, 25759, 25763, 25771, 25793, 25799,
25801, 25819, 25841, 25847, 25849, 25867, 25873, 25889, 25903, 25913,
25919, 25931, 25933, 25939, 25943, 25951, 25969, 25981, 25997, 25999,
26003, 26017, 26021, 26029, 26041, 26053, 26083, 26099, 26107, 26111,
26113, 26119, 26141, 26153, 26161, 26171, 26177, 26183, 26189, 26203,
26209, 26227, 26237, 26249, 26251, 26261, 26263, 26267, 26293, 26297,
26309, 26317, 26321, 26339, 26347, 26357, 26371, 26387, 26393, 26399,
26407, 26417, 26423, 26431, 26437, 26449, 26459, 26479, 26489, 26497,
26501, 26513, 26539, 26557, 26561, 26573, 26591, 26597, 26627, 26633,
26641, 26647, 26669, 26681, 26683, 26687, 26693, 26699, 26701, 26711,
26713, 26717, 26723, 26729, 26731, 26737, 26759, 26777, 26783, 26801,
26813, 26821, 26833, 26839, 26849, 26861, 26863, 26879, 26881, 26891,
26893, 26903, 26921, 26927, 26947, 26951, 26953, 26959, 26981, 26987,
26993, 27011, 27017, 27031, 27043, 27059, 27061, 27067, 27073, 27077,
27091, 27103, 27107, 27109, 27127, 27143, 27179, 27191, 27197, 27211,
27239, 27241, 27253, 27259, 27271, 27277, 27281, 27283, 27299, 27329,
27337, 27361, 27367, 27397, 27407, 27409, 27427, 27431, 27437, 27449,
27457, 27479, 27481, 27487, 27509, 27527, 27529, 27539, 27541, 27551,
27581, 27583, 27611, 27617, 27631, 27647, 27653, 27673, 27689, 27691,
27697, 27701, 27733, 27737, 27739, 27743, 27749, 27751, 27763, 27767,
27773, 27779, 27791, 27793, 27799, 27803, 27809, 27817, 27823, 27827,
27847, 27851, 27883, 27893, 27901, 27917, 27919, 27941, 27943, 27947,
27953, 27961, 27967, 27983, 27997, 28001, 28019, 28027, 28031, 28051,
28057, 28069, 28081, 28087, 28097, 28099, 28109, 28111, 28123, 28151,
28163, 28181, 28183, 28201, 28211, 28219, 28229, 28277, 28279, 28283,
28289, 28297, 28307, 28309, 28319, 28349, 28351, 28387, 28393, 28403,
28409, 28411, 28429, 28433, 28439, 28447, 28463, 28477, 28493, 28499,
28513, 28517, 28537, 28541, 28547, 28549, 28559, 28571, 28573, 28579,
28591, 28597, 28603, 28607, 28619, 28621, 28627, 28631, 28643, 28649,
28657, 28661, 28663, 28669, 28687, 28697, 28703, 28711, 28723, 28729,
28751, 28753, 28759, 28771, 28789, 28793, 28807, 28813, 28817, 28837,
28843, 28859, 28867, 28871, 28879, 28901, 28909, 28921, 28927, 28933,
28949, 28961, 28979, 29009, 29017, 29021, 29023, 29027, 29033, 29059,
29063, 29077, 29101, 29123, 29129, 29131, 29137, 29147, 29153, 29167,
29173, 29179, 29191, 29201, 29207, 29209, 29221, 29231, 29243, 29251,
29269, 29287, 29297, 29303, 29311, 29327, 29333, 29339, 29347, 29363,
29383, 29387, 29389, 29399, 29401, 29411, 29423, 29429, 29437, 29443,
29453, 29473, 29483, 29501, 29527, 29531, 29537, 29567, 29569, 29573,
29581, 29587, 29599, 29611, 29629, 29633, 29641, 29663, 29669, 29671,
29683, 29717, 29723, 29741, 29753, 29759, 29761, 29789, 29803, 29819,
29833, 29837, 29851, 29863, 29867, 29873, 29879, 29881, 29917, 29921,
29927, 29947, 29959, 29983, 29989, 30011, 30013, 30029, 30047, 30059,
30071, 30089, 30091, 30097, 30103, 30109, 30113, 30119, 30133, 30137,
30139, 30161, 30169, 30181, 30187, 30197, 30203, 30211, 30223, 30241,
30253, 30259, 30269, 30271, 30293, 30307, 30313, 30319, 30323, 30341,
30347, 30367, 30389, 30391, 30403, 30427, 30431, 30449, 30467, 30469,
30491, 30493, 30497, 30509, 30517, 30529, 30539, 30553, 30557, 30559,
30577, 30593, 30631, 30637, 30643, 30649, 30661, 30671, 30677, 30689,
30697, 30703, 30707, 30713, 30727, 30757, 30763, 30773, 30781, 30803,
30809, 30817, 30829, 30839, 30841, 30851, 30853, 30859, 30869, 30871,
30881, 30893, 30911, 30931, 30937, 30941, 30949, 30971, 30977, 30983,
31013, 31019, 31033, 31039, 31051, 31063, 31069, 31079, 31081, 31091,
31121, 31123, 31139, 31147, 31151, 31153, 31159, 31177, 31181, 31183,
31189, 31193, 31219, 31223, 31231, 31237, 31247, 31249, 31253, 31259,
31267, 31271, 31277, 31307, 31319, 31321, 31327, 31333, 31337, 31357,
31379, 31387, 31391, 31393, 31397, 31469, 31477, 31481, 31489, 31511,
31513, 31517, 31531, 31541, 31543, 31547, 31567, 31573, 31583, 31601,
31607, 31627, 31643, 31649, 31657, 31663, 31667, 31687, 31699, 31721,
31723, 31727, 31729, 31741, 31751, 31769, 31771, 31793, 31799, 31817,
31847, 31849, 31859, 31873, 31883, 31891, 31907, 31957, 31963, 31973,
31981, 31991, 32003, 32009, 32027, 32029, 32051, 32057, 32059, 32063,
32069, 32077, 32083, 32089, 32099, 32117, 32119, 32141, 32143, 32159,
32173, 32183, 32189, 32191, 32203, 32213, 32233, 32237, 32251, 32257,
32261, 32297, 32299, 32303, 32309, 32321, 32323, 32327, 32341, 32353,
32359, 32363, 32369, 32371, 32377, 32381, 32401, 32411, 32413, 32423,
32429, 32441, 32443, 32467, 32479, 32491, 32497, 32503, 32507, 32531,
32533, 32537, 32561, 32563, 32569, 32573, 32579, 32587, 32603, 32609,
32611, 32621, 32633, 32647, 32653, 32687, 32693, 32707, 32713, 32717,
32719, 32749, 32771, 32779, 32783, 32789, 32797, 32801, 32803, 32831,
32833, 32839, 32843, 32869, 32887, 32909, 32911, 32917, 32933, 32939,
32941, 32957, 32969, 32971, 32983, 32987, 32993, 32999, 33013, 33023,
33029, 33037, 33049, 33053, 33071, 33073, 33083, 33091, 33107, 33113,
33119, 33149, 33151, 33161, 33179, 33181, 33191, 33199, 33203, 33211,
33223, 33247, 33287, 33289, 33301, 33311, 33317, 33329, 33331, 33343,
33347, 33349, 33353, 33359, 33377, 33391, 33403, 33409, 33413, 33427,
33457, 33461, 33469, 33479, 33487, 33493, 33503, 33521, 33529, 33533,
33547, 33563, 33569, 33577, 33581, 33587, 33589, 33599, 33601, 33613,
33617, 33619, 33623, 33629, 33637, 33641, 33647, 33679, 33703, 33713,
33721, 33739, 33749, 33751, 33757, 33767, 33769, 33773, 33791, 33797,
33809, 33811, 33827, 33829, 33851, 33857, 33863, 33871, 33889, 33893,
33911, 33923, 33931, 33937, 33941, 33961, 33967, 33997, 34019, 34031,
34033, 34039, 34057, 34061, 34123, 34127, 34129, 34141, 34147, 34157,
34159, 34171, 34183, 34211, 34213, 34217, 34231, 34253, 34259, 34261,
34267, 34273, 34283, 34297, 34301, 34303, 34313, 34319, 34327, 34337,
34351, 34361, 34367, 34369, 34381, 34403, 34421, 34429, 34439, 34457,
34469, 34471, 34483, 34487, 34499, 34501, 34511, 34513, 34519, 34537,
34543, 34549, 34583, 34589, 34591, 34603, 34607, 34613, 34631, 34649,
34651, 34667, 34673, 34679, 34687, 34693, 34703, 34721, 34729, 34739,
34747, 34757, 34759, 34763, 34781, 34807, 34819, 34841, 34843, 34847,
34849, 34871, 34877, 34883, 34897, 34913, 34919, 34939, 34949, 34961,
34963, 34981, 35023, 35027, 35051, 35053, 35059, 35069, 35081, 35083,
35089, 35099, 35107, 35111, 35117, 35129, 35141, 35149, 35153, 35159,
35171, 35201, 35221, 35227, 35251, 35257, 35267, 35279, 35281, 35291,
35311, 35317, 35323, 35327, 35339, 35353, 35363, 35381, 35393, 35401,
35407, 35419, 35423, 35437, 35447, 35449, 35461, 35491, 35507, 35509,
35521, 35527, 35531, 35533, 35537, 35543, 35569, 35573, 35591, 35593,
35597, 35603, 35617, 35671, 35677, 35729, 35731, 35747, 35753, 35759,
35771, 35797, 35801, 35803, 35809, 35831, 35837, 35839, 35851, 35863,
35869, 35879, 35897, 35899, 35911, 35923, 35933, 35951, 35963, 35969,
35977, 35983, 35993, 35999, 36007, 36011, 36013, 36017, 36037, 36061,
36067, 36073, 36083, 36097, 36107, 36109, 36131, 36137, 36151, 36161,
36187, 36191, 36209, 36217, 36229, 36241, 36251, 36263, 36269, 36277,
36293, 36299, 36307, 36313, 36319, 36341, 36343, 36353, 36373, 36383,
36389, 36433, 36451, 36457, 36467, 36469, 36473, 36479, 36493, 36497,
36523, 36527, 36529, 36541, 36551, 36559, 36563, 36571, 36583, 36587,
36599, 36607, 36629, 36637, 36643, 36653, 36671, 36677, 36683, 36691,
36697, 36709, 36713, 36721, 36739, 36749, 36761, 36767, 36779, 36781,
36787, 36791, 36793, 36809, 36821, 36833, 36847, 36857, 36871, 36877,
36887, 36899, 36901, 36913, 36919, 36923, 36929, 36931, 36943, 36947,
36973, 36979, 36997, 37003, 37013, 37019, 37021, 37039, 37049, 37057,
37061, 37087, 37097, 37117, 37123, 37139, 37159, 37171, 37181, 37189,
37199, 37201, 37217, 37223, 37243, 37253, 37273, 37277, 37307, 37309,
37313, 37321, 37337, 37339, 37357, 37361, 37363, 37369, 37379, 37397,
37409, 37423, 37441, 37447, 37463, 37483, 37489, 37493, 37501, 37507,
37511, 37517, 37529, 37537, 37547, 37549, 37561, 37567, 37571, 37573,
37579, 37589, 37591, 37607, 37619, 37633, 37643, 37649, 37657, 37663,
37691, 37693, 37699, 37717, 37747, 37781, 37783, 37799, 37811, 37813,
37831, 37847, 37853, 37861, 37871, 37879, 37889, 37897, 37907, 37951,
37957, 37963, 37967, 37987, 37991, 37993, 37997, 38011, 38039, 38047,
38053, 38069, 38083, 38113, 38119, 38149, 38153, 38167, 38177, 38183,
38189, 38197, 38201, 38219, 38231, 38237, 38239, 38261, 38273, 38281,
38287, 38299, 38303, 38317, 38321, 38327, 38329, 38333, 38351, 38371,
38377, 38393, 38431, 38447, 38449, 38453, 38459, 38461, 38501, 38543,
38557, 38561, 38567, 38569, 38593, 38603, 38609, 38611, 38629, 38639,
38651, 38653, 38669, 38671, 38677, 38693, 38699, 38707, 38711, 38713,
38723, 38729, 38737, 38747, 38749, 38767, 38783, 38791, 38803, 38821,
38833, 38839, 38851, 38861, 38867, 38873, 38891, 38903, 38917, 38921,
38923, 38933, 38953, 38959, 38971, 38977, 38993, 39019, 39023, 39041,
39043, 39047, 39079, 39089, 39097, 39103, 39107, 39113, 39119, 39133,
39139, 39157, 39161, 39163, 39181, 39191, 39199, 39209, 39217, 39227,
39229, 39233, 39239, 39241, 39251, 39293, 39301, 39313, 39317, 39323,
39341, 39343, 39359, 39367, 39371, 39373, 39383, 39397, 39409, 39419,
39439, 39443, 39451, 39461, 39499, 39503, 39509, 39511, 39521, 39541,
39551, 39563, 39569, 39581, 39607, 39619, 39623, 39631, 39659, 39667,
39671, 39679, 39703, 39709, 39719, 39727, 39733, 39749, 39761, 39769,
39779, 39791, 39799, 39821, 39827, 39829, 39839, 39841, 39847, 39857,
39863, 39869, 39877, 39883, 39887, 39901, 39929, 39937, 39953, 39971,
39979, 39983, 39989, 40009, 40013, 40031, 40037, 40039, 40063, 40087,
40093, 40099, 40111, 40123, 40127, 40129, 40151, 40153, 40163, 40169,
40177, 40189, 40193, 40213, 40231, 40237, 40241, 40253, 40277, 40283,
40289, 40343, 40351, 40357, 40361, 40387, 40423, 40427, 40429, 40433,
40459, 40471, 40483, 40487, 40493, 40499, 40507, 40519, 40529, 40531,
40543, 40559, 40577, 40583, 40591, 40597, 40609, 40627, 40637, 40639,
40693, 40697, 40699, 40709, 40739, 40751, 40759, 40763, 40771, 40787,
40801, 40813, 40819, 40823, 40829, 40841, 40847, 40849, 40853, 40867,
40879, 40883, 40897, 40903, 40927, 40933, 40939, 40949, 40961, 40973,
40993, 41011, 41017, 41023, 41039, 41047, 41051, 41057, 41077, 41081,
41113, 41117, 41131, 41141, 41143, 41149, 41161, 41177, 41179, 41183,
41189, 41201, 41203, 41213, 41221, 41227, 41231, 41233, 41243, 41257,
41263, 41269, 41281, 41299, 41333, 41341, 41351, 41357, 41381, 41387,
41389, 41399, 41411, 41413, 41443, 41453, 41467, 41479, 41491, 41507,
41513, 41519, 41521, 41539, 41543, 41549, 41579, 41593, 41597, 41603,
41609, 41611, 41617, 41621, 41627, 41641, 41647, 41651, 41659, 41669,
41681, 41687, 41719, 41729, 41737, 41759, 41761, 41771, 41777, 41801,
41809, 41813, 41843, 41849, 41851, 41863, 41879, 41887, 41893, 41897,
41903, 41911, 41927, 41941, 41947, 41953, 41957, 41959, 41969, 41981,
41983, 41999, 42013, 42017, 42019, 42023, 42043, 42061, 42071, 42073,
42083, 42089, 42101, 42131, 42139, 42157, 42169, 42179, 42181, 42187,
42193, 42197, 42209, 42221, 42223, 42227, 42239, 42257, 42281, 42283,
42293, 42299, 42307, 42323, 42331, 42337, 42349, 42359, 42373, 42379,
42391, 42397, 42403, 42407, 42409, 42433, 42437, 42443, 42451, 42457,
42461, 42463, 42467, 42473, 42487, 42491, 42499, 42509, 42533, 42557,
42569, 42571, 42577, 42589, 42611, 42641, 42643, 42649, 42667, 42677,
42683, 42689, 42697, 42701, 42703, 42709, 42719, 42727, 42737, 42743,
42751, 42767, 42773, 42787, 42793, 42797, 42821, 42829, 42839, 42841,
42853, 42859, 42863, 42899, 42901, 42923, 42929, 42937, 42943, 42953,
42961, 42967, 42979, 42989, 43003, 43013, 43019, 43037, 43049, 43051,
43063, 43067, 43093, 43103, 43117, 43133, 43151, 43159, 43177, 43189,
43201, 43207, 43223, 43237, 43261, 43271, 43283, 43291, 43313, 43319,
43321, 43331, 43391, 43397, 43399, 43403, 43411, 43427, 43441, 43451,
43457, 43481, 43487, 43499, 43517, 43541, 43543, 43573, 43577, 43579,
43591, 43597, 43607, 43609, 43613, 43627, 43633, 43649, 43651, 43661,
43669, 43691, 43711, 43717, 43721, 43753, 43759, 43777, 43781, 43783,
43787, 43789, 43793, 43801, 43853, 43867, 43889, 43891, 43913, 43933,
43943, 43951, 43961, 43963, 43969, 43973, 43987, 43991, 43997, 44017,
44021, 44027, 44029, 44041, 44053, 44059, 44071, 44087, 44089, 44101,
44111, 44119, 44123, 44129, 44131, 44159, 44171, 44179, 44189, 44201,
44203, 44207, 44221, 44249, 44257, 44263, 44267, 44269, 44273, 44279,
44281, 44293, 44351, 44357, 44371, 44381, 44383, 44389, 44417, 44449,
44453, 44483, 44491, 44497, 44501, 44507, 44519, 44531, 44533, 44537,
44543, 44549, 44563, 44579, 44587, 44617, 44621, 44623, 44633, 44641,
44647, 44651, 44657, 44683, 44687, 44699, 44701, 44711, 44729, 44741,
44753, 44771, 44773, 44777, 44789, 44797, 44809, 44819, 44839, 44843,
44851, 44867, 44879, 44887, 44893, 44909, 44917, 44927, 44939, 44953,
44959, 44963, 44971, 44983, 44987, 45007, 45013, 45053, 45061, 45077,
45083, 45119, 45121, 45127, 45131, 45137, 45139, 45161, 45179, 45181,
45191, 45197, 45233, 45247, 45259, 45263, 45281, 45289, 45293, 45307,
45317, 45319, 45329, 45337, 45341, 45343, 45361, 45377, 45389, 45403,
45413, 45427, 45433, 45439, 45481, 45491, 45497, 45503, 45523, 45533,
45541, 45553, 45557, 45569, 45587, 45589, 45599, 45613, 45631, 45641,
45659, 45667, 45673, 45677, 45691, 45697, 45707, 45737, 45751, 45757,
45763, 45767, 45779, 45817, 45821, 45823, 45827, 45833, 45841, 45853,
45863, 45869, 45887, 45893, 45943, 45949, 45953, 45959, 45971, 45979,
45989, 46021, 46027, 46049, 46051, 46061, 46073, 46091, 46093, 46099,
46103, 46133, 46141, 46147, 46153, 46171, 46181, 46183, 46187, 46199,
46219, 46229, 46237, 46261, 46271, 46273, 46279, 46301, 46307, 46309,
46327, 46337, 46349, 46351, 46381, 46399, 46411, 46439, 46441, 46447,
46451, 46457, 46471, 46477, 46489, 46499, 46507, 46511, 46523, 46549,
46559, 46567, 46573, 46589, 46591, 46601, 46619, 46633, 46639, 46643,
46649, 46663, 46679, 46681, 46687, 46691, 46703, 46723, 46727, 46747,
46751, 46757, 46769, 46771, 46807, 46811, 46817, 46819, 46829, 46831,
46853, 46861, 46867, 46877, 46889, 46901, 46919, 46933, 46957, 46993,
46997, 47017, 47041, 47051, 47057, 47059, 47087, 47093, 47111, 47119,
47123, 47129, 47137, 47143, 47147, 47149, 47161, 47189, 47207, 47221,
47237, 47251, 47269, 47279, 47287, 47293, 47297, 47303, 47309, 47317,
47339, 47351, 47353, 47363, 47381, 47387, 47389, 47407, 47417, 47419,
47431, 47441, 47459, 47491, 47497, 47501, 47507, 47513, 47521, 47527,
47533, 47543, 47563, 47569, 47581, 47591, 47599, 47609, 47623, 47629,
47639, 47653, 47657, 47659, 47681, 47699, 47701, 47711, 47713, 47717,
47737, 47741, 47743, 47777, 47779, 47791, 47797, 47807, 47809, 47819,
47837, 47843, 47857, 47869, 47881, 47903, 47911, 47917, 47933, 47939,
47947, 47951, 47963, 47969, 47977, 47981, 48017, 48023, 48029, 48049,
48073, 48079, 48091, 48109, 48119, 48121, 48131, 48157, 48163, 48179,
48187, 48193, 48197, 48221, 48239, 48247, 48259, 48271, 48281, 48299,
48311, 48313, 48337, 48341, 48353, 48371, 48383, 48397, 48407, 48409,
48413, 48437, 48449, 48463, 48473, 48479, 48481, 48487, 48491, 48497,
48523, 48527, 48533, 48539, 48541, 48563, 48571, 48589, 48593, 48611,
48619, 48623, 48647, 48649, 48661, 48673, 48677, 48679, 48731, 48733,
48751, 48757, 48761, 48767, 48779, 48781, 48787, 48799, 48809, 48817,
48821, 48823, 48847, 48857, 48859, 48869, 48871, 48883, 48889, 48907,
48947, 48953, 48973, 48989, 48991, 49003, 49009, 49019, 49031, 49033,
49037, 49043, 49057, 49069, 49081, 49103, 49109, 49117, 49121, 49123,
49139, 49157, 49169, 49171, 49177, 49193, 49199, 49201, 49207, 49211,
49223, 49253, 49261, 49277, 49279, 49297, 49307, 49331, 49333, 49339,
49363, 49367, 49369, 49391, 49393, 49409, 49411, 49417, 49429, 49433,
49451, 49459, 49463, 49477, 49481, 49499, 49523, 49529, 49531, 49537,
49547, 49549, 49559, 49597, 49603, 49613, 49627, 49633, 49639, 49663,
49667, 49669, 49681, 49697, 49711, 49727, 49739, 49741, 49747, 49757,
49783, 49787, 49789, 49801, 49807, 49811, 49823, 49831, 49843, 49853,
49871, 49877, 49891, 49919, 49921, 49927, 49937, 49939, 49943, 49957,
49991, 49993, 49999, 50021, 50023, 50033, 50047, 50051, 50053, 50069,
50077, 50087, 50093, 50101, 50111, 50119, 50123, 50129, 50131, 50147,
50153, 50159, 50177, 50207, 50221, 50227, 50231, 50261, 50263, 50273,
50287, 50291, 50311, 50321, 50329, 50333, 50341, 50359, 50363, 50377,
50383, 50387, 50411, 50417, 50423, 50441, 50459, 50461, 50497, 50503,
50513, 50527, 50539, 50543, 50549, 50551, 50581, 50587, 50591, 50593,
50599, 50627, 50647, 50651, 50671, 50683, 50707, 50723, 50741, 50753,
50767, 50773, 50777, 50789, 50821, 50833, 50839, 50849, 50857, 50867,
50873, 50891, 50893, 50909, 50923, 50929, 50951, 50957, 50969, 50971,
50989, 50993, 51001, 51031, 51043, 51047, 51059, 51061, 51071, 51109,
51131, 51133, 51137, 51151, 51157, 51169, 51193, 51197, 51199, 51203,
51217, 51229, 51239, 51241, 51257, 51263, 51283, 51287, 51307, 51329,
51341, 51343, 51347, 51349, 51361, 51383, 51407, 51413, 51419, 51421,
51427, 51431, 51437, 51439, 51449, 51461, 51473, 51479, 51481, 51487,
51503, 51511, 51517, 51521, 51539, 51551, 51563, 51577, 51581, 51593,
51599, 51607, 51613, 51631, 51637, 51647, 51659, 51673, 51679, 51683,
51691, 51713, 51719, 51721, 51749, 51767, 51769, 51787, 51797, 51803,
51817, 51827, 51829, 51839, 51853, 51859, 51869, 51871, 51893, 51899,
51907, 51913, 51929, 51941, 51949, 51971, 51973, 51977, 51991, 52009,
52021, 52027, 52051, 52057, 52067, 52069, 52081, 52103, 52121, 52127,
52147, 52153, 52163, 52177, 52181, 52183, 52189, 52201, 52223, 52237,
52249, 52253, 52259, 52267, 52289, 52291, 52301, 52313, 52321, 52361,
52363, 52369, 52379, 52387, 52391, 52433, 52453, 52457, 52489, 52501,
52511, 52517, 52529, 52541, 52543, 52553, 52561, 52567, 52571, 52579,
52583, 52609, 52627, 52631, 52639, 52667, 52673, 52691, 52697, 52709,
52711, 52721, 52727, 52733, 52747, 52757, 52769, 52783, 52807, 52813,
52817, 52837, 52859, 52861, 52879, 52883, 52889, 52901, 52903, 52919,
52937, 52951, 52957, 52963, 52967, 52973, 52981, 52999, 53003, 53017,
53047, 53051, 53069, 53077, 53087, 53089, 53093, 53101, 53113, 53117,
53129, 53147, 53149, 53161, 53171, 53173, 53189, 53197, 53201, 53231,
53233, 53239, 53267, 53269, 53279, 53281, 53299, 53309, 53323, 53327,
53353, 53359, 53377, 53381, 53401, 53407, 53411, 53419, 53437, 53441,
53453, 53479, 53503, 53507, 53527, 53549, 53551, 53569, 53591, 53593,
53597, 53609, 53611, 53617, 53623, 53629, 53633, 53639, 53653, 53657,
53681, 53693, 53699, 53717, 53719, 53731, 53759, 53773, 53777, 53783,
53791, 53813, 53819, 53831, 53849, 53857, 53861, 53881, 53887, 53891,
53897, 53899, 53917, 53923, 53927, 53939, 53951, 53959, 53987, 53993,
54001, 54011, 54013, 54037, 54049, 54059, 54083, 54091, 54101, 54121,
54133, 54139, 54151, 54163, 54167, 54181, 54193, 54217, 54251, 54269,
54277, 54287, 54293, 54311, 54319, 54323, 54331, 54347, 54361, 54367,
54371, 54377, 54401, 54403, 54409, 54413, 54419, 54421, 54437, 54443,
54449, 54469, 54493, 54497, 54499, 54503, 54517, 54521, 54539, 54541,
54547, 54559, 54563, 54577, 54581, 54583, 54601, 54617, 54623, 54629,
54631, 54647, 54667, 54673, 54679, 54709, 54713, 54721, 54727, 54751,
54767, 54773, 54779, 54787, 54799, 54829, 54833, 54851, 54869, 54877,
54881, 54907, 54917, 54919, 54941, 54949, 54959, 54973, 54979, 54983,
55001, 55009, 55021, 55049, 55051, 55057, 55061, 55073, 55079, 55103,
55109, 55117, 55127, 55147, 55163, 55171, 55201, 55207, 55213, 55217,
55219, 55229, 55243, 55249, 55259, 55291, 55313, 55331, 55333, 55337,
55339, 55343, 55351, 55373, 55381, 55399, 55411, 55439, 55441, 55457,
55469, 55487, 55501, 55511, 55529, 55541, 55547, 55579, 55589, 55603,
55609, 55619, 55621, 55631, 55633, 55639, 55661, 55663, 55667, 55673,
55681, 55691, 55697, 55711, 55717, 55721, 55733, 55763, 55787, 55793,
55799, 55807, 55813, 55817, 55819, 55823, 55829, 55837, 55843, 55849,
55871, 55889, 55897, 55901, 55903, 55921, 55927, 55931, 55933, 55949,
55967, 55987, 55997, 56003, 56009, 56039, 56041, 56053, 56081, 56087,
56093, 56099, 56101, 56113, 56123, 56131, 56149, 56167, 56171, 56179,
56197, 56207, 56209, 56237, 56239, 56249, 56263, 56267, 56269, 56299,
56311, 56333, 56359, 56369, 56377, 56383, 56393, 56401, 56417, 56431,
56437, 56443, 56453, 56467, 56473, 56477, 56479, 56489, 56501, 56503,
56509, 56519, 56527, 56531, 56533, 56543, 56569, 56591, 56597, 56599,
56611, 56629, 56633, 56659, 56663, 56671, 56681, 56687, 56701, 56711,
56713, 56731, 56737, 56747, 56767, 56773, 56779, 56783, 56807, 56809,
56813, 56821, 56827, 56843, 56857, 56873, 56891, 56893, 56897, 56909,
56911, 56921, 56923, 56929, 56941, 56951, 56957, 56963, 56983, 56989,
56993, 56999, 57037, 57041, 57047, 57059, 57073, 57077, 57089, 57097,
57107, 57119, 57131, 57139, 57143, 57149, 57163, 57173, 57179, 57191,
57193, 57203, 57221, 57223, 57241, 57251, 57259, 57269, 57271, 57283,
57287, 57301, 57329, 57331, 57347, 57349, 57367, 57373, 57383, 57389,
57397, 57413, 57427, 57457, 57467, 57487, 57493, 57503, 57527, 57529,
57557, 57559, 57571, 57587, 57593, 57601, 57637, 57641, 57649, 57653,
57667, 57679, 57689, 57697, 57709, 57713, 57719, 57727, 57731, 57737,
57751, 57773, 57781, 57787, 57791, 57793, 57803, 57809, 57829, 57839,
57847, 57853, 57859, 57881, 57899, 57901, 57917, 57923, 57943, 57947,
57973, 57977, 57991, 58013, 58027, 58031, 58043, 58049, 58057, 58061,
58067, 58073, 58099, 58109, 58111, 58129, 58147, 58151, 58153, 58169,
58171, 58189, 58193, 58199, 58207, 58211, 58217, 58229, 58231, 58237,
58243, 58271, 58309, 58313, 58321, 58337, 58363, 58367, 58369, 58379,
58391, 58393, 58403, 58411, 58417, 58427, 58439, 58441, 58451, 58453,
58477, 58481, 58511, 58537, 58543, 58549, 58567, 58573, 58579, 58601,
58603, 58613, 58631, 58657, 58661, 58679, 58687, 58693, 58699, 58711,
58727, 58733, 58741, 58757, 58763, 58771, 58787, 58789, 58831, 58889,
58897, 58901, 58907, 58909, 58913, 58921, 58937, 58943, 58963, 58967,
58979, 58991, 58997, 59009, 59011, 59021, 59023, 59029, 59051, 59053,
59063, 59069, 59077, 59083, 59093, 59107, 59113, 59119, 59123, 59141,
59149, 59159, 59167, 59183, 59197, 59207, 59209, 59219, 59221, 59233,
59239, 59243, 59263, 59273, 59281, 59333, 59341, 59351, 59357, 59359,
59369, 59377, 59387, 59393, 59399, 59407, 59417, 59419, 59441, 59443,
59447, 59453, 59467, 59471, 59473, 59497, 59509, 59513, 59539, 59557,
59561, 59567, 59581, 59611, 59617, 59621, 59627, 59629, 59651, 59659,
59663, 59669, 59671, 59693, 59699, 59707, 59723, 59729, 59743, 59747,
59753, 59771, 59779, 59791, 59797, 59809, 59833, 59863, 59879, 59887,
59921, 59929, 59951, 59957, 59971, 59981, 59999, 60013, 60017, 60029,
60037, 60041, 60077, 60083, 60089, 60091, 60101, 60103, 60107, 60127,
60133, 60139, 60149, 60161, 60167, 60169, 60209, 60217, 60223, 60251,
60257, 60259, 60271, 60289, 60293, 60317, 60331, 60337, 60343, 60353,
60373, 60383, 60397, 60413, 60427, 60443, 60449, 60457, 60493, 60497,
60509, 60521, 60527, 60539, 60589, 60601, 60607, 60611, 60617, 60623,
60631, 60637, 60647, 60649, 60659, 60661, 60679, 60689, 60703, 60719,
60727, 60733, 60737, 60757, 60761, 60763, 60773, 60779, 60793, 60811,
60821, 60859, 60869, 60887, 60889, 60899, 60901, 60913, 60917, 60919,
60923, 60937, 60943, 60953, 60961, 61001, 61007, 61027, 61031, 61043,
61051, 61057, 61091, 61099, 61121, 61129, 61141, 61151, 61153, 61169,
61211, 61223, 61231, 61253, 61261, 61283, 61291, 61297, 61331, 61333,
61339, 61343, 61357, 61363, 61379, 61381, 61403, 61409, 61417, 61441,
61463, 61469, 61471, 61483, 61487, 61493, 61507, 61511, 61519, 61543,
61547, 61553, 61559, 61561, 61583, 61603, 61609, 61613, 61627, 61631,
61637, 61643, 61651, 61657, 61667, 61673, 61681, 61687, 61703, 61717,
61723, 61729, 61751, 61757, 61781, 61813, 61819, 61837, 61843, 61861,
61871, 61879, 61909, 61927, 61933, 61949, 61961, 61967, 61979, 61981,
61987, 61991, 62003, 62011, 62017, 62039, 62047, 62053, 62057, 62071,
62081, 62099, 62119, 62129, 62131, 62137, 62141, 62143, 62171, 62189,
62191, 62201, 62207, 62213, 62219, 62233, 62273, 62297, 62299, 62303,
62311, 62323, 62327, 62347, 62351, 62383, 62401, 62417, 62423, 62459,
62467, 62473, 62477, 62483, 62497, 62501, 62507, 62533, 62539, 62549,
62563, 62581, 62591, 62597, 62603, 62617, 62627, 62633, 62639, 62653,
62659, 62683, 62687, 62701, 62723, 62731, 62743, 62753, 62761, 62773,
62791, 62801, 62819, 62827, 62851, 62861, 62869, 62873, 62897, 62903,
62921, 62927, 62929, 62939, 62969, 62971, 62981, 62983, 62987, 62989,
63029, 63031, 63059, 63067, 63073, 63079, 63097, 63103, 63113, 63127,
63131, 63149, 63179, 63197, 63199, 63211, 63241, 63247, 63277, 63281,
63299, 63311, 63313, 63317, 63331, 63337, 63347, 63353, 63361, 63367,
63377, 63389, 63391, 63397, 63409, 63419, 63421, 63439, 63443, 63463,
63467, 63473, 63487, 63493, 63499, 63521, 63527, 63533, 63541, 63559,
63577, 63587, 63589, 63599, 63601, 63607, 63611, 63617, 63629, 63647,
63649, 63659, 63667, 63671, 63689, 63691, 63697, 63703, 63709, 63719,
63727, 63737, 63743, 63761, 63773, 63781, 63793, 63799, 63803, 63809,
63823, 63839, 63841, 63853, 63857, 63863, 63901, 63907, 63913, 63929,
63949, 63977, 63997, 64007, 64013, 64019, 64033, 64037, 64063, 64067,
64081, 64091, 64109, 64123, 64151, 64153, 64157, 64171, 64187, 64189,
64217, 64223, 64231, 64237, 64271, 64279, 64283, 64301, 64303, 64319,
64327, 64333, 64373, 64381, 64399, 64403, 64433, 64439, 64451, 64453,
64483, 64489, 64499, 64513, 64553, 64567, 64577, 64579, 64591, 64601,
64609, 64613, 64621, 64627, 64633, 64661, 64663, 64667, 64679, 64693,
64709, 64717, 64747, 64763, 64781, 64783, 64793, 64811, 64817, 64849,
64853, 64871, 64877, 64879, 64891, 64901, 64919, 64921, 64927, 64937,
64951, 64969, 64997, 65003, 65011, 65027, 65029, 65033, 65053, 65063,
65071, 65089, 65099, 65101, 65111, 65119, 65123, 65129, 65141, 65147,
65167, 65171, 65173, 65179, 65183, 65203, 65213, 65239, 65257, 65267,
65269, 65287, 65293, 65309, 65323, 65327, 65353, 65357, 65371, 65381,
65393, 65407, 65413, 65419, 65423, 65437, 65447, 65449, 65479, 65497,
65519, 65521, 65537, 65539, 65543, 65551, 65557, 65563, 65579, 65581,
65587, 65599, 65609, 65617, 65629, 65633, 65647, 65651, 65657, 65677,
65687, 65699, 65701, 65707, 65713, 65717, 65719, 65729, 65731, 65761,
65777, 65789, 65809, 65827, 65831, 65837, 65839, 65843, 65851, 65867,
65881, 65899, 65921, 65927, 65929, 65951, 65957, 65963, 65981, 65983,
65993, 66029, 66037, 66041, 66047, 66067, 66071, 66083, 66089, 66103,
66107, 66109, 66137, 66161, 66169, 66173, 66179, 66191, 66221, 66239,
66271, 66293, 66301, 66337, 66343, 66347, 66359, 66361, 66373, 66377,
66383, 66403, 66413, 66431, 66449, 66457, 66463, 66467, 66491, 66499,
66509, 66523, 66529, 66533, 66541, 66553, 66569, 66571, 66587, 66593,
66601, 66617, 66629, 66643, 66653, 66683, 66697, 66701, 66713, 66721,
66733, 66739, 66749, 66751, 66763, 66791, 66797, 66809, 66821, 66841,
66851, 66853, 66863, 66877, 66883, 66889, 66919, 66923, 66931, 66943,
66947, 66949, 66959, 66973, 66977, 67003, 67021, 67033, 67043, 67049,
67057, 67061, 67073, 67079, 67103, 67121, 67129, 67139, 67141, 67153,
67157, 67169, 67181, 67187, 67189, 67211, 67213, 67217, 67219, 67231,
67247, 67261, 67271, 67273, 67289, 67307, 67339, 67343, 67349, 67369,
67391, 67399, 67409, 67411, 67421, 67427, 67429, 67433, 67447, 67453,
67477, 67481, 67489, 67493, 67499, 67511, 67523, 67531, 67537, 67547,
67559, 67567, 67577, 67579, 67589, 67601, 67607, 67619, 67631, 67651,
67679, 67699, 67709, 67723, 67733, 67741, 67751, 67757, 67759, 67763,
67777, 67783, 67789, 67801, 67807, 67819, 67829, 67843, 67853, 67867,
67883, 67891, 67901, 67927, 67931, 67933, 67939, 67943, 67957, 67961,
67967, 67979, 67987, 67993, 68023, 68041, 68053, 68059, 68071, 68087,
68099, 68111, 68113, 68141, 68147, 68161, 68171, 68207, 68209, 68213,
68219, 68227, 68239, 68261, 68279, 68281, 68311, 68329, 68351, 68371,
68389, 68399, 68437, 68443, 68447, 68449, 68473, 68477, 68483, 68489,
68491, 68501, 68507, 68521, 68531, 68539, 68543, 68567, 68581, 68597,
68611, 68633, 68639, 68659, 68669, 68683, 68687, 68699, 68711, 68713,
68729, 68737, 68743, 68749, 68767, 68771, 68777, 68791, 68813, 68819,
68821, 68863, 68879, 68881, 68891, 68897, 68899, 68903, 68909, 68917,
68927, 68947, 68963, 68993, 69001, 69011, 69019, 69029, 69031, 69061,
69067, 69073, 69109, 69119, 69127, 69143, 69149, 69151, 69163, 69191,
69193, 69197, 69203, 69221, 69233, 69239, 69247, 69257, 69259, 69263,
69313, 69317, 69337, 69341, 69371, 69379, 69383, 69389, 69401, 69403,
69427, 69431, 69439, 69457, 69463, 69467, 69473, 69481, 69491, 69493,
69497, 69499, 69539, 69557, 69593, 69623, 69653, 69661, 69677, 69691,
69697, 69709, 69737, 69739, 69761, 69763, 69767, 69779, 69809, 69821,
69827, 69829, 69833, 69847, 69857, 69859, 69877, 69899, 69911, 69929,
69931, 69941, 69959, 69991, 69997, 70001, 70003, 70009, 70019, 70039,
70051, 70061, 70067, 70079, 70099, 70111, 70117, 70121, 70123, 70139,
70141, 70157, 70163, 70177, 70181, 70183, 70199, 70201, 70207, 70223,
70229, 70237, 70241, 70249, 70271, 70289, 70297, 70309, 70313, 70321,
70327, 70351, 70373, 70379, 70381, 70393, 70423, 70429, 70439, 70451,
70457, 70459, 70481, 70487, 70489, 70501, 70507, 70529, 70537, 70549,
70571, 70573, 70583, 70589, 70607, 70619, 70621, 70627, 70639, 70657,
70663, 70667, 70687, 70709, 70717, 70729, 70753, 70769, 70783, 70793,
70823, 70841, 70843, 70849, 70853, 70867, 70877, 70879, 70891, 70901,
70913, 70919, 70921, 70937, 70949, 70951, 70957, 70969, 70979, 70981,
70991, 70997, 70999, 71011, 71023, 71039, 71059, 71069, 71081, 71089,
71119, 71129, 71143, 71147, 71153, 71161, 71167, 71171, 71191, 71209,
71233, 71237, 71249, 71257, 71261, 71263, 71287, 71293, 71317, 71327,
71329, 71333, 71339, 71341, 71347, 71353, 71359, 71363, 71387, 71389,
71399, 71411, 71413, 71419, 71429, 71437, 71443, 71453, 71471, 71473,
71479, 71483, 71503, 71527, 71537, 71549, 71551, 71563, 71569, 71593,
71597, 71633, 71647, 71663, 71671, 71693, 71699, 71707, 71711, 71713,
71719, 71741, 71761, 71777, 71789, 71807, 71809, 71821, 71837, 71843,
71849, 71861, 71867, 71879, 71881, 71887, 71899, 71909, 71917, 71933,
71941, 71947, 71963, 71971, 71983, 71987, 71993, 71999, 72019, 72031,
72043, 72047, 72053, 72073, 72077, 72089, 72091, 72101, 72103, 72109,
72139, 72161, 72167, 72169, 72173, 72211, 72221, 72223, 72227, 72229,
72251, 72253, 72269, 72271, 72277, 72287, 72307, 72313, 72337, 72341,
72353, 72367, 72379, 72383, 72421, 72431, 72461, 72467, 72469, 72481,
72493, 72497, 72503, 72533, 72547, 72551, 72559, 72577, 72613, 72617,
72623, 72643, 72647, 72649, 72661, 72671, 72673, 72679, 72689, 72701,
72707, 72719, 72727, 72733, 72739, 72763, 72767, 72797, 72817, 72823,
72859, 72869, 72871, 72883, 72889, 72893, 72901, 72907, 72911, 72923,
72931, 72937, 72949, 72953, 72959, 72973, 72977, 72997, 73009, 73013,
73019, 73037, 73039, 73043, 73061, 73063, 73079, 73091, 73121, 73127,
73133, 73141, 73181, 73189, 73237, 73243, 73259, 73277, 73291, 73303,
73309, 73327, 73331, 73351, 73361, 73363, 73369, 73379, 73387, 73417,
73421, 73433, 73453, 73459, 73471, 73477, 73483, 73517, 73523, 73529,
73547, 73553, 73561, 73571, 73583, 73589, 73597, 73607, 73609, 73613,
73637, 73643, 73651, 73673, 73679, 73681, 73693, 73699, 73709, 73721,
73727, 73751, 73757, 73771, 73783, 73819, 73823, 73847, 73849, 73859,
73867, 73877, 73883, 73897, 73907, 73939, 73943, 73951, 73961, 73973,
73999, 74017, 74021, 74027, 74047, 74051, 74071, 74077, 74093, 74099,
74101, 74131, 74143, 74149, 74159, 74161, 74167, 74177, 74189, 74197,
74201, 74203, 74209, 74219, 74231, 74257, 74279, 74287, 74293, 74297,
74311, 74317, 74323, 74353, 74357, 74363, 74377, 74381, 74383, 74411,
74413, 74419, 74441, 74449, 74453, 74471, 74489, 74507, 74509, 74521,
74527, 74531, 74551, 74561, 74567, 74573, 74587, 74597, 74609, 74611,
74623, 74653, 74687, 74699, 74707, 74713, 74717, 74719, 74729, 74731,
74747, 74759, 74761, 74771, 74779, 74797, 74821, 74827, 74831, 74843,
74857, 74861, 74869, 74873, 74887, 74891, 74897, 74903, 74923, 74929,
74933, 74941, 74959, 75011, 75013, 75017, 75029, 75037, 75041, 75079,
75083, 75109, 75133, 75149, 75161, 75167, 75169, 75181, 75193, 75209,
75211, 75217, 75223, 75227, 75239, 75253, 75269, 75277, 75289, 75307,
75323, 75329, 75337, 75347, 75353, 75367, 75377, 75389, 75391, 75401,
75403, 75407, 75431, 75437, 75479, 75503, 75511, 75521, 75527, 75533,
75539, 75541, 75553, 75557, 75571, 75577, 75583, 75611, 75617, 75619,
75629, 75641, 75653, 75659, 75679, 75683, 75689, 75703, 75707, 75709,
75721, 75731, 75743, 75767, 75773, 75781, 75787, 75793, 75797, 75821,
75833, 75853, 75869, 75883, 75913, 75931, 75937, 75941, 75967, 75979,
75983, 75989, 75991, 75997, 76001, 76003, 76031, 76039, 76079, 76081,
76091, 76099, 76103, 76123, 76129, 76147, 76157, 76159, 76163, 76207,
76213, 76231, 76243, 76249, 76253, 76259, 76261, 76283, 76289, 76303,
76333, 76343, 76367, 76369, 76379, 76387, 76403, 76421, 76423, 76441,
76463, 76471, 76481, 76487, 76493, 76507, 76511, 76519, 76537, 76541,
76543, 76561, 76579, 76597, 76603, 76607, 76631, 76649, 76651, 76667,
76673, 76679, 76697, 76717, 76733, 76753, 76757, 76771, 76777, 76781,
76801, 76819, 76829, 76831, 76837, 76847, 76871, 76873, 76883, 76907,
76913, 76919, 76943, 76949, 76961, 76963, 76991, 77003, 77017, 77023,
77029, 77041, 77047, 77069, 77081, 77093, 77101, 77137, 77141, 77153,
77167, 77171, 77191, 77201, 77213, 77237, 77239, 77243, 77249, 77261,
77263, 77267, 77269, 77279, 77291, 77317, 77323, 77339, 77347, 77351,
77359, 77369, 77377, 77383, 77417, 77419, 77431, 77447, 77471, 77477,
77479, 77489, 77491, 77509, 77513, 77521, 77527, 77543, 77549, 77551,
77557, 77563, 77569, 77573, 77587, 77591, 77611, 77617, 77621, 77641,
77647, 77659, 77681, 77687, 77689, 77699, 77711, 77713, 77719, 77723,
77731, 77743, 77747, 77761, 77773, 77783, 77797, 77801, 77813, 77839,
77849, 77863, 77867, 77893, 77899, 77929, 77933, 77951, 77969, 77977,
77983, 77999, 78007, 78017, 78031, 78041, 78049, 78059, 78079, 78101,
78121, 78137, 78139, 78157, 78163, 78167, 78173, 78179, 78191, 78193,
78203, 78229, 78233, 78241, 78259, 78277, 78283, 78301, 78307, 78311,
78317, 78341, 78347, 78367, 78401, 78427, 78437, 78439, 78467, 78479,
78487, 78497, 78509, 78511, 78517, 78539, 78541, 78553, 78569, 78571,
78577, 78583, 78593, 78607, 78623, 78643, 78649, 78653, 78691, 78697,
78707, 78713, 78721, 78737, 78779, 78781, 78787, 78791, 78797, 78803,
78809, 78823, 78839, 78853, 78857, 78877, 78887, 78889, 78893, 78901,
78919, 78929, 78941, 78977, 78979, 78989, 79031, 79039, 79043, 79063,
79087, 79103, 79111, 79133, 79139, 79147, 79151, 79153, 79159, 79181,
79187, 79193, 79201, 79229, 79231, 79241, 79259, 79273, 79279, 79283,
79301, 79309, 79319, 79333, 79337, 79349, 79357, 79367, 79379, 79393,
79397, 79399, 79411, 79423, 79427, 79433, 79451, 79481, 79493, 79531,
79537, 79549, 79559, 79561, 79579, 79589, 79601, 79609, 79613, 79621,
79627, 79631, 79633, 79657, 79669, 79687, 79691, 79693, 79697, 79699,
79757, 79769, 79777, 79801, 79811, 79813, 79817, 79823, 79829, 79841,
79843, 79847, 79861, 79867, 79873, 79889, 79901, 79903, 79907, 79939,
79943, 79967, 79973, 79979, 79987, 79997, 79999, 80021, 80039, 80051,
80071, 80077, 80107, 80111, 80141, 80147, 80149, 80153, 80167, 80173,
80177, 80191, 80207, 80209, 80221, 80231, 80233, 80239, 80251, 80263,
80273, 80279, 80287, 80309, 80317, 80329, 80341, 80347, 80363, 80369,
80387, 80407, 80429, 80447, 80449, 80471, 80473, 80489, 80491, 80513,
80527, 80537, 80557, 80567, 80599, 80603, 80611, 80621, 80627, 80629,
80651, 80657, 80669, 80671, 80677, 80681, 80683, 80687, 80701, 80713,
80737, 80747, 80749, 80761, 80777, 80779, 80783, 80789, 80803, 80809,
80819, 80831, 80833, 80849, 80863, 80897, 80909, 80911, 80917, 80923,
80929, 80933, 80953, 80963, 80989, 81001, 81013, 81017, 81019, 81023,
81031, 81041, 81043, 81047, 81049, 81071, 81077, 81083, 81097, 81101,
81119, 81131, 81157, 81163, 81173, 81181, 81197, 81199, 81203, 81223,
81233, 81239, 81281, 81283, 81293, 81299, 81307, 81331, 81343, 81349,
81353, 81359, 81371, 81373, 81401, 81409, 81421, 81439, 81457, 81463,
81509, 81517, 81527, 81533, 81547, 81551, 81553, 81559, 81563, 81569,
81611, 81619, 81629, 81637, 81647, 81649, 81667, 81671, 81677, 81689,
81701, 81703, 81707, 81727, 81737, 81749, 81761, 81769, 81773, 81799,
81817, 81839, 81847, 81853, 81869, 81883, 81899, 81901, 81919, 81929,
81931, 81937, 81943, 81953, 81967, 81971, 81973, 82003, 82007, 82009,
82013, 82021, 82031, 82037, 82039, 82051, 82067, 82073, 82129, 82139,
82141, 82153, 82163, 82171, 82183, 82189, 82193, 82207, 82217, 82219,
82223, 82231, 82237, 82241, 82261, 82267, 82279, 82301, 82307, 82339,
82349, 82351, 82361, 82373, 82387, 82393, 82421, 82457, 82463, 82469,
82471, 82483, 82487, 82493, 82499, 82507, 82529, 82531, 82549, 82559,
82561, 82567, 82571, 82591, 82601, 82609, 82613, 82619, 82633, 82651,
82657, 82699, 82721, 82723, 82727, 82729, 82757, 82759, 82763, 82781,
82787, 82793, 82799, 82811, 82813, 82837, 82847, 82883, 82889, 82891,
82903, 82913, 82939, 82963, 82981, 82997, 83003, 83009, 83023, 83047,
83059, 83063, 83071, 83077, 83089, 83093, 83101, 83117, 83137, 83177,
83203, 83207, 83219, 83221, 83227, 83231, 83233, 83243, 83257, 83267,
83269, 83273, 83299, 83311, 83339, 83341, 83357, 83383, 83389, 83399,
83401, 83407, 83417, 83423, 83431, 83437, 83443, 83449, 83459, 83471,
83477, 83497, 83537, 83557, 83561, 83563, 83579, 83591, 83597, 83609,
83617, 83621, 83639, 83641, 83653, 83663, 83689, 83701, 83717, 83719,
83737, 83761, 83773, 83777, 83791, 83813, 83833, 83843, 83857, 83869,
83873, 83891, 83903, 83911, 83921, 83933, 83939, 83969, 83983, 83987,
84011, 84017, 84047, 84053, 84059, 84061, 84067, 84089, 84121, 84127,
84131, 84137, 84143, 84163, 84179, 84181, 84191, 84199, 84211, 84221,
84223, 84229, 84239, 84247, 84263, 84299, 84307, 84313, 84317, 84319,
84347, 84349, 84377, 84389, 84391, 84401, 84407, 84421, 84431, 84437,
84443, 84449, 84457, 84463, 84467, 84481, 84499, 84503, 84509, 84521,
84523, 84533, 84551, 84559, 84589, 84629, 84631, 84649, 84653, 84659,
84673, 84691, 84697, 84701, 84713, 84719, 84731, 84737, 84751, 84761,
84787, 84793, 84809, 84811, 84827, 84857, 84859, 84869, 84871, 84913,
84919, 84947, 84961, 84967, 84977, 84979, 84991, 85009, 85021, 85027,
85037, 85049, 85061, 85081, 85087, 85091, 85093, 85103, 85109, 85121,
85133, 85147, 85159, 85193, 85199, 85201, 85213, 85223, 85229, 85237,
85243, 85247, 85259, 85297, 85303, 85313, 85331, 85333, 85361, 85363,
85369, 85381, 85411, 85427, 85429, 85439, 85447, 85451, 85453, 85469,
85487, 85513, 85517, 85523, 85531, 85549, 85571, 85577, 85597, 85601,
85607, 85619, 85621, 85627, 85639, 85643, 85661, 85667, 85669, 85691,
85703, 85711, 85717, 85733, 85751, 85781, 85793, 85817, 85819, 85829,
85831, 85837, 85843, 85847, 85853, 85889, 85903, 85909, 85931, 85933,
85991, 85999, 86011, 86017, 86027, 86029, 86069, 86077, 86083, 86111,
86113, 86117, 86131, 86137, 86143, 86161, 86171, 86179, 86183, 86197,
86201, 86209, 86239, 86243, 86249, 86257, 86263, 86269, 86287, 86291,
86293, 86297, 86311, 86323, 86341, 86351, 86353, 86357, 86369, 86371,
86381, 86389, 86399, 86413, 86423, 86441, 86453, 86461, 86467, 86477,
86491, 86501, 86509, 86531, 86533, 86539, 86561, 86573, 86579, 86587,
86599, 86627, 86629, 86677, 86689, 86693, 86711, 86719, 86729, 86743,
86753, 86767, 86771, 86783, 86813, 86837, 86843, 86851, 86857, 86861,
86869, 86923, 86927, 86929, 86939, 86951, 86959, 86969, 86981, 86993,
87011, 87013, 87037, 87041, 87049, 87071, 87083, 87103, 87107, 87119,
87121, 87133, 87149, 87151, 87179, 87181, 87187, 87211, 87221, 87223,
87251, 87253, 87257, 87277, 87281, 87293, 87299, 87313, 87317, 87323,
87337, 87359, 87383, 87403, 87407, 87421, 87427, 87433, 87443, 87473,
87481, 87491, 87509, 87511, 87517, 87523, 87539, 87541, 87547, 87553,
87557, 87559, 87583, 87587, 87589, 87613, 87623, 87629, 87631, 87641,
87643, 87649, 87671, 87679, 87683, 87691, 87697, 87701, 87719, 87721,
87739, 87743, 87751, 87767, 87793, 87797, 87803, 87811, 87833, 87853,
87869, 87877, 87881, 87887, 87911, 87917, 87931, 87943, 87959, 87961,
87973, 87977, 87991, 88001, 88003, 88007, 88019, 88037, 88069, 88079,
88093, 88117, 88129, 88169, 88177, 88211, 88223, 88237, 88241, 88259,
88261, 88289, 88301, 88321, 88327, 88337, 88339, 88379, 88397, 88411,
88423, 88427, 88463, 88469, 88471, 88493, 88499, 88513, 88523, 88547,
88589, 88591, 88607, 88609, 88643, 88651, 88657, 88661, 88663, 88667,
88681, 88721, 88729, 88741, 88747, 88771, 88789, 88793, 88799, 88801,
88807, 88811, 88813, 88817, 88819, 88843, 88853, 88861, 88867, 88873,
88883, 88897, 88903, 88919, 88937, 88951, 88969, 88993, 88997, 89003,
89009, 89017, 89021, 89041, 89051, 89057, 89069, 89071, 89083, 89087,
89101, 89107, 89113, 89119, 89123, 89137, 89153, 89189, 89203, 89209,
89213, 89227, 89231, 89237, 89261, 89269, 89273, 89293, 89303, 89317,
89329, 89363, 89371, 89381, 89387, 89393, 89399, 89413, 89417, 89431,
89443, 89449, 89459, 89477, 89491, 89501, 89513, 89519, 89521, 89527,
89533, 89561, 89563, 89567, 89591, 89597, 89599, 89603, 89611, 89627,
89633, 89653, 89657, 89659, 89669, 89671, 89681, 89689, 89753, 89759,
89767, 89779, 89783, 89797, 89809, 89819, 89821, 89833, 89839, 89849,
89867, 89891, 89897, 89899, 89909, 89917, 89923, 89939, 89959, 89963,
89977, 89983, 89989, 90001, 90007, 90011, 90017, 90019, 90023, 90031,
90053, 90059, 90067, 90071, 90073, 90089, 90107, 90121, 90127, 90149,
90163, 90173, 90187, 90191, 90197, 90199, 90203, 90217, 90227, 90239,
90247, 90263, 90271, 90281, 90289, 90313, 90353, 90359, 90371, 90373,
90379, 90397, 90401, 90403, 90407, 90437, 90439, 90469, 90473, 90481,
90499, 90511, 90523, 90527, 90529, 90533, 90547, 90583, 90599, 90617,
90619, 90631, 90641, 90647, 90659, 90677, 90679, 90697, 90703, 90709,
90731, 90749, 90787, 90793, 90803, 90821, 90823, 90833, 90841, 90847,
90863, 90887, 90901, 90907, 90911, 90917, 90931, 90947, 90971, 90977,
90989, 90997, 91009, 91019, 91033, 91079, 91081, 91097, 91099, 91121,
91127, 91129, 91139, 91141, 91151, 91153, 91159, 91163, 91183, 91193,
91199, 91229, 91237, 91243, 91249, 91253, 91283, 91291, 91297, 91303,
91309, 91331, 91367, 91369, 91373, 91381, 91387, 91393, 91397, 91411,
91423, 91433, 91453, 91457, 91459, 91463, 91493, 91499, 91513, 91529,
91541, 91571, 91573, 91577, 91583, 91591, 91621, 91631, 91639, 91673,
91691, 91703, 91711, 91733, 91753, 91757, 91771, 91781, 91801, 91807,
91811, 91813, 91823, 91837, 91841, 91867, 91873, 91909, 91921, 91939,
91943, 91951, 91957, 91961, 91967, 91969, 91997, 92003, 92009, 92033,
92041, 92051, 92077, 92083, 92107, 92111, 92119, 92143, 92153, 92173,
92177, 92179, 92189, 92203, 92219, 92221, 92227, 92233, 92237, 92243,
92251, 92269, 92297, 92311, 92317, 92333, 92347, 92353, 92357, 92363,
92369, 92377, 92381, 92383, 92387, 92399, 92401, 92413, 92419, 92431,
92459, 92461, 92467, 92479, 92489, 92503, 92507, 92551, 92557, 92567,
92569, 92581, 92593, 92623, 92627, 92639, 92641, 92647, 92657, 92669,
92671, 92681, 92683, 92693, 92699, 92707, 92717, 92723, 92737, 92753,
92761, 92767, 92779, 92789, 92791, 92801, 92809, 92821, 92831, 92849,
92857, 92861, 92863, 92867, 92893, 92899, 92921, 92927, 92941, 92951,
92957, 92959, 92987, 92993, 93001, 93047, 93053, 93059, 93077, 93083,
93089, 93097, 93103, 93113, 93131, 93133, 93139, 93151, 93169, 93179,
93187, 93199, 93229, 93239, 93241, 93251, 93253, 93257, 93263, 93281,
93283, 93287, 93307, 93319, 93323, 93329, 93337, 93371, 93377, 93383,
93407, 93419, 93427, 93463, 93479, 93481, 93487, 93491, 93493, 93497,
93503, 93523, 93529, 93553, 93557, 93559, 93563, 93581, 93601, 93607,
93629, 93637, 93683, 93701, 93703, 93719, 93739, 93761, 93763, 93787,
93809, 93811, 93827, 93851, 93871, 93887, 93889, 93893, 93901, 93911,
93913, 93923, 93937, 93941, 93949, 93967, 93971, 93979, 93983, 93997,
94007, 94009, 94033, 94049, 94057, 94063, 94079, 94099, 94109, 94111,
94117, 94121, 94151, 94153, 94169, 94201, 94207, 94219, 94229, 94253,
94261, 94273, 94291, 94307, 94309, 94321, 94327, 94331, 94343, 94349,
94351, 94379, 94397, 94399, 94421, 94427, 94433, 94439, 94441, 94447,
94463, 94477, 94483, 94513, 94529, 94531, 94541, 94543, 94547, 94559,
94561, 94573, 94583, 94597, 94603, 94613, 94621, 94649, 94651, 94687,
94693, 94709, 94723, 94727, 94747, 94771, 94777, 94781, 94789, 94793,
94811, 94819, 94823, 94837, 94841, 94847, 94849, 94873, 94889, 94903,
94907, 94933, 94949, 94951, 94961, 94993, 94999, 95003, 95009, 95021,
95027, 95063, 95071, 95083, 95087, 95089, 95093, 95101, 95107, 95111,
95131, 95143, 95153, 95177, 95189, 95191, 95203, 95213, 95219, 95231,
95233, 95239, 95257, 95261, 95267, 95273, 95279, 95287, 95311, 95317,
95327, 95339, 95369, 95383, 95393, 95401, 95413, 95419, 95429, 95441,
95443, 95461, 95467, 95471, 95479, 95483, 95507, 95527, 95531, 95539,
95549, 95561, 95569, 95581, 95597, 95603, 95617, 95621, 95629, 95633,
95651, 95701, 95707, 95713, 95717, 95723, 95731, 95737, 95747, 95773,
95783, 95789, 95791, 95801, 95803, 95813, 95819, 95857, 95869, 95873,
95881, 95891, 95911, 95917, 95923, 95929, 95947, 95957, 95959, 95971,
95987, 95989, 96001, 96013, 96017, 96043, 96053, 96059, 96079, 96097,
96137, 96149, 96157, 96167, 96179, 96181, 96199, 96211, 96221, 96223,
96233, 96259, 96263, 96269, 96281, 96289, 96293, 96323, 96329, 96331,
96337, 96353, 96377, 96401, 96419, 96431, 96443, 96451, 96457, 96461,
96469, 96479, 96487, 96493, 96497, 96517, 96527, 96553, 96557, 96581,
96587, 96589, 96601, 96643, 96661, 96667, 96671, 96697, 96703, 96731,
96737, 96739, 96749, 96757, 96763, 96769, 96779, 96787, 96797, 96799,
96821, 96823, 96827, 96847, 96851, 96857, 96893, 96907, 96911, 96931,
96953, 96959, 96973, 96979, 96989, 96997, 97001, 97003, 97007, 97021,
97039, 97073, 97081, 97103, 97117, 97127, 97151, 97157, 97159, 97169,
97171, 97177, 97187, 97213, 97231, 97241, 97259, 97283, 97301, 97303,
97327, 97367, 97369, 97373, 97379, 97381, 97387, 97397, 97423, 97429,
97441, 97453, 97459, 97463, 97499, 97501, 97511, 97523, 97547, 97549,
97553, 97561, 97571, 97577, 97579, 97583, 97607, 97609, 97613, 97649,
97651, 97673, 97687, 97711, 97729, 97771, 97777, 97787, 97789, 97813,
97829, 97841, 97843, 97847, 97849, 97859, 97861, 97871, 97879, 97883,
97919, 97927, 97931, 97943, 97961, 97967, 97973, 97987, 98009, 98011,
98017, 98041, 98047, 98057, 98081, 98101, 98123, 98129, 98143, 98179,
98207, 98213, 98221, 98227, 98251, 98257, 98269, 98297, 98299, 98317,
98321, 98323, 98327, 98347, 98369, 98377, 98387, 98389, 98407, 98411,
98419, 98429, 98443, 98453, 98459, 98467, 98473, 98479, 98491, 98507,
98519, 98533, 98543, 98561, 98563, 98573, 98597, 98621, 98627, 98639,
98641, 98663, 98669, 98689, 98711, 98713, 98717, 98729, 98731, 98737,
98773, 98779, 98801, 98807, 98809, 98837, 98849, 98867, 98869, 98873,
98887, 98893, 98897, 98899, 98909, 98911, 98927, 98929, 98939, 98947,
98953, 98963, 98981, 98993, 98999, 99013, 99017, 99023, 99041, 99053,
99079, 99083, 99089, 99103, 99109, 99119, 99131, 99133, 99137, 99139,
99149, 99173, 99181, 99191, 99223, 99233, 99241, 99251, 99257, 99259,
99277, 99289, 99317, 99347, 99349, 99367, 99371, 99377, 99391, 99397,
99401, 99409, 99431, 99439, 99469, 99487, 99497, 99523, 99527, 99529,
99551, 99559, 99563, 99571, 99577, 99581, 99607, 99611, 99623, 99643,
99661, 99667, 99679, 99689, 99707, 99709, 99713, 99719, 99721, 99733,
99761, 99767, 99787, 99793, 99809, 99817, 99823, 99829, 99833, 99839,
99859, 99871, 99877, 99881, 99901, 99907, 99923, 99929, 99961, 99971,
99989, 99991, 100003, 100019, 100043, 100049, 100057, 100069, 100103, 100109,
100129, 100151, 100153, 100169, 100183, 100189, 100193, 100207, 100213, 100237,
100267, 100271, 100279, 100291, 100297, 100313, 100333, 100343, 100357, 100361,
100363, 100379, 100391, 100393, 100403, 100411, 100417, 100447, 100459, 100469,
100483, 100493, 100501, 100511, 100517, 100519, 100523, 100537, 100547, 100549,
100559, 100591, 100609, 100613, 100621, 100649, 100669, 100673, 100693, 100699,
100703, 100733, 100741, 100747, 100769, 100787, 100799, 100801, 100811, 100823,
100829, 100847, 100853, 100907, 100913, 100927, 100931, 100937, 100943, 100957,
100981, 100987, 100999, 101009, 101021, 101027, 101051, 101063, 101081, 101089,
101107, 101111, 101113, 101117, 101119, 101141, 101149, 101159, 101161, 101173,
101183, 101197, 101203, 101207, 101209, 101221, 101267, 101273, 101279, 101281,
101287, 101293, 101323, 101333, 101341, 101347, 101359, 101363, 101377, 101383,
101399, 101411, 101419, 101429, 101449, 101467, 101477, 101483, 101489, 101501,
101503, 101513, 101527, 101531, 101533, 101537, 101561, 101573, 101581, 101599,
101603, 101611, 101627, 101641, 101653, 101663, 101681, 101693, 101701, 101719,
101723, 101737, 101741, 101747, 101749, 101771, 101789, 101797, 101807, 101833,
101837, 101839, 101863, 101869, 101873, 101879, 101891, 101917, 101921, 101929,
101939, 101957, 101963, 101977, 101987, 101999, 102001, 102013, 102019, 102023,
102031, 102043, 102059, 102061, 102071, 102077, 102079, 102101, 102103, 102107,
102121, 102139, 102149, 102161, 102181, 102191, 102197, 102199, 102203, 102217,
102229, 102233, 102241, 102251, 102253, 102259, 102293, 102299, 102301, 102317,
102329, 102337, 102359, 102367, 102397, 102407, 102409, 102433, 102437, 102451,
102461, 102481, 102497, 102499, 102503, 102523, 102533, 102539, 102547, 102551,
102559, 102563, 102587, 102593, 102607, 102611, 102643, 102647, 102653, 102667,
102673, 102677, 102679, 102701, 102761, 102763, 102769, 102793, 102797, 102811,
102829, 102841, 102859, 102871, 102877, 102881, 102911, 102913, 102929, 102931,
102953, 102967, 102983, 103001, 103007, 103043, 103049, 103067, 103069, 103079,
103087, 103091, 103093, 103099, 103123, 103141, 103171, 103177, 103183, 103217,
103231, 103237, 103289, 103291, 103307, 103319, 103333, 103349, 103357, 103387,
103391, 103393, 103399, 103409, 103421, 103423, 103451, 103457, 103471, 103483,
103511, 103529, 103549, 103553, 103561, 103567, 103573, 103577, 103583, 103591,
103613, 103619, 103643, 103651, 103657, 103669, 103681, 103687, 103699, 103703,
103723, 103769, 103787, 103801, 103811, 103813, 103837, 103841, 103843, 103867,
103889, 103903, 103913, 103919, 103951, 103963, 103967, 103969, 103979, 103981,
103991, 103993, 103997, 104003, 104009, 104021, 104033, 104047, 104053, 104059,
104087, 104089, 104107, 104113, 104119, 104123, 104147, 104149, 104161, 104173,
104179, 104183, 104207, 104231, 104233, 104239, 104243, 104281, 104287, 104297,
104309, 104311, 104323, 104327, 104347, 104369, 104381, 104383, 104393, 104399,
104417, 104459, 104471, 104473, 104479, 104491, 104513, 104527, 104537, 104543,
104549, 104551, 104561, 104579, 104593, 104597, 104623, 104639, 104651, 104659,
104677, 104681, 104683, 104693, 104701, 104707, 104711, 104717, 104723, 104729,
)
|
"use strict";
var app;
function setup() {
app = new App("Curtains with holes 2",540,540, "svg");
//first parameter, string with images comma-separated
//second parameter, string with sounds comma-separated
app.loadResources("mosterd.png");
}
function draw() {
var i;
if(app.isloaded) {
if(app.pal.name != "beautiful"){
app.pal = new Palette(7, "beautiful");
app.pal.fromImage(app.images[0],5);
app.pal.sortImgColors();
}
if(app.isnot(app.project)){
app.project = new Project();
app.scene = 0;
}
else{
switch(app.scene) {
case -1:{
background(app.pal.colors[1]);
app.pal.show();
app.pal.showImgColors();
app.wait(50);
break;
}
case 0:{
background(255);
app.project.draw();
break;
}
default:{
//wait
app.wait();
}
}
}
app.info.show();
//function(skip, maxframes, show)
//app.gifmaker.check(1, 160, false);
}
else{
println("loading resources ...");
}
}
|
import { Cartesian2 } from '../../Source/Cesium.js';
import { Cartesian3 } from '../../Source/Cesium.js';
import { Cartesian4 } from '../../Source/Cesium.js';
import { Math as CesiumMath } from '../../Source/Cesium.js';
import { Matrix4 } from '../../Source/Cesium.js';
import { PerspectiveFrustum } from '../../Source/Cesium.js';
import createPackableSpecs from '../createPackableSpecs.js';
describe('Core/PerspectiveFrustum', function() {
var frustum, planes;
beforeEach(function() {
frustum = new PerspectiveFrustum();
frustum.near = 1.0;
frustum.far = 2.0;
frustum.aspectRatio = 1.0;
frustum.fov = (Math.PI) / 3;
planes = frustum.computeCullingVolume(new Cartesian3(), Cartesian3.negate(Cartesian3.UNIT_Z, new Cartesian3()), Cartesian3.UNIT_Y).planes;
});
it('constructs', function() {
var options = {
fov : 1.0,
aspectRatio : 2.0,
near : 3.0,
far : 4.0,
xOffset : 5.0,
yOffset : 6.0
};
var f = new PerspectiveFrustum(options);
expect(f.fov).toEqual(options.fov);
expect(f.aspectRatio).toEqual(options.aspectRatio);
expect(f.near).toEqual(options.near);
expect(f.far).toEqual(options.far);
expect(f.xOffset).toEqual(options.xOffset);
expect(f.yOffset).toEqual(options.yOffset);
});
it('default constructs', function() {
var f = new PerspectiveFrustum();
expect(f.fov).toBeUndefined();
expect(f.aspectRatio).toBeUndefined();
expect(f.near).toEqual(1.0);
expect(f.far).toEqual(500000000.0);
expect(f.xOffset).toEqual(0.0);
expect(f.yOffset).toEqual(0.0);
});
it('out of range fov causes an exception', function() {
frustum.fov = -1.0;
expect(function() {
return frustum.projectionMatrix;
}).toThrowDeveloperError();
frustum.fov = CesiumMath.TWO_PI;
expect(function() {
return frustum.projectionMatrix;
}).toThrowDeveloperError();
});
it('negative aspect ratio throws an exception', function() {
frustum.aspectRatio = -1.0;
expect(function() {
return frustum.projectionMatrix;
}).toThrowDeveloperError();
});
it('out of range near plane throws an exception', function() {
frustum.near = -1.0;
expect(function() {
return frustum.projectionMatrix;
}).toThrowDeveloperError();
});
it('negative far plane throws an exception', function() {
frustum.far = -1.0;
expect(function() {
return frustum.projectionMatrix;
}).toThrowDeveloperError();
});
it('computeCullingVolume with no position throws an exception', function() {
expect(function() {
return frustum.computeCullingVolume();
}).toThrowDeveloperError();
});
it('computeCullingVolume with no direction throws an exception', function() {
expect(function() {
return frustum.computeCullingVolume(new Cartesian3());
}).toThrowDeveloperError();
});
it('computeCullingVolume with no up throws an exception', function() {
expect(function() {
return frustum.computeCullingVolume(new Cartesian3(), new Cartesian3());
}).toThrowDeveloperError();
});
it('get frustum left plane', function() {
var leftPlane = planes[0];
var expectedResult = new Cartesian4(Math.sqrt(3.0) / 2.0, 0.0, -0.5, 0.0);
expect(leftPlane).toEqualEpsilon(expectedResult, CesiumMath.EPSILON14);
});
it('get frustum right plane', function() {
var rightPlane = planes[1];
var expectedResult = new Cartesian4(-Math.sqrt(3.0) / 2.0, 0.0, -0.5, 0.0);
expect(rightPlane).toEqualEpsilon(expectedResult, CesiumMath.EPSILON14);
});
it('get frustum bottom plane', function() {
var bottomPlane = planes[2];
var expectedResult = new Cartesian4(0.0, Math.sqrt(3.0) / 2.0, -0.5, 0.0);
expect(bottomPlane).toEqualEpsilon(expectedResult, CesiumMath.EPSILON14);
});
it('get frustum top plane', function() {
var topPlane = planes[3];
var expectedResult = new Cartesian4(0.0, -Math.sqrt(3.0) / 2.0, -0.5, 0.0);
expect(topPlane).toEqualEpsilon(expectedResult, CesiumMath.EPSILON14);
});
it('get frustum near plane', function() {
var nearPlane = planes[4];
var expectedResult = new Cartesian4(0.0, 0.0, -1.0, -1.0);
expect(nearPlane).toEqual(expectedResult);
});
it('get frustum far plane', function() {
var farPlane = planes[5];
var expectedResult = new Cartesian4(0.0, 0.0, 1.0, 2.0);
expect(farPlane).toEqual(expectedResult);
});
it('get sseDenominator', function() {
expect(frustum.sseDenominator).toEqualEpsilon(1.1547, CesiumMath.EPSILON5);
});
it('get perspective projection matrix', function() {
var projectionMatrix = frustum.projectionMatrix;
var expected = Matrix4.computePerspectiveFieldOfView(frustum.fovy, frustum.aspectRatio, frustum.near, frustum.far, new Matrix4());
expect(projectionMatrix).toEqualEpsilon(expected, CesiumMath.EPSILON6);
});
it('get infinite perspective matrix', function() {
var top = frustum.near * Math.tan(0.5 * frustum.fovy);
var bottom = -top;
var right = frustum.aspectRatio * top;
var left = -right;
var near = frustum.near;
var expected = Matrix4.computeInfinitePerspectiveOffCenter(left, right, bottom, top, near, new Matrix4());
expect(frustum.infiniteProjectionMatrix).toEqual(expected);
});
it('get pixel dimensions throws without canvas height', function() {
expect(function() {
return frustum.getPixelDimensions(1.0, undefined, 1.0, 1.0, new Cartesian2());
}).toThrowDeveloperError();
});
it('get pixel dimensions throws without canvas width', function() {
expect(function() {
return frustum.getPixelDimensions(undefined, 1.0, 1.0, 1.0, new Cartesian2());
}).toThrowDeveloperError();
});
it('get pixel dimensions throws with canvas width less than or equal to zero', function() {
expect(function() {
return frustum.getPixelDimensions(0.0, 1.0, 1.0, 1.0, new Cartesian2());
}).toThrowDeveloperError();
});
it('get pixel dimensions throws with canvas height less than or equal to zero', function() {
expect(function() {
return frustum.getPixelDimensions(1.0, 0.0, 1.0, 1.0, new Cartesian2());
}).toThrowDeveloperError();
});
it('get pixel dimensions throws without pixel ratio', function() {
expect(function() {
return frustum.getPixelDimensions(1.0, 1.0, 1.0, undefined, new Cartesian2());
}).toThrowDeveloperError();
});
it('get pixel dimensions throws with pixel ratio less than or equal to zero', function() {
expect(function() {
return frustum.getPixelDimensions(1.0, 1.0, 1.0, 0.0, new Cartesian2());
}).toThrowDeveloperError();
});
it('get pixel dimensions', function() {
var dimensions = new Cartesian2(1.0, 1.0);
var pixelRatio = 1.0;
var distance = 1.0;
var pixelSize = frustum.getPixelDimensions(dimensions.x, dimensions.y, distance, pixelRatio, new Cartesian2());
var expected = frustum._offCenterFrustum.getPixelDimensions(dimensions.x, dimensions.y, distance, pixelRatio, new Cartesian2());
expect(pixelSize.x).toEqual(expected.x);
expect(pixelSize.y).toEqual(expected.y);
});
it('get pixel dimensions with pixel ratio', function() {
var dimensions = new Cartesian2(1.0, 1.0);
var pixelRatio = 2.0;
var distance = 1.0;
var pixelSize = frustum.getPixelDimensions(dimensions.x, dimensions.y, distance, pixelRatio, new Cartesian2());
var expected = frustum._offCenterFrustum.getPixelDimensions(dimensions.x, dimensions.y, distance, pixelRatio, new Cartesian2());
expect(pixelSize.x).toEqual(expected.x);
expect(pixelSize.y).toEqual(expected.y);
});
it('equals', function() {
var frustum2 = new PerspectiveFrustum();
frustum2.near = 1.0;
frustum2.far = 2.0;
frustum2.fov = (Math.PI) / 3.0;
frustum2.aspectRatio = 1.0;
expect(frustum.equals(frustum2)).toEqual(true);
});
it('equals epsilon', function() {
var frustum2 = new PerspectiveFrustum();
frustum2.near = 1.0;
frustum2.far = 2.0;
frustum2.fov = (Math.PI) / 3.0;
frustum2.aspectRatio = 1.0;
expect(frustum.equalsEpsilon(frustum2, CesiumMath.EPSILON7)).toEqual(true);
var frustum3 = new PerspectiveFrustum();
frustum3.near = 1.01;
frustum3.far = 2.01;
frustum3.fov = ((Math.PI) / 3.0) + 0.01;
frustum3.aspectRatio = 1.01;
expect(frustum.equalsEpsilon(frustum3, CesiumMath.EPSILON1)).toEqual(true);
var frustum4 = new PerspectiveFrustum();
frustum4.near = 1.0;
frustum4.far = 2.0;
frustum4.fov = (Math.PI) / 3.0;
frustum4.aspectRatio = 1.1;
expect(frustum.equalsEpsilon(frustum4, CesiumMath.EPSILON2)).toEqual(false);
});
it('equals undefined', function() {
expect(frustum.equals()).toEqual(false);
});
it('throws with undefined frustum parameters', function() {
var frustum = new PerspectiveFrustum();
expect(function() {
return frustum.infiniteProjectionMatrix;
}).toThrowDeveloperError();
});
it('clone', function() {
var frustum2 = frustum.clone();
expect(frustum).toEqual(frustum2);
});
it('clone with result parameter', function() {
var result = new PerspectiveFrustum();
var frustum2 = frustum.clone(result);
expect(frustum2).toBe(result);
expect(frustum).toEqual(frustum2);
});
createPackableSpecs(PerspectiveFrustum, new PerspectiveFrustum({
fov : 1.0,
aspectRatio : 2.0,
near : 3.0,
far : 4.0,
xOffset : 5.0,
yOffset : 6.0
}), [1.0, 2.0, 3.0, 4.0, 5.0, 6.0]);
});
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='InputRequest',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('origin', models.CharField(max_length=15, null=True, blank=True)),
('url', models.URLField(null=True, blank=True)),
('data', models.CharField(max_length=10000, null=True, blank=True)),
('method', models.CharField(default=b'UNKNOWN', max_length=10, blank=True, choices=[(b'UNKNOWN', b'UNKNOWN'), (b'GET', b'GET'), (b'POST', b'POST')])),
('date', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='RequestHeader',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(max_length=10000, null=True, blank=True)),
('value', models.CharField(max_length=10000, null=True, blank=True)),
('request', models.ForeignKey(related_name='headers', blank=True, to='jass.InputRequest', null=True)),
],
),
]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import BaseAPI
from Login import Logincontrol
import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
import json
import time
import json
class Account(object):
"""docstring for Account"""
def __init__(self):
super(Account, self).__init__()
self.session = BaseAPI.get_session();
def get_current_userInfo(self):
url = "https://www.zhihu.com/settings/profile"
login_page = self.session.get(url, headers=BaseAPI.get_headers(), allow_redirects=False)
html = login_page.text
print(login_page);
pattern = r'<a href="(.*?)" class="zu-top-nav-userinfo ">'
result = re.findall(pattern,html)
soup = BeautifulSoup(html ,'lxml')
title = soup.find('script', {'data-name': 'ga_vars','class':'json-inline'})
user_info = json.loads(title.contents[0])
user_info["user_address"] = result[0]
return user_info
def get_user_deatailinfo(self,user_address):
user_url = 'https://www.zhihu.com' + user_address
userID = user_address[8:]
user_page = self.session.get(user_url, headers = BaseAPI.get_headers(), allow_redirects=False)
soup = BeautifulSoup(user_page.content ,'lxml')
name = soup.find_all('span', {'class': 'name'})[1].string
location = soup.find('span', {'class': 'location item'})
if location == None:
location = 'None'
else:
location = location.string
business = soup.find('span', {'class': 'business item'})
if business == None:
business = 'None'
else:
business = business.string
gender = soup.find('input', {'checked': 'checked'})
if gender == None:
gender = 'None'
else:
gender = gender['class'][0]
employment = soup.find('span', {'class': 'employment item'})
if employment == None:
employment = 'None'
else:
employment = employment.string
position = soup.find('span', {'class': 'position item'})
if position == None:
position = 'None'
else:
position = position.string
education = soup.find('span', {'class': 'education item'})
if education == None:
education = 'None'
else:
education = education.string
major = soup.find('span', {'class': 'education-extra item'})
if major == None:
major = 'None'
else:
major = major.string
temp = soup.find('img', {'alt': name})
avatar_url = temp['src'][0:-6] + temp['src'][-4:]
agree = int(soup.find('span', {'class': 'zm-profile-header-user-agree'}).strong.string)
thanks = int(soup.find('span', {'class': 'zm-profile-header-user-thanks'}).strong.string)
infolist = soup.find_all('a', {'class': 'item'})
asks = int(infolist[1].span.string)
answers = int(infolist[2].span.string)
posts = int(infolist[3].span.string)
try:
collections = ""
collections = int(infolist[4].span.string)
except:
print(infolist)
try:
logs = ""
logs = int(infolist[5].span.string)
except:
print(infolist)
followees = int(infolist[len(infolist)-2].strong.string)
followers = int(infolist[len(infolist)-1].strong.string)
scantime = int(soup.find_all('span', {'class': 'zg-gray-normal'})[len(soup.find_all('span', {'class': 'zg-gray-normal'}))-1].strong.string)
info = {
'name':name,
'uid':userID,
'location':location,
'business':business,
'gender':gender,
'employment':employment,
'position':position,
'education':education,
'major':major,
'agree':agree,
'thanks':thanks,
'asks':asks,
'answers':answers,
'posts':posts,
'collections':collections,
'logs':logs,
'followers':followers,
'scantime':scantime,
}
user_info.update(info)
return info
def get_followees(self,user_address):
followees_url = "https://www.zhihu.com" + user_address +"/followees"
user_page = self.session.get(followees_url, headers = BaseAPI.get_headers(), allow_redirects=False)
soup = BeautifulSoup(user_page.content, "lxml")
follist = soup.select('div[class*="zm-profile-card"]')
fol_user_addresses = []
for followees in follist:
tag = followees.a["href"]
fol_user_addresses.append(tag)
return fol_user_addresses;
def set_info(self,user_infos):
array = []
for user_info in user_infos:
followees = user_info["followees"]
person.update({'uid':user_info['uid']},user_info,True)
for followees_address in followees:
user_deatailinfo = self.get_user_deatailinfo(followees_address)
followees = self.get_followees(followees_address)
user_deatailinfo["followees"] = followees
array.append(user_deatailinfo)
print(user_deatailinfo)
person.update({'uid':user_deatailinfo['uid']},user_deatailinfo,True)
time.sleep(2)
self.set_info(array)
if __name__ == '__main__':
lc = Logincontrol()
lc.login()
global person
person = MongoClient().rpv.person
ac = Account()
user_info = ac.get_current_userInfo()
user_deatailinfo = ac.get_user_deatailinfo(user_info["user_address"])
user_deatailinfo.update(user_info)
followees = ac.get_followees(user_info["user_address"])
user_deatailinfo["followees"] = followees
array = []
array.append(user_deatailinfo)
ac.set_info(array)
# person.update({'uid':user_address[8:]},{"$pushAll":{"followees":fol_userids}})
|
window.onload = () => {
document.getElementById('save-button').addEventListener('click', function () {
storeSetting()
})
checkSetting()
}
function storeSetting() {
const EnglishDisabled = !document.getElementById('lang-english-checkbox').checked
const KoreanDisabled = !document.getElementById('lang-korean-checkbox').checked
const JapaneseDisabled = !document.getElementById('lang-japanese-checkbox').checked
const ChineseDisabled = !document.getElementById('lang-chinese-checkbox').checked
const debugModeEnabled = document.getElementById('debug-checkbox').checked
const updateNotificationDisabled = !document.getElementById('update-noti-checkbox').checked
const setting = {
EnglishDisabled: EnglishDisabled,
KoreanDisabled: KoreanDisabled,
JapaneseDisabled: JapaneseDisabled,
ChineseDisabled: ChineseDisabled,
debugModeEnabled: debugModeEnabled,
updateNotificationDisabled: updateNotificationDisabled,
}
console.log(setting)
chrome.storage.sync.set(setting, () => {
console.log('Stored', setting)
document.getElementById('save-button').innerHTML = 'Saved!'
setTimeout(function () {
document.getElementById('save-button').innerHTML = 'Save'
}, 2000)
})
}
function checkSetting() {
chrome.storage.sync.get(
[
'EnglishDisabled',
'KoreanDisabled',
'JapaneseDisabled',
'ChineseDisabled',
'debugModeEnabled',
'updateNotificationDisabled',
],
(result) => {
document.getElementById('lang-english-checkbox').checked = !result.EnglishDisabled
document.getElementById('lang-korean-checkbox').checked = !result.KoreanDisabled
document.getElementById('lang-japanese-checkbox').checked = !result.JapaneseDisabled
document.getElementById('lang-chinese-checkbox').checked = !result.ChineseDisabled
document.getElementById('debug-checkbox').checked = result.debugModeEnabled
document.getElementById('update-noti-checkbox').checked = !result.updateNotificationDisabled
}
)
}
|
#
# Licensed Materials - Property of IBM
#
# (c) Copyright IBM Corp. 2007-2008
#
import unittest, sys
import ibm_db
import config
from testfunctions import IbmDbTestFunctions
class IbmDbTestCase(unittest.TestCase):
def test_133_ExecuteLongInputParams(self):
obj = IbmDbTestFunctions()
obj.assert_expectf(self.run_test_133)
def run_test_133(self):
conn = ibm_db.connect(config.database, config.user, config.password)
if (not conn):
print("Connection failed.")
return 0
ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
print("Starting test ...")
res = ''
sql = "INSERT INTO animals (id, breed, name, weight) VALUES (?, ?, ?, ?)"
try:
stmt = ibm_db.prepare(conn, sql)
res = ibm_db.execute(stmt,(128, 'hacker of human and technological nature', 'Wez the ruler of all things PECL', 88.3))
stmt = ibm_db.prepare(conn, "SELECT breed, name FROM animals WHERE id = ?")
res = ibm_db.execute(stmt, (128,))
row = ibm_db.fetch_assoc(stmt)
for i in row:
print(i)
ibm_db.rollback(conn)
print("Done")
except:
print("SQLSTATE: %s" % ibm_db.stmt_error(stmt))
print("Message: %s" % ibm_db.stmt_errormsg(stmt))
try:
stmt = ibm_db.prepare(conn, "SELECT breed, name FROM animals WHERE id = ?")
res = ibm_db.execute(stmt, (128,))
row = ibm_db.fetch_assoc(stmt)
if (row):
for i in row:
print(i)
print(res)
print("SQLSTATE: %s" % ibm_db.stmt_error(stmt))
print("Message: %s" % ibm_db.stmt_errormsg(stmt))
except:
print("An Exception is not expected")
print("SQLSTATE: %s" % ibm_db.stmt_error(stmt))
print("Message: %s" % ibm_db.stmt_errormsg(stmt))
ibm_db.rollback(conn)
print("Done")
#__END__
#__LUW_EXPECTED__
#Starting test ...
#
#SQLSTATE: 22001
#Message: [IBM][CLI Driver] CLI0109E String data right truncation. SQLSTATE=22001 SQLCODE=-99999
#True
#SQLSTATE: 02000
#Message: [IBM][CLI Driver][DB2/%s] SQL0100W No row was found for FETCH, UPDATE or DELETE; or the result of a query is an empty table. SQLSTATE=02000 SQLCODE=100
#Done
#__ZOS_EXPECTED__
#Starting test ...
#
#SQLSTATE: 22001
#Message: [IBM][CLI Driver] CLI0109E String data right truncation. SQLSTATE=22001 SQLCODE=-99999
#True
#SQLSTATE: 02000
#Message: [IBM][CLI Driver][DB2] SQL0100W No row was found for FETCH, UPDATE or DELETE; or the result of a query is an empty table. SQLSTATE=02000 SQLCODE=100
#Done
#__SYSTEMI_EXPECTED__
#Starting test ...
#
#SQLSTATE: 22001
#Message: [IBM][CLI Driver] CLI0109E String data right truncation. SQLSTATE=22001 SQLCODE=-99999
#True
#SQLSTATE: 02000
#Message: [IBM][CLI Driver][AS] SQL0100W No row was found for FETCH, UPDATE or DELETE; or the result of a query is an empty table. SQLSTATE=02000 SQLCODE=100
#Done
#__IDS_EXPECTED__
#Starting test ...
#
#SQLSTATE: 22001
#Message: [IBM][CLI Driver][IDS%s] Value exceeds string column length. SQLCODE=-1279
#True
#SQLSTATE: 02000
#Message: [IBM][CLI Driver][IDS%s] SQL0100W No row was found for FETCH, UPDATE or DELETE; or the result of a query is an empty table. SQLSTATE=02000 SQLCODE=100
#Done
|
from .lib.middleware import I18nMiddleware, SpyMiddleware
from .config import LOCALES_DIR
from .bot import dp
i18n = I18nMiddleware("bot", LOCALES_DIR, default="ru")
dp.middleware.setup(i18n)
dp.middleware.setup(SpyMiddleware())
_ = i18n.t
|
module.exports = (data) => {
return {
testMiddleware(req, res, next) {
console.log("testmiddleware")
next();
},
}
}
|
import struct
import zipfile
import numpy as np
import zengl
from PIL import Image
import assets
from window import Window
window = Window(1280, 720)
ctx = zengl.context()
pack = zipfile.ZipFile(assets.get('metal_plate_1k.gltf.zip'))
img1 = Image.open(pack.open('textures/metal_plate_diff_1k.jpg')).convert('RGBA')
img2 = Image.open(pack.open('textures/metal_plate_rough_1k.jpg')).convert('RGBA')
img3 = Image.open(pack.open('textures/metal_plate_nor_gl_1k.jpg')).convert('RGBA')
texture1 = ctx.image(img1.size, 'rgba8unorm-srgb', img1.tobytes())
texture2 = ctx.image(img2.size, 'rgba8unorm', img2.tobytes())
texture3 = ctx.image(img3.size, 'rgba8unorm', img3.tobytes())
image = ctx.image(window.size, 'rgba8unorm', samples=4)
depth = ctx.image(window.size, 'depth24plus', samples=4)
image.clear_value = (0.2, 0.2, 0.2, 1.0)
vertex_buffer = ctx.buffer(np.array([
-1.0, -1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0,
-1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0,
1.0, -1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0,
1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0,
], 'f4'))
index_buffer = ctx.buffer(np.array([0, 2, 1, 1, 2, 3], 'i4'))
uniform_buffer = ctx.buffer(size=144)
pipeline = ctx.pipeline(
vertex_shader='''
#version 330
layout (std140) uniform Common {
mat4 mvp;
vec3 eye_pos;
vec3 light_pos;
};
layout (location = 0) in vec3 in_vert;
layout (location = 1) in vec3 in_norm;
layout (location = 2) in vec2 in_text;
layout (location = 3) in vec3 in_tangent;
out vec3 v_vert;
out vec3 v_norm;
out vec2 v_text;
out vec3 v_tangent;
void main() {
gl_Position = mvp * vec4(in_vert, 1.0);
v_vert = in_vert;
v_norm = in_norm;
v_text = in_text;
v_tangent = in_tangent;
}
''',
fragment_shader='''
#version 330
layout (std140) uniform Common {
mat4 mvp;
vec3 eye_pos;
vec3 light_pos;
};
uniform sampler2D Texture1;
uniform sampler2D Texture2;
uniform sampler2D Texture3;
in vec3 v_vert;
in vec3 v_norm;
in vec2 v_text;
in vec3 v_tangent;
layout (location = 0) out vec4 out_color;
void main() {
vec3 bitangent = cross(v_tangent, v_norm);
mat3 btn = mat3(v_tangent, bitangent, v_norm);
vec3 texture_normal = texture(Texture3, v_text).rgb - 0.5;
vec3 normal = normalize(btn * texture_normal);
float shininess = 32.0;
vec3 light_dir = normalize(light_pos - v_vert);
vec3 eye_dir = normalize(eye_pos - v_vert);
vec3 halfway_dir = normalize(light_dir + eye_dir);
vec3 surface_normal = texture(Texture3, v_text).rgb;
float rought = texture(Texture2, v_text).r;
float spec = pow(max(dot(normal, halfway_dir), 0.0), shininess) * rought;
vec3 color = texture(Texture1, v_text).rgb + vec3(1.0, 1.0, 1.0) * spec;
out_color = vec4(pow(color, vec3(1.0 / 2.2)), 1.0);
}
''',
layout=[
{
'name': 'Common',
'binding': 0,
},
{
'name': 'Texture1',
'binding': 0,
},
{
'name': 'Texture2',
'binding': 1,
},
{
'name': 'Texture3',
'binding': 2,
},
],
resources=[
{
'type': 'uniform_buffer',
'binding': 0,
'buffer': uniform_buffer,
},
{
'type': 'sampler',
'binding': 0,
'image': texture1,
},
{
'type': 'sampler',
'binding': 1,
'image': texture2,
},
{
'type': 'sampler',
'binding': 2,
'image': texture3,
},
],
framebuffer=[image, depth],
topology='triangles',
cull_face='back',
index_buffer=index_buffer,
vertex_buffers=zengl.bind(vertex_buffer, '3f 3f 2f 3f', 0, 1, 2, 3),
vertex_count=index_buffer.size // 4,
)
while window.update():
x, y = window.mouse[0] / window.size[0] - 0.5, window.mouse[1] / window.size[1] - 0.5
eye_pos = (x * 2.0, y * 2.0, 3.0)
light_pos = (x * 2.0, y * 2.0, 1.0)
light_color = (1.0, 1.0, 1.0)
object_color = (1.0, 0.5, 0.3)
ambient = 0.1
shininess = 64.0
camera = zengl.camera(eye_pos, (0.0, 0.0, 0.0), (0.0, 1.0, 0.0), aspect=window.aspect, fov=45.0)
uniform_buffer.write(struct.pack(
'=64s3f4x3f4x3f4x3fff', camera, *eye_pos, *light_pos, *light_color, *object_color, ambient, shininess,
))
image.clear()
depth.clear()
pipeline.render()
image.blit()
|
/*
* Copyright (C) 2009-2010 Christian Hergert <chris@dronelabs.com>
* Copyright © 2010 Codethink Limited
*
* This library is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of the
* licence, or (at your option) any later version.
*
* This is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, see <http://www.gnu.org/licenses/>.
*
* Authors: Christian Hergert <chris@dronelabs.com>
* Thiago Santos <thiago.sousa.santos@collabora.co.uk>
* Emmanuele Bassi <ebassi@linux.intel.com>
* Ryan Lortie <desrt@desrt.ca>
*/
#ifndef __G_DATE_TIME_H__
#define __G_DATE_TIME_H__
#if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
#error "Only <glib.h> can be included directly."
#endif
#include <glib/gtimezone.h>
G_BEGIN_DECLS
/**
* G_TIME_SPAN_DAY:
*
* Evaluates to a time span of one day.
*
* Since: 2.26
*/
#define G_TIME_SPAN_DAY (G_GINT64_CONSTANT (86400000000))
/**
* G_TIME_SPAN_HOUR:
*
* Evaluates to a time span of one hour.
*
* Since: 2.26
*/
#define G_TIME_SPAN_HOUR (G_GINT64_CONSTANT (3600000000))
/**
* G_TIME_SPAN_MINUTE:
*
* Evaluates to a time span of one minute.
*
* Since: 2.26
*/
#define G_TIME_SPAN_MINUTE (G_GINT64_CONSTANT (60000000))
/**
* G_TIME_SPAN_SECOND:
*
* Evaluates to a time span of one second.
*
* Since: 2.26
*/
#define G_TIME_SPAN_SECOND (G_GINT64_CONSTANT (1000000))
/**
* G_TIME_SPAN_MILLISECOND:
*
* Evaluates to a time span of one millisecond.
*
* Since: 2.26
*/
#define G_TIME_SPAN_MILLISECOND (G_GINT64_CONSTANT (1000))
/**
* GTimeSpan:
*
* A value representing an interval of time, in microseconds.
*
* Since: 2.26
*/
typedef gint64 GTimeSpan;
/**
* GDateTime:
*
* `GDateTime` is an opaque structure whose members
* cannot be accessed directly.
*
* Since: 2.26
*/
typedef struct _GDateTime GDateTime;
GLIB_AVAILABLE_IN_ALL
void g_date_time_unref (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_ref (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new_now (GTimeZone *tz);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new_now_local (void);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new_now_utc (void);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new_from_unix_local (gint64 t);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new_from_unix_utc (gint64 t);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new_from_timeval_local (const GTimeVal *tv);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new_from_timeval_utc (const GTimeVal *tv);
GLIB_AVAILABLE_IN_2_56
GDateTime * g_date_time_new_from_iso8601 (const gchar *text,
GTimeZone *default_tz);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new (GTimeZone *tz,
gint year,
gint month,
gint day,
gint hour,
gint minute,
gdouble seconds);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new_local (gint year,
gint month,
gint day,
gint hour,
gint minute,
gdouble seconds);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_new_utc (gint year,
gint month,
gint day,
gint hour,
gint minute,
gdouble seconds);
GLIB_AVAILABLE_IN_ALL
G_GNUC_WARN_UNUSED_RESULT
GDateTime * g_date_time_add (GDateTime *datetime,
GTimeSpan timespan);
GLIB_AVAILABLE_IN_ALL
G_GNUC_WARN_UNUSED_RESULT
GDateTime * g_date_time_add_years (GDateTime *datetime,
gint years);
GLIB_AVAILABLE_IN_ALL
G_GNUC_WARN_UNUSED_RESULT
GDateTime * g_date_time_add_months (GDateTime *datetime,
gint months);
GLIB_AVAILABLE_IN_ALL
G_GNUC_WARN_UNUSED_RESULT
GDateTime * g_date_time_add_weeks (GDateTime *datetime,
gint weeks);
GLIB_AVAILABLE_IN_ALL
G_GNUC_WARN_UNUSED_RESULT
GDateTime * g_date_time_add_days (GDateTime *datetime,
gint days);
GLIB_AVAILABLE_IN_ALL
G_GNUC_WARN_UNUSED_RESULT
GDateTime * g_date_time_add_hours (GDateTime *datetime,
gint hours);
GLIB_AVAILABLE_IN_ALL
G_GNUC_WARN_UNUSED_RESULT
GDateTime * g_date_time_add_minutes (GDateTime *datetime,
gint minutes);
GLIB_AVAILABLE_IN_ALL
G_GNUC_WARN_UNUSED_RESULT
GDateTime * g_date_time_add_seconds (GDateTime *datetime,
gdouble seconds);
GLIB_AVAILABLE_IN_ALL
G_GNUC_WARN_UNUSED_RESULT
GDateTime * g_date_time_add_full (GDateTime *datetime,
gint years,
gint months,
gint days,
gint hours,
gint minutes,
gdouble seconds);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_compare (gconstpointer dt1,
gconstpointer dt2);
GLIB_AVAILABLE_IN_ALL
GTimeSpan g_date_time_difference (GDateTime *end,
GDateTime *begin);
GLIB_AVAILABLE_IN_ALL
guint g_date_time_hash (gconstpointer datetime);
GLIB_AVAILABLE_IN_ALL
gboolean g_date_time_equal (gconstpointer dt1,
gconstpointer dt2);
GLIB_AVAILABLE_IN_ALL
void g_date_time_get_ymd (GDateTime *datetime,
gint *year,
gint *month,
gint *day);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_year (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_month (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_day_of_month (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_week_numbering_year (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_week_of_year (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_day_of_week (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_day_of_year (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_hour (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_minute (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_second (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint g_date_time_get_microsecond (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gdouble g_date_time_get_seconds (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gint64 g_date_time_to_unix (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gboolean g_date_time_to_timeval (GDateTime *datetime,
GTimeVal *tv);
GLIB_AVAILABLE_IN_ALL
GTimeSpan g_date_time_get_utc_offset (GDateTime *datetime);
GLIB_AVAILABLE_IN_2_58
GTimeZone * g_date_time_get_timezone (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
const gchar * g_date_time_get_timezone_abbreviation (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gboolean g_date_time_is_daylight_savings (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_to_timezone (GDateTime *datetime,
GTimeZone *tz);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_to_local (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
GDateTime * g_date_time_to_utc (GDateTime *datetime);
GLIB_AVAILABLE_IN_ALL
gchar * g_date_time_format (GDateTime *datetime,
const gchar *format) G_GNUC_MALLOC;
G_END_DECLS
#endif /* __G_DATE_TIME_H__ */
|
import warning from 'warning';
import debounce from 'lodash.debounce';
import mixin from '../../globals/js/misc/mixin';
import createComponent from '../../globals/js/mixins/create-component';
import initComponentBySearch from '../../globals/js/mixins/init-component-by-search';
import on from '../../globals/js/misc/on';
let didWarnAboutDeprecation = false;
class DetailPageHeader extends mixin(createComponent, initComponentBySearch) {
/**
* The Detail Page Header.
* @extends CreateComponent
* @extends InitComponentBySearch
* @param {HTMLElement} element The element working as a page header.
* @param {Object} [options] The component options.
*/
constructor(element, options) {
super(element, options);
this.previousScrollY = 0;
// Debounce scroll event calls to handleScroll (default: 50)
const debouncedScroll = debounce(this._handleScroll.bind(this), 25);
this.hScroll = on(this.element.ownerDocument.defaultView, 'scroll', debouncedScroll);
if (__DEV__) {
warning(
didWarnAboutDeprecation,
'Accessing the `detail-page-header` component from the ' +
'`carbon-components` package is deprecated. Use the ' +
'`carbon-addons-bluemix` package instead.'
);
didWarnAboutDeprecation = true;
}
}
/**
* Adds class to header based on users position on the page
*/
_handleScroll() {
let scrollPosition;
if (this.element.ownerDocument.defaultView.pageYOffset) {
scrollPosition = this.element.ownerDocument.defaultView.pageYOffset;
} else {
scrollPosition = this.element.ownerDocument.defaultView.pageYOffset;
}
if (scrollPosition > 86) {
this.element.dataset.headerActive = true;
if (scrollPosition < this.previousScrollY) {
this.element.classList.remove(this.options.scroll);
} else {
this.element.classList.add(this.options.scroll);
}
} else {
this.element.classList.remove(this.options.scroll);
this.element.dataset.headerActive = false;
}
this.previousScrollY = scrollPosition;
}
/**
* Cleans up stuffs specific to this widget.
*/
release() {
this.hScroll.release();
super.release();
}
/**
* The map associating DOM element and detail page header instance.
* @member DetailPageHeader.components
* @type {WeakMap}
*/
static components = new WeakMap();
/**
* The component options.
* If `options` is specified in the constructor,
* {@linkcode DetailPageHeader.create .create()}, or {@linkcode DetailPageHeader.init .init()},
* properties in this object are overriden for the instance being created
* and how {@linkcode DetailPageHeader.init .init()} works.
* @member DetailPageHeader.options
* @type {Object}
* @property {string} selectorInit The CSS selector to find detail page headers.
*/
static options = {
selectorInit: '[data-detail-page-header]',
scroll: 'bx--detail-page-header--scroll',
};
}
export default DetailPageHeader;
|
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* parse.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: skavunen <marvin@42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/10/17 16:22:48 by skavunen #+# #+# */
/* Updated: 2017/10/27 13:58:31 by skavunen ### ########.fr */
/* */
/* ************************************************************************** */
#include "../includes/ft_lib.h"
t_jobs *take_params(t_jobs *program, char **map, int *pos, char **env)
{
program->cmd = take_cmd(map[++(*pos)]);
program->argv = make_argv(program->cmd);
if ((program->process = take_num(map[++(*pos)], "numprocs: ", 10)) == 0)
config_error();
program->umask = take_num(map[++(*pos)], "umask: ", 7);
program->workingdir = take_string(map[++(*pos)], "workingdir: ", 12);
program->autostart = take_autostart(map[++(*pos)]);
program->autorestart = take_autorestart(map[++(*pos)]);
program->exitcode = take_exit(map[++(*pos)], "exitcodes: ", 10);
program->startretries = take_num(map[++(*pos)], "startretries: ", 14);
program->starttime = take_num(map[++(*pos)], "starttime: ", 11);
program->stopsignal = take_signal(map[++(*pos)]);
program->stoptime = take_num(map[++(*pos)], "stoptime: ", 10);
program->stdoutput = take_string(map[++(*pos)], "stdout: ", 8);
program->stderror = take_string(map[++(*pos)], "stderr: ", 8);
program->env = take_env(env, map, pos);
return (program);
}
t_jobs *take_one_program(char **env, char **map, int *pos)
{
t_jobs *jobs;
int size;
if (!(jobs = (t_jobs*)ft_memalloc(sizeof(t_jobs))))
exit(0);
size = ft_strlen(map[(*pos)]);
if (map[(*pos)][size - 1] != ':')
config_error();
map[(*pos)][size - 1] = '\0';
jobs->name = ft_strdup(map[(*pos)]);
jobs = take_params(jobs, map, pos, env);
if (jobs->process > 1)
dup_process(jobs);
return (jobs);
}
static void ft_nodepushback(t_jobs **lst, char **map, int *pos, char **env)
{
t_jobs *node;
node = *lst;
if (node)
{
while (node->next)
node = node->next;
node->next = take_one_program(env, map, pos);
node->next->prev = node;
}
else
*lst = take_one_program(env, map, pos);
}
t_jobs *parse_config(int fd, char **env, t_jobs *jobs)
{
char **config;
int i;
config = take_file(fd);
i = 0;
if (!config[i] ||
ft_strcmp("programs:", config[i]) != 0)
config_error();
i++;
if (!config[i])
config_error();
ft_nodepushback(&jobs, config, &i, env);
while (1)
{
if (!config[i])
break ;
ft_nodepushback(&jobs, config, &i, env);
}
ft_strdel_array(config);
return (jobs);
}
t_jobs *load_jobs(char **env)
{
int fd;
t_jobs *jobs;
fd = open(g_config, O_RDONLY);
if (fd < 0)
{
if (access(g_config, R_OK) == -1)
ft_putstr_fd("taskmaster: Permission Denied To Config File\n",
g_fd);
else
ft_putstr_fd("taskmaster: No Config File\n", g_fd);
return (NULL);
}
jobs = parse_config(fd, env, NULL);
close(fd);
return (jobs);
}
|
// ddlist.js
// 2017-12-08
// Copyright (c) 2017 Nimzozo
window.addEventListener("load", function () {
"use strict";
/**
* Css classes and ids.
*/
var css = {
header: "drop-down-header",
item: "drop-down-list__item",
link: "drop-down-list__link",
list: "drop-down-list",
openHeader: "drop-down-header_open",
openList: "drop-down-list_open",
selectedLink: "drop-down-list__link_selected"
};
var headers = document.getElementsByClassName(css.header);
var items = document.getElementsByClassName(css.item);
var lists = document.getElementsByClassName(css.list);
var openHeaders = document.getElementsByClassName(css.openHeader);
var openLists = document.getElementsByClassName(css.openList);
var selectedLinks = document.getElementsByClassName(css.selectedLink);
function raf(callback) {
return window.requestAnimationFrame(callback) ||
window.webkitRequestAnimationFrame(callback) ||
window.mozRequestAnimationFrame(callback) ||
window.setTimeout(callback, 1000 / 60);
}
function DropDownList(header, list, isOpen) {
var dropDownList = {
animationSpeed: 4,
header: header,
itemsHeight: 0,
itemsMaxHeight: 20,
items: [],
list: list,
isOpen: isOpen
};
dropDownList.animate = function () {
if (dropDownList.isOpen) {
if (dropDownList.itemsHeight < 1) {
dropDownList.header.className = css.header;
dropDownList.list.className = css.list;
dropDownList.isOpen = false;
return;
}
dropDownList.itemsHeight -= dropDownList.animationSpeed;
} else {
if (dropDownList.itemsHeight === dropDownList.itemsMaxHeight) {
dropDownList.isOpen = true;
return;
}
dropDownList.itemsHeight += dropDownList.animationSpeed;
}
dropDownList.updateItems();
raf(dropDownList.animate);
};
dropDownList.create = function () {
var children = list.children;
Object.keys(children).forEach(function (key) {
var child = children[key];
if (child.tagName.toLowerCase() === "li") {
dropDownList.items.push(child);
}
});
if (dropDownList.isOpen) {
dropDownList.itemsHeight = dropDownList.itemsMaxHeight;
}
header.addEventListener("click", dropDownList.onHeaderClick);
};
dropDownList.onHeaderClick = function () {
if (dropDownList.isOpen) {
raf(dropDownList.animate);
} else {
raf(dropDownList.open);
}
};
dropDownList.open = function () {
dropDownList.updateItems();
dropDownList.header.className = css.openHeader;
dropDownList.list.className = css.openList;
raf(dropDownList.animate);
};
dropDownList.updateItems = function () {
dropDownList.items.forEach(function (item) {
item.style.height = dropDownList.itemsHeight + "px";
});
};
return dropDownList.create();
}
Object.keys(headers).forEach(function (key) {
var dropDown = {};
var header = headers[key];
var list = lists[key];
dropDown = new DropDownList(header, list, false);
});
Object.keys(openHeaders).forEach(function (key) {
var dropDown = {};
var header = openHeaders[key];
var list = openLists[key];
dropDown = new DropDownList(header, list, true);
});
Object.keys(items).forEach(function (key) {
var link = items[key].firstChild;
link.addEventListener("click", function () {
Object.keys(selectedLinks).forEach(function (index) {
var selectedLink = selectedLinks[index];
raf(function () {
selectedLink.className = css.link;
});
});
link.className = css.selectedLink;
});
});
});
|
#! /usr/bin/env node
const program = require('commander');
const fs = require('fs')
const command_new = require('./commands/new');
const command_init = require('./commands/init');
program
.command('new [title]')
.alias('n')
.description("Create a new project and generate it's boilerplate")
.action(function(title) {
command_new(title);
});
program
.command('init')
.description('Configure the project cli')
.action(function() {
command_init();
})
program.parse(process.argv);
|
import logging
logger = logging.getLogger(__name__)
#debug = logger.debug
#loginfo = logger.info
#logmsg = logger.info
def _getstr(args):
try:
return ' '.join(str(a) for a in args)
except:
return ' '.join(unicode(a) for a in args)
def loginfo(*args):
ss = _getstr(args)
logger.info(ss)
logmsg = loginfo
def debug(*args):
ss = _getstr(args)
logger.debug(ss)
logdebug = debug
|
////////////////////////////////////////////////////////////////////////////////
// Pac-Man clone
//
// Copyright (c) 2021 Kwena Mashamaite (kwena.mashamaite1@gmail.com)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
////////////////////////////////////////////////////////////////////////////////
#ifndef PACMAN_ACTORSTATEFSM_H
#define PACMAN_ACTORSTATEFSM_H
#include "IActorState.h"
#include <stack>
#include <memory>
namespace pm {
/**
* @brief Stack-based Finite State Machine (FSM) for IActorState
*/
class ActorStateFSM {
public:
/**
* @brief Default constructor
*/
ActorStateFSM();
/**
* @brief Start the FSM
*
* Note that when started, the FSM will enter the state at the top of
* the stack if any. In addition, subsequent state pushes will be entered
* immediately after the push
*
* @see push
*/
void start();
/**
* @brief Add a state to the FSM
* @param state The state to be added
*
* The state will become the current active state, after its added
*
* @see pop and start
*/
void push(std::unique_ptr<IActorState> state);
/**
* @brief Remove the current active state and optionally push a new one
* @param state State to be pushed
*
* The @a state argument is optional and must only be provided if you
* intend to pop the current state and immediately push another one.
* When popping and pushing individually (calling pop() and immediately
* after calling push()), the previous scene (if any) will be resumed
* after the pop operation and then paused again after the push operation.
* The @a state argument intends to prevent this momentary resume and
* pause action. The @a state will be pushed and entered without resuming
* and pausing the previous state
*
* @warning If the active state calls this function, any attempt
* to access it afterwards is undefined behavior. In addition,
* this function must not be called in pm::IActorState::onExit
*
* @see push
*/
void pop(std::unique_ptr<IActorState> state = nullptr);
/**
* @brief Get the current active state
* @return The current active state or a nullptr if the FSM has no states
*/
IActorState* top();
/**
* @brief Get the number of states in the FSM
* @return The number of states in the FSM
*/
std::size_t getCount() const;
/**
* @brief Remove all states from the FSM
*/
void clear();
private:
std::stack<std::unique_ptr<IActorState>> states_; //!< States container
bool isStarted_; //!< A flag indicating whether or not the FSM has been started
bool isExitingState_; //!< A flag indicating whether or not the FSM is currently popping a state
};
}
#endif //PACMAN_ACTORSTATEFSM_H
|
import React, {useState, useEffect} from 'react'
import Layout from '../components/layout'
import Nav from '../components/nav'
import Banner from '../components/non-front-banner'
import AuthorImg from '../../static/author/main_author.jpg'
/*Slick*/
import Slider from "react-slick";
// Import css files
import "slick-carousel/slick/slick.css";
import "slick-carousel/slick/slick-theme.css";
import {Helmet} from "react-helmet"
/*
Author Images
*/
const ATB = (props)=>{
const [mobState, __functState] = useState(false);
useEffect(() => {
window.addEventListener("resize", ()=>{
__functState(window.innerWidth <= 990);
});
}, [mobState]);
var settings = {
dots: true,
infinite: true,
speed: 500,
slidesToShow: (mobState) ? 1 : 3,
slidesToScroll: 1,
};
return (
<>
<Layout>
<Helmet title="About the Author | Chip Weinert" />
<Nav pathExt={props.path}/>
<Banner
spanFirst={`About The`}
contextHeading={`Author`}/>
<div className="container">
<section className="body-author-contents columns">
<div className="heading-quote column">
<div className="author-image-container">
<img src={AuthorImg} />
</div>
<div className="heading-quote">
<h4>
"Hey you big old snow leopard, come on over here, and let me buy you a drink. I’ve been looking for someone just your size and smarts.”
</h4>
<span className="ata-span-fx"></span>
</div>
</div>
<article className="article-section column" id="author">
<br/>
<br/>
<p>
Chip Weinert grew up in Wisconsin before getting his degree in Communication from Utah State University. After college, he moved to Miami and worked writing ad copy for mail order catalogs. He left Miami and headed back out west, this time landing in Hood River, Oregon after a couple years in Seattle. In Hood River he worked for the Hood River News before becoming Managing Editor/Associate Publisher of Wind Tracks magazine, a four-color glossy windsurfing publication, based out of the southern Oregon coast. When the magazine was sold, Chip stayed on the south coast where these days he spends his time windsurfing, surfing, kayaking, fishing, cycling and writing about it. In between the writing gigs, he’s worked in the non-profit industry as manager and development director. A widower, he lives with two adorable pound puppies – rescue dogs – that keep him on his toes.
</p>
<p>
<span className="author-name">
<span className="author-span-ft">Author | Writer </span>
Chip Weinert
</span>
</p>
</article>
</section>
{/* <div className="photo-grid">
<div className="row-photo">
<div className="photo-row">
<div className="photo-column">
<img src={Img6}/>
</div>
<div className="photo-column">
<img src={Img4}/>
<img src={Img2}/>
<img src={Img3}/>
</div>
<div className="photo-column">
<img src={Img5}/>
<img src={Img1}/>
</div>
</div>
</div>
</div>
<div id="slider-container">
<Slider {...settings} className="ata-slider">
<div className="author-img">
<img src={Img1}
alt="author image" />
</div>
<div className="author-img">
<img src={Img2}
alt="author image"/>
</div>
<div className="author-img">
<img src={Img3}
alt="author image" />
</div>
<div className="author-img">
<img src={Img4}
alt="author image" />
</div>
<div className="author-img">
<img src={Img5}
alt="author image" />
</div>
<div className="author-img">
<img src={Img6}
alt="author image" />
</div>
</Slider>
</div> */}
</div>
</Layout>
</>
)
}
export default ATB;
|
// eslint-disable-next-line
import { default as component } from './Bool.vue'
import { compose } from '@cortezaproject/corteza-js'
import * as fieldTypes from './loader'
console.log(fieldTypes)
// const namespace = ({
// canCreateChart: true,
// canCreateModule: true,
// canCreatePage: true,
// canDeleteNamespace: true,
// canGrant: true,
// canManageNamespace: true,
// canUpdateNamespace: true,
// createdAt: 'Fri Jul 30 2021 18:25:13 GMT+0300 (Eastern European Summer Time)',
// deletedAt: undefined,
// enabled: true,
// // labels: Object (empty),
// meta: {
// iconID: '0',
// logoID: '0',
// },
// name: 'CRM',
// namespaceID: '242313184189546499',
// slug: 'crm',
// })
const props = {
// compose.Namespace
namespace: new compose.Namespace(),
// compose.Module
module: new compose.Module(),
// compose.ModuleField
field: new compose.ModuleFieldBool({
options: { trueLabel: 'foo' },
}),
// options
}
export default {
name: 'Bool',
group: ['ModuleFields', '/Configurator'],
component,
props,
controls: [],
}
|
# --------------
# Code starts here
# Create the lists
class_1 = ['Geoffrey Hinton','Andrew Ng','Sebastian Raschka','Yoshua Bengio']
class_2 = ['Hilary Mason','Carla Gentry','Corinna Cortes']
# Concatenate both the strings
new_class = class_1 + class_2
#print(new_class)
# Append the list
new_class.append('Perter Warden')
# Print updated list
print(new_class)
# Remove the element from the list
new_class.remove('Carla Gentry')
# Print the list
print(new_class)
# Create the Dictionary
courses = {'Math':65, 'English':70, 'History':80, 'French':70, 'Science':60}
# Slice the dict and stores the all subjects marks in variable
l1 = courses.values()
print(l1)
# Store the all the subject in one variable `Total`
total = sum(l1)
# Print the total
print(total)
# Insert percentage formula
percentage = (total / 500)*100
# Print the percentage
print(percentage)
# Create the Dictionary
mathamatics = {'Geoffrey Hinton':78, 'Andrew Ng':95, 'Sebastian Raschka':65, 'Yoshua Benjio':50, 'Hilary Mason':70, 'Corinna Cortes':66, 'Peter Warden':75}
topper = max(mathamatics,key = mathamatics.get)
print(topper)
# Given string
# Create variable first_name
first_name = topper.split(" ")[0]
# Create variable Last_name and store last two element in the list
last_name = topper.split(" ")[-1]
# Concatenate the string
full_name = last_name + ' ' + first_name
# print the full_name
print(full_name)
# print the name in upper case
certificate_name = full_name.upper()
print(certificate_name)
# Code ends here
|
from tkinter import *
root = Tk()
def myclick():
myLabel = Label(root,text= 'Look! I clicked a button ')
myLabel.pack()
myButton = Button(root , text='Click Me' , padx=50 , pady= 50, command=myclick)
myButton.pack()
root.mainloop()
|
function ready(fn) {
if (document.readyState != 'loading'){
fn();
} else {
document.addEventListener('DOMContentLoaded', fn);
}
}
function loadReusableComponents() {
//insert elements, even before Polymer is loaded
//first element of the original document
var firstElement = document.body.children[0];
//insert alerts before header
var alerts = document.querySelector('uqlibrary-alerts');
if (!alerts) {
//as a back up insert header if it's not defined already
alerts = document.createElement('uqlibrary-alerts');
document.body.insertBefore(alerts, firstElement);
}
// insert header after body-tag
var header = document.createElement('uq-minimal-header');
document.body.insertBefore(header, firstElement);
// insert sub footer before body-tag
var subFooter = document.createElement('uql-connect-footer');
document.body.appendChild(subFooter);
// insert footer before body-tag
var footer = document.createElement('uq-minimal-footer');
document.body.appendChild(footer);
window.addEventListener('WebComponentsReady', function() {
// when polymer is ready - configure elements
});
}
ready(loadReusableComponents);
|
import os.path as osp
from unittest import TestCase
from pylinac.log_analyzer import VMAT, IMAGING, STATIC_IMRT, DYNAMIC_IMRT
from tests_basic.test_logs import IndividualLogBase
from tests_basic import TEST_BANK_DIR
class LogBankBase(IndividualLogBase):
dir_location = osp.join(TEST_BANK_DIR, 'Machine logs')
class One(LogBankBase, TestCase):
file_path = ['Anonymous_4DC Treatment_A_TX_20120928131920.bin']
treatment_type = STATIC_IMRT
num_subbeams = 1
mu_delivered = 209
num_snapshots = 1098
first_subbeam_data = {'gantry_angle': 185, 'collimator_angle': 180, 'jaw_y1': 10.5}
class Two(LogBankBase, TestCase):
file_path = ['Anonymous_4DC Treatment_A1_TX_20120928132942.bin']
treatment_type = DYNAMIC_IMRT
num_subbeams = 10
mu_delivered = 681
num_beamholds = 142
num_snapshots = 28268
first_subbeam_data = {'gantry_angle': 340, 'collimator_angle': 180, 'jaw_y1': 10.8}
class DynalogArc(LogBankBase, TestCase):
file_path = ['Katy iX', 'A20120712122417_Anonymous.dlg']
treatment_type = VMAT
version = 'B'
tolerance = 510
average_gamma = 0.06
mu_delivered = 25000
num_snapshots = 1151
average_rms = 0.16
maximum_rms = 0.205
class Four(LogBankBase, TestCase):
file_path = ['Chicago', 'T-Log HDMLC', 'anonymized_4DC Treatment_1.1_TX_20151015093202.bin']
num_snapshots = 6356
version = 3
treatment_type = DYNAMIC_IMRT
num_subbeams = 2
num_axes = 16
mu_delivered = 535
num_beamholds = 2
mlc_model = 3
first_subbeam_data = {'gantry_angle': 178.9, 'jaw_x2': 5.2}
class CBCTSetup(LogBankBase, TestCase):
file_path = ['Chicago', 'T-Log HDMLC', 'anonymized_4DC Treatment_KVCBCT_Setup_20151015090308.bin']
num_snapshots = 1238
version = 3
treatment_type = IMAGING
num_subbeams = 1
num_axes = 16
mu_delivered = 0
num_beamholds = 0
mlc_model = 3
first_subbeam_data = {}
def test_publish_pdf(self):
pass
class KVSetup(LogBankBase, TestCase):
file_path = ['Chicago', 'T-Log HDMLC', 'anonymized_4DC Treatment_KVKV_SetupPair_20151015130741.bin']
num_snapshots = 185
version = 3
treatment_type = IMAGING
mlc_model = 3
num_subbeams = 1
num_axes = 16
mu_delivered = 0
num_beamholds = 0
first_subbeam_data = {}
def test_publish_pdf(self):
pass
class DoubleExposure(LogBankBase, TestCase):
file_path = ['Chicago', 'T-Log HDMLC', 'anonymized_4DC Treatment_Planned_Double_Exposure_ADHOC_20151015140943.bin']
num_snapshots = 750
version = 3
treatment_type = IMAGING
mlc_model = 3
average_gamma = 0
num_subbeams = 2
num_axes = 16
mu_delivered = 2
num_beamholds = 4
first_subbeam_data = {'gantry_angle': 180, 'jaw_x2': 4}
def test_publish_pdf(self):
pass
class Five(LogBankBase, TestCase):
file_path = ['Chicago', 'T-Log Mil120', 'anonymized_4DC Treatment_1.1_TX_20151015102651.bin']
num_snapshots = 10728
version = 3
num_subbeams = 3
treatment_type = DYNAMIC_IMRT
num_axes = 16
mu_delivered = 428
num_beamholds = 3
first_subbeam_data = {'gantry_angle': 176.7, 'jaw_x2': 8.2}
class OpenPort(LogBankBase, TestCase):
file_path = ['Chicago', 'T-Log Mil120', 'anonymized_4DC Treatment_Planned_Open_Port_Image_ADHOC_20151015131101.bin']
num_snapshots = 72
version = 3
treatment_type = IMAGING
num_subbeams = 1
num_axes = 16
mu_delivered = 1
num_beamholds = 3
first_subbeam_data = {'gantry_angle': 180, 'jaw_x2': 6}
def test_publish_pdf(self):
pass
class Six(LogBankBase, TestCase):
file_path = ['Bay Area iX', 'A20121212123129_Anonymous.dlg']
treatment_type = VMAT
version = 'B'
tolerance = 510
num_snapshots = 1150
average_rms = 0.11
maximum_rms = 0.14
num_subbeams = 1
num_axes = 16
mu_delivered = 25000
average_gamma = 0.03
|
import _ from 'lodash'
import faker from 'faker'
import React from 'react'
import { Search, Grid, Header, Segment } from 'semantic-ui-react'
const source = _.times(5, () => ({
title: faker.company.companyName(),
description: faker.company.catchPhrase(),
image: faker.internet.avatar(),
price: faker.finance.amount(0, 100, 2, '$'),
}))
const initialState = {
loading: false,
results: [],
value: '',
}
function exampleReducer(state, action) {
switch (action.type) {
case 'CLEAN_QUERY':
return initialState
case 'START_SEARCH':
return { ...state, loading: true, value: action.query }
case 'FINISH_SEARCH':
return { ...state, loading: false, results: action.results }
case 'UPDATE_SELECTION':
return { ...state, value: action.selection }
default:
throw new Error()
}
}
function SearchExampleStandard() {
const [state, dispatch] = React.useReducer(exampleReducer, initialState)
const { loading, results, value } = state
const timeoutRef = React.useRef()
const handleSearchChange = React.useCallback((e, data) => {
clearTimeout(timeoutRef.current)
dispatch({ type: 'START_SEARCH', query: data.value })
timeoutRef.current = setTimeout(() => {
if (data.value.length === 0) {
dispatch({ type: 'CLEAN_QUERY' })
return
}
const re = new RegExp(_.escapeRegExp(data.value), 'i')
const isMatch = (result) => re.test(result.title)
dispatch({
type: 'FINISH_SEARCH',
results: _.filter(source, isMatch),
})
}, 300)
}, [])
React.useEffect(() => {
return () => {
clearTimeout(timeoutRef.current)
}
}, [])
return (
<Grid>
<Grid.Column width={6}>
<Search
loading={loading}
onResultSelect={(e, data) =>
dispatch({ type: 'UPDATE_SELECTION', selection: data.result.title })
}
onSearchChange={handleSearchChange}
results={results}
value={value}
/>
</Grid.Column>
<Grid.Column width={10}>
<Segment>
<Header>State</Header>
<pre style={{ overflowX: 'auto' }}>
{JSON.stringify({ loading, results, value }, null, 2)}
</pre>
<Header>Options</Header>
<pre style={{ overflowX: 'auto' }}>
{JSON.stringify(source, null, 2)}
</pre>
</Segment>
</Grid.Column>
</Grid>
)
}
export default SearchExampleStandard
|
!function(e){function t(r){if(n[r])return n[r].exports;var a=n[r]={exports:{},id:r,loaded:!1};return e[r].call(a.exports,a,a.exports,t),a.loaded=!0,a.exports}var n={};return t.m=e,t.c=n,t.p="",t(0)}([function(e,t,n){e.exports=n(1)},function(e,t,n){"use strict";n(2);var r=n(7),a=n(163);r.render(r.createElement(a,null),document.getElementById("body"))},function(e,t,n){var r=n(3);"string"==typeof r&&(r=[[e.id,r,""]]);n(6)(r,{});r.locals&&(e.exports=r.locals)},function(e,t,n){t=e.exports=n(4)(),t.push([e.id,"@import url(//fonts.googleapis.com/css?family=Roboto);",""]),t.push([e.id,"@import url(//fonts.googleapis.com/css?family=Chau+Philomene+One);",""]),t.push([e.id,".on-boarding-section,.page-permanent-announcement{jusify-content:center;margin-right:auto;margin-left:auto}*{box-sizing:border-box}body{font-family:Roboto,sans-serif}.page-permanent-announcement,.the-button{font-family:'Chau Philomene One',sans-serif}.app{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.main-container::before{content:\"\";position:fixed;top:0;left:0;width:100vw;height:100vh;background-image:url("+n(5)+");background-repeat:no-repeat;background-position:center;webkit-background-size:cover;background-size:cover;z-index:-1}.header,.page-permanent-announcement{display:-webkit-flex;display:-ms-flexbox;width:100%}.header{color:grey;height:30px;-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap;padding-top:8px;padding-bottom:8px;display:flex;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;border-bottom:1px solid #e1e1e1;background:rgba(0,0,0,.7)}.page-permanent-announcement{display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;font-size:40px;color:#FFF;text-align:center;background:linear-gradient(to bottom,rgba(51,122,183,.85) 0,rgba(117,171,209,.75) 48%,rgba(122,175,211,.75) 52%,rgba(188,224,238,0) 100%);filter:progid:DXImageTransform.Microsoft.gradient( startColorstr='#d9337ab7', endColorstr='#00bce0ee', GradientType=0 );background-size:100vw 100% cover;padding-top:3%;padding-bottom:10%}.on-boarding-section,.panel{display:-webkit-flex;display:-ms-flexbox}.on-boarding-section{margin-top:10%;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-align-content:center;-ms-flex-line-pack:center;align-content:center}.the-button{color:#fff;margin:auto;font-size:28px;height:100px;width:400px;background:rgba(0,0,0,.5);border-radius:25px}.the-button:hover{background:rgba(0,0,0,.75);color:#fff}.mini-bus-pic{width:auto;height:100px}.departures{margin-top:-15%}.panel{margin:0 auto 45px;display:flex;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row;background:rgba(255,255,255,.8)}.info-box,.panel-body{display:-webkit-flex;display:-ms-flexbox}.panel-body{display:flex;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}.info-box{color:#144B73;padding:10px;font-size:16px;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.info-variable{font-size:14px;color:#000}.panel-footer{background:0 0;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-align-content:center;-ms-flex-line-pack:center;align-content:center;margin:auto;border:none;border-left:1px solid #d3d3d3;border-right:1px solid #d3d3d3}.glyphicon.spinning{text-align:center;color:#FFF;font-size:4em;animation:spin 1s infinite linear;-webkit-animation:spin2 1s infinite linear}@-webkit-keyframes spin{from{-webkit-transform:scale(1) rotate(0);transform:scale(1) rotate(0)}to{-webkit-transform:scale(1) rotate(360deg);transform:scale(1) rotate(360deg)}}@keyframes spin{from{-webkit-transform:scale(1) rotate(0);transform:scale(1) rotate(0)}to{-webkit-transform:scale(1) rotate(360deg);transform:scale(1) rotate(360deg)}}@-webkit-keyframes spin2{from{-webkit-transform:rotate(0)}to{-webkit-transform:rotate(360deg)}}@media (min-width:700px){.panel{max-width:700px}}@media (max-width:700px){.panel-footer img{height:50px}}@media (max-width:450px){.header{font-size:10px}.the-button{font-size:20px;width:250px;height:60px}.panel{-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}}@media (max-width:600px){.panel-footer{display:none}}",""])},function(e,t){e.exports=function(){var e=[];return e.toString=function(){for(var e=[],t=0;t<this.length;t++){var n=this[t];n[2]?e.push("@media "+n[2]+"{"+n[1]+"}"):e.push(n[1])}return e.join("")},e.i=function(t,n){"string"==typeof t&&(t=[[null,t,""]]);for(var r={},a=0;a<this.length;a++){var o=this[a][0];"number"==typeof o&&(r[o]=!0)}for(a=0;a<t.length;a++){var i=t[a];"number"==typeof i[0]&&r[i[0]]||(n&&!i[2]?i[2]=n:n&&(i[2]="("+i[2]+") and ("+n+")"),e.push(i))}},e}},function(e,t,n){e.exports=n.p+"fce0849b42e5ca7c8f9797ab15942682.jpg"},function(e,t,n){function r(e,t){for(var n=0;n<e.length;n++){var r=e[n],a=c[r.id];if(a){a.refs++;for(var o=0;o<a.parts.length;o++)a.parts[o](r.parts[o]);for(;o<r.parts.length;o++)a.parts.push(s(r.parts[o],t))}else{for(var i=[],o=0;o<r.parts.length;o++)i.push(s(r.parts[o],t));c[r.id]={id:r.id,refs:1,parts:i}}}}function a(e){for(var t=[],n={},r=0;r<e.length;r++){var a=e[r],o=a[0],i=a[1],s=a[2],u=a[3],d={css:i,media:s,sourceMap:u};n[o]?n[o].parts.push(d):t.push(n[o]={id:o,parts:[d]})}return t}function o(){var e=document.createElement("style"),t=m();return e.type="text/css",t.appendChild(e),e}function i(){var e=document.createElement("link"),t=m();return e.rel="stylesheet",t.appendChild(e),e}function s(e,t){var n,r,a;if(t.singleton){var s=f++;n=h||(h=o()),r=u.bind(null,n,s,!1),a=u.bind(null,n,s,!0)}else e.sourceMap&&"function"==typeof URL&&"function"==typeof URL.createObjectURL&&"function"==typeof URL.revokeObjectURL&&"function"==typeof Blob&&"function"==typeof btoa?(n=i(),r=l.bind(null,n),a=function(){n.parentNode.removeChild(n),n.href&&URL.revokeObjectURL(n.href)}):(n=o(),r=d.bind(null,n),a=function(){n.parentNode.removeChild(n)});return r(e),function(t){if(t){if(t.css===e.css&&t.media===e.media&&t.sourceMap===e.sourceMap)return;r(e=t)}else a()}}function u(e,t,n,r){var a=n?"":r.css;if(e.styleSheet)e.styleSheet.cssText=y(t,a);else{var o=document.createTextNode(a),i=e.childNodes;i[t]&&e.removeChild(i[t]),i.length?e.insertBefore(o,i[t]):e.appendChild(o)}}function d(e,t){var n=t.css,r=t.media;t.sourceMap;if(r&&e.setAttribute("media",r),e.styleSheet)e.styleSheet.cssText=n;else{for(;e.firstChild;)e.removeChild(e.firstChild);e.appendChild(document.createTextNode(n))}}function l(e,t){var n=t.css,r=(t.media,t.sourceMap);r&&(n+="\n/*# sourceMappingURL=data:application/json;base64,"+btoa(unescape(encodeURIComponent(JSON.stringify(r))))+" */");var a=new Blob([n],{type:"text/css"}),o=e.href;e.href=URL.createObjectURL(a),o&&URL.revokeObjectURL(o)}var c={},_=function(e){var t;return function(){return"undefined"==typeof t&&(t=e.apply(this,arguments)),t}},p=_(function(){return/msie [6-9]\b/.test(window.navigator.userAgent.toLowerCase())}),m=_(function(){return document.head||document.getElementsByTagName("head")[0]}),h=null,f=0;e.exports=function(e,t){t=t||{},"undefined"==typeof t.singleton&&(t.singleton=p());var n=a(e);return r(n,t),function(e){for(var o=[],i=0;i<n.length;i++){var s=n[i],u=c[s.id];u.refs--,o.push(u)}if(e){var d=a(e);r(d,t)}for(var i=0;i<o.length;i++){var u=o[i];if(0===u.refs){for(var l=0;l<u.parts.length;l++)u.parts[l]();delete c[u.id]}}}};var y=function(){var e=[];return function(t,n){return e[t]=n,e.filter(Boolean).join("\n")}}()},function(e,t,n){e.exports=n(8)},function(e,t,n){(function(t){"use strict";var r=n(10),a=n(14),o=n(28),i=n(43),s=n(18),u=n(23),d=n(17),l=n(38),c=n(46),_=n(48),p=n(97),m=n(25),h=n(73),f=n(34),y=n(128),M=n(35),v=n(160),g=n(19),L=n(117),D=n(162);p.inject();var Y=d.createElement,b=d.createFactory,k=d.cloneElement;"production"!==t.env.NODE_ENV&&(Y=l.createElement,b=l.createFactory,k=l.cloneElement);var T=f.measure("React","render",h.render),E={Children:{map:a.map,forEach:a.forEach,count:a.count,only:D},Component:o,DOM:c,PropTypes:y,initializeTouchEvents:function(e){r.useTouchEvents=e},createClass:i.createClass,createElement:Y,cloneElement:k,createFactory:b,createMixin:function(e){return e},constructAndRenderComponent:h.constructAndRenderComponent,constructAndRenderComponentByID:h.constructAndRenderComponentByID,findDOMNode:L,render:T,renderToString:v.renderToString,renderToStaticMarkup:v.renderToStaticMarkup,unmountComponentAtNode:h.unmountComponentAtNode,isValidElement:d.isValidElement,withContext:s.withContext,__spread:g};if("undefined"!=typeof __REACT_DEVTOOLS_GLOBAL_HOOK__&&"function"==typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.inject&&__REACT_DEVTOOLS_GLOBAL_HOOK__.inject({CurrentOwner:u,InstanceHandles:m,Mount:h,Reconciler:M,TextComponent:_}),"production"!==t.env.NODE_ENV){var w=n(57);if(w.canUseDOM&&window.top===window.self){navigator.userAgent.indexOf("Chrome")>-1&&"undefined"==typeof __REACT_DEVTOOLS_GLOBAL_HOOK__&&console.debug("Download the React DevTools for a better development experience: https://fb.me/react-devtools");for(var N=[Array.isArray,Array.prototype.every,Array.prototype.forEach,Array.prototype.indexOf,Array.prototype.map,Date.now,Function.prototype.bind,Object.keys,String.prototype.split,String.prototype.trim,Object.create,Object.freeze],S=0;S<N.length;S++)if(!N[S]){console.error("One or more ES5 shim/shams expected by React are not available: https://fb.me/react-warning-polyfills");break}}}E.version="0.13.3",e.exports=E}).call(t,n(9))},function(e,t){function n(){d=!1,i.length?u=i.concat(u):l=-1,u.length&&r()}function r(){if(!d){var e=setTimeout(n);d=!0;for(var t=u.length;t;){for(i=u,u=[];++l<t;)i&&i[l].run();l=-1,t=u.length}i=null,d=!1,clearTimeout(e)}}function a(e,t){this.fun=e,this.array=t}function o(){}var i,s=e.exports={},u=[],d=!1,l=-1;s.nextTick=function(e){var t=new Array(arguments.length-1);if(arguments.length>1)for(var n=1;n<arguments.length;n++)t[n-1]=arguments[n];u.push(new a(e,t)),1!==u.length||d||setTimeout(r,0)},a.prototype.run=function(){this.fun.apply(null,this.array)},s.title="browser",s.browser=!0,s.env={},s.argv=[],s.version="",s.versions={},s.on=o,s.addListener=o,s.once=o,s.off=o,s.removeListener=o,s.removeAllListeners=o,s.emit=o,s.binding=function(e){throw new Error("process.binding is not supported")},s.cwd=function(){return"/"},s.chdir=function(e){throw new Error("process.chdir is not supported")},s.umask=function(){return 0}},function(e,t,n){(function(t){"use strict";function r(e){return e===y.topMouseUp||e===y.topTouchEnd||e===y.topTouchCancel}function a(e){return e===y.topMouseMove||e===y.topTouchMove}function o(e){return e===y.topMouseDown||e===y.topTouchStart}function i(e,n){var r=e._dispatchListeners,a=e._dispatchIDs;if("production"!==t.env.NODE_ENV&&p(e),Array.isArray(r))for(var o=0;o<r.length&&!e.isPropagationStopped();o++)n(e,r[o],a[o]);else r&&n(e,r,a)}function s(e,t,n){e.currentTarget=f.Mount.getNode(n);var r=t(e,n);return e.currentTarget=null,r}function u(e,t){i(e,t),e._dispatchListeners=null,e._dispatchIDs=null}function d(e){var n=e._dispatchListeners,r=e._dispatchIDs;if("production"!==t.env.NODE_ENV&&p(e),Array.isArray(n)){for(var a=0;a<n.length&&!e.isPropagationStopped();a++)if(n[a](e,r[a]))return r[a]}else if(n&&n(e,r))return r;return null}function l(e){var t=d(e);return e._dispatchIDs=null,e._dispatchListeners=null,t}function c(e){"production"!==t.env.NODE_ENV&&p(e);var n=e._dispatchListeners,r=e._dispatchIDs;"production"!==t.env.NODE_ENV?h(!Array.isArray(n),"executeDirectDispatch(...): Invalid `event`."):h(!Array.isArray(n));var a=n?n(e,r):null;return e._dispatchListeners=null,e._dispatchIDs=null,a}function _(e){return!!e._dispatchListeners}var p,m=n(11),h=n(13),f={Mount:null,injectMount:function(e){f.Mount=e,"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?h(e&&e.getNode,"EventPluginUtils.injection.injectMount(...): Injected Mount module is missing getNode."):h(e&&e.getNode))}},y=m.topLevelTypes;"production"!==t.env.NODE_ENV&&(p=function(e){var n=e._dispatchListeners,r=e._dispatchIDs,a=Array.isArray(n),o=Array.isArray(r),i=o?r.length:r?1:0,s=a?n.length:n?1:0;"production"!==t.env.NODE_ENV?h(o===a&&i===s,"EventPluginUtils: Invalid `event`."):h(o===a&&i===s)});var M={isEndish:r,isMoveish:a,isStartish:o,executeDirectDispatch:c,executeDispatch:s,executeDispatchesInOrder:u,executeDispatchesInOrderStopAtTrue:l,hasDispatches:_,injection:f,useTouchEvents:!1};e.exports=M}).call(t,n(9))},function(e,t,n){"use strict";var r=n(12),a=r({bubbled:null,captured:null}),o=r({topBlur:null,topChange:null,topClick:null,topCompositionEnd:null,topCompositionStart:null,topCompositionUpdate:null,topContextMenu:null,topCopy:null,topCut:null,topDoubleClick:null,topDrag:null,topDragEnd:null,topDragEnter:null,topDragExit:null,topDragLeave:null,topDragOver:null,topDragStart:null,topDrop:null,topError:null,topFocus:null,topInput:null,topKeyDown:null,topKeyPress:null,topKeyUp:null,topLoad:null,topMouseDown:null,topMouseMove:null,topMouseOut:null,topMouseOver:null,topMouseUp:null,topPaste:null,topReset:null,topScroll:null,topSelectionChange:null,topSubmit:null,topTextInput:null,topTouchCancel:null,topTouchEnd:null,topTouchMove:null,topTouchStart:null,topWheel:null}),i={topLevelTypes:o,PropagationPhases:a};e.exports=i},function(e,t,n){(function(t){"use strict";var r=n(13),a=function(e){var n,a={};"production"!==t.env.NODE_ENV?r(e instanceof Object&&!Array.isArray(e),"keyMirror(...): Argument must be an object."):r(e instanceof Object&&!Array.isArray(e));for(n in e)e.hasOwnProperty(n)&&(a[n]=n);return a};e.exports=a}).call(t,n(9))},function(e,t,n){(function(t){"use strict";var n=function(e,n,r,a,o,i,s,u){if("production"!==t.env.NODE_ENV&&void 0===n)throw new Error("invariant requires an error message argument");if(!e){var d;if(void 0===n)d=new Error("Minified exception occurred; use the non-minified dev environment for the full error message and additional helpful warnings.");else{var l=[r,a,o,i,s,u],c=0;d=new Error("Invariant Violation: "+n.replace(/%s/g,function(){return l[c++]}))}throw d.framesToPop=1,d}};e.exports=n}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e,t){this.forEachFunction=e,this.forEachContext=t}function a(e,t,n,r){var a=e;a.forEachFunction.call(a.forEachContext,t,r)}function o(e,t,n){if(null==e)return e;var o=r.getPooled(t,n);p(e,a,o),r.release(o)}function i(e,t,n){this.mapResult=e,this.mapFunction=t,this.mapContext=n}function s(e,n,r,a){var o=e,i=o.mapResult,s=!i.hasOwnProperty(r);if("production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?m(s,"ReactChildren.map(...): Encountered two children with the same key, `%s`. Child keys must be unique; when two children share a key, only the first child will be used.",r):null),s){var u=o.mapFunction.call(o.mapContext,n,a);i[r]=u}}function u(e,t,n){if(null==e)return e;var r={},a=i.getPooled(r,t,n);return p(e,s,a),i.release(a),_.create(r)}function d(e,t,n,r){return null}function l(e,t){return p(e,d,null)}var c=n(15),_=n(16),p=n(24),m=n(21),h=c.twoArgumentPooler,f=c.threeArgumentPooler;c.addPoolingTo(r,h),c.addPoolingTo(i,f);var y={forEach:o,map:u,count:l};e.exports=y}).call(t,n(9))},function(e,t,n){(function(t){"use strict";var r=n(13),a=function(e){var t=this;if(t.instancePool.length){var n=t.instancePool.pop();return t.call(n,e),n}return new t(e)},o=function(e,t){var n=this;if(n.instancePool.length){var r=n.instancePool.pop();return n.call(r,e,t),r}return new n(e,t)},i=function(e,t,n){var r=this;if(r.instancePool.length){var a=r.instancePool.pop();return r.call(a,e,t,n),a}return new r(e,t,n)},s=function(e,t,n,r,a){var o=this;if(o.instancePool.length){var i=o.instancePool.pop();return o.call(i,e,t,n,r,a),i}return new o(e,t,n,r,a)},u=function(e){var n=this;"production"!==t.env.NODE_ENV?r(e instanceof n,"Trying to release an instance into a pool of a different type."):r(e instanceof n),e.destructor&&e.destructor(),n.instancePool.length<n.poolSize&&n.instancePool.push(e)},d=10,l=a,c=function(e,t){var n=e;return n.instancePool=[],n.getPooled=t||l,n.poolSize||(n.poolSize=d),n.release=u,n},_={addPoolingTo:c,oneArgumentPooler:a,twoArgumentPooler:o,threeArgumentPooler:i,fiveArgumentPooler:s};e.exports=_}).call(t,n(9))},function(e,t,n){(function(t){"use strict";var r=n(17),a=n(21);if("production"!==t.env.NODE_ENV){var o="_reactFragment",i="_reactDidWarn",s=!1;try{var u=function(){return 1};Object.defineProperty({},o,{enumerable:!1,value:!0}),Object.defineProperty({},"key",{enumerable:!0,get:u}),s=!0}catch(d){}var l=function(e,n){Object.defineProperty(e,n,{enumerable:!0,get:function(){return"production"!==t.env.NODE_ENV?a(this[i],"A ReactFragment is an opaque type. Accessing any of its properties is deprecated. Pass it to one of the React.Children helpers."):null,this[i]=!0,this[o][n]},set:function(e){"production"!==t.env.NODE_ENV?a(this[i],"A ReactFragment is an immutable opaque type. Mutating its properties is deprecated."):null,this[i]=!0,this[o][n]=e}})},c={},_=function(e){var t="";for(var n in e)t+=n+":"+typeof e[n]+",";var r=!!c[t];return c[t]=!0,r}}var p={create:function(e){if("production"!==t.env.NODE_ENV){if("object"!=typeof e||!e||Array.isArray(e))return"production"!==t.env.NODE_ENV?a(!1,"React.addons.createFragment only accepts a single object.",e):null,e;if(r.isValidElement(e))return"production"!==t.env.NODE_ENV?a(!1,"React.addons.createFragment does not accept a ReactElement without a wrapper object."):null,e;if(s){var n={};Object.defineProperty(n,o,{enumerable:!1,value:e}),Object.defineProperty(n,i,{writable:!0,enumerable:!1,value:!1});for(var u in e)l(n,u);return Object.preventExtensions(n),n}}return e},extract:function(e){return"production"!==t.env.NODE_ENV&&s?e[o]?e[o]:("production"!==t.env.NODE_ENV?a(_(e),"Any use of a keyed object should be wrapped in React.addons.createFragment(object) before being passed as a child."):null,e):e},extractIfFragment:function(e){if("production"!==t.env.NODE_ENV&&s){if(e[o])return e[o];for(var n in e)if(e.hasOwnProperty(n)&&r.isValidElement(e[n]))return p.extract(e)}return e}};e.exports=p}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e,n){Object.defineProperty(e,n,{configurable:!1,enumerable:!0,get:function(){return this._store?this._store[n]:null},set:function(e){"production"!==t.env.NODE_ENV?u(!1,"Don't set the %s property of the React element. Instead, specify the correct value when initially creating the element.",n):null,this._store[n]=e}})}function a(e){try{var t={props:!0};for(var n in t)r(e,n);l=!0}catch(a){}}var o=n(18),i=n(23),s=n(19),u=n(21),d={key:!0,ref:!0},l=!1,c=function(e,n,r,a,o,i){if(this.type=e,this.key=n,this.ref=r,this._owner=a,this._context=o,"production"!==t.env.NODE_ENV){this._store={props:i,originalProps:s({},i)};try{Object.defineProperty(this._store,"validated",{configurable:!1,enumerable:!1,writable:!0})}catch(u){}if(this._store.validated=!1,l)return void Object.freeze(this)}this.props=i};c.prototype={_isReactElement:!0},"production"!==t.env.NODE_ENV&&a(c.prototype),c.createElement=function(e,t,n){var r,a={},s=null,u=null;if(null!=t){u=void 0===t.ref?null:t.ref,s=void 0===t.key?null:""+t.key;for(r in t)t.hasOwnProperty(r)&&!d.hasOwnProperty(r)&&(a[r]=t[r])}var l=arguments.length-2;if(1===l)a.children=n;else if(l>1){for(var _=Array(l),p=0;l>p;p++)_[p]=arguments[p+2];a.children=_}if(e&&e.defaultProps){var m=e.defaultProps;for(r in m)"undefined"==typeof a[r]&&(a[r]=m[r])}return new c(e,s,u,i.current,o.current,a)},c.createFactory=function(e){var t=c.createElement.bind(null,e);return t.type=e,t},c.cloneAndReplaceProps=function(e,n){var r=new c(e.type,e.key,e.ref,e._owner,e._context,n);return"production"!==t.env.NODE_ENV&&(r._store.validated=e._store.validated),r},c.cloneElement=function(e,t,n){var r,a=s({},e.props),o=e.key,u=e.ref,l=e._owner;if(null!=t){void 0!==t.ref&&(u=t.ref,l=i.current),void 0!==t.key&&(o=""+t.key);for(r in t)t.hasOwnProperty(r)&&!d.hasOwnProperty(r)&&(a[r]=t[r])}var _=arguments.length-2;if(1===_)a.children=n;else if(_>1){for(var p=Array(_),m=0;_>m;m++)p[m]=arguments[m+2];a.children=p}return new c(e.type,o,u,l,e._context,a)},c.isValidElement=function(e){var t=!(!e||!e._isReactElement);return t},e.exports=c}).call(t,n(9))},function(e,t,n){(function(t){"use strict";var r=n(19),a=n(20),o=n(21),i=!1,s={current:a,withContext:function(e,n){"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?o(i,"withContext is deprecated and will be removed in a future version. Use a wrapper component with getChildContext instead."):null,i=!0);var a,u=s.current;s.current=r({},u,e);try{a=n()}finally{s.current=u}return a}};e.exports=s}).call(t,n(9))},function(e,t){"use strict";function n(e,t){if(null==e)throw new TypeError("Object.assign target cannot be null or undefined");for(var n=Object(e),r=Object.prototype.hasOwnProperty,a=1;a<arguments.length;a++){var o=arguments[a];if(null!=o){var i=Object(o);for(var s in i)r.call(i,s)&&(n[s]=i[s])}}return n}e.exports=n},function(e,t,n){(function(t){"use strict";var n={};"production"!==t.env.NODE_ENV&&Object.freeze(n),e.exports=n}).call(t,n(9))},function(e,t,n){(function(t){"use strict";var r=n(22),a=r;"production"!==t.env.NODE_ENV&&(a=function(e,t){for(var n=[],r=2,a=arguments.length;a>r;r++)n.push(arguments[r]);if(void 0===t)throw new Error("`warning(condition, format, ...args)` requires a warning message argument");if(t.length<10||/^[s\W]*$/.test(t))throw new Error("The warning format should be able to uniquely identify this warning. Please, use a more descriptive format than: "+t);if(0!==t.indexOf("Failed Composite propType: ")&&!e){var o=0,i="Warning: "+t.replace(/%s/g,function(){return n[o++]});console.warn(i);try{throw new Error(i)}catch(s){}}}),e.exports=a}).call(t,n(9))},function(e,t){function n(e){return function(){return e}}function r(){}r.thatReturns=n,r.thatReturnsFalse=n(!1),r.thatReturnsTrue=n(!0),r.thatReturnsNull=n(null),r.thatReturnsThis=function(){return this},r.thatReturnsArgument=function(e){return e},e.exports=r},function(e,t){"use strict";var n={current:null};e.exports=n},function(e,t,n){(function(t){"use strict";function r(e){return y[e]}function a(e,t){return e&&null!=e.key?i(e.key):t.toString(36)}function o(e){return(""+e).replace(M,r)}function i(e){return"$"+o(e)}function s(e,n,r,o,u){var c=typeof e;if(("undefined"===c||"boolean"===c)&&(e=null),null===e||"string"===c||"number"===c||d.isValidElement(e))return o(u,e,""===n?h+a(e,0):n,r),1;var y,M,g,L=0;if(Array.isArray(e))for(var D=0;D<e.length;D++)y=e[D],M=(""!==n?n+f:h)+a(y,D),g=r+L,L+=s(y,M,g,o,u);else{var Y=_(e);if(Y){var b,k=Y.call(e);if(Y!==e.entries)for(var T=0;!(b=k.next()).done;)y=b.value,M=(""!==n?n+f:h)+a(y,T++),g=r+L,L+=s(y,M,g,o,u);else for("production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?m(v,"Using Maps as children is not yet fully supported. It is an experimental feature that might be removed. Convert it to a sequence / iterable of keyed ReactElements instead."):null,v=!0);!(b=k.next()).done;){var E=b.value;E&&(y=E[1],M=(""!==n?n+f:h)+i(E[0])+f+a(y,0),g=r+L,L+=s(y,M,g,o,u))}}else if("object"===c){"production"!==t.env.NODE_ENV?p(1!==e.nodeType,"traverseAllChildren(...): Encountered an invalid child; DOM elements are not valid children of React components."):p(1!==e.nodeType);var w=l.extract(e);for(var N in w)w.hasOwnProperty(N)&&(y=w[N],M=(""!==n?n+f:h)+i(N)+f+a(y,0),g=r+L,L+=s(y,M,g,o,u))}}return L}function u(e,t,n){return null==e?0:s(e,"",0,t,n)}var d=n(17),l=n(16),c=n(25),_=n(27),p=n(13),m=n(21),h=c.SEPARATOR,f=":",y={"=":"=0",".":"=1",":":"=2"},M=/[=.:]/g,v=!1;e.exports=u}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e){return p+e.toString(36)}function a(e,t){return e.charAt(t)===p||t===e.length}function o(e){return""===e||e.charAt(0)===p&&e.charAt(e.length-1)!==p}function i(e,t){return 0===t.indexOf(e)&&a(t,e.length)}function s(e){return e?e.substr(0,e.lastIndexOf(p)):""}function u(e,n){if("production"!==t.env.NODE_ENV?_(o(e)&&o(n),"getNextDescendantID(%s, %s): Received an invalid React DOM ID.",e,n):_(o(e)&&o(n)),"production"!==t.env.NODE_ENV?_(i(e,n),"getNextDescendantID(...): React has made an invalid assumption about the DOM hierarchy. Expected `%s` to be an ancestor of `%s`.",e,n):_(i(e,n)),e===n)return e;var r,s=e.length+m;for(r=s;r<n.length&&!a(n,r);r++);return n.substr(0,r)}function d(e,n){var r=Math.min(e.length,n.length);if(0===r)return"";for(var i=0,s=0;r>=s;s++)if(a(e,s)&&a(n,s))i=s;else if(e.charAt(s)!==n.charAt(s))break;var u=e.substr(0,i);return"production"!==t.env.NODE_ENV?_(o(u),"getFirstCommonAncestorID(%s, %s): Expected a valid React DOM ID: %s",e,n,u):_(o(u)),u}function l(e,n,r,a,o,d){e=e||"",n=n||"","production"!==t.env.NODE_ENV?_(e!==n,"traverseParentPath(...): Cannot traverse from and to the same ID, `%s`.",e):_(e!==n);var l=i(n,e);"production"!==t.env.NODE_ENV?_(l||i(e,n),"traverseParentPath(%s, %s, ...): Cannot traverse from two IDs that do not have a parent path.",e,n):_(l||i(e,n));for(var c=0,p=l?s:u,m=e;;m=p(m,n)){var f;if(o&&m===e||d&&m===n||(f=r(m,l,a)),f===!1||m===n)break;"production"!==t.env.NODE_ENV?_(c++<h,"traverseParentPath(%s, %s, ...): Detected an infinite loop while traversing the React DOM ID tree. This may be due to malformed IDs: %s",e,n):_(c++<h)}}var c=n(26),_=n(13),p=".",m=p.length,h=100,f={createReactRootID:function(){return r(c.createReactRootIndex())},createReactID:function(e,t){return e+t},getReactRootIDFromNodeID:function(e){if(e&&e.charAt(0)===p&&e.length>1){var t=e.indexOf(p,1);return t>-1?e.substr(0,t):e}return null},traverseEnterLeave:function(e,t,n,r,a){var o=d(e,t);o!==e&&l(e,o,n,r,!1,!0),o!==t&&l(o,t,n,a,!0,!1)},traverseTwoPhase:function(e,t,n){e&&(l("",e,t,n,!0,!1),l(e,"",t,n,!1,!0))},traverseAncestors:function(e,t,n){l("",e,t,n,!0,!1)},_getFirstCommonAncestorID:d,_getNextDescendantID:u,isAncestorIDOf:i,SEPARATOR:p};e.exports=f}).call(t,n(9))},function(e,t){"use strict";var n={injectCreateReactRootIndex:function(e){r.createReactRootIndex=e}},r={createReactRootIndex:null,injection:n};e.exports=r},function(e,t){"use strict";function n(e){var t=e&&(r&&e[r]||e[a]);return"function"==typeof t?t:void 0}var r="function"==typeof Symbol&&Symbol.iterator,a="@@iterator";e.exports=n},function(e,t,n){(function(t){"use strict";function r(e,t){this.props=e,this.context=t}var a=n(29),o=n(13),i=n(21);if(r.prototype.setState=function(e,n){"production"!==t.env.NODE_ENV?o("object"==typeof e||"function"==typeof e||null==e,"setState(...): takes an object of state variables to update or a function which returns an object of state variables."):o("object"==typeof e||"function"==typeof e||null==e),"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?i(null!=e,"setState(...): You passed an undefined or null state object; instead, use forceUpdate()."):null),a.enqueueSetState(this,e),n&&a.enqueueCallback(this,n)},r.prototype.forceUpdate=function(e){a.enqueueForceUpdate(this),e&&a.enqueueCallback(this,e)},"production"!==t.env.NODE_ENV){var s={getDOMNode:["getDOMNode","Use React.findDOMNode(component) instead."],isMounted:["isMounted","Instead, make sure to clean up subscriptions and pending requests in componentWillUnmount to prevent memory leaks."],replaceProps:["replaceProps","Instead, call React.render again at the top level."],replaceState:["replaceState","Refactor your code to use setState instead (see https://github.com/facebook/react/issues/3236)."],setProps:["setProps","Instead, call React.render again at the top level."]},u=function(e,n){try{Object.defineProperty(r.prototype,e,{get:function(){"production"!==t.env.NODE_ENV?i(!1,"%s(...) is deprecated in plain JavaScript React classes. %s",n[0],n[1]):null}})}catch(a){}};for(var d in s)s.hasOwnProperty(d)&&u(d,s[d])}e.exports=r}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e){e!==o.currentlyMountingInstance&&d.enqueueUpdate(e)}function a(e,n){"production"!==t.env.NODE_ENV?c(null==i.current,"%s(...): Cannot update during an existing state transition (such as within `render`). Render methods should be a pure function of props and state.",n):c(null==i.current);var r=u.get(e);return r?r===o.currentlyUnmountingInstance?null:r:("production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?_(!n,"%s(...): Can only update a mounted or mounting component. This usually means you called %s() on an unmounted component. This is a no-op.",n,n):null),null)}var o=n(30),i=n(23),s=n(17),u=n(31),d=n(32),l=n(19),c=n(13),_=n(21),p={enqueueCallback:function(e,n){"production"!==t.env.NODE_ENV?c("function"==typeof n,"enqueueCallback(...): You called `setProps`, `replaceProps`, `setState`, `replaceState`, or `forceUpdate` with a callback that isn't callable."):c("function"==typeof n);var i=a(e);return i&&i!==o.currentlyMountingInstance?(i._pendingCallbacks?i._pendingCallbacks.push(n):i._pendingCallbacks=[n],void r(i)):null},enqueueCallbackInternal:function(e,n){"production"!==t.env.NODE_ENV?c("function"==typeof n,"enqueueCallback(...): You called `setProps`, `replaceProps`, `setState`, `replaceState`, or `forceUpdate` with a callback that isn't callable."):c("function"==typeof n),e._pendingCallbacks?e._pendingCallbacks.push(n):e._pendingCallbacks=[n],r(e)},enqueueForceUpdate:function(e){var t=a(e,"forceUpdate");t&&(t._pendingForceUpdate=!0,r(t))},enqueueReplaceState:function(e,t){var n=a(e,"replaceState");n&&(n._pendingStateQueue=[t],n._pendingReplaceState=!0,r(n))},enqueueSetState:function(e,t){var n=a(e,"setState");if(n){var o=n._pendingStateQueue||(n._pendingStateQueue=[]);o.push(t),r(n)}},enqueueSetProps:function(e,n){var o=a(e,"setProps");if(o){"production"!==t.env.NODE_ENV?c(o._isTopLevel,"setProps(...): You called `setProps` on a component with a parent. This is an anti-pattern since props will get reactively updated when rendered. Instead, change the owner's `render` method to pass the correct value as props to the component where it is created."):c(o._isTopLevel);var i=o._pendingElement||o._currentElement,u=l({},i.props,n);o._pendingElement=s.cloneAndReplaceProps(i,u),r(o)}},enqueueReplaceProps:function(e,n){var o=a(e,"replaceProps");if(o){"production"!==t.env.NODE_ENV?c(o._isTopLevel,"replaceProps(...): You called `replaceProps` on a component with a parent. This is an anti-pattern since props will get reactively updated when rendered. Instead, change the owner's `render` method to pass the correct value as props to the component where it is created."):c(o._isTopLevel);var i=o._pendingElement||o._currentElement;o._pendingElement=s.cloneAndReplaceProps(i,n),r(o)}},enqueueElementInternal:function(e,t){e._pendingElement=t,r(e)}};e.exports=p}).call(t,n(9))},function(e,t){"use strict";var n={currentlyMountingInstance:null,currentlyUnmountingInstance:null};e.exports=n},function(e,t){"use strict";var n={remove:function(e){e._reactInternalInstance=void 0},get:function(e){return e._reactInternalInstance},has:function(e){return void 0!==e._reactInternalInstance},set:function(e,t){e._reactInternalInstance=t}};e.exports=n},function(e,t,n){(function(t){"use strict";function r(){"production"!==t.env.NODE_ENV?y(w.ReactReconcileTransaction&&D,"ReactUpdates: must inject a reconcile transaction class and batching strategy"):y(w.ReactReconcileTransaction&&D)}function a(){this.reinitializeTransaction(),this.dirtyComponentsLength=null,this.callbackQueue=l.getPooled(),this.reconcileTransaction=w.ReactReconcileTransaction.getPooled()}function o(e,t,n,a,o){r(),D.batchedUpdates(e,t,n,a,o)}function i(e,t){return e._mountOrder-t._mountOrder}function s(e){var n=e.dirtyComponentsLength;"production"!==t.env.NODE_ENV?y(n===v.length,"Expected flush transaction's stored dirty-components length (%s) to match dirty-components array length (%s).",n,v.length):y(n===v.length),v.sort(i);for(var r=0;n>r;r++){var a=v[r],o=a._pendingCallbacks;
if(a._pendingCallbacks=null,m.performUpdateIfNecessary(a,e.reconcileTransaction),o)for(var s=0;s<o.length;s++)e.callbackQueue.enqueue(o[s],a.getPublicInstance())}}function u(e){return r(),"production"!==t.env.NODE_ENV?M(null==_.current,"enqueueUpdate(): Render methods should be a pure function of props and state; triggering nested component updates from render is not allowed. If necessary, trigger nested updates in componentDidUpdate."):null,D.isBatchingUpdates?void v.push(e):void D.batchedUpdates(u,e)}function d(e,n){"production"!==t.env.NODE_ENV?y(D.isBatchingUpdates,"ReactUpdates.asap: Can't enqueue an asap callback in a context whereupdates are not being batched."):y(D.isBatchingUpdates),g.enqueue(e,n),L=!0}var l=n(33),c=n(15),_=n(23),p=n(34),m=n(35),h=n(42),f=n(19),y=n(13),M=n(21),v=[],g=l.getPooled(),L=!1,D=null,Y={initialize:function(){this.dirtyComponentsLength=v.length},close:function(){this.dirtyComponentsLength!==v.length?(v.splice(0,this.dirtyComponentsLength),T()):v.length=0}},b={initialize:function(){this.callbackQueue.reset()},close:function(){this.callbackQueue.notifyAll()}},k=[Y,b];f(a.prototype,h.Mixin,{getTransactionWrappers:function(){return k},destructor:function(){this.dirtyComponentsLength=null,l.release(this.callbackQueue),this.callbackQueue=null,w.ReactReconcileTransaction.release(this.reconcileTransaction),this.reconcileTransaction=null},perform:function(e,t,n){return h.Mixin.perform.call(this,this.reconcileTransaction.perform,this.reconcileTransaction,e,t,n)}}),c.addPoolingTo(a);var T=function(){for(;v.length||L;){if(v.length){var e=a.getPooled();e.perform(s,null,e),a.release(e)}if(L){L=!1;var t=g;g=l.getPooled(),t.notifyAll(),l.release(t)}}};T=p.measure("ReactUpdates","flushBatchedUpdates",T);var E={injectReconcileTransaction:function(e){"production"!==t.env.NODE_ENV?y(e,"ReactUpdates: must provide a reconcile transaction class"):y(e),w.ReactReconcileTransaction=e},injectBatchingStrategy:function(e){"production"!==t.env.NODE_ENV?y(e,"ReactUpdates: must provide a batching strategy"):y(e),"production"!==t.env.NODE_ENV?y("function"==typeof e.batchedUpdates,"ReactUpdates: must provide a batchedUpdates() function"):y("function"==typeof e.batchedUpdates),"production"!==t.env.NODE_ENV?y("boolean"==typeof e.isBatchingUpdates,"ReactUpdates: must provide an isBatchingUpdates boolean attribute"):y("boolean"==typeof e.isBatchingUpdates),D=e}},w={ReactReconcileTransaction:null,batchedUpdates:o,enqueueUpdate:u,flushBatchedUpdates:T,injection:E,asap:d};e.exports=w}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(){this._callbacks=null,this._contexts=null}var a=n(15),o=n(19),i=n(13);o(r.prototype,{enqueue:function(e,t){this._callbacks=this._callbacks||[],this._contexts=this._contexts||[],this._callbacks.push(e),this._contexts.push(t)},notifyAll:function(){var e=this._callbacks,n=this._contexts;if(e){"production"!==t.env.NODE_ENV?i(e.length===n.length,"Mismatched list of contexts in callback queue"):i(e.length===n.length),this._callbacks=null,this._contexts=null;for(var r=0,a=e.length;a>r;r++)e[r].call(n[r]);e.length=0,n.length=0}},reset:function(){this._callbacks=null,this._contexts=null},destructor:function(){this.reset()}}),a.addPoolingTo(r),e.exports=r}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function n(e,t,n){return n}var r={enableMeasure:!1,storedMeasure:n,measureMethods:function(e,n,a){if("production"!==t.env.NODE_ENV)for(var o in a)a.hasOwnProperty(o)&&(e[o]=r.measure(n,a[o],e[o]))},measure:function(e,n,a){if("production"!==t.env.NODE_ENV){var o=null,i=function(){return r.enableMeasure?(o||(o=r.storedMeasure(e,n,a)),o.apply(this,arguments)):a.apply(this,arguments)};return i.displayName=e+"_"+n,i}return a},injection:{injectMeasure:function(e){r.storedMeasure=e}}};e.exports=r}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(){a.attachRefs(this,this._currentElement)}var a=n(36),o=n(38),i={mountComponent:function(e,n,a,i){var s=e.mountComponent(n,a,i);return"production"!==t.env.NODE_ENV&&o.checkAndWarnForMutatedProps(e._currentElement),a.getReactMountReady().enqueue(r,e),s},unmountComponent:function(e){a.detachRefs(e,e._currentElement),e.unmountComponent()},receiveComponent:function(e,n,i,s){var u=e._currentElement;if(n!==u||null==n._owner){"production"!==t.env.NODE_ENV&&o.checkAndWarnForMutatedProps(n);var d=a.shouldUpdateRefs(u,n);d&&a.detachRefs(e,u),e.receiveComponent(n,i,s),d&&i.getReactMountReady().enqueue(r,e)}},performUpdateIfNecessary:function(e,t){e.performUpdateIfNecessary(t)}};e.exports=i}).call(t,n(9))},function(e,t,n){"use strict";function r(e,t,n){"function"==typeof e?e(t.getPublicInstance()):o.addComponentAsRefTo(t,e,n)}function a(e,t,n){"function"==typeof e?e(null):o.removeComponentAsRefFrom(t,e,n)}var o=n(37),i={};i.attachRefs=function(e,t){var n=t.ref;null!=n&&r(n,e,t._owner)},i.shouldUpdateRefs=function(e,t){return t._owner!==e._owner||t.ref!==e.ref},i.detachRefs=function(e,t){var n=t.ref;null!=n&&a(n,e,t._owner)},e.exports=i},function(e,t,n){(function(t){"use strict";var r=n(13),a={isValidOwner:function(e){return!(!e||"function"!=typeof e.attachRef||"function"!=typeof e.detachRef)},addComponentAsRefTo:function(e,n,o){"production"!==t.env.NODE_ENV?r(a.isValidOwner(o),"addComponentAsRefTo(...): Only a ReactOwner can have refs. This usually means that you're trying to add a ref to a component that doesn't have an owner (that is, was not created inside of another component's `render` method). Try rendering this component inside of a new top-level component which will hold the ref."):r(a.isValidOwner(o)),o.attachRef(n,e)},removeComponentAsRefFrom:function(e,n,o){"production"!==t.env.NODE_ENV?r(a.isValidOwner(o),"removeComponentAsRefFrom(...): Only a ReactOwner can have refs. This usually means that you're trying to remove a ref to a component that doesn't have an owner (that is, was not created inside of another component's `render` method). Try rendering this component inside of a new top-level component which will hold the ref."):r(a.isValidOwner(o)),o.getPublicInstance().refs[n]===e.getPublicInstance()&&o.detachRef(n)}};e.exports=a}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(){if(v.current){var e=v.current.getName();if(e)return" Check the render method of `"+e+"`."}return""}function a(e){var t=e&&e.getPublicInstance();if(t){var n=t.constructor;if(n)return n.displayName||n.name||void 0}}function o(){var e=v.current;return e&&a(e)||void 0}function i(e,t){e._store.validated||null!=e.key||(e._store.validated=!0,u('Each child in an array or iterator should have a unique "key" prop.',e,t))}function s(e,t,n){T.test(e)&&u("Child objects should have non-numeric keys so ordering is preserved.",t,n)}function u(e,n,r){var i=o(),s="string"==typeof r?r:r.displayName||r.name,u=i||s,d=b[e]||(b[e]={});if(!d.hasOwnProperty(u)){d[u]=!0;var l=i?" Check the render method of "+i+".":s?" Check the React.render call using <"+s+">.":"",c="";if(n&&n._owner&&n._owner!==v.current){var _=a(n._owner);c=" It was passed a child from "+_+"."}"production"!==t.env.NODE_ENV?Y(!1,e+"%s%s See https://fb.me/react-warning-keys for more information.",l,c):null}}function d(e,t){if(Array.isArray(e))for(var n=0;n<e.length;n++){var r=e[n];h.isValidElement(r)&&i(r,t)}else if(h.isValidElement(e))e._store.validated=!0;else if(e){var a=L(e);if(a){if(a!==e.entries)for(var o,u=a.call(e);!(o=u.next()).done;)h.isValidElement(o.value)&&i(o.value,t)}else if("object"==typeof e){var d=f.extractIfFragment(e);for(var l in d)d.hasOwnProperty(l)&&s(l,d[l],t)}}}function l(e,n,a,o){for(var i in n)if(n.hasOwnProperty(i)){var s;try{"production"!==t.env.NODE_ENV?D("function"==typeof n[i],"%s: %s type `%s` is invalid; it must be a function, usually from React.PropTypes.",e||"React class",M[o],i):D("function"==typeof n[i]),s=n[i](a,i,e,o)}catch(u){s=u}if(s instanceof Error&&!(s.message in k)){k[s.message]=!0;var d=r(this);"production"!==t.env.NODE_ENV?Y(!1,"Failed propType: %s%s",s.message,d):null}}}function c(e,n){var r=n.type,a="string"==typeof r?r:r.displayName,o=n._owner?n._owner.getPublicInstance().constructor.displayName:null,i=e+"|"+a+"|"+o;if(!E.hasOwnProperty(i)){E[i]=!0;var s="";a&&(s=" <"+a+" />");var u="";o&&(u=" The element was created by "+o+"."),"production"!==t.env.NODE_ENV?Y(!1,"Don't set .props.%s of the React component%s. Instead, specify the correct value when initially creating the element or use React.cloneElement to make a new element with updated props.%s",e,s,u):null}}function _(e,t){return e!==e?t!==t:0===e&&0===t?1/e===1/t:e===t}function p(e){if(e._store){var t=e._store.originalProps,n=e.props;for(var r in n)n.hasOwnProperty(r)&&(t.hasOwnProperty(r)&&_(t[r],n[r])||(c(r,e),t[r]=n[r]))}}function m(e){if(null!=e.type){var n=g.getComponentClassForElement(e),r=n.displayName||n.name;n.propTypes&&l(r,n.propTypes,e.props,y.prop),"function"==typeof n.getDefaultProps&&("production"!==t.env.NODE_ENV?Y(n.getDefaultProps.isReactClassApproved,"getDefaultProps is only used on classic React.createClass definitions. Use a static property named `defaultProps` instead."):null)}}var h=n(17),f=n(16),y=n(39),M=n(40),v=n(23),g=n(41),L=n(27),D=n(13),Y=n(21),b={},k={},T=/^\d+$/,E={},w={checkAndWarnForMutatedProps:p,createElement:function(e,n,r){"production"!==t.env.NODE_ENV?Y(null!=e,"React.createElement: type should not be null or undefined. It should be a string (for DOM elements) or a ReactClass (for composite components)."):null;var a=h.createElement.apply(this,arguments);if(null==a)return a;for(var o=2;o<arguments.length;o++)d(arguments[o],e);return m(a),a},createFactory:function(e){var n=w.createElement.bind(null,e);if(n.type=e,"production"!==t.env.NODE_ENV)try{Object.defineProperty(n,"type",{enumerable:!1,get:function(){return"production"!==t.env.NODE_ENV?Y(!1,"Factory.type is deprecated. Access the class directly before passing it to createFactory."):null,Object.defineProperty(this,"type",{value:e}),e}})}catch(r){}return n},cloneElement:function(e,t,n){for(var r=h.cloneElement.apply(this,arguments),a=2;a<arguments.length;a++)d(arguments[a],r.type);return m(r),r}};e.exports=w}).call(t,n(9))},function(e,t,n){"use strict";var r=n(12),a=r({prop:null,context:null,childContext:null});e.exports=a},function(e,t,n){(function(t){"use strict";var n={};"production"!==t.env.NODE_ENV&&(n={prop:"prop",context:"context",childContext:"child context"}),e.exports=n}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e){if("function"==typeof e.type)return e.type;var t=e.type,n=c[t];return null==n&&(c[t]=n=d(t)),n}function a(e){return"production"!==t.env.NODE_ENV?u(l,"There is no registered component for the tag %s",e.type):u(l),new l(e.type,e.props)}function o(e){return new _(e)}function i(e){return e instanceof _}var s=n(19),u=n(13),d=null,l=null,c={},_=null,p={injectGenericComponentClass:function(e){l=e},injectTextComponentClass:function(e){_=e},injectComponentClasses:function(e){s(c,e)},injectAutoWrapper:function(e){d=e}},m={getComponentClassForElement:r,createInternalComponent:a,createInstanceForText:o,isTextComponent:i,injection:p};e.exports=m}).call(t,n(9))},function(e,t,n){(function(t){"use strict";var r=n(13),a={reinitializeTransaction:function(){this.transactionWrappers=this.getTransactionWrappers(),this.wrapperInitData?this.wrapperInitData.length=0:this.wrapperInitData=[],this._isInTransaction=!1},_isInTransaction:!1,getTransactionWrappers:null,isInTransaction:function(){return!!this._isInTransaction},perform:function(e,n,a,o,i,s,u,d){"production"!==t.env.NODE_ENV?r(!this.isInTransaction(),"Transaction.perform(...): Cannot initialize a transaction when there is already an outstanding transaction."):r(!this.isInTransaction());var l,c;try{this._isInTransaction=!0,l=!0,this.initializeAll(0),c=e.call(n,a,o,i,s,u,d),l=!1}finally{try{if(l)try{this.closeAll(0)}catch(_){}else this.closeAll(0)}finally{this._isInTransaction=!1}}return c},initializeAll:function(e){for(var t=this.transactionWrappers,n=e;n<t.length;n++){var r=t[n];try{this.wrapperInitData[n]=o.OBSERVED_ERROR,this.wrapperInitData[n]=r.initialize?r.initialize.call(this):null}finally{if(this.wrapperInitData[n]===o.OBSERVED_ERROR)try{this.initializeAll(n+1)}catch(a){}}}},closeAll:function(e){"production"!==t.env.NODE_ENV?r(this.isInTransaction(),"Transaction.closeAll(): Cannot close transaction when none are open."):r(this.isInTransaction());for(var n=this.transactionWrappers,a=e;a<n.length;a++){var i,s=n[a],u=this.wrapperInitData[a];try{i=!0,u!==o.OBSERVED_ERROR&&s.close&&s.close.call(this,u),i=!1}finally{if(i)try{this.closeAll(a+1)}catch(d){}}}this.wrapperInitData.length=0}},o={Mixin:a,OBSERVED_ERROR:{}};e.exports=o}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e,n,r){for(var a in n)n.hasOwnProperty(a)&&("production"!==t.env.NODE_ENV?k("function"==typeof n[a],"%s: %s type `%s` is invalid; it must be a function, usually from React.PropTypes.",e.displayName||"ReactClass",v[r],a):null)}function a(e,n){var r=N.hasOwnProperty(n)?N[n]:null;C.hasOwnProperty(n)&&("production"!==t.env.NODE_ENV?D(r===E.OVERRIDE_BASE,"ReactClassInterface: You are attempting to override `%s` from your class specification. Ensure that your method names do not overlap with React methods.",n):D(r===E.OVERRIDE_BASE)),e.hasOwnProperty(n)&&("production"!==t.env.NODE_ENV?D(r===E.DEFINE_MANY||r===E.DEFINE_MANY_MERGED,"ReactClassInterface: You are attempting to define `%s` on your component more than once. This conflict may be due to a mixin.",n):D(r===E.DEFINE_MANY||r===E.DEFINE_MANY_MERGED))}function o(e,n){if(n){"production"!==t.env.NODE_ENV?D("function"!=typeof n,"ReactClass: You're attempting to use a component class as a mixin. Instead, just use a regular object."):D("function"!=typeof n),"production"!==t.env.NODE_ENV?D(!m.isValidElement(n),"ReactClass: You're attempting to use a component as a mixin. Instead, just use a regular object."):D(!m.isValidElement(n));var r=e.prototype;n.hasOwnProperty(T)&&S.mixins(e,n.mixins);for(var o in n)if(n.hasOwnProperty(o)&&o!==T){var i=n[o];if(a(r,o),S.hasOwnProperty(o))S[o](e,i);else{var s=N.hasOwnProperty(o),l=r.hasOwnProperty(o),c=i&&i.__reactDontBind,_="function"==typeof i,p=_&&!s&&!l&&!c;if(p)r.__reactAutoBindMap||(r.__reactAutoBindMap={}),r.__reactAutoBindMap[o]=i,r[o]=i;else if(l){var h=N[o];"production"!==t.env.NODE_ENV?D(s&&(h===E.DEFINE_MANY_MERGED||h===E.DEFINE_MANY),"ReactClass: Unexpected spec policy %s for key %s when mixing in component specs.",h,o):D(s&&(h===E.DEFINE_MANY_MERGED||h===E.DEFINE_MANY)),h===E.DEFINE_MANY_MERGED?r[o]=u(r[o],i):h===E.DEFINE_MANY&&(r[o]=d(r[o],i))}else r[o]=i,"production"!==t.env.NODE_ENV&&"function"==typeof i&&n.displayName&&(r[o].displayName=n.displayName+"_"+o)}}}}function i(e,n){if(n)for(var r in n){var a=n[r];if(n.hasOwnProperty(r)){var o=r in S;"production"!==t.env.NODE_ENV?D(!o,'ReactClass: You are attempting to define a reserved property, `%s`, that shouldn\'t be on the "statics" key. Define it as an instance property instead; it will still be accessible on the constructor.',r):D(!o);var i=r in e;"production"!==t.env.NODE_ENV?D(!i,"ReactClass: You are attempting to define `%s` on your component more than once. This conflict may be due to a mixin.",r):D(!i),e[r]=a}}}function s(e,n){"production"!==t.env.NODE_ENV?D(e&&n&&"object"==typeof e&&"object"==typeof n,"mergeIntoWithNoDuplicateKeys(): Cannot merge non-objects."):D(e&&n&&"object"==typeof e&&"object"==typeof n);for(var r in n)n.hasOwnProperty(r)&&("production"!==t.env.NODE_ENV?D(void 0===e[r],"mergeIntoWithNoDuplicateKeys(): Tried to merge two objects with the same key: `%s`. This conflict may be due to a mixin; in particular, this may be caused by two getInitialState() or getDefaultProps() methods returning objects with clashing keys.",r):D(void 0===e[r]),e[r]=n[r]);return e}function u(e,t){return function(){var n=e.apply(this,arguments),r=t.apply(this,arguments);if(null==n)return r;if(null==r)return n;var a={};return s(a,n),s(a,r),a}}function d(e,t){return function(){e.apply(this,arguments),t.apply(this,arguments)}}function l(e,n){var r=n.bind(e);if("production"!==t.env.NODE_ENV){r.__reactBoundContext=e,r.__reactBoundMethod=n,r.__reactBoundArguments=null;var a=e.constructor.displayName,o=r.bind;r.bind=function(i){for(var s=[],u=1,d=arguments.length;d>u;u++)s.push(arguments[u]);if(i!==e&&null!==i)"production"!==t.env.NODE_ENV?k(!1,"bind(): React component methods may only be bound to the component instance. See %s",a):null;else if(!s.length)return"production"!==t.env.NODE_ENV?k(!1,"bind(): You are binding a component method to the component. React does this for you automatically in a high-performance way, so you can safely remove this call. See %s",a):null,r;var l=o.apply(r,arguments);return l.__reactBoundContext=e,l.__reactBoundMethod=n,l.__reactBoundArguments=s,l}}return r}function c(e){for(var t in e.__reactAutoBindMap)if(e.__reactAutoBindMap.hasOwnProperty(t)){var n=e.__reactAutoBindMap[t];e[t]=l(e,h.guard(n,e.constructor.displayName+"."+t))}}var _=n(28),p=n(23),m=n(17),h=n(44),f=n(31),y=n(30),M=n(39),v=n(40),g=n(29),L=n(19),D=n(13),Y=n(12),b=n(45),k=n(21),T=b({mixins:null}),E=Y({DEFINE_ONCE:null,DEFINE_MANY:null,OVERRIDE_BASE:null,DEFINE_MANY_MERGED:null}),w=[],N={mixins:E.DEFINE_MANY,statics:E.DEFINE_MANY,propTypes:E.DEFINE_MANY,contextTypes:E.DEFINE_MANY,childContextTypes:E.DEFINE_MANY,getDefaultProps:E.DEFINE_MANY_MERGED,getInitialState:E.DEFINE_MANY_MERGED,getChildContext:E.DEFINE_MANY_MERGED,render:E.DEFINE_ONCE,componentWillMount:E.DEFINE_MANY,componentDidMount:E.DEFINE_MANY,componentWillReceiveProps:E.DEFINE_MANY,shouldComponentUpdate:E.DEFINE_ONCE,componentWillUpdate:E.DEFINE_MANY,componentDidUpdate:E.DEFINE_MANY,componentWillUnmount:E.DEFINE_MANY,updateComponent:E.OVERRIDE_BASE},S={displayName:function(e,t){e.displayName=t},mixins:function(e,t){if(t)for(var n=0;n<t.length;n++)o(e,t[n])},childContextTypes:function(e,n){"production"!==t.env.NODE_ENV&&r(e,n,M.childContext),e.childContextTypes=L({},e.childContextTypes,n)},contextTypes:function(e,n){"production"!==t.env.NODE_ENV&&r(e,n,M.context),e.contextTypes=L({},e.contextTypes,n)},getDefaultProps:function(e,t){e.getDefaultProps?e.getDefaultProps=u(e.getDefaultProps,t):e.getDefaultProps=t},propTypes:function(e,n){"production"!==t.env.NODE_ENV&&r(e,n,M.prop),e.propTypes=L({},e.propTypes,n)},statics:function(e,t){i(e,t)}},x={enumerable:!1,get:function(){var e=this.displayName||this.name||"Component";return"production"!==t.env.NODE_ENV?k(!1,"%s.type is deprecated. Use %s directly to access the class.",e,e):null,Object.defineProperty(this,"type",{value:this}),this}},C={replaceState:function(e,t){g.enqueueReplaceState(this,e),t&&g.enqueueCallback(this,t)},isMounted:function(){if("production"!==t.env.NODE_ENV){var e=p.current;null!==e&&("production"!==t.env.NODE_ENV?k(e._warnedAboutRefsInRender,"%s is accessing isMounted inside its render() function. render() should be a pure function of props and state. It should never access something that requires stale data from the previous render, such as refs. Move this logic to componentDidMount and componentDidUpdate instead.",e.getName()||"A component"):null,e._warnedAboutRefsInRender=!0)}var n=f.get(this);return n&&n!==y.currentlyMountingInstance},setProps:function(e,t){g.enqueueSetProps(this,e),t&&g.enqueueCallback(this,t)},replaceProps:function(e,t){g.enqueueReplaceProps(this,e),t&&g.enqueueCallback(this,t)}},O=function(){};L(O.prototype,_.prototype,C);var P={createClass:function(e){var n=function(e,r){"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?k(this instanceof n,"Something is calling a React component directly. Use a factory or JSX instead. See: https://fb.me/react-legacyfactory"):null),this.__reactAutoBindMap&&c(this),this.props=e,this.context=r,this.state=null;var a=this.getInitialState?this.getInitialState():null;"production"!==t.env.NODE_ENV&&"undefined"==typeof a&&this.getInitialState._isMockFunction&&(a=null),"production"!==t.env.NODE_ENV?D("object"==typeof a&&!Array.isArray(a),"%s.getInitialState(): must return an object or null",n.displayName||"ReactCompositeComponent"):D("object"==typeof a&&!Array.isArray(a)),this.state=a};n.prototype=new O,n.prototype.constructor=n,w.forEach(o.bind(null,n)),o(n,e),n.getDefaultProps&&(n.defaultProps=n.getDefaultProps()),"production"!==t.env.NODE_ENV&&(n.getDefaultProps&&(n.getDefaultProps.isReactClassApproved={}),n.prototype.getInitialState&&(n.prototype.getInitialState.isReactClassApproved={})),"production"!==t.env.NODE_ENV?D(n.prototype.render,"createClass(...): Class specification must implement a `render` method."):D(n.prototype.render),"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?k(!n.prototype.componentShouldUpdate,"%s has a method called componentShouldUpdate(). Did you mean shouldComponentUpdate()? The name is phrased as a question because the function is expected to return a value.",e.displayName||"A component"):null);for(var r in N)n.prototype[r]||(n.prototype[r]=null);if(n.type=n,"production"!==t.env.NODE_ENV)try{Object.defineProperty(n,"type",x)}catch(a){}return n},injection:{injectMixin:function(e){w.push(e)}}};e.exports=P}).call(t,n(9))},function(e,t){"use strict";var n={guard:function(e,t){return e}};e.exports=n},function(e,t){var n=function(e){var t;for(t in e)if(e.hasOwnProperty(t))return t;return null};e.exports=n},function(e,t,n){(function(t){"use strict";function r(e){return"production"!==t.env.NODE_ENV?o.createFactory(e):a.createFactory(e)}var a=n(17),o=n(38),i=n(47),s=i({a:"a",abbr:"abbr",address:"address",area:"area",article:"article",aside:"aside",audio:"audio",b:"b",base:"base",bdi:"bdi",bdo:"bdo",big:"big",blockquote:"blockquote",body:"body",br:"br",button:"button",canvas:"canvas",caption:"caption",cite:"cite",code:"code",col:"col",colgroup:"colgroup",data:"data",datalist:"datalist",dd:"dd",del:"del",details:"details",dfn:"dfn",dialog:"dialog",div:"div",dl:"dl",dt:"dt",em:"em",embed:"embed",fieldset:"fieldset",figcaption:"figcaption",figure:"figure",footer:"footer",form:"form",h1:"h1",h2:"h2",h3:"h3",h4:"h4",h5:"h5",h6:"h6",head:"head",header:"header",hr:"hr",html:"html",i:"i",iframe:"iframe",img:"img",input:"input",ins:"ins",kbd:"kbd",keygen:"keygen",label:"label",legend:"legend",li:"li",link:"link",main:"main",map:"map",mark:"mark",menu:"menu",menuitem:"menuitem",meta:"meta",meter:"meter",nav:"nav",noscript:"noscript",object:"object",ol:"ol",optgroup:"optgroup",option:"option",output:"output",p:"p",param:"param",picture:"picture",pre:"pre",progress:"progress",q:"q",rp:"rp",rt:"rt",ruby:"ruby",s:"s",samp:"samp",script:"script",section:"section",select:"select",small:"small",source:"source",span:"span",strong:"strong",style:"style",sub:"sub",summary:"summary",sup:"sup",table:"table",tbody:"tbody",td:"td",textarea:"textarea",tfoot:"tfoot",th:"th",thead:"thead",time:"time",title:"title",tr:"tr",track:"track",u:"u",ul:"ul","var":"var",video:"video",wbr:"wbr",circle:"circle",clipPath:"clipPath",defs:"defs",ellipse:"ellipse",g:"g",line:"line",linearGradient:"linearGradient",mask:"mask",path:"path",pattern:"pattern",polygon:"polygon",polyline:"polyline",radialGradient:"radialGradient",rect:"rect",stop:"stop",svg:"svg",text:"text",tspan:"tspan"},r);e.exports=s}).call(t,n(9))},function(e,t){"use strict";function n(e,t,n){if(!e)return null;var a={};for(var o in e)r.call(e,o)&&(a[o]=t.call(n,e[o],o,e));return a}var r=Object.prototype.hasOwnProperty;e.exports=n},function(e,t,n){"use strict";var r=n(49),a=n(53),o=n(93),i=n(19),s=n(52),u=function(e){};i(u.prototype,{construct:function(e){this._currentElement=e,this._stringText=""+e,this._rootNodeID=null,this._mountIndex=0},mountComponent:function(e,t,n){this._rootNodeID=e;var a=s(this._stringText);return t.renderToStaticMarkup?a:"<span "+r.createMarkupForID(e)+">"+a+"</span>"},receiveComponent:function(e,t){if(e!==this._currentElement){this._currentElement=e;var n=""+e;n!==this._stringText&&(this._stringText=n,o.BackendIDOperations.updateTextContentByID(this._rootNodeID,n))}},unmountComponent:function(){a.unmountIDFromEnvironment(this._rootNodeID)}}),e.exports=u},function(e,t,n){(function(t){"use strict";function r(e,t){return null==t||a.hasBooleanValue[e]&&!t||a.hasNumericValue[e]&&isNaN(t)||a.hasPositiveNumericValue[e]&&1>t||a.hasOverloadedBooleanValue[e]&&t===!1}var a=n(50),o=n(51),i=n(21);if("production"!==t.env.NODE_ENV)var s={children:!0,dangerouslySetInnerHTML:!0,key:!0,ref:!0},u={},d=function(e){if(!(s.hasOwnProperty(e)&&s[e]||u.hasOwnProperty(e)&&u[e])){u[e]=!0;var n=e.toLowerCase(),r=a.isCustomAttribute(n)?n:a.getPossibleStandardName.hasOwnProperty(n)?a.getPossibleStandardName[n]:null;"production"!==t.env.NODE_ENV?i(null==r,"Unknown DOM property %s. Did you mean %s?",e,r):null}};var l={createMarkupForID:function(e){return a.ID_ATTRIBUTE_NAME+"="+o(e)},createMarkupForProperty:function(e,n){if(a.isStandardName.hasOwnProperty(e)&&a.isStandardName[e]){if(r(e,n))return"";var i=a.getAttributeName[e];return a.hasBooleanValue[e]||a.hasOverloadedBooleanValue[e]&&n===!0?i:i+"="+o(n)}return a.isCustomAttribute(e)?null==n?"":e+"="+o(n):("production"!==t.env.NODE_ENV&&d(e),null)},setValueForProperty:function(e,n,o){if(a.isStandardName.hasOwnProperty(n)&&a.isStandardName[n]){var i=a.getMutationMethod[n];if(i)i(e,o);else if(r(n,o))this.deleteValueForProperty(e,n);else if(a.mustUseAttribute[n])e.setAttribute(a.getAttributeName[n],""+o);else{var s=a.getPropertyName[n];a.hasSideEffects[n]&&""+e[s]==""+o||(e[s]=o)}}else a.isCustomAttribute(n)?null==o?e.removeAttribute(n):e.setAttribute(n,""+o):"production"!==t.env.NODE_ENV&&d(n)},deleteValueForProperty:function(e,n){if(a.isStandardName.hasOwnProperty(n)&&a.isStandardName[n]){var r=a.getMutationMethod[n];if(r)r(e,void 0);else if(a.mustUseAttribute[n])e.removeAttribute(a.getAttributeName[n]);else{var o=a.getPropertyName[n],i=a.getDefaultValueForProperty(e.nodeName,o);a.hasSideEffects[n]&&""+e[o]===i||(e[o]=i)}}else a.isCustomAttribute(n)?e.removeAttribute(n):"production"!==t.env.NODE_ENV&&d(n)}};e.exports=l}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e,t){return(e&t)===t}var a=n(13),o={MUST_USE_ATTRIBUTE:1,MUST_USE_PROPERTY:2,HAS_SIDE_EFFECTS:4,HAS_BOOLEAN_VALUE:8,HAS_NUMERIC_VALUE:16,HAS_POSITIVE_NUMERIC_VALUE:48,HAS_OVERLOADED_BOOLEAN_VALUE:64,injectDOMPropertyConfig:function(e){var n=e.Properties||{},i=e.DOMAttributeNames||{},u=e.DOMPropertyNames||{},d=e.DOMMutationMethods||{};e.isCustomAttribute&&s._isCustomAttributeFunctions.push(e.isCustomAttribute);for(var l in n){"production"!==t.env.NODE_ENV?a(!s.isStandardName.hasOwnProperty(l),"injectDOMPropertyConfig(...): You're trying to inject DOM property '%s' which has already been injected. You may be accidentally injecting the same DOM property config twice, or you may be injecting two configs that have conflicting property names.",l):a(!s.isStandardName.hasOwnProperty(l)),s.isStandardName[l]=!0;var c=l.toLowerCase();if(s.getPossibleStandardName[c]=l,i.hasOwnProperty(l)){var _=i[l];s.getPossibleStandardName[_]=l,s.getAttributeName[l]=_}else s.getAttributeName[l]=c;s.getPropertyName[l]=u.hasOwnProperty(l)?u[l]:l,d.hasOwnProperty(l)?s.getMutationMethod[l]=d[l]:s.getMutationMethod[l]=null;var p=n[l];s.mustUseAttribute[l]=r(p,o.MUST_USE_ATTRIBUTE),s.mustUseProperty[l]=r(p,o.MUST_USE_PROPERTY),s.hasSideEffects[l]=r(p,o.HAS_SIDE_EFFECTS),s.hasBooleanValue[l]=r(p,o.HAS_BOOLEAN_VALUE),s.hasNumericValue[l]=r(p,o.HAS_NUMERIC_VALUE),s.hasPositiveNumericValue[l]=r(p,o.HAS_POSITIVE_NUMERIC_VALUE),s.hasOverloadedBooleanValue[l]=r(p,o.HAS_OVERLOADED_BOOLEAN_VALUE),"production"!==t.env.NODE_ENV?a(!s.mustUseAttribute[l]||!s.mustUseProperty[l],"DOMProperty: Cannot require using both attribute and property: %s",l):a(!s.mustUseAttribute[l]||!s.mustUseProperty[l]),"production"!==t.env.NODE_ENV?a(s.mustUseProperty[l]||!s.hasSideEffects[l],"DOMProperty: Properties that have side effects must use property: %s",l):a(s.mustUseProperty[l]||!s.hasSideEffects[l]),"production"!==t.env.NODE_ENV?a(!!s.hasBooleanValue[l]+!!s.hasNumericValue[l]+!!s.hasOverloadedBooleanValue[l]<=1,"DOMProperty: Value can be one of boolean, overloaded boolean, or numeric value, but not a combination: %s",l):a(!!s.hasBooleanValue[l]+!!s.hasNumericValue[l]+!!s.hasOverloadedBooleanValue[l]<=1)}}},i={},s={ID_ATTRIBUTE_NAME:"data-reactid",isStandardName:{},getPossibleStandardName:{},getAttributeName:{},getPropertyName:{},getMutationMethod:{},mustUseAttribute:{},mustUseProperty:{},hasSideEffects:{},hasBooleanValue:{},hasNumericValue:{},hasPositiveNumericValue:{},hasOverloadedBooleanValue:{},_isCustomAttributeFunctions:[],isCustomAttribute:function(e){for(var t=0;t<s._isCustomAttributeFunctions.length;t++){var n=s._isCustomAttributeFunctions[t];if(n(e))return!0}return!1},getDefaultValueForProperty:function(e,t){var n,r=i[e];return r||(i[e]=r={}),t in r||(n=document.createElement(e),r[t]=n[t]),r[t]},injection:o};e.exports=s}).call(t,n(9))},function(e,t,n){"use strict";function r(e){return'"'+a(e)+'"'}var a=n(52);e.exports=r},function(e,t){"use strict";function n(e){return a[e]}function r(e){return(""+e).replace(o,n)}var a={"&":"&",">":">","<":"<",'"':""","'":"'"},o=/[&><"']/g;e.exports=r},function(e,t,n){"use strict";var r=n(54),a=n(73),o={processChildrenUpdates:r.dangerouslyProcessChildrenUpdates,replaceNodeWithMarkupByID:r.dangerouslyReplaceNodeWithMarkupByID,unmountIDFromEnvironment:function(e){a.purgeID(e)}};e.exports=o},function(e,t,n){(function(t){"use strict";var r=n(55),a=n(64),o=n(49),i=n(73),s=n(34),u=n(13),d=n(72),l={dangerouslySetInnerHTML:"`dangerouslySetInnerHTML` must be set using `updateInnerHTMLByID()`.",style:"`style` must be set using `updateStylesByID()`."},c={updatePropertyByID:function(e,n,r){var a=i.getNode(e);"production"!==t.env.NODE_ENV?u(!l.hasOwnProperty(n),"updatePropertyByID(...): %s",l[n]):u(!l.hasOwnProperty(n)),null!=r?o.setValueForProperty(a,n,r):o.deleteValueForProperty(a,n)},deletePropertyByID:function(e,n,r){var a=i.getNode(e);"production"!==t.env.NODE_ENV?u(!l.hasOwnProperty(n),"updatePropertyByID(...): %s",l[n]):u(!l.hasOwnProperty(n)),o.deleteValueForProperty(a,n,r)},updateStylesByID:function(e,t){var n=i.getNode(e);r.setValueForStyles(n,t)},updateInnerHTMLByID:function(e,t){var n=i.getNode(e);d(n,t)},updateTextContentByID:function(e,t){var n=i.getNode(e);a.updateTextContent(n,t)},dangerouslyReplaceNodeWithMarkupByID:function(e,t){var n=i.getNode(e);a.dangerouslyReplaceNodeWithMarkup(n,t)},dangerouslyProcessChildrenUpdates:function(e,t){for(var n=0;n<e.length;n++)e[n].parentNode=i.getNode(e[n].parentID);a.processUpdates(e,t)}};s.measureMethods(c,"ReactDOMIDOperations",{updatePropertyByID:"updatePropertyByID",deletePropertyByID:"deletePropertyByID",updateStylesByID:"updateStylesByID",updateInnerHTMLByID:"updateInnerHTMLByID",updateTextContentByID:"updateTextContentByID",dangerouslyReplaceNodeWithMarkupByID:"dangerouslyReplaceNodeWithMarkupByID",dangerouslyProcessChildrenUpdates:"dangerouslyProcessChildrenUpdates"}),e.exports=c}).call(t,n(9))},function(e,t,n){(function(t){"use strict";var r=n(56),a=n(57),o=n(58),i=n(60),s=n(61),u=n(63),d=n(21),l=u(function(e){return s(e)}),c="cssFloat";if(a.canUseDOM&&void 0===document.documentElement.style.cssFloat&&(c="styleFloat"),"production"!==t.env.NODE_ENV)var _=/^(?:webkit|moz|o)[A-Z]/,p=/;\s*$/,m={},h={},f=function(e){m.hasOwnProperty(e)&&m[e]||(m[e]=!0,"production"!==t.env.NODE_ENV?d(!1,"Unsupported style property %s. Did you mean %s?",e,o(e)):null)},y=function(e){m.hasOwnProperty(e)&&m[e]||(m[e]=!0,"production"!==t.env.NODE_ENV?d(!1,"Unsupported vendor-prefixed style property %s. Did you mean %s?",e,e.charAt(0).toUpperCase()+e.slice(1)):null)},M=function(e,n){h.hasOwnProperty(n)&&h[n]||(h[n]=!0,"production"!==t.env.NODE_ENV?d(!1,'Style property values shouldn\'t contain a semicolon. Try "%s: %s" instead.',e,n.replace(p,"")):null)},v=function(e,t){e.indexOf("-")>-1?f(e):_.test(e)?y(e):p.test(t)&&M(e,t)};var g={createMarkupForStyles:function(e){
var n="";for(var r in e)if(e.hasOwnProperty(r)){var a=e[r];"production"!==t.env.NODE_ENV&&v(r,a),null!=a&&(n+=l(r)+":",n+=i(r,a)+";")}return n||null},setValueForStyles:function(e,n){var a=e.style;for(var o in n)if(n.hasOwnProperty(o)){"production"!==t.env.NODE_ENV&&v(o,n[o]);var s=i(o,n[o]);if("float"===o&&(o=c),s)a[o]=s;else{var u=r.shorthandPropertyExpansions[o];if(u)for(var d in u)a[d]="";else a[o]=""}}}};e.exports=g}).call(t,n(9))},function(e,t){"use strict";function n(e,t){return e+t.charAt(0).toUpperCase()+t.substring(1)}var r={boxFlex:!0,boxFlexGroup:!0,columnCount:!0,flex:!0,flexGrow:!0,flexPositive:!0,flexShrink:!0,flexNegative:!0,fontWeight:!0,lineClamp:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,strokeDashoffset:!0,strokeOpacity:!0,strokeWidth:!0},a=["Webkit","ms","Moz","O"];Object.keys(r).forEach(function(e){a.forEach(function(t){r[n(t,e)]=r[e]})});var o={background:{backgroundImage:!0,backgroundPosition:!0,backgroundRepeat:!0,backgroundColor:!0},border:{borderWidth:!0,borderStyle:!0,borderColor:!0},borderBottom:{borderBottomWidth:!0,borderBottomStyle:!0,borderBottomColor:!0},borderLeft:{borderLeftWidth:!0,borderLeftStyle:!0,borderLeftColor:!0},borderRight:{borderRightWidth:!0,borderRightStyle:!0,borderRightColor:!0},borderTop:{borderTopWidth:!0,borderTopStyle:!0,borderTopColor:!0},font:{fontStyle:!0,fontVariant:!0,fontWeight:!0,fontSize:!0,lineHeight:!0,fontFamily:!0}},i={isUnitlessNumber:r,shorthandPropertyExpansions:o};e.exports=i},function(e,t){"use strict";var n=!("undefined"==typeof window||!window.document||!window.document.createElement),r={canUseDOM:n,canUseWorkers:"undefined"!=typeof Worker,canUseEventListeners:n&&!(!window.addEventListener&&!window.attachEvent),canUseViewport:n&&!!window.screen,isInWorker:!n};e.exports=r},function(e,t,n){"use strict";function r(e){return a(e.replace(o,"ms-"))}var a=n(59),o=/^-ms-/;e.exports=r},function(e,t){function n(e){return e.replace(r,function(e,t){return t.toUpperCase()})}var r=/-(.)/g;e.exports=n},function(e,t,n){"use strict";function r(e,t){var n=null==t||"boolean"==typeof t||""===t;if(n)return"";var r=isNaN(t);return r||0===t||o.hasOwnProperty(e)&&o[e]?""+t:("string"==typeof t&&(t=t.trim()),t+"px")}var a=n(56),o=a.isUnitlessNumber;e.exports=r},function(e,t,n){"use strict";function r(e){return a(e).replace(o,"-ms-")}var a=n(62),o=/^ms-/;e.exports=r},function(e,t){function n(e){return e.replace(r,"-$1").toLowerCase()}var r=/([A-Z])/g;e.exports=n},function(e,t){"use strict";function n(e){var t={};return function(n){return t.hasOwnProperty(n)||(t[n]=e.call(this,n)),t[n]}}e.exports=n},function(e,t,n){(function(t){"use strict";function r(e,t,n){e.insertBefore(t,e.childNodes[n]||null)}var a=n(65),o=n(70),i=n(71),s=n(13),u={dangerouslyReplaceNodeWithMarkup:a.dangerouslyReplaceNodeWithMarkup,updateTextContent:i,processUpdates:function(e,n){for(var u,d=null,l=null,c=0;c<e.length;c++)if(u=e[c],u.type===o.MOVE_EXISTING||u.type===o.REMOVE_NODE){var _=u.fromIndex,p=u.parentNode.childNodes[_],m=u.parentID;"production"!==t.env.NODE_ENV?s(p,"processUpdates(): Unable to find child %s of element. This probably means the DOM was unexpectedly mutated (e.g., by the browser), usually due to forgetting a <tbody> when using tables, nesting tags like <form>, <p>, or <a>, or using non-SVG elements in an <svg> parent. Try inspecting the child nodes of the element with React ID `%s`.",_,m):s(p),d=d||{},d[m]=d[m]||[],d[m][_]=p,l=l||[],l.push(p)}var h=a.dangerouslyRenderMarkup(n);if(l)for(var f=0;f<l.length;f++)l[f].parentNode.removeChild(l[f]);for(var y=0;y<e.length;y++)switch(u=e[y],u.type){case o.INSERT_MARKUP:r(u.parentNode,h[u.markupIndex],u.toIndex);break;case o.MOVE_EXISTING:r(u.parentNode,d[u.parentID][u.fromIndex],u.toIndex);break;case o.TEXT_CONTENT:i(u.parentNode,u.textContent);break;case o.REMOVE_NODE:}}};e.exports=u}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e){return e.substring(1,e.indexOf(" "))}var a=n(57),o=n(66),i=n(22),s=n(69),u=n(13),d=/^(<[^ \/>]+)/,l="data-danger-index",c={dangerouslyRenderMarkup:function(e){"production"!==t.env.NODE_ENV?u(a.canUseDOM,"dangerouslyRenderMarkup(...): Cannot render markup in a worker thread. Make sure `window` and `document` are available globally before requiring React when unit testing or use React.renderToString for server rendering."):u(a.canUseDOM);for(var n,c={},_=0;_<e.length;_++)"production"!==t.env.NODE_ENV?u(e[_],"dangerouslyRenderMarkup(...): Missing markup."):u(e[_]),n=r(e[_]),n=s(n)?n:"*",c[n]=c[n]||[],c[n][_]=e[_];var p=[],m=0;for(n in c)if(c.hasOwnProperty(n)){var h,f=c[n];for(h in f)if(f.hasOwnProperty(h)){var y=f[h];f[h]=y.replace(d,"$1 "+l+'="'+h+'" ')}for(var M=o(f.join(""),i),v=0;v<M.length;++v){var g=M[v];g.hasAttribute&&g.hasAttribute(l)?(h=+g.getAttribute(l),g.removeAttribute(l),"production"!==t.env.NODE_ENV?u(!p.hasOwnProperty(h),"Danger: Assigning to an already-occupied result index."):u(!p.hasOwnProperty(h)),p[h]=g,m+=1):"production"!==t.env.NODE_ENV&&console.error("Danger: Discarding unexpected node:",g)}}return"production"!==t.env.NODE_ENV?u(m===p.length,"Danger: Did not assign to every index of resultList."):u(m===p.length),"production"!==t.env.NODE_ENV?u(p.length===e.length,"Danger: Expected markup to render %s nodes, but rendered %s.",e.length,p.length):u(p.length===e.length),p},dangerouslyReplaceNodeWithMarkup:function(e,n){"production"!==t.env.NODE_ENV?u(a.canUseDOM,"dangerouslyReplaceNodeWithMarkup(...): Cannot render markup in a worker thread. Make sure `window` and `document` are available globally before requiring React when unit testing or use React.renderToString for server rendering."):u(a.canUseDOM),"production"!==t.env.NODE_ENV?u(n,"dangerouslyReplaceNodeWithMarkup(...): Missing markup."):u(n),"production"!==t.env.NODE_ENV?u("html"!==e.tagName.toLowerCase(),"dangerouslyReplaceNodeWithMarkup(...): Cannot replace markup of the <html> node. This is because browser quirks make this unreliable and/or slow. If you want to render to the root you must use server rendering. See React.renderToString()."):u("html"!==e.tagName.toLowerCase());var r=o(n,i)[0];e.parentNode.replaceChild(r,e)}};e.exports=c}).call(t,n(9))},function(e,t,n){(function(t){function r(e){var t=e.match(l);return t&&t[1].toLowerCase()}function a(e,n){var a=d;"production"!==t.env.NODE_ENV?u(!!d,"createNodesFromMarkup dummy not initialized"):u(!!d);var o=r(e),l=o&&s(o);if(l){a.innerHTML=l[1]+e+l[2];for(var c=l[0];c--;)a=a.lastChild}else a.innerHTML=e;var _=a.getElementsByTagName("script");_.length&&("production"!==t.env.NODE_ENV?u(n,"createNodesFromMarkup(...): Unexpected <script> element rendered."):u(n),i(_).forEach(n));for(var p=i(a.childNodes);a.lastChild;)a.removeChild(a.lastChild);return p}var o=n(57),i=n(67),s=n(69),u=n(13),d=o.canUseDOM?document.createElement("div"):null,l=/^\s*<(\w+)/;e.exports=a}).call(t,n(9))},function(e,t,n){function r(e){return!!e&&("object"==typeof e||"function"==typeof e)&&"length"in e&&!("setInterval"in e)&&"number"!=typeof e.nodeType&&(Array.isArray(e)||"callee"in e||"item"in e)}function a(e){return r(e)?Array.isArray(e)?e.slice():o(e):[e]}var o=n(68);e.exports=a},function(e,t,n){(function(t){function r(e){var n=e.length;if("production"!==t.env.NODE_ENV?a(!Array.isArray(e)&&("object"==typeof e||"function"==typeof e),"toArray: Array-like object expected"):a(!Array.isArray(e)&&("object"==typeof e||"function"==typeof e)),"production"!==t.env.NODE_ENV?a("number"==typeof n,"toArray: Object needs a length property"):a("number"==typeof n),"production"!==t.env.NODE_ENV?a(0===n||n-1 in e,"toArray: Object should have keys for indices"):a(0===n||n-1 in e),e.hasOwnProperty)try{return Array.prototype.slice.call(e)}catch(r){}for(var o=Array(n),i=0;n>i;i++)o[i]=e[i];return o}var a=n(13);e.exports=r}).call(t,n(9))},function(e,t,n){(function(t){function r(e){return"production"!==t.env.NODE_ENV?o(!!i,"Markup wrapping node not initialized"):o(!!i),_.hasOwnProperty(e)||(e="*"),s.hasOwnProperty(e)||("*"===e?i.innerHTML="<link />":i.innerHTML="<"+e+"></"+e+">",s[e]=!i.firstChild),s[e]?_[e]:null}var a=n(57),o=n(13),i=a.canUseDOM?document.createElement("div"):null,s={circle:!0,clipPath:!0,defs:!0,ellipse:!0,g:!0,line:!0,linearGradient:!0,path:!0,polygon:!0,polyline:!0,radialGradient:!0,rect:!0,stop:!0,text:!0},u=[1,'<select multiple="true">',"</select>"],d=[1,"<table>","</table>"],l=[3,"<table><tbody><tr>","</tr></tbody></table>"],c=[1,"<svg>","</svg>"],_={"*":[1,"?<div>","</div>"],area:[1,"<map>","</map>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],legend:[1,"<fieldset>","</fieldset>"],param:[1,"<object>","</object>"],tr:[2,"<table><tbody>","</tbody></table>"],optgroup:u,option:u,caption:d,colgroup:d,tbody:d,tfoot:d,thead:d,td:l,th:l,circle:c,clipPath:c,defs:c,ellipse:c,g:c,line:c,linearGradient:c,path:c,polygon:c,polyline:c,radialGradient:c,rect:c,stop:c,text:c};e.exports=r}).call(t,n(9))},function(e,t,n){"use strict";var r=n(12),a=r({INSERT_MARKUP:null,MOVE_EXISTING:null,REMOVE_NODE:null,TEXT_CONTENT:null});e.exports=a},function(e,t,n){"use strict";var r=n(57),a=n(52),o=n(72),i=function(e,t){e.textContent=t};r.canUseDOM&&("textContent"in document.documentElement||(i=function(e,t){o(e,a(t))})),e.exports=i},function(e,t,n){"use strict";var r=n(57),a=/^[ \r\n\t\f]/,o=/<(!--|link|noscript|meta|script|style)[ \r\n\t\f\/>]/,i=function(e,t){e.innerHTML=t};if("undefined"!=typeof MSApp&&MSApp.execUnsafeLocalFunction&&(i=function(e,t){MSApp.execUnsafeLocalFunction(function(){e.innerHTML=t})}),r.canUseDOM){var s=document.createElement("div");s.innerHTML=" ",""===s.innerHTML&&(i=function(e,t){if(e.parentNode&&e.parentNode.replaceChild(e,e),a.test(t)||"<"===t[0]&&o.test(t)){e.innerHTML="\ufeff"+t;var n=e.firstChild;1===n.data.length?e.removeChild(n):n.deleteData(0,1)}else e.innerHTML=t})}e.exports=i},function(e,t,n){(function(t){"use strict";function r(e,t){for(var n=Math.min(e.length,t.length),r=0;n>r;r++)if(e.charAt(r)!==t.charAt(r))return r;return e.length===t.length?-1:n}function a(e){var t=x(e);return t&&G.getID(t)}function o(e){var n=i(e);if(n)if(A.hasOwnProperty(n)){var r=A[n];r!==e&&("production"!==t.env.NODE_ENV?O(!l(r,n),"ReactMount: Two valid but unequal nodes with the same `%s`: %s",R,n):O(!l(r,n)),A[n]=e)}else A[n]=e;return n}function i(e){return e&&e.getAttribute&&e.getAttribute(R)||""}function s(e,t){var n=i(e);n!==t&&delete A[n],e.setAttribute(R,t),A[t]=e}function u(e){return A.hasOwnProperty(e)&&l(A[e],e)||(A[e]=G.findReactNodeByID(e)),A[e]}function d(e){var t=Y.get(e)._rootNodeID;return L.isNullComponentID(t)?null:(A.hasOwnProperty(t)&&l(A[t],t)||(A[t]=G.findReactNodeByID(t)),A[t])}function l(e,n){if(e){"production"!==t.env.NODE_ENV?O(i(e)===n,"ReactMount: Unexpected modification of `%s`",R):O(i(e)===n);var r=G.findReactContainerForID(n);if(r&&S(r,e))return!0}return!1}function c(e){delete A[e]}function _(e){var t=A[e];return t&&l(t,e)?void(J=t):!1}function p(e){J=null,D.traverseAncestors(e,_);var t=J;return J=null,t}function m(e,t,n,r,a){var o=T.mountComponent(e,t,r,N);e._isTopLevel=!0,G._mountImageIntoNode(o,n,a)}function h(e,t,n,r){var a=w.ReactReconcileTransaction.getPooled();a.perform(m,null,e,t,n,a,r),w.ReactReconcileTransaction.release(a)}var f=n(50),y=n(74),M=n(23),v=n(17),g=n(38),L=n(82),D=n(25),Y=n(31),b=n(83),k=n(34),T=n(35),E=n(29),w=n(32),N=n(20),S=n(85),x=n(88),C=n(89),O=n(13),P=n(72),j=n(92),H=n(21),I=D.SEPARATOR,R=f.ID_ATTRIBUTE_NAME,A={},V=1,F=9,U={},W={};if("production"!==t.env.NODE_ENV)var z={};var B=[],J=null,G={_instancesByReactRootID:U,scrollMonitor:function(e,t){t()},_updateRootComponent:function(e,n,r,o){return"production"!==t.env.NODE_ENV&&g.checkAndWarnForMutatedProps(n),G.scrollMonitor(r,function(){E.enqueueElementInternal(e,n),o&&E.enqueueCallbackInternal(e,o)}),"production"!==t.env.NODE_ENV&&(z[a(r)]=x(r)),e},_registerComponent:function(e,n){"production"!==t.env.NODE_ENV?O(n&&(n.nodeType===V||n.nodeType===F),"_registerComponent(...): Target container is not a DOM element."):O(n&&(n.nodeType===V||n.nodeType===F)),y.ensureScrollValueMonitoring();var r=G.registerContainer(n);return U[r]=e,r},_renderNewRootComponent:function(e,n,r){"production"!==t.env.NODE_ENV?H(null==M.current,"_renderNewRootComponent(): Render methods should be a pure function of props and state; triggering nested component updates from render is not allowed. If necessary, trigger nested updates in componentDidUpdate."):null;var a=C(e,null),o=G._registerComponent(a,n);return w.batchedUpdates(h,a,o,n,r),"production"!==t.env.NODE_ENV&&(z[o]=x(n)),a},render:function(e,n,r){"production"!==t.env.NODE_ENV?O(v.isValidElement(e),"React.render(): Invalid component element.%s","string"==typeof e?" Instead of passing an element string, make sure to instantiate it by passing it to React.createElement.":"function"==typeof e?" Instead of passing a component class, make sure to instantiate it by passing it to React.createElement.":null!=e&&void 0!==e.props?" This may be caused by unintentionally loading two independent copies of React.":""):O(v.isValidElement(e));var o=U[a(n)];if(o){var i=o._currentElement;if(j(i,e))return G._updateRootComponent(o,e,n,r).getPublicInstance();G.unmountComponentAtNode(n)}var s=x(n),u=s&&G.isRenderedByReact(s);if("production"!==t.env.NODE_ENV&&(!u||s.nextSibling))for(var d=s;d;){if(G.isRenderedByReact(d)){"production"!==t.env.NODE_ENV?H(!1,"render(): Target node has markup rendered by React, but there are unrelated nodes as well. This is most commonly caused by white-space inserted around server-rendered markup."):null;break}d=d.nextSibling}var l=u&&!o,c=G._renderNewRootComponent(e,n,l).getPublicInstance();return r&&r.call(c),c},constructAndRenderComponent:function(e,t,n){var r=v.createElement(e,t);return G.render(r,n)},constructAndRenderComponentByID:function(e,n,r){var a=document.getElementById(r);return"production"!==t.env.NODE_ENV?O(a,'Tried to get element with id of "%s" but it is not present on the page.',r):O(a),G.constructAndRenderComponent(e,n,a)},registerContainer:function(e){var t=a(e);return t&&(t=D.getReactRootIDFromNodeID(t)),t||(t=D.createReactRootID()),W[t]=e,t},unmountComponentAtNode:function(e){"production"!==t.env.NODE_ENV?H(null==M.current,"unmountComponentAtNode(): Render methods should be a pure function of props and state; triggering nested component updates from render is not allowed. If necessary, trigger nested updates in componentDidUpdate."):null,"production"!==t.env.NODE_ENV?O(e&&(e.nodeType===V||e.nodeType===F),"unmountComponentAtNode(...): Target container is not a DOM element."):O(e&&(e.nodeType===V||e.nodeType===F));var n=a(e),r=U[n];return r?(G.unmountComponentFromNode(r,e),delete U[n],delete W[n],"production"!==t.env.NODE_ENV&&delete z[n],!0):!1},unmountComponentFromNode:function(e,t){for(T.unmountComponent(e),t.nodeType===F&&(t=t.documentElement);t.lastChild;)t.removeChild(t.lastChild)},findReactContainerForID:function(e){var n=D.getReactRootIDFromNodeID(e),r=W[n];if("production"!==t.env.NODE_ENV){var a=z[n];if(a&&a.parentNode!==r){"production"!==t.env.NODE_ENV?O(i(a)===n,"ReactMount: Root element ID differed from reactRootID."):O(i(a)===n);var o=r.firstChild;o&&n===i(o)?z[n]=o:"production"!==t.env.NODE_ENV?H(!1,"ReactMount: Root element has been removed from its original container. New container:",a.parentNode):null}}return r},findReactNodeByID:function(e){var t=G.findReactContainerForID(e);return G.findComponentRoot(t,e)},isRenderedByReact:function(e){if(1!==e.nodeType)return!1;var t=G.getID(e);return t?t.charAt(0)===I:!1},getFirstReactDOM:function(e){for(var t=e;t&&t.parentNode!==t;){if(G.isRenderedByReact(t))return t;t=t.parentNode}return null},findComponentRoot:function(e,n){var r=B,a=0,o=p(n)||e;for(r[0]=o.firstChild,r.length=1;a<r.length;){for(var i,s=r[a++];s;){var u=G.getID(s);u?n===u?i=s:D.isAncestorIDOf(u,n)&&(r.length=a=0,r.push(s.firstChild)):r.push(s.firstChild),s=s.nextSibling}if(i)return r.length=0,i}r.length=0,"production"!==t.env.NODE_ENV?O(!1,"findComponentRoot(..., %s): Unable to find element. This probably means the DOM was unexpectedly mutated (e.g., by the browser), usually due to forgetting a <tbody> when using tables, nesting tags like <form>, <p>, or <a>, or using non-SVG elements in an <svg> parent. Try inspecting the child nodes of the element with React ID `%s`.",n,G.getID(e)):O(!1)},_mountImageIntoNode:function(e,n,a){if("production"!==t.env.NODE_ENV?O(n&&(n.nodeType===V||n.nodeType===F),"mountComponentIntoNode(...): Target container is not valid."):O(n&&(n.nodeType===V||n.nodeType===F)),a){var o=x(n);if(b.canReuseMarkup(e,o))return;var i=o.getAttribute(b.CHECKSUM_ATTR_NAME);o.removeAttribute(b.CHECKSUM_ATTR_NAME);var s=o.outerHTML;o.setAttribute(b.CHECKSUM_ATTR_NAME,i);var u=r(e,s),d=" (client) "+e.substring(u-20,u+20)+"\n (server) "+s.substring(u-20,u+20);"production"!==t.env.NODE_ENV?O(n.nodeType!==F,"You're trying to render a component to the document using server rendering but the checksum was invalid. This usually means you rendered a different component type or props on the client from the one on the server, or your render() methods are impure. React cannot handle this case due to cross-browser quirks by rendering at the document root. You should look for environment dependent code in your components and ensure the props are the same client and server side:\n%s",d):O(n.nodeType!==F),"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?H(!1,"React attempted to reuse markup in a container but the checksum was invalid. This generally means that you are using server rendering and the markup generated on the server was not what the client was expecting. React injected new markup to compensate which works but you have lost many of the benefits of server rendering. Instead, figure out why the markup being generated is different on the client or server:\n%s",d):null)}"production"!==t.env.NODE_ENV?O(n.nodeType!==F,"You're trying to render a component to the document but you didn't use server rendering. We can't do this without using server rendering due to cross-browser quirks. See React.renderToString() for server rendering."):O(n.nodeType!==F),P(n,e)},getReactRootID:a,getID:o,setID:s,getNode:u,getNodeFromInstance:d,purgeID:c};k.measureMethods(G,"ReactMount",{_renderNewRootComponent:"_renderNewRootComponent",_mountImageIntoNode:"_mountImageIntoNode"}),e.exports=G}).call(t,n(9))},function(e,t,n){"use strict";function r(e){return Object.prototype.hasOwnProperty.call(e,h)||(e[h]=p++,c[e[h]]={}),c[e[h]]}var a=n(11),o=n(75),i=n(76),s=n(79),u=n(80),d=n(19),l=n(81),c={},_=!1,p=0,m={topBlur:"blur",topChange:"change",topClick:"click",topCompositionEnd:"compositionend",topCompositionStart:"compositionstart",topCompositionUpdate:"compositionupdate",topContextMenu:"contextmenu",topCopy:"copy",topCut:"cut",topDoubleClick:"dblclick",topDrag:"drag",topDragEnd:"dragend",topDragEnter:"dragenter",topDragExit:"dragexit",topDragLeave:"dragleave",topDragOver:"dragover",topDragStart:"dragstart",topDrop:"drop",topFocus:"focus",topInput:"input",topKeyDown:"keydown",topKeyPress:"keypress",topKeyUp:"keyup",topMouseDown:"mousedown",topMouseMove:"mousemove",topMouseOut:"mouseout",topMouseOver:"mouseover",topMouseUp:"mouseup",topPaste:"paste",topScroll:"scroll",topSelectionChange:"selectionchange",topTextInput:"textInput",topTouchCancel:"touchcancel",topTouchEnd:"touchend",topTouchMove:"touchmove",topTouchStart:"touchstart",topWheel:"wheel"},h="_reactListenersID"+String(Math.random()).slice(2),f=d({},s,{ReactEventListener:null,injection:{injectReactEventListener:function(e){e.setHandleTopLevel(f.handleTopLevel),f.ReactEventListener=e}},setEnabled:function(e){f.ReactEventListener&&f.ReactEventListener.setEnabled(e)},isEnabled:function(){return!(!f.ReactEventListener||!f.ReactEventListener.isEnabled())},listenTo:function(e,t){for(var n=t,o=r(n),s=i.registrationNameDependencies[e],u=a.topLevelTypes,d=0,c=s.length;c>d;d++){var _=s[d];o.hasOwnProperty(_)&&o[_]||(_===u.topWheel?l("wheel")?f.ReactEventListener.trapBubbledEvent(u.topWheel,"wheel",n):l("mousewheel")?f.ReactEventListener.trapBubbledEvent(u.topWheel,"mousewheel",n):f.ReactEventListener.trapBubbledEvent(u.topWheel,"DOMMouseScroll",n):_===u.topScroll?l("scroll",!0)?f.ReactEventListener.trapCapturedEvent(u.topScroll,"scroll",n):f.ReactEventListener.trapBubbledEvent(u.topScroll,"scroll",f.ReactEventListener.WINDOW_HANDLE):_===u.topFocus||_===u.topBlur?(l("focus",!0)?(f.ReactEventListener.trapCapturedEvent(u.topFocus,"focus",n),f.ReactEventListener.trapCapturedEvent(u.topBlur,"blur",n)):l("focusin")&&(f.ReactEventListener.trapBubbledEvent(u.topFocus,"focusin",n),f.ReactEventListener.trapBubbledEvent(u.topBlur,"focusout",n)),o[u.topBlur]=!0,o[u.topFocus]=!0):m.hasOwnProperty(_)&&f.ReactEventListener.trapBubbledEvent(_,m[_],n),o[_]=!0)}},trapBubbledEvent:function(e,t,n){return f.ReactEventListener.trapBubbledEvent(e,t,n)},trapCapturedEvent:function(e,t,n){return f.ReactEventListener.trapCapturedEvent(e,t,n)},ensureScrollValueMonitoring:function(){if(!_){var e=u.refreshScrollValues;f.ReactEventListener.monitorScrollValue(e),_=!0}},eventNameDispatchConfigs:o.eventNameDispatchConfigs,registrationNameModules:o.registrationNameModules,putListener:o.putListener,getListener:o.getListener,deleteListener:o.deleteListener,deleteAllListeners:o.deleteAllListeners});e.exports=f},function(e,t,n){(function(t){"use strict";function r(){var e=_&&_.traverseTwoPhase&&_.traverseEnterLeave;"production"!==t.env.NODE_ENV?u(e,"InstanceHandle not injected before use!"):u(e)}var a=n(76),o=n(10),i=n(77),s=n(78),u=n(13),d={},l=null,c=function(e){if(e){var t=o.executeDispatch,n=a.getPluginModuleForEvent(e);n&&n.executeDispatch&&(t=n.executeDispatch),o.executeDispatchesInOrder(e,t),e.isPersistent()||e.constructor.release(e)}},_=null,p={injection:{injectMount:o.injection.injectMount,injectInstanceHandle:function(e){_=e,"production"!==t.env.NODE_ENV&&r()},getInstanceHandle:function(){return"production"!==t.env.NODE_ENV&&r(),_},injectEventPluginOrder:a.injectEventPluginOrder,injectEventPluginsByName:a.injectEventPluginsByName},eventNameDispatchConfigs:a.eventNameDispatchConfigs,registrationNameModules:a.registrationNameModules,putListener:function(e,n,r){"production"!==t.env.NODE_ENV?u(!r||"function"==typeof r,"Expected %s listener to be a function, instead got type %s",n,typeof r):u(!r||"function"==typeof r);var a=d[n]||(d[n]={});a[e]=r},getListener:function(e,t){var n=d[t];return n&&n[e]},deleteListener:function(e,t){var n=d[t];n&&delete n[e]},deleteAllListeners:function(e){for(var t in d)delete d[t][e]},extractEvents:function(e,t,n,r){for(var o,s=a.plugins,u=0,d=s.length;d>u;u++){var l=s[u];if(l){var c=l.extractEvents(e,t,n,r);c&&(o=i(o,c))}}return o},enqueueEvents:function(e){e&&(l=i(l,e))},processEventQueue:function(){var e=l;l=null,s(e,c),"production"!==t.env.NODE_ENV?u(!l,"processEventQueue(): Additional events were enqueued while processing an event queue. Support for this has not yet been implemented."):u(!l)},__purge:function(){d={}},__getListenerBank:function(){return d}};e.exports=p}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(){if(s)for(var e in u){var n=u[e],r=s.indexOf(e);if("production"!==t.env.NODE_ENV?i(r>-1,"EventPluginRegistry: Cannot inject event plugins that do not exist in the plugin ordering, `%s`.",e):i(r>-1),!d.plugins[r]){"production"!==t.env.NODE_ENV?i(n.extractEvents,"EventPluginRegistry: Event plugins must implement an `extractEvents` method, but `%s` does not.",e):i(n.extractEvents),d.plugins[r]=n;var o=n.eventTypes;for(var l in o)"production"!==t.env.NODE_ENV?i(a(o[l],n,l),"EventPluginRegistry: Failed to publish event `%s` for plugin `%s`.",l,e):i(a(o[l],n,l))}}}function a(e,n,r){"production"!==t.env.NODE_ENV?i(!d.eventNameDispatchConfigs.hasOwnProperty(r),"EventPluginHub: More than one plugin attempted to publish the same event name, `%s`.",r):i(!d.eventNameDispatchConfigs.hasOwnProperty(r)),d.eventNameDispatchConfigs[r]=e;var a=e.phasedRegistrationNames;if(a){for(var s in a)if(a.hasOwnProperty(s)){var u=a[s];o(u,n,r)}return!0}return e.registrationName?(o(e.registrationName,n,r),!0):!1}function o(e,n,r){"production"!==t.env.NODE_ENV?i(!d.registrationNameModules[e],"EventPluginHub: More than one plugin attempted to publish the same registration name, `%s`.",e):i(!d.registrationNameModules[e]),d.registrationNameModules[e]=n,d.registrationNameDependencies[e]=n.eventTypes[r].dependencies}var i=n(13),s=null,u={},d={plugins:[],eventNameDispatchConfigs:{},registrationNameModules:{},registrationNameDependencies:{},injectEventPluginOrder:function(e){"production"!==t.env.NODE_ENV?i(!s,"EventPluginRegistry: Cannot inject event plugin ordering more than once. You are likely trying to load more than one copy of React."):i(!s),s=Array.prototype.slice.call(e),r()},injectEventPluginsByName:function(e){var n=!1;for(var a in e)if(e.hasOwnProperty(a)){var o=e[a];u.hasOwnProperty(a)&&u[a]===o||("production"!==t.env.NODE_ENV?i(!u[a],"EventPluginRegistry: Cannot inject two different event plugins using the same name, `%s`.",a):i(!u[a]),u[a]=o,n=!0)}n&&r()},getPluginModuleForEvent:function(e){var t=e.dispatchConfig;if(t.registrationName)return d.registrationNameModules[t.registrationName]||null;for(var n in t.phasedRegistrationNames)if(t.phasedRegistrationNames.hasOwnProperty(n)){var r=d.registrationNameModules[t.phasedRegistrationNames[n]];if(r)return r}return null},_resetEventPlugins:function(){s=null;for(var e in u)u.hasOwnProperty(e)&&delete u[e];d.plugins.length=0;var t=d.eventNameDispatchConfigs;for(var n in t)t.hasOwnProperty(n)&&delete t[n];var r=d.registrationNameModules;for(var a in r)r.hasOwnProperty(a)&&delete r[a]}};e.exports=d}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e,n){if("production"!==t.env.NODE_ENV?a(null!=n,"accumulateInto(...): Accumulated items must not be null or undefined."):a(null!=n),null==e)return n;var r=Array.isArray(e),o=Array.isArray(n);return r&&o?(e.push.apply(e,n),e):r?(e.push(n),e):o?[e].concat(n):[e,n]}var a=n(13);e.exports=r}).call(t,n(9))},function(e,t){"use strict";var n=function(e,t,n){Array.isArray(e)?e.forEach(t,n):e&&t.call(n,e)};e.exports=n},function(e,t,n){"use strict";function r(e){a.enqueueEvents(e),a.processEventQueue()}var a=n(75),o={handleTopLevel:function(e,t,n,o){var i=a.extractEvents(e,t,n,o);r(i)}};e.exports=o},function(e,t){"use strict";var n={currentScrollLeft:0,currentScrollTop:0,refreshScrollValues:function(e){n.currentScrollLeft=e.x,n.currentScrollTop=e.y}};e.exports=n},function(e,t,n){"use strict";/**
* Checks if an event is supported in the current execution environment.
*
* NOTE: This will not work correctly for non-generic events such as `change`,
* `reset`, `load`, `error`, and `select`.
*
* Borrows from Modernizr.
*
* @param {string} eventNameSuffix Event name, e.g. "click".
* @param {?boolean} capture Check if the capture phase is supported.
* @return {boolean} True if the event is supported.
* @internal
* @license Modernizr 3.0.0pre (Custom Build) | MIT
*/
function r(e,t){if(!o.canUseDOM||t&&!("addEventListener"in document))return!1;var n="on"+e,r=n in document;if(!r){var i=document.createElement("div");i.setAttribute(n,"return;"),r="function"==typeof i[n]}return!r&&a&&"wheel"===e&&(r=document.implementation.hasFeature("Events.wheel","3.0")),r}var a,o=n(57);o.canUseDOM&&(a=document.implementation&&document.implementation.hasFeature&&document.implementation.hasFeature("","")!==!0),e.exports=r},function(e,t,n){(function(t){"use strict";function r(e){l[e]=!0}function a(e){delete l[e]}function o(e){return!!l[e]}var i,s=n(17),u=n(31),d=n(13),l={},c={injectEmptyComponent:function(e){i=s.createFactory(e)}},_=function(){};_.prototype.componentDidMount=function(){var e=u.get(this);e&&r(e._rootNodeID)},_.prototype.componentWillUnmount=function(){var e=u.get(this);e&&a(e._rootNodeID)},_.prototype.render=function(){return"production"!==t.env.NODE_ENV?d(i,"Trying to return null from a render, but no null placeholder component was injected."):d(i),i()};var p=s.createElement(_),m={emptyElement:p,injection:c,isNullComponentID:o};e.exports=m}).call(t,n(9))},function(e,t,n){"use strict";var r=n(84),a={CHECKSUM_ATTR_NAME:"data-react-checksum",addChecksumToMarkup:function(e){var t=r(e);return e.replace(">"," "+a.CHECKSUM_ATTR_NAME+'="'+t+'">')},canReuseMarkup:function(e,t){var n=t.getAttribute(a.CHECKSUM_ATTR_NAME);n=n&&parseInt(n,10);var o=r(e);return o===n}};e.exports=a},function(e,t){"use strict";function n(e){for(var t=1,n=0,a=0;a<e.length;a++)t=(t+e.charCodeAt(a))%r,n=(n+t)%r;return t|n<<16}var r=65521;e.exports=n},function(e,t,n){function r(e,t){return e&&t?e===t?!0:a(e)?!1:a(t)?r(e,t.parentNode):e.contains?e.contains(t):e.compareDocumentPosition?!!(16&e.compareDocumentPosition(t)):!1:!1}var a=n(86);e.exports=r},function(e,t,n){function r(e){return a(e)&&3==e.nodeType}var a=n(87);e.exports=r},function(e,t){function n(e){return!(!e||!("function"==typeof Node?e instanceof Node:"object"==typeof e&&"number"==typeof e.nodeType&&"string"==typeof e.nodeName))}e.exports=n},function(e,t){"use strict";function n(e){return e?e.nodeType===r?e.documentElement:e.firstChild:null}var r=9;e.exports=n},function(e,t,n){(function(t){"use strict";function r(e){return"function"==typeof e&&"undefined"!=typeof e.prototype&&"function"==typeof e.prototype.mountComponent&&"function"==typeof e.prototype.receiveComponent}function a(e,n){var a;if((null===e||e===!1)&&(e=i.emptyElement),"object"==typeof e){var o=e;"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?l(o&&("function"==typeof o.type||"string"==typeof o.type),"Only functions or strings can be mounted as React components."):null),a=n===o.type&&"string"==typeof o.type?s.createInternalComponent(o):r(o.type)?new o.type(o):new c}else"string"==typeof e||"number"==typeof e?a=s.createInstanceForText(e):"production"!==t.env.NODE_ENV?d(!1,"Encountered invalid React node of type %s",typeof e):d(!1);return"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?l("function"==typeof a.construct&&"function"==typeof a.mountComponent&&"function"==typeof a.receiveComponent&&"function"==typeof a.unmountComponent,"Only React Components can be mounted."):null),a.construct(e),a._mountIndex=0,a._mountImage=null,"production"!==t.env.NODE_ENV&&(a._isOwnerNecessary=!1,a._warnedAboutRefsInRender=!1),"production"!==t.env.NODE_ENV&&Object.preventExtensions&&Object.preventExtensions(a),a}var o=n(90),i=n(82),s=n(41),u=n(19),d=n(13),l=n(21),c=function(){};u(c.prototype,o.Mixin,{_instantiateReactComponent:a}),e.exports=a}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e){var t=e._currentElement._owner||null;if(t){var n=t.getName();if(n)return" Check the render method of `"+n+"`."}return""}var a=n(91),o=n(18),i=n(23),s=n(17),u=n(38),d=n(31),l=n(30),c=n(41),_=n(34),p=n(39),m=n(40),h=n(35),f=n(32),y=n(19),M=n(20),v=n(13),g=n(92),L=n(21),D=1,Y={construct:function(e){this._currentElement=e,this._rootNodeID=null,this._instance=null,this._pendingElement=null,this._pendingStateQueue=null,this._pendingReplaceState=!1,this._pendingForceUpdate=!1,this._renderedComponent=null,this._context=null,this._mountOrder=0,this._isTopLevel=!1,this._pendingCallbacks=null},mountComponent:function(e,n,r){this._context=r,this._mountOrder=D++,this._rootNodeID=e;var a=this._processProps(this._currentElement.props),o=this._processContext(this._currentElement._context),i=c.getComponentClassForElement(this._currentElement),s=new i(a,o);"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?L(null!=s.render,"%s(...): No `render` method found on the returned component instance: you may have forgotten to define `render` in your component or you may have accidentally tried to render an element whose type is a function that isn't a React component.",i.displayName||i.name||"Component"):null),s.props=a,s.context=o,s.refs=M,this._instance=s,d.set(s,this),"production"!==t.env.NODE_ENV&&this._warnIfContextsDiffer(this._currentElement._context,r),"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?L(!s.getInitialState||s.getInitialState.isReactClassApproved,"getInitialState was defined on %s, a plain JavaScript class. This is only supported for classes created using React.createClass. Did you mean to define a state property instead?",this.getName()||"a component"):null,"production"!==t.env.NODE_ENV?L(!s.getDefaultProps||s.getDefaultProps.isReactClassApproved,"getDefaultProps was defined on %s, a plain JavaScript class. This is only supported for classes created using React.createClass. Use a static property to define defaultProps instead.",this.getName()||"a component"):null,"production"!==t.env.NODE_ENV?L(!s.propTypes,"propTypes was defined as an instance property on %s. Use a static property to define propTypes instead.",this.getName()||"a component"):null,"production"!==t.env.NODE_ENV?L(!s.contextTypes,"contextTypes was defined as an instance property on %s. Use a static property to define contextTypes instead.",this.getName()||"a component"):null,"production"!==t.env.NODE_ENV?L("function"!=typeof s.componentShouldUpdate,"%s has a method called componentShouldUpdate(). Did you mean shouldComponentUpdate()? The name is phrased as a question because the function is expected to return a value.",this.getName()||"A component"):null);var u=s.state;void 0===u&&(s.state=u=null),"production"!==t.env.NODE_ENV?v("object"==typeof u&&!Array.isArray(u),"%s.state: must be set to an object or null",this.getName()||"ReactCompositeComponent"):v("object"==typeof u&&!Array.isArray(u)),this._pendingStateQueue=null,this._pendingReplaceState=!1,this._pendingForceUpdate=!1;var _,p,m=l.currentlyMountingInstance;l.currentlyMountingInstance=this;try{s.componentWillMount&&(s.componentWillMount(),this._pendingStateQueue&&(s.state=this._processPendingState(s.props,s.context))),_=this._getValidatedChildContext(r),p=this._renderValidatedComponent(_)}finally{l.currentlyMountingInstance=m}this._renderedComponent=this._instantiateReactComponent(p,this._currentElement.type);var f=h.mountComponent(this._renderedComponent,e,n,this._mergeChildContext(r,_));return s.componentDidMount&&n.getReactMountReady().enqueue(s.componentDidMount,s),f},unmountComponent:function(){var e=this._instance;if(e.componentWillUnmount){var t=l.currentlyUnmountingInstance;l.currentlyUnmountingInstance=this;try{e.componentWillUnmount()}finally{l.currentlyUnmountingInstance=t}}h.unmountComponent(this._renderedComponent),this._renderedComponent=null,this._pendingStateQueue=null,this._pendingReplaceState=!1,this._pendingForceUpdate=!1,this._pendingCallbacks=null,this._pendingElement=null,this._context=null,this._rootNodeID=null,d.remove(e)},_setPropsInternal:function(e,t){var n=this._pendingElement||this._currentElement;this._pendingElement=s.cloneAndReplaceProps(n,y({},n.props,e)),f.enqueueUpdate(this,t)},_maskContext:function(e){var t=null;if("string"==typeof this._currentElement.type)return M;var n=this._currentElement.type.contextTypes;if(!n)return M;t={};for(var r in n)t[r]=e[r];return t},_processContext:function(e){var n=this._maskContext(e);if("production"!==t.env.NODE_ENV){var r=c.getComponentClassForElement(this._currentElement);r.contextTypes&&this._checkPropTypes(r.contextTypes,n,p.context)}return n},_getValidatedChildContext:function(e){var n=this._instance,r=n.getChildContext&&n.getChildContext();if(r){"production"!==t.env.NODE_ENV?v("object"==typeof n.constructor.childContextTypes,"%s.getChildContext(): childContextTypes must be defined in order to use getChildContext().",this.getName()||"ReactCompositeComponent"):v("object"==typeof n.constructor.childContextTypes),"production"!==t.env.NODE_ENV&&this._checkPropTypes(n.constructor.childContextTypes,r,p.childContext);for(var a in r)"production"!==t.env.NODE_ENV?v(a in n.constructor.childContextTypes,'%s.getChildContext(): key "%s" is not defined in childContextTypes.',this.getName()||"ReactCompositeComponent",a):v(a in n.constructor.childContextTypes);return r}return null},_mergeChildContext:function(e,t){return t?y({},e,t):e},_processProps:function(e){if("production"!==t.env.NODE_ENV){var n=c.getComponentClassForElement(this._currentElement);n.propTypes&&this._checkPropTypes(n.propTypes,e,p.prop)}return e},_checkPropTypes:function(e,n,a){var o=this.getName();for(var i in e)if(e.hasOwnProperty(i)){var s;try{"production"!==t.env.NODE_ENV?v("function"==typeof e[i],"%s: %s type `%s` is invalid; it must be a function, usually from React.PropTypes.",o||"React class",m[a],i):v("function"==typeof e[i]),s=e[i](n,i,o,a)}catch(u){s=u}if(s instanceof Error){var d=r(this);a===p.prop?"production"!==t.env.NODE_ENV?L(!1,"Failed Composite propType: %s%s",s.message,d):null:"production"!==t.env.NODE_ENV?L(!1,"Failed Context Types: %s%s",s.message,d):null}}},receiveComponent:function(e,t,n){var r=this._currentElement,a=this._context;this._pendingElement=null,this.updateComponent(t,r,e,a,n)},performUpdateIfNecessary:function(e){null!=this._pendingElement&&h.receiveComponent(this,this._pendingElement||this._currentElement,e,this._context),(null!==this._pendingStateQueue||this._pendingForceUpdate)&&("production"!==t.env.NODE_ENV&&u.checkAndWarnForMutatedProps(this._currentElement),this.updateComponent(e,this._currentElement,this._currentElement,this._context,this._context))},_warnIfContextsDiffer:function(e,n){e=this._maskContext(e),n=this._maskContext(n);for(var r=Object.keys(n).sort(),a=this.getName()||"ReactCompositeComponent",o=0;o<r.length;o++){var i=r[o];"production"!==t.env.NODE_ENV?L(e[i]===n[i],"owner-based and parent-based contexts differ (values: `%s` vs `%s`) for key (%s) while mounting %s (see: http://fb.me/react-context-by-parent)",e[i],n[i],i,a):null}},updateComponent:function(e,n,r,a,o){var i=this._instance,s=i.context,u=i.props;n!==r&&(s=this._processContext(r._context),u=this._processProps(r.props),"production"!==t.env.NODE_ENV&&null!=o&&this._warnIfContextsDiffer(r._context,o),i.componentWillReceiveProps&&i.componentWillReceiveProps(u,s));var d=this._processPendingState(u,s),l=this._pendingForceUpdate||!i.shouldComponentUpdate||i.shouldComponentUpdate(u,d,s);"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?L("undefined"!=typeof l,"%s.shouldComponentUpdate(): Returned undefined instead of a boolean value. Make sure to return true or false.",this.getName()||"ReactCompositeComponent"):null),l?(this._pendingForceUpdate=!1,this._performComponentUpdate(r,u,d,s,e,o)):(this._currentElement=r,this._context=o,i.props=u,i.state=d,i.context=s)},_processPendingState:function(e,t){var n=this._instance,r=this._pendingStateQueue,a=this._pendingReplaceState;if(this._pendingReplaceState=!1,this._pendingStateQueue=null,!r)return n.state;if(a&&1===r.length)return r[0];for(var o=y({},a?r[0]:n.state),i=a?1:0;i<r.length;i++){var s=r[i];y(o,"function"==typeof s?s.call(n,o,e,t):s)}return o},_performComponentUpdate:function(e,t,n,r,a,o){var i=this._instance,s=i.props,u=i.state,d=i.context;i.componentWillUpdate&&i.componentWillUpdate(t,n,r),this._currentElement=e,this._context=o,i.props=t,i.state=n,i.context=r,this._updateRenderedComponent(a,o),i.componentDidUpdate&&a.getReactMountReady().enqueue(i.componentDidUpdate.bind(i,s,u,d),i)},_updateRenderedComponent:function(e,t){var n=this._renderedComponent,r=n._currentElement,a=this._getValidatedChildContext(),o=this._renderValidatedComponent(a);if(g(r,o))h.receiveComponent(n,o,e,this._mergeChildContext(t,a));else{var i=this._rootNodeID,s=n._rootNodeID;h.unmountComponent(n),this._renderedComponent=this._instantiateReactComponent(o,this._currentElement.type);var u=h.mountComponent(this._renderedComponent,i,e,this._mergeChildContext(t,a));this._replaceNodeWithMarkupByID(s,u)}},_replaceNodeWithMarkupByID:function(e,t){a.replaceNodeWithMarkupByID(e,t)},_renderValidatedComponentWithoutOwnerOrContext:function(){var e=this._instance,n=e.render();return"production"!==t.env.NODE_ENV&&"undefined"==typeof n&&e.render._isMockFunction&&(n=null),n},_renderValidatedComponent:function(e){var n,r=o.current;o.current=this._mergeChildContext(this._currentElement._context,e),i.current=this;try{n=this._renderValidatedComponentWithoutOwnerOrContext()}finally{o.current=r,i.current=null}return"production"!==t.env.NODE_ENV?v(null===n||n===!1||s.isValidElement(n),"%s.render(): A valid ReactComponent must be returned. You may have returned undefined, an array or some other invalid object.",this.getName()||"ReactCompositeComponent"):v(null===n||n===!1||s.isValidElement(n)),n},attachRef:function(e,t){var n=this.getPublicInstance(),r=n.refs===M?n.refs={}:n.refs;r[e]=t.getPublicInstance()},detachRef:function(e){var t=this.getPublicInstance().refs;delete t[e]},getName:function(){var e=this._currentElement.type,t=this._instance&&this._instance.constructor;return e.displayName||t&&t.displayName||e.name||t&&t.name||null},getPublicInstance:function(){return this._instance},_instantiateReactComponent:null};_.measureMethods(Y,"ReactCompositeComponent",{mountComponent:"mountComponent",updateComponent:"updateComponent",_renderValidatedComponent:"_renderValidatedComponent"});var b={Mixin:Y};e.exports=b}).call(t,n(9))},function(e,t,n){(function(t){"use strict";var r=n(13),a=!1,o={unmountIDFromEnvironment:null,replaceNodeWithMarkupByID:null,processChildrenUpdates:null,injection:{injectEnvironment:function(e){"production"!==t.env.NODE_ENV?r(!a,"ReactCompositeComponent: injectEnvironment() can only be called once."):r(!a),o.unmountIDFromEnvironment=e.unmountIDFromEnvironment,o.replaceNodeWithMarkupByID=e.replaceNodeWithMarkupByID,o.processChildrenUpdates=e.processChildrenUpdates,a=!0}}};e.exports=o}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e,n){if(null!=e&&null!=n){var r=typeof e,o=typeof n;if("string"===r||"number"===r)return"string"===o||"number"===o;if("object"===o&&e.type===n.type&&e.key===n.key){var i=e._owner===n._owner,s=null,u=null,d=null;return"production"!==t.env.NODE_ENV&&(i||(null!=e._owner&&null!=e._owner.getPublicInstance()&&null!=e._owner.getPublicInstance().constructor&&(s=e._owner.getPublicInstance().constructor.displayName),null!=n._owner&&null!=n._owner.getPublicInstance()&&null!=n._owner.getPublicInstance().constructor&&(u=n._owner.getPublicInstance().constructor.displayName),null!=n.type&&null!=n.type.displayName&&(d=n.type.displayName),null!=n.type&&"string"==typeof n.type&&(d=n.type),("string"!=typeof n.type||"input"===n.type||"textarea"===n.type)&&(null!=e._owner&&e._owner._isOwnerNecessary===!1||null!=n._owner&&n._owner._isOwnerNecessary===!1)&&(null!=e._owner&&(e._owner._isOwnerNecessary=!0),null!=n._owner&&(n._owner._isOwnerNecessary=!0),"production"!==t.env.NODE_ENV?a(!1,"<%s /> is being rendered by both %s and %s using the same key (%s) in the same place. Currently, this means that they don't preserve state. This behavior should be very rare so we're considering deprecating it. Please contact the React team and explain your use case so that we can take that into consideration.",d||"Unknown Component",s||"[Unknown]",u||"[Unknown]",e.key):null))),i}}return!1}var a=n(21);e.exports=r}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e){e&&(null!=e.dangerouslySetInnerHTML&&("production"!==t.env.NODE_ENV?y(null==e.children,"Can only set one of `children` or `props.dangerouslySetInnerHTML`."):y(null==e.children),"production"!==t.env.NODE_ENV?y("object"==typeof e.dangerouslySetInnerHTML&&"__html"in e.dangerouslySetInnerHTML,"`props.dangerouslySetInnerHTML` must be in the form `{__html: ...}`. Please visit https://fb.me/react-invariant-dangerously-set-inner-html for more information."):y("object"==typeof e.dangerouslySetInnerHTML&&"__html"in e.dangerouslySetInnerHTML)),"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?g(null==e.innerHTML,"Directly setting property `innerHTML` is not permitted. For more information, lookup documentation on `dangerouslySetInnerHTML`."):null,"production"!==t.env.NODE_ENV?g(!e.contentEditable||null==e.children,"A component is `contentEditable` and contains `children` managed by React. It is now your responsibility to guarantee that none of those nodes are unexpectedly modified or duplicated. This is probably not intentional."):null),"production"!==t.env.NODE_ENV?y(null==e.style||"object"==typeof e.style,"The `style` prop expects a mapping from style properties to values, not a string. For example, style={{marginRight: spacing + 'em'}} when using JSX."):y(null==e.style||"object"==typeof e.style))}function a(e,n,r,a){"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?g("onScroll"!==n||M("scroll",!0),"This browser doesn't support the `onScroll` event"):null);var o=_.findReactContainerForID(e);if(o){var i=o.nodeType===T?o.ownerDocument:o;D(n,i)}a.getPutListenerQueue().enqueuePutListener(e,n,r)}function o(e){x.call(S,e)||("production"!==t.env.NODE_ENV?y(N.test(e),"Invalid tag: %s",e):y(N.test(e)),S[e]=!0)}function i(e){o(e),this._tag=e,this._renderedChildren=null,this._previousStyleCopy=null,this._rootNodeID=null}var s=n(55),u=n(50),d=n(49),l=n(74),c=n(53),_=n(73),p=n(94),m=n(34),h=n(19),f=n(52),y=n(13),M=n(81),v=n(45),g=n(21),L=l.deleteListener,D=l.listenTo,Y=l.registrationNameModules,b={string:!0,number:!0},k=v({style:null}),T=1,E=null,w={area:!0,base:!0,br:!0,col:!0,embed:!0,hr:!0,img:!0,input:!0,keygen:!0,link:!0,meta:!0,param:!0,source:!0,track:!0,wbr:!0},N=/^[a-zA-Z][a-zA-Z:_\.\-\d]*$/,S={},x={}.hasOwnProperty;i.displayName="ReactDOMComponent",i.Mixin={construct:function(e){this._currentElement=e},mountComponent:function(e,t,n){this._rootNodeID=e,r(this._currentElement.props);var a=w[this._tag]?"":"</"+this._tag+">";return this._createOpenTagMarkupAndPutListeners(t)+this._createContentMarkup(t,n)+a},_createOpenTagMarkupAndPutListeners:function(e){var t=this._currentElement.props,n="<"+this._tag;for(var r in t)if(t.hasOwnProperty(r)){var o=t[r];if(null!=o)if(Y.hasOwnProperty(r))a(this._rootNodeID,r,o,e);else{r===k&&(o&&(o=this._previousStyleCopy=h({},t.style)),o=s.createMarkupForStyles(o));var i=d.createMarkupForProperty(r,o);i&&(n+=" "+i)}}if(e.renderToStaticMarkup)return n+">";var u=d.createMarkupForID(this._rootNodeID);return n+" "+u+">"},_createContentMarkup:function(e,t){var n="";("listing"===this._tag||"pre"===this._tag||"textarea"===this._tag)&&(n="\n");var r=this._currentElement.props,a=r.dangerouslySetInnerHTML;if(null!=a){if(null!=a.__html)return n+a.__html}else{var o=b[typeof r.children]?r.children:null,i=null!=o?null:r.children;if(null!=o)return n+f(o);if(null!=i){var s=this.mountChildren(i,e,t);return n+s.join("")}}return n},receiveComponent:function(e,t,n){var r=this._currentElement;this._currentElement=e,this.updateComponent(t,r,e,n)},updateComponent:function(e,t,n,a){r(this._currentElement.props),this._updateDOMProperties(t.props,e),this._updateDOMChildren(t.props,e,a)},_updateDOMProperties:function(e,t){var n,r,o,i=this._currentElement.props;for(n in e)if(!i.hasOwnProperty(n)&&e.hasOwnProperty(n))if(n===k){var s=this._previousStyleCopy;for(r in s)s.hasOwnProperty(r)&&(o=o||{},o[r]="");this._previousStyleCopy=null}else Y.hasOwnProperty(n)?L(this._rootNodeID,n):(u.isStandardName[n]||u.isCustomAttribute(n))&&E.deletePropertyByID(this._rootNodeID,n);for(n in i){var d=i[n],l=n===k?this._previousStyleCopy:e[n];if(i.hasOwnProperty(n)&&d!==l)if(n===k)if(d?d=this._previousStyleCopy=h({},d):this._previousStyleCopy=null,l){for(r in l)!l.hasOwnProperty(r)||d&&d.hasOwnProperty(r)||(o=o||{},o[r]="");for(r in d)d.hasOwnProperty(r)&&l[r]!==d[r]&&(o=o||{},o[r]=d[r])}else o=d;else Y.hasOwnProperty(n)?a(this._rootNodeID,n,d,t):(u.isStandardName[n]||u.isCustomAttribute(n))&&E.updatePropertyByID(this._rootNodeID,n,d)}o&&E.updateStylesByID(this._rootNodeID,o)},_updateDOMChildren:function(e,t,n){var r=this._currentElement.props,a=b[typeof e.children]?e.children:null,o=b[typeof r.children]?r.children:null,i=e.dangerouslySetInnerHTML&&e.dangerouslySetInnerHTML.__html,s=r.dangerouslySetInnerHTML&&r.dangerouslySetInnerHTML.__html,u=null!=a?null:e.children,d=null!=o?null:r.children,l=null!=a||null!=i,c=null!=o||null!=s;null!=u&&null==d?this.updateChildren(null,t,n):l&&!c&&this.updateTextContent(""),null!=o?a!==o&&this.updateTextContent(""+o):null!=s?i!==s&&E.updateInnerHTMLByID(this._rootNodeID,s):null!=d&&this.updateChildren(d,t,n)},unmountComponent:function(){this.unmountChildren(),l.deleteAllListeners(this._rootNodeID),c.unmountIDFromEnvironment(this._rootNodeID),this._rootNodeID=null}},m.measureMethods(i,"ReactDOMComponent",{mountComponent:"mountComponent",updateComponent:"updateComponent"}),h(i.prototype,i.Mixin,p.Mixin),i.injection={injectIDOperations:function(e){i.BackendIDOperations=E=e}},e.exports=i}).call(t,n(9))},function(e,t,n){"use strict";function r(e,t,n){m.push({parentID:e,parentNode:null,type:l.INSERT_MARKUP,markupIndex:h.push(t)-1,textContent:null,fromIndex:null,toIndex:n})}function a(e,t,n){m.push({parentID:e,parentNode:null,type:l.MOVE_EXISTING,markupIndex:null,textContent:null,fromIndex:t,toIndex:n})}function o(e,t){m.push({parentID:e,parentNode:null,type:l.REMOVE_NODE,markupIndex:null,textContent:null,fromIndex:t,toIndex:null})}function i(e,t){m.push({parentID:e,parentNode:null,type:l.TEXT_CONTENT,markupIndex:null,textContent:t,fromIndex:null,toIndex:null})}function s(){m.length&&(d.processChildrenUpdates(m,h),u())}function u(){m.length=0,h.length=0}var d=n(91),l=n(70),c=n(35),_=n(95),p=0,m=[],h=[],f={Mixin:{mountChildren:function(e,t,n){var r=_.instantiateChildren(e,t,n);this._renderedChildren=r;var a=[],o=0;for(var i in r)if(r.hasOwnProperty(i)){var s=r[i],u=this._rootNodeID+i,d=c.mountComponent(s,u,t,n);s._mountIndex=o,a.push(d),o++}return a},updateTextContent:function(e){p++;var t=!0;try{var n=this._renderedChildren;_.unmountChildren(n);for(var r in n)n.hasOwnProperty(r)&&this._unmountChildByName(n[r],r);this.setTextContent(e),t=!1}finally{p--,p||(t?u():s())}},updateChildren:function(e,t,n){p++;var r=!0;try{this._updateChildren(e,t,n),r=!1}finally{p--,p||(r?u():s())}},_updateChildren:function(e,t,n){var r=this._renderedChildren,a=_.updateChildren(r,e,t,n);if(this._renderedChildren=a,a||r){var o,i=0,s=0;for(o in a)if(a.hasOwnProperty(o)){var u=r&&r[o],d=a[o];u===d?(this.moveChild(u,s,i),i=Math.max(u._mountIndex,i),u._mountIndex=s):(u&&(i=Math.max(u._mountIndex,i),this._unmountChildByName(u,o)),this._mountChildByNameAtIndex(d,o,s,t,n)),s++}for(o in r)!r.hasOwnProperty(o)||a&&a.hasOwnProperty(o)||this._unmountChildByName(r[o],o)}},unmountChildren:function(){var e=this._renderedChildren;_.unmountChildren(e),this._renderedChildren=null},moveChild:function(e,t,n){e._mountIndex<n&&a(this._rootNodeID,e._mountIndex,t)},createChild:function(e,t){r(this._rootNodeID,t,e._mountIndex)},removeChild:function(e){o(this._rootNodeID,e._mountIndex)},setTextContent:function(e){i(this._rootNodeID,e)},_mountChildByNameAtIndex:function(e,t,n,r,a){var o=this._rootNodeID+t,i=c.mountComponent(e,o,r,a);e._mountIndex=n,this.createChild(e,i)},_unmountChildByName:function(e,t){this.removeChild(e),e._mountIndex=null}}};e.exports=f},function(e,t,n){"use strict";var r=n(35),a=n(96),o=n(89),i=n(92),s={instantiateChildren:function(e,t,n){var r=a(e);for(var i in r)if(r.hasOwnProperty(i)){var s=r[i],u=o(s,null);r[i]=u}return r},updateChildren:function(e,t,n,s){var u=a(t);if(!u&&!e)return null;var d;for(d in u)if(u.hasOwnProperty(d)){var l=e&&e[d],c=l&&l._currentElement,_=u[d];if(i(c,_))r.receiveComponent(l,_,n,s),u[d]=l;else{l&&r.unmountComponent(l,d);var p=o(_,null);u[d]=p}}for(d in e)!e.hasOwnProperty(d)||u&&u.hasOwnProperty(d)||r.unmountComponent(e[d]);return u},unmountChildren:function(e){for(var t in e){var n=e[t];r.unmountComponent(n)}}};e.exports=s},function(e,t,n){(function(t){"use strict";function r(e,n,r){var a=e,o=!a.hasOwnProperty(r);"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?i(o,"flattenChildren(...): Encountered two children with the same key, `%s`. Child keys must be unique; when two children share a key, only the first child will be used.",r):null),o&&null!=n&&(a[r]=n)}function a(e){if(null==e)return e;var t={};return o(e,r,t),t}var o=n(24),i=n(21);e.exports=a}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e){return m.createClass({tagName:e.toUpperCase(),render:function(){return new w(e,null,null,null,null,this.props)}})}function a(){if(S.EventEmitter.injectReactEventListener(N),S.EventPluginHub.injectEventPluginOrder(u),S.EventPluginHub.injectInstanceHandle(x),S.EventPluginHub.injectMount(C),S.EventPluginHub.injectEventPluginsByName({SimpleEventPlugin:H,EnterLeaveEventPlugin:d,ChangeEventPlugin:i,MobileSafariClickEventPlugin:_,SelectEventPlugin:P,BeforeInputEventPlugin:o}),S.NativeComponent.injectGenericComponentClass(y),S.NativeComponent.injectTextComponentClass(E),S.NativeComponent.injectAutoWrapper(r),S.Class.injectMixin(p),S.NativeComponent.injectComponentClasses({button:M,form:v,iframe:D,img:g,input:Y,option:b,select:k,textarea:T,html:R("html"),head:R("head"),body:R("body")}),S.DOMProperty.injectDOMPropertyConfig(c),S.DOMProperty.injectDOMPropertyConfig(I),S.EmptyComponent.injectEmptyComponent("noscript"),S.Updates.injectReconcileTransaction(O),S.Updates.injectBatchingStrategy(f),S.RootIndex.injectCreateReactRootIndex(l.canUseDOM?s.createReactRootIndex:j.createReactRootIndex),S.Component.injectEnvironment(h),S.DOMComponent.injectIDOperations(L),"production"!==t.env.NODE_ENV){var e=l.canUseDOM&&window.location.href||"";if(/[?&]react_perf\b/.test(e)){var a=n(156);a.start()}}}var o=n(98),i=n(106),s=n(108),u=n(109),d=n(110),l=n(57),c=n(114),_=n(115),p=n(116),m=n(43),h=n(53),f=n(118),y=n(93),M=n(119),v=n(122),g=n(124),L=n(54),D=n(125),Y=n(126),b=n(129),k=n(130),T=n(131),E=n(48),w=n(17),N=n(132),S=n(135),x=n(25),C=n(73),O=n(136),P=n(142),j=n(144),H=n(145),I=n(154),R=n(155);e.exports={inject:a}}).call(t,n(9))},function(e,t,n){"use strict";function r(){var e=window.opera;return"object"==typeof e&&"function"==typeof e.version&&parseInt(e.version(),10)<=12}function a(e){return(e.ctrlKey||e.altKey||e.metaKey)&&!(e.ctrlKey&&e.altKey)}function o(e){switch(e){case w.topCompositionStart:return N.compositionStart;case w.topCompositionEnd:return N.compositionEnd;case w.topCompositionUpdate:return N.compositionUpdate}}function i(e,t){return e===w.topKeyDown&&t.keyCode===L}function s(e,t){switch(e){case w.topKeyUp:return-1!==g.indexOf(t.keyCode);case w.topKeyDown:return t.keyCode!==L;case w.topKeyPress:case w.topMouseDown:case w.topBlur:return!0;default:return!1}}function u(e){var t=e.detail;return"object"==typeof t&&"data"in t?t.data:null}function d(e,t,n,r){var a,d;if(D?a=o(e):x?s(e,r)&&(a=N.compositionEnd):i(e,r)&&(a=N.compositionStart),!a)return null;k&&(x||a!==N.compositionStart?a===N.compositionEnd&&x&&(d=x.getData()):x=f.getPooled(t));var l=y.getPooled(a,n,r);if(d)l.data=d;else{var c=u(r);null!==c&&(l.data=c)}return m.accumulateTwoPhaseDispatches(l),l}function l(e,t){switch(e){case w.topCompositionEnd:return u(t);case w.topKeyPress:var n=t.which;return n!==T?null:(S=!0,E);case w.topTextInput:var r=t.data;return r===E&&S?null:r;default:return null}}function c(e,t){if(x){if(e===w.topCompositionEnd||s(e,t)){var n=x.getData();return f.release(x),x=null,n}return null}switch(e){case w.topPaste:return null;case w.topKeyPress:return t.which&&!a(t)?String.fromCharCode(t.which):null;case w.topCompositionEnd:return k?null:t.data;default:return null}}function _(e,t,n,r){var a;if(a=b?l(e,r):c(e,r),!a)return null;var o=M.getPooled(N.beforeInput,n,r);return o.data=a,m.accumulateTwoPhaseDispatches(o),o}var p=n(11),m=n(99),h=n(57),f=n(100),y=n(102),M=n(105),v=n(45),g=[9,13,27,32],L=229,D=h.canUseDOM&&"CompositionEvent"in window,Y=null;h.canUseDOM&&"documentMode"in document&&(Y=document.documentMode);var b=h.canUseDOM&&"TextEvent"in window&&!Y&&!r(),k=h.canUseDOM&&(!D||Y&&Y>8&&11>=Y),T=32,E=String.fromCharCode(T),w=p.topLevelTypes,N={beforeInput:{phasedRegistrationNames:{bubbled:v({onBeforeInput:null}),captured:v({onBeforeInputCapture:null})},dependencies:[w.topCompositionEnd,w.topKeyPress,w.topTextInput,w.topPaste]},compositionEnd:{phasedRegistrationNames:{bubbled:v({onCompositionEnd:null}),captured:v({onCompositionEndCapture:null})},dependencies:[w.topBlur,w.topCompositionEnd,w.topKeyDown,w.topKeyPress,w.topKeyUp,w.topMouseDown]},compositionStart:{phasedRegistrationNames:{bubbled:v({onCompositionStart:null}),captured:v({onCompositionStartCapture:null})},dependencies:[w.topBlur,w.topCompositionStart,w.topKeyDown,w.topKeyPress,w.topKeyUp,w.topMouseDown]},compositionUpdate:{phasedRegistrationNames:{bubbled:v({onCompositionUpdate:null}),captured:v({onCompositionUpdateCapture:null})},dependencies:[w.topBlur,w.topCompositionUpdate,w.topKeyDown,w.topKeyPress,w.topKeyUp,w.topMouseDown]}},S=!1,x=null,C={eventTypes:N,extractEvents:function(e,t,n,r){return[d(e,t,n,r),_(e,t,n,r)]}};e.exports=C},function(e,t,n){(function(t){"use strict";function r(e,t,n){var r=t.dispatchConfig.phasedRegistrationNames[n];return f(e,r)}function a(e,n,a){if("production"!==t.env.NODE_ENV&&!e)throw new Error("Dispatching id must not be null");var o=n?h.bubbled:h.captured,i=r(e,a,o);i&&(a._dispatchListeners=p(a._dispatchListeners,i),a._dispatchIDs=p(a._dispatchIDs,e))}function o(e){e&&e.dispatchConfig.phasedRegistrationNames&&_.injection.getInstanceHandle().traverseTwoPhase(e.dispatchMarker,a,e)}function i(e,t,n){if(n&&n.dispatchConfig.registrationName){var r=n.dispatchConfig.registrationName,a=f(e,r);a&&(n._dispatchListeners=p(n._dispatchListeners,a),n._dispatchIDs=p(n._dispatchIDs,e))}}function s(e){e&&e.dispatchConfig.registrationName&&i(e.dispatchMarker,null,e)}function u(e){m(e,o)}function d(e,t,n,r){_.injection.getInstanceHandle().traverseEnterLeave(n,r,i,e,t)}function l(e){m(e,s)}var c=n(11),_=n(75),p=n(77),m=n(78),h=c.PropagationPhases,f=_.getListener,y={accumulateTwoPhaseDispatches:u,accumulateDirectDispatches:l,accumulateEnterLeaveDispatches:d};e.exports=y}).call(t,n(9))},function(e,t,n){"use strict";function r(e){this._root=e,this._startText=this.getText(),this._fallbackText=null}var a=n(15),o=n(19),i=n(101);o(r.prototype,{getText:function(){return"value"in this._root?this._root.value:this._root[i()]},getData:function(){if(this._fallbackText)return this._fallbackText;var e,t,n=this._startText,r=n.length,a=this.getText(),o=a.length;for(e=0;r>e&&n[e]===a[e];e++);var i=r-e;for(t=1;i>=t&&n[r-t]===a[o-t];t++);var s=t>1?1-t:void 0;return this._fallbackText=a.slice(e,s),this._fallbackText}}),a.addPoolingTo(r),e.exports=r},function(e,t,n){"use strict";function r(){return!o&&a.canUseDOM&&(o="textContent"in document.documentElement?"textContent":"innerText"),o}var a=n(57),o=null;e.exports=r},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(103),o={data:null};a.augmentClass(r,o),e.exports=r},function(e,t,n){"use strict";function r(e,t,n){this.dispatchConfig=e,this.dispatchMarker=t,this.nativeEvent=n;var r=this.constructor.Interface;for(var a in r)if(r.hasOwnProperty(a)){var o=r[a];o?this[a]=o(n):this[a]=n[a]}var s=null!=n.defaultPrevented?n.defaultPrevented:n.returnValue===!1;
s?this.isDefaultPrevented=i.thatReturnsTrue:this.isDefaultPrevented=i.thatReturnsFalse,this.isPropagationStopped=i.thatReturnsFalse}var a=n(15),o=n(19),i=n(22),s=n(104),u={type:null,target:s,currentTarget:i.thatReturnsNull,eventPhase:null,bubbles:null,cancelable:null,timeStamp:function(e){return e.timeStamp||Date.now()},defaultPrevented:null,isTrusted:null};o(r.prototype,{preventDefault:function(){this.defaultPrevented=!0;var e=this.nativeEvent;e.preventDefault?e.preventDefault():e.returnValue=!1,this.isDefaultPrevented=i.thatReturnsTrue},stopPropagation:function(){var e=this.nativeEvent;e.stopPropagation?e.stopPropagation():e.cancelBubble=!0,this.isPropagationStopped=i.thatReturnsTrue},persist:function(){this.isPersistent=i.thatReturnsTrue},isPersistent:i.thatReturnsFalse,destructor:function(){var e=this.constructor.Interface;for(var t in e)this[t]=null;this.dispatchConfig=null,this.dispatchMarker=null,this.nativeEvent=null}}),r.Interface=u,r.augmentClass=function(e,t){var n=this,r=Object.create(n.prototype);o(r,e.prototype),e.prototype=r,e.prototype.constructor=e,e.Interface=o({},n.Interface,t),e.augmentClass=n.augmentClass,a.addPoolingTo(e,a.threeArgumentPooler)},a.addPoolingTo(r,a.threeArgumentPooler),e.exports=r},function(e,t){"use strict";function n(e){var t=e.target||e.srcElement||window;return 3===t.nodeType?t.parentNode:t}e.exports=n},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(103),o={data:null};a.augmentClass(r,o),e.exports=r},function(e,t,n){"use strict";function r(e){return"SELECT"===e.nodeName||"INPUT"===e.nodeName&&"file"===e.type}function a(e){var t=Y.getPooled(w.change,S,e);g.accumulateTwoPhaseDispatches(t),D.batchedUpdates(o,t)}function o(e){v.enqueueEvents(e),v.processEventQueue()}function i(e,t){N=e,S=t,N.attachEvent("onchange",a)}function s(){N&&(N.detachEvent("onchange",a),N=null,S=null)}function u(e,t,n){return e===E.topChange?n:void 0}function d(e,t,n){e===E.topFocus?(s(),i(t,n)):e===E.topBlur&&s()}function l(e,t){N=e,S=t,x=e.value,C=Object.getOwnPropertyDescriptor(e.constructor.prototype,"value"),Object.defineProperty(N,"value",j),N.attachEvent("onpropertychange",_)}function c(){N&&(delete N.value,N.detachEvent("onpropertychange",_),N=null,S=null,x=null,C=null)}function _(e){if("value"===e.propertyName){var t=e.srcElement.value;t!==x&&(x=t,a(e))}}function p(e,t,n){return e===E.topInput?n:void 0}function m(e,t,n){e===E.topFocus?(c(),l(t,n)):e===E.topBlur&&c()}function h(e,t,n){return e!==E.topSelectionChange&&e!==E.topKeyUp&&e!==E.topKeyDown||!N||N.value===x?void 0:(x=N.value,S)}function f(e){return"INPUT"===e.nodeName&&("checkbox"===e.type||"radio"===e.type)}function y(e,t,n){return e===E.topClick?n:void 0}var M=n(11),v=n(75),g=n(99),L=n(57),D=n(32),Y=n(103),b=n(81),k=n(107),T=n(45),E=M.topLevelTypes,w={change:{phasedRegistrationNames:{bubbled:T({onChange:null}),captured:T({onChangeCapture:null})},dependencies:[E.topBlur,E.topChange,E.topClick,E.topFocus,E.topInput,E.topKeyDown,E.topKeyUp,E.topSelectionChange]}},N=null,S=null,x=null,C=null,O=!1;L.canUseDOM&&(O=b("change")&&(!("documentMode"in document)||document.documentMode>8));var P=!1;L.canUseDOM&&(P=b("input")&&(!("documentMode"in document)||document.documentMode>9));var j={get:function(){return C.get.call(this)},set:function(e){x=""+e,C.set.call(this,e)}},H={eventTypes:w,extractEvents:function(e,t,n,a){var o,i;if(r(t)?O?o=u:i=d:k(t)?P?o=p:(o=h,i=m):f(t)&&(o=y),o){var s=o(e,t,n);if(s){var l=Y.getPooled(w.change,s,a);return g.accumulateTwoPhaseDispatches(l),l}}i&&i(e,t,n)}};e.exports=H},function(e,t){"use strict";function n(e){return e&&("INPUT"===e.nodeName&&r[e.type]||"TEXTAREA"===e.nodeName)}var r={color:!0,date:!0,datetime:!0,"datetime-local":!0,email:!0,month:!0,number:!0,password:!0,range:!0,search:!0,tel:!0,text:!0,time:!0,url:!0,week:!0};e.exports=n},function(e,t){"use strict";var n=0,r={createReactRootIndex:function(){return n++}};e.exports=r},function(e,t,n){"use strict";var r=n(45),a=[r({ResponderEventPlugin:null}),r({SimpleEventPlugin:null}),r({TapEventPlugin:null}),r({EnterLeaveEventPlugin:null}),r({ChangeEventPlugin:null}),r({SelectEventPlugin:null}),r({BeforeInputEventPlugin:null}),r({AnalyticsEventPlugin:null}),r({MobileSafariClickEventPlugin:null})];e.exports=a},function(e,t,n){"use strict";var r=n(11),a=n(99),o=n(111),i=n(73),s=n(45),u=r.topLevelTypes,d=i.getFirstReactDOM,l={mouseEnter:{registrationName:s({onMouseEnter:null}),dependencies:[u.topMouseOut,u.topMouseOver]},mouseLeave:{registrationName:s({onMouseLeave:null}),dependencies:[u.topMouseOut,u.topMouseOver]}},c=[null,null],_={eventTypes:l,extractEvents:function(e,t,n,r){if(e===u.topMouseOver&&(r.relatedTarget||r.fromElement))return null;if(e!==u.topMouseOut&&e!==u.topMouseOver)return null;var s;if(t.window===t)s=t;else{var _=t.ownerDocument;s=_?_.defaultView||_.parentWindow:window}var p,m;if(e===u.topMouseOut?(p=t,m=d(r.relatedTarget||r.toElement)||s):(p=s,m=t),p===m)return null;var h=p?i.getID(p):"",f=m?i.getID(m):"",y=o.getPooled(l.mouseLeave,h,r);y.type="mouseleave",y.target=p,y.relatedTarget=m;var M=o.getPooled(l.mouseEnter,f,r);return M.type="mouseenter",M.target=m,M.relatedTarget=p,a.accumulateEnterLeaveDispatches(y,M,h,f),c[0]=y,c[1]=M,c}};e.exports=_},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(112),o=n(80),i=n(113),s={screenX:null,screenY:null,clientX:null,clientY:null,ctrlKey:null,shiftKey:null,altKey:null,metaKey:null,getModifierState:i,button:function(e){var t=e.button;return"which"in e?t:2===t?2:4===t?1:0},buttons:null,relatedTarget:function(e){return e.relatedTarget||(e.fromElement===e.srcElement?e.toElement:e.fromElement)},pageX:function(e){return"pageX"in e?e.pageX:e.clientX+o.currentScrollLeft},pageY:function(e){return"pageY"in e?e.pageY:e.clientY+o.currentScrollTop}};a.augmentClass(r,s),e.exports=r},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(103),o=n(104),i={view:function(e){if(e.view)return e.view;var t=o(e);if(null!=t&&t.window===t)return t;var n=t.ownerDocument;return n?n.defaultView||n.parentWindow:window},detail:function(e){return e.detail||0}};a.augmentClass(r,i),e.exports=r},function(e,t){"use strict";function n(e){var t=this,n=t.nativeEvent;if(n.getModifierState)return n.getModifierState(e);var r=a[e];return r?!!n[r]:!1}function r(e){return n}var a={Alt:"altKey",Control:"ctrlKey",Meta:"metaKey",Shift:"shiftKey"};e.exports=r},function(e,t,n){"use strict";var r,a=n(50),o=n(57),i=a.injection.MUST_USE_ATTRIBUTE,s=a.injection.MUST_USE_PROPERTY,u=a.injection.HAS_BOOLEAN_VALUE,d=a.injection.HAS_SIDE_EFFECTS,l=a.injection.HAS_NUMERIC_VALUE,c=a.injection.HAS_POSITIVE_NUMERIC_VALUE,_=a.injection.HAS_OVERLOADED_BOOLEAN_VALUE;if(o.canUseDOM){var p=document.implementation;r=p&&p.hasFeature&&p.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure","1.1")}var m={isCustomAttribute:RegExp.prototype.test.bind(/^(data|aria)-[a-z_][a-z\d_.\-]*$/),Properties:{accept:null,acceptCharset:null,accessKey:null,action:null,allowFullScreen:i|u,allowTransparency:i,alt:null,async:u,autoComplete:null,autoPlay:u,cellPadding:null,cellSpacing:null,charSet:i,checked:s|u,classID:i,className:r?i:s,cols:i|c,colSpan:null,content:null,contentEditable:null,contextMenu:i,controls:s|u,coords:null,crossOrigin:null,data:null,dateTime:i,defer:u,dir:null,disabled:i|u,download:_,draggable:null,encType:null,form:i,formAction:i,formEncType:i,formMethod:i,formNoValidate:u,formTarget:i,frameBorder:i,headers:null,height:i,hidden:i|u,high:null,href:null,hrefLang:null,htmlFor:null,httpEquiv:null,icon:null,id:s,label:null,lang:null,list:i,loop:s|u,low:null,manifest:i,marginHeight:null,marginWidth:null,max:null,maxLength:i,media:i,mediaGroup:null,method:null,min:null,multiple:s|u,muted:s|u,name:null,noValidate:u,open:u,optimum:null,pattern:null,placeholder:null,poster:null,preload:null,radioGroup:null,readOnly:s|u,rel:null,required:u,role:i,rows:i|c,rowSpan:null,sandbox:null,scope:null,scoped:u,scrolling:null,seamless:i|u,selected:s|u,shape:null,size:i|c,sizes:i,span:c,spellCheck:null,src:null,srcDoc:s,srcSet:i,start:l,step:null,style:null,tabIndex:null,target:null,title:null,type:null,useMap:null,value:s|d,width:i,wmode:i,autoCapitalize:null,autoCorrect:null,itemProp:i,itemScope:i|u,itemType:i,itemID:i,itemRef:i,property:null,unselectable:i},DOMAttributeNames:{acceptCharset:"accept-charset",className:"class",htmlFor:"for",httpEquiv:"http-equiv"},DOMPropertyNames:{autoCapitalize:"autocapitalize",autoComplete:"autocomplete",autoCorrect:"autocorrect",autoFocus:"autofocus",autoPlay:"autoplay",encType:"encoding",hrefLang:"hreflang",radioGroup:"radiogroup",spellCheck:"spellcheck",srcDoc:"srcdoc",srcSet:"srcset"}};e.exports=m},function(e,t,n){"use strict";var r=n(11),a=n(22),o=r.topLevelTypes,i={eventTypes:null,extractEvents:function(e,t,n,r){if(e===o.topTouchStart){var i=r.target;i&&!i.onclick&&(i.onclick=a)}}};e.exports=i},function(e,t,n){"use strict";var r=n(117),a={getDOMNode:function(){return r(this)}};e.exports=a},function(e,t,n){(function(t){"use strict";function r(e){if("production"!==t.env.NODE_ENV){var n=a.current;null!==n&&("production"!==t.env.NODE_ENV?d(n._warnedAboutRefsInRender,"%s is accessing getDOMNode or findDOMNode inside its render(). render() should be a pure function of props and state. It should never access something that requires stale data from the previous render, such as refs. Move this logic to componentDidMount and componentDidUpdate instead.",n.getName()||"A component"):null,n._warnedAboutRefsInRender=!0)}return null==e?null:u(e)?e:o.has(e)?i.getNodeFromInstance(e):("production"!==t.env.NODE_ENV?s(null==e.render||"function"!=typeof e.render,"Component (with keys: %s) contains `render` method but is not mounted in the DOM",Object.keys(e)):s(null==e.render||"function"!=typeof e.render),void("production"!==t.env.NODE_ENV?s(!1,"Element appears to be neither ReactComponent nor DOMNode (keys: %s)",Object.keys(e)):s(!1)))}var a=n(23),o=n(31),i=n(73),s=n(13),u=n(87),d=n(21);e.exports=r}).call(t,n(9))},function(e,t,n){"use strict";function r(){this.reinitializeTransaction()}var a=n(32),o=n(42),i=n(19),s=n(22),u={initialize:s,close:function(){_.isBatchingUpdates=!1}},d={initialize:s,close:a.flushBatchedUpdates.bind(a)},l=[d,u];i(r.prototype,o.Mixin,{getTransactionWrappers:function(){return l}});var c=new r,_={isBatchingUpdates:!1,batchedUpdates:function(e,t,n,r,a){var o=_.isBatchingUpdates;_.isBatchingUpdates=!0,o?e(t,n,r,a):c.perform(e,null,t,n,r,a)}};e.exports=_},function(e,t,n){"use strict";var r=n(120),a=n(116),o=n(43),i=n(17),s=n(12),u=i.createFactory("button"),d=s({onClick:!0,onDoubleClick:!0,onMouseDown:!0,onMouseMove:!0,onMouseUp:!0,onClickCapture:!0,onDoubleClickCapture:!0,onMouseDownCapture:!0,onMouseMoveCapture:!0,onMouseUpCapture:!0}),l=o.createClass({displayName:"ReactDOMButton",tagName:"BUTTON",mixins:[r,a],render:function(){var e={};for(var t in this.props)!this.props.hasOwnProperty(t)||this.props.disabled&&d[t]||(e[t]=this.props[t]);return u(e,this.props.children)}});e.exports=l},function(e,t,n){"use strict";var r=n(121),a={componentDidMount:function(){this.props.autoFocus&&r(this.getDOMNode())}};e.exports=a},function(e,t){"use strict";function n(e){try{e.focus()}catch(t){}}e.exports=n},function(e,t,n){"use strict";var r=n(11),a=n(123),o=n(116),i=n(43),s=n(17),u=s.createFactory("form"),d=i.createClass({displayName:"ReactDOMForm",tagName:"FORM",mixins:[o,a],render:function(){return u(this.props)},componentDidMount:function(){this.trapBubbledEvent(r.topLevelTypes.topReset,"reset"),this.trapBubbledEvent(r.topLevelTypes.topSubmit,"submit")}});e.exports=d},function(e,t,n){(function(t){"use strict";function r(e){e.remove()}var a=n(74),o=n(77),i=n(78),s=n(13),u={trapBubbledEvent:function(e,n){"production"!==t.env.NODE_ENV?s(this.isMounted(),"Must be mounted to trap events"):s(this.isMounted());var r=this.getDOMNode();"production"!==t.env.NODE_ENV?s(r,"LocalEventTrapMixin.trapBubbledEvent(...): Requires node to be rendered."):s(r);var i=a.trapBubbledEvent(e,n,r);this._localEventListeners=o(this._localEventListeners,i)},componentWillUnmount:function(){this._localEventListeners&&i(this._localEventListeners,r)}};e.exports=u}).call(t,n(9))},function(e,t,n){"use strict";var r=n(11),a=n(123),o=n(116),i=n(43),s=n(17),u=s.createFactory("img"),d=i.createClass({displayName:"ReactDOMImg",tagName:"IMG",mixins:[o,a],render:function(){return u(this.props)},componentDidMount:function(){this.trapBubbledEvent(r.topLevelTypes.topLoad,"load"),this.trapBubbledEvent(r.topLevelTypes.topError,"error")}});e.exports=d},function(e,t,n){"use strict";var r=n(11),a=n(123),o=n(116),i=n(43),s=n(17),u=s.createFactory("iframe"),d=i.createClass({displayName:"ReactDOMIframe",tagName:"IFRAME",mixins:[o,a],render:function(){return u(this.props)},componentDidMount:function(){this.trapBubbledEvent(r.topLevelTypes.topLoad,"load")}});e.exports=d},function(e,t,n){(function(t){"use strict";function r(){this.isMounted()&&this.forceUpdate()}var a=n(120),o=n(49),i=n(127),s=n(116),u=n(43),d=n(17),l=n(73),c=n(32),_=n(19),p=n(13),m=d.createFactory("input"),h={},f=u.createClass({displayName:"ReactDOMInput",tagName:"INPUT",mixins:[a,i.Mixin,s],getInitialState:function(){var e=this.props.defaultValue;return{initialChecked:this.props.defaultChecked||!1,initialValue:null!=e?e:null}},render:function(){var e=_({},this.props);e.defaultChecked=null,e.defaultValue=null;var t=i.getValue(this);e.value=null!=t?t:this.state.initialValue;var n=i.getChecked(this);return e.checked=null!=n?n:this.state.initialChecked,e.onChange=this._handleChange,m(e,this.props.children)},componentDidMount:function(){var e=l.getID(this.getDOMNode());h[e]=this},componentWillUnmount:function(){var e=this.getDOMNode(),t=l.getID(e);delete h[t]},componentDidUpdate:function(e,t,n){var r=this.getDOMNode();null!=this.props.checked&&o.setValueForProperty(r,"checked",this.props.checked||!1);var a=i.getValue(this);null!=a&&o.setValueForProperty(r,"value",""+a)},_handleChange:function(e){var n,a=i.getOnChange(this);a&&(n=a.call(this,e)),c.asap(r,this);var o=this.props.name;if("radio"===this.props.type&&null!=o){for(var s=this.getDOMNode(),u=s;u.parentNode;)u=u.parentNode;for(var d=u.querySelectorAll("input[name="+JSON.stringify(""+o)+'][type="radio"]'),_=0,m=d.length;m>_;_++){var f=d[_];if(f!==s&&f.form===s.form){var y=l.getID(f);"production"!==t.env.NODE_ENV?p(y,"ReactDOMInput: Mixing React and non-React radio inputs with the same `name` is not supported."):p(y);var M=h[y];"production"!==t.env.NODE_ENV?p(M,"ReactDOMInput: Unknown radio button ID %s.",y):p(M),c.asap(r,M)}}}return n}});e.exports=f}).call(t,n(9))},function(e,t,n){(function(t){"use strict";function r(e){"production"!==t.env.NODE_ENV?d(null==e.props.checkedLink||null==e.props.valueLink,"Cannot provide a checkedLink and a valueLink. If you want to use checkedLink, you probably don't want to use valueLink and vice versa."):d(null==e.props.checkedLink||null==e.props.valueLink)}function a(e){r(e),"production"!==t.env.NODE_ENV?d(null==e.props.value&&null==e.props.onChange,"Cannot provide a valueLink and a value or onChange event. If you want to use value or onChange, you probably don't want to use valueLink."):d(null==e.props.value&&null==e.props.onChange)}function o(e){r(e),"production"!==t.env.NODE_ENV?d(null==e.props.checked&&null==e.props.onChange,"Cannot provide a checkedLink and a checked property or onChange event. If you want to use checked or onChange, you probably don't want to use checkedLink"):d(null==e.props.checked&&null==e.props.onChange)}function i(e){this.props.valueLink.requestChange(e.target.value)}function s(e){this.props.checkedLink.requestChange(e.target.checked)}var u=n(128),d=n(13),l={button:!0,checkbox:!0,image:!0,hidden:!0,radio:!0,reset:!0,submit:!0},c={Mixin:{propTypes:{value:function(e,t,n){return!e[t]||l[e.type]||e.onChange||e.readOnly||e.disabled?null:new Error("You provided a `value` prop to a form field without an `onChange` handler. This will render a read-only field. If the field should be mutable use `defaultValue`. Otherwise, set either `onChange` or `readOnly`.")},checked:function(e,t,n){return!e[t]||e.onChange||e.readOnly||e.disabled?null:new Error("You provided a `checked` prop to a form field without an `onChange` handler. This will render a read-only field. If the field should be mutable use `defaultChecked`. Otherwise, set either `onChange` or `readOnly`.")},onChange:u.func}},getValue:function(e){return e.props.valueLink?(a(e),e.props.valueLink.value):e.props.value},getChecked:function(e){return e.props.checkedLink?(o(e),e.props.checkedLink.value):e.props.checked},getOnChange:function(e){return e.props.valueLink?(a(e),i):e.props.checkedLink?(o(e),s):e.props.onChange}};e.exports=c}).call(t,n(9))},function(e,t,n){"use strict";function r(e){function t(t,n,r,a,o){if(a=a||L,null==n[r]){var i=v[o];return t?new Error("Required "+i+" `"+r+"` was not specified in "+("`"+a+"`.")):null}return e(n,r,a,o)}var n=t.bind(null,!1);return n.isRequired=t.bind(null,!0),n}function a(e){function t(t,n,r,a){var o=t[n],i=h(o);if(i!==e){var s=v[a],u=f(o);return new Error("Invalid "+s+" `"+n+"` of type `"+u+"` "+("supplied to `"+r+"`, expected `"+e+"`."))}return null}return r(t)}function o(){return r(g.thatReturns(null))}function i(e){function t(t,n,r,a){var o=t[n];if(!Array.isArray(o)){var i=v[a],s=h(o);return new Error("Invalid "+i+" `"+n+"` of type "+("`"+s+"` supplied to `"+r+"`, expected an array."))}for(var u=0;u<o.length;u++){var d=e(o,u,r,a);if(d instanceof Error)return d}return null}return r(t)}function s(){function e(e,t,n,r){if(!y.isValidElement(e[t])){var a=v[r];return new Error("Invalid "+a+" `"+t+"` supplied to "+("`"+n+"`, expected a ReactElement."))}return null}return r(e)}function u(e){function t(t,n,r,a){if(!(t[n]instanceof e)){var o=v[a],i=e.name||L;return new Error("Invalid "+o+" `"+n+"` supplied to "+("`"+r+"`, expected instance of `"+i+"`."))}return null}return r(t)}function d(e){function t(t,n,r,a){for(var o=t[n],i=0;i<e.length;i++)if(o===e[i])return null;var s=v[a],u=JSON.stringify(e);return new Error("Invalid "+s+" `"+n+"` of value `"+o+"` "+("supplied to `"+r+"`, expected one of "+u+"."))}return r(t)}function l(e){function t(t,n,r,a){var o=t[n],i=h(o);if("object"!==i){var s=v[a];return new Error("Invalid "+s+" `"+n+"` of type "+("`"+i+"` supplied to `"+r+"`, expected an object."))}for(var u in o)if(o.hasOwnProperty(u)){var d=e(o,u,r,a);if(d instanceof Error)return d}return null}return r(t)}function c(e){function t(t,n,r,a){for(var o=0;o<e.length;o++){var i=e[o];if(null==i(t,n,r,a))return null}var s=v[a];return new Error("Invalid "+s+" `"+n+"` supplied to "+("`"+r+"`."))}return r(t)}function _(){function e(e,t,n,r){if(!m(e[t])){var a=v[r];return new Error("Invalid "+a+" `"+t+"` supplied to "+("`"+n+"`, expected a ReactNode."))}return null}return r(e)}function p(e){function t(t,n,r,a){var o=t[n],i=h(o);if("object"!==i){var s=v[a];return new Error("Invalid "+s+" `"+n+"` of type `"+i+"` "+("supplied to `"+r+"`, expected `object`."))}for(var u in e){var d=e[u];if(d){var l=d(o,u,r,a);if(l)return l}}return null}return r(t)}function m(e){switch(typeof e){case"number":case"string":case"undefined":return!0;case"boolean":return!e;case"object":if(Array.isArray(e))return e.every(m);if(null===e||y.isValidElement(e))return!0;e=M.extractIfFragment(e);for(var t in e)if(!m(e[t]))return!1;return!0;default:return!1}}function h(e){var t=typeof e;return Array.isArray(e)?"array":e instanceof RegExp?"object":t}function f(e){var t=h(e);if("object"===t){if(e instanceof Date)return"date";if(e instanceof RegExp)return"regexp"}return t}var y=n(17),M=n(16),v=n(40),g=n(22),L="<<anonymous>>",D=s(),Y=_(),b={array:a("array"),bool:a("boolean"),func:a("function"),number:a("number"),object:a("object"),string:a("string"),any:o(),arrayOf:i,element:D,instanceOf:u,node:Y,objectOf:l,oneOf:d,oneOfType:c,shape:p};e.exports=b},function(e,t,n){(function(t){"use strict";var r=n(116),a=n(43),o=n(17),i=n(21),s=o.createFactory("option"),u=a.createClass({displayName:"ReactDOMOption",tagName:"OPTION",mixins:[r],componentWillMount:function(){"production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?i(null==this.props.selected,"Use the `defaultValue` or `value` props on <select> instead of setting `selected` on <option>."):null)},render:function(){return s(this.props,this.props.children)}});e.exports=u}).call(t,n(9))},function(e,t,n){"use strict";function r(){if(this._pendingUpdate){this._pendingUpdate=!1;var e=s.getValue(this);null!=e&&this.isMounted()&&o(this,e)}}function a(e,t,n){if(null==e[t])return null;if(e.multiple){if(!Array.isArray(e[t]))return new Error("The `"+t+"` prop supplied to <select> must be an array if `multiple` is true.")}else if(Array.isArray(e[t]))return new Error("The `"+t+"` prop supplied to <select> must be a scalar value if `multiple` is false.")}function o(e,t){var n,r,a,o=e.getDOMNode().options;if(e.props.multiple){for(n={},r=0,a=t.length;a>r;r++)n[""+t[r]]=!0;for(r=0,a=o.length;a>r;r++){var i=n.hasOwnProperty(o[r].value);o[r].selected!==i&&(o[r].selected=i)}}else{for(n=""+t,r=0,a=o.length;a>r;r++)if(o[r].value===n)return void(o[r].selected=!0);o.length&&(o[0].selected=!0)}}var i=n(120),s=n(127),u=n(116),d=n(43),l=n(17),c=n(32),_=n(19),p=l.createFactory("select"),m=d.createClass({displayName:"ReactDOMSelect",tagName:"SELECT",mixins:[i,s.Mixin,u],propTypes:{defaultValue:a,value:a},render:function(){var e=_({},this.props);return e.onChange=this._handleChange,e.value=null,p(e,this.props.children)},componentWillMount:function(){this._pendingUpdate=!1},componentDidMount:function(){var e=s.getValue(this);null!=e?o(this,e):null!=this.props.defaultValue&&o(this,this.props.defaultValue)},componentDidUpdate:function(e){var t=s.getValue(this);null!=t?(this._pendingUpdate=!1,o(this,t)):!e.multiple!=!this.props.multiple&&(null!=this.props.defaultValue?o(this,this.props.defaultValue):o(this,this.props.multiple?[]:""))},_handleChange:function(e){var t,n=s.getOnChange(this);return n&&(t=n.call(this,e)),this._pendingUpdate=!0,c.asap(r,this),t}});e.exports=m},function(e,t,n){(function(t){"use strict";function r(){this.isMounted()&&this.forceUpdate()}var a=n(120),o=n(49),i=n(127),s=n(116),u=n(43),d=n(17),l=n(32),c=n(19),_=n(13),p=n(21),m=d.createFactory("textarea"),h=u.createClass({displayName:"ReactDOMTextarea",tagName:"TEXTAREA",mixins:[a,i.Mixin,s],getInitialState:function(){var e=this.props.defaultValue,n=this.props.children;null!=n&&("production"!==t.env.NODE_ENV&&("production"!==t.env.NODE_ENV?p(!1,"Use the `defaultValue` or `value` props instead of setting children on <textarea>."):null),"production"!==t.env.NODE_ENV?_(null==e,"If you supply `defaultValue` on a <textarea>, do not pass children."):_(null==e),Array.isArray(n)&&("production"!==t.env.NODE_ENV?_(n.length<=1,"<textarea> can only have at most one child."):_(n.length<=1),n=n[0]),e=""+n),null==e&&(e="");var r=i.getValue(this);return{initialValue:""+(null!=r?r:e)}},render:function(){var e=c({},this.props);return"production"!==t.env.NODE_ENV?_(null==e.dangerouslySetInnerHTML,"`dangerouslySetInnerHTML` does not make sense on <textarea>."):_(null==e.dangerouslySetInnerHTML),e.defaultValue=null,e.value=null,e.onChange=this._handleChange,m(e,this.state.initialValue)},componentDidUpdate:function(e,t,n){var r=i.getValue(this);if(null!=r){var a=this.getDOMNode();o.setValueForProperty(a,"value",""+r)}},_handleChange:function(e){var t,n=i.getOnChange(this);return n&&(t=n.call(this,e)),l.asap(r,this),t}});e.exports=h}).call(t,n(9))},function(e,t,n){"use strict";function r(e){var t=c.getID(e),n=l.getReactRootIDFromNodeID(t),r=c.findReactContainerForID(n),a=c.getFirstReactDOM(r);return a}function a(e,t){this.topLevelType=e,this.nativeEvent=t,this.ancestors=[]}function o(e){for(var t=c.getFirstReactDOM(m(e.nativeEvent))||window,n=t;n;)e.ancestors.push(n),n=r(n);for(var a=0,o=e.ancestors.length;o>a;a++){t=e.ancestors[a];var i=c.getID(t)||"";f._handleTopLevel(e.topLevelType,t,i,e.nativeEvent)}}function i(e){var t=h(window);e(t)}var s=n(133),u=n(57),d=n(15),l=n(25),c=n(73),_=n(32),p=n(19),m=n(104),h=n(134);p(a.prototype,{destructor:function(){this.topLevelType=null,this.nativeEvent=null,this.ancestors.length=0}}),d.addPoolingTo(a,d.twoArgumentPooler);var f={_enabled:!0,_handleTopLevel:null,WINDOW_HANDLE:u.canUseDOM?window:null,setHandleTopLevel:function(e){f._handleTopLevel=e},setEnabled:function(e){f._enabled=!!e},isEnabled:function(){return f._enabled},trapBubbledEvent:function(e,t,n){var r=n;return r?s.listen(r,t,f.dispatchEvent.bind(null,e)):null},trapCapturedEvent:function(e,t,n){var r=n;return r?s.capture(r,t,f.dispatchEvent.bind(null,e)):null},monitorScrollValue:function(e){var t=i.bind(null,e);s.listen(window,"scroll",t)},dispatchEvent:function(e,t){if(f._enabled){var n=a.getPooled(e,t);try{_.batchedUpdates(o,n)}finally{a.release(n)}}}};e.exports=f},function(e,t,n){(function(t){var r=n(22),a={listen:function(e,t,n){return e.addEventListener?(e.addEventListener(t,n,!1),{remove:function(){e.removeEventListener(t,n,!1)}}):e.attachEvent?(e.attachEvent("on"+t,n),{remove:function(){e.detachEvent("on"+t,n)}}):void 0},capture:function(e,n,a){return e.addEventListener?(e.addEventListener(n,a,!0),{remove:function(){e.removeEventListener(n,a,!0)}}):("production"!==t.env.NODE_ENV&&console.error("Attempted to listen to events during the capture phase on a browser that does not support the capture phase. Your application will not receive some events."),{remove:r})},registerDefault:function(){}};e.exports=a}).call(t,n(9))},function(e,t){"use strict";function n(e){return e===window?{x:window.pageXOffset||document.documentElement.scrollLeft,y:window.pageYOffset||document.documentElement.scrollTop}:{x:e.scrollLeft,y:e.scrollTop}}e.exports=n},function(e,t,n){"use strict";var r=n(50),a=n(75),o=n(91),i=n(43),s=n(82),u=n(74),d=n(41),l=n(93),c=n(34),_=n(26),p=n(32),m={Component:o.injection,Class:i.injection,DOMComponent:l.injection,DOMProperty:r.injection,EmptyComponent:s.injection,EventPluginHub:a.injection,EventEmitter:u.injection,NativeComponent:d.injection,Perf:c.injection,RootIndex:_.injection,Updates:p.injection};e.exports=m},function(e,t,n){"use strict";function r(){this.reinitializeTransaction(),this.renderToStaticMarkup=!1,this.reactMountReady=a.getPooled(null),this.putListenerQueue=u.getPooled()}var a=n(33),o=n(15),i=n(74),s=n(137),u=n(141),d=n(42),l=n(19),c={initialize:s.getSelectionInformation,close:s.restoreSelection},_={initialize:function(){var e=i.isEnabled();return i.setEnabled(!1),e},close:function(e){i.setEnabled(e)}},p={initialize:function(){this.reactMountReady.reset()},close:function(){this.reactMountReady.notifyAll()}},m={initialize:function(){this.putListenerQueue.reset()},close:function(){this.putListenerQueue.putListeners()}},h=[m,c,_,p],f={getTransactionWrappers:function(){return h},getReactMountReady:function(){return this.reactMountReady},getPutListenerQueue:function(){return this.putListenerQueue},destructor:function(){a.release(this.reactMountReady),this.reactMountReady=null,u.release(this.putListenerQueue),this.putListenerQueue=null}};l(r.prototype,d.Mixin,f),o.addPoolingTo(r),e.exports=r},function(e,t,n){"use strict";function r(e){return o(document.documentElement,e)}var a=n(138),o=n(85),i=n(121),s=n(140),u={hasSelectionCapabilities:function(e){return e&&("INPUT"===e.nodeName&&"text"===e.type||"TEXTAREA"===e.nodeName||"true"===e.contentEditable)},getSelectionInformation:function(){var e=s();return{focusedElem:e,selectionRange:u.hasSelectionCapabilities(e)?u.getSelection(e):null}},restoreSelection:function(e){var t=s(),n=e.focusedElem,a=e.selectionRange;t!==n&&r(n)&&(u.hasSelectionCapabilities(n)&&u.setSelection(n,a),i(n))},getSelection:function(e){var t;if("selectionStart"in e)t={start:e.selectionStart,end:e.selectionEnd};else if(document.selection&&"INPUT"===e.nodeName){var n=document.selection.createRange();n.parentElement()===e&&(t={start:-n.moveStart("character",-e.value.length),end:-n.moveEnd("character",-e.value.length)})}else t=a.getOffsets(e);return t||{start:0,end:0}},setSelection:function(e,t){var n=t.start,r=t.end;if("undefined"==typeof r&&(r=n),"selectionStart"in e)e.selectionStart=n,e.selectionEnd=Math.min(r,e.value.length);else if(document.selection&&"INPUT"===e.nodeName){var o=e.createTextRange();o.collapse(!0),o.moveStart("character",n),o.moveEnd("character",r-n),o.select()}else a.setOffsets(e,t)}};e.exports=u},function(e,t,n){"use strict";function r(e,t,n,r){return e===n&&t===r}function a(e){var t=document.selection,n=t.createRange(),r=n.text.length,a=n.duplicate();a.moveToElementText(e),a.setEndPoint("EndToStart",n);var o=a.text.length,i=o+r;return{start:o,end:i}}function o(e){var t=window.getSelection&&window.getSelection();if(!t||0===t.rangeCount)return null;var n=t.anchorNode,a=t.anchorOffset,o=t.focusNode,i=t.focusOffset,s=t.getRangeAt(0),u=r(t.anchorNode,t.anchorOffset,t.focusNode,t.focusOffset),d=u?0:s.toString().length,l=s.cloneRange();l.selectNodeContents(e),l.setEnd(s.startContainer,s.startOffset);var c=r(l.startContainer,l.startOffset,l.endContainer,l.endOffset),_=c?0:l.toString().length,p=_+d,m=document.createRange();m.setStart(n,a),m.setEnd(o,i);var h=m.collapsed;return{start:h?p:_,end:h?_:p}}function i(e,t){var n,r,a=document.selection.createRange().duplicate();"undefined"==typeof t.end?(n=t.start,r=n):t.start>t.end?(n=t.end,r=t.start):(n=t.start,r=t.end),a.moveToElementText(e),a.moveStart("character",n),a.setEndPoint("EndToStart",a),a.moveEnd("character",r-n),a.select()}function s(e,t){if(window.getSelection){var n=window.getSelection(),r=e[l()].length,a=Math.min(t.start,r),o="undefined"==typeof t.end?a:Math.min(t.end,r);if(!n.extend&&a>o){var i=o;o=a,a=i}var s=d(e,a),u=d(e,o);if(s&&u){var c=document.createRange();c.setStart(s.node,s.offset),n.removeAllRanges(),a>o?(n.addRange(c),n.extend(u.node,u.offset)):(c.setEnd(u.node,u.offset),n.addRange(c))}}}var u=n(57),d=n(139),l=n(101),c=u.canUseDOM&&"selection"in document&&!("getSelection"in window),_={getOffsets:c?a:o,setOffsets:c?i:s};e.exports=_},function(e,t){"use strict";function n(e){for(;e&&e.firstChild;)e=e.firstChild;return e}function r(e){for(;e;){if(e.nextSibling)return e.nextSibling;e=e.parentNode}}function a(e,t){for(var a=n(e),o=0,i=0;a;){if(3===a.nodeType){if(i=o+a.textContent.length,t>=o&&i>=t)return{node:a,offset:t-o};o=i}a=n(r(a))}}e.exports=a},function(e,t){function n(){try{return document.activeElement||document.body}catch(e){return document.body}}e.exports=n},function(e,t,n){"use strict";function r(){this.listenersToPut=[]}var a=n(15),o=n(74),i=n(19);i(r.prototype,{enqueuePutListener:function(e,t,n){this.listenersToPut.push({rootNodeID:e,propKey:t,propValue:n})},putListeners:function(){for(var e=0;e<this.listenersToPut.length;e++){var t=this.listenersToPut[e];o.putListener(t.rootNodeID,t.propKey,t.propValue)}},reset:function(){this.listenersToPut.length=0},destructor:function(){this.reset()}}),a.addPoolingTo(r),e.exports=r},function(e,t,n){"use strict";function r(e){if("selectionStart"in e&&s.hasSelectionCapabilities(e))return{start:e.selectionStart,end:e.selectionEnd};if(window.getSelection){var t=window.getSelection();return{anchorNode:t.anchorNode,anchorOffset:t.anchorOffset,focusNode:t.focusNode,focusOffset:t.focusOffset}}if(document.selection){var n=document.selection.createRange();return{parentElement:n.parentElement(),text:n.text,top:n.boundingTop,left:n.boundingLeft}}}function a(e){if(M||null==h||h!==d())return null;var t=r(h);if(!y||!_(y,t)){y=t;var n=u.getPooled(m.select,f,e);return n.type="select",n.target=h,i.accumulateTwoPhaseDispatches(n),n}}var o=n(11),i=n(99),s=n(137),u=n(103),d=n(140),l=n(107),c=n(45),_=n(143),p=o.topLevelTypes,m={select:{phasedRegistrationNames:{bubbled:c({onSelect:null}),captured:c({onSelectCapture:null})},dependencies:[p.topBlur,p.topContextMenu,p.topFocus,p.topKeyDown,p.topMouseDown,p.topMouseUp,p.topSelectionChange]}},h=null,f=null,y=null,M=!1,v={eventTypes:m,extractEvents:function(e,t,n,r){
switch(e){case p.topFocus:(l(t)||"true"===t.contentEditable)&&(h=t,f=n,y=null);break;case p.topBlur:h=null,f=null,y=null;break;case p.topMouseDown:M=!0;break;case p.topContextMenu:case p.topMouseUp:return M=!1,a(r);case p.topSelectionChange:case p.topKeyDown:case p.topKeyUp:return a(r)}}};e.exports=v},function(e,t){"use strict";function n(e,t){if(e===t)return!0;var n;for(n in e)if(e.hasOwnProperty(n)&&(!t.hasOwnProperty(n)||e[n]!==t[n]))return!1;for(n in t)if(t.hasOwnProperty(n)&&!e.hasOwnProperty(n))return!1;return!0}e.exports=n},function(e,t){"use strict";var n=Math.pow(2,53),r={createReactRootIndex:function(){return Math.ceil(Math.random()*n)}};e.exports=r},function(e,t,n){(function(t){"use strict";var r=n(11),a=n(10),o=n(99),i=n(146),s=n(103),u=n(147),d=n(148),l=n(111),c=n(151),_=n(152),p=n(112),m=n(153),h=n(149),f=n(13),y=n(45),M=n(21),v=r.topLevelTypes,g={blur:{phasedRegistrationNames:{bubbled:y({onBlur:!0}),captured:y({onBlurCapture:!0})}},click:{phasedRegistrationNames:{bubbled:y({onClick:!0}),captured:y({onClickCapture:!0})}},contextMenu:{phasedRegistrationNames:{bubbled:y({onContextMenu:!0}),captured:y({onContextMenuCapture:!0})}},copy:{phasedRegistrationNames:{bubbled:y({onCopy:!0}),captured:y({onCopyCapture:!0})}},cut:{phasedRegistrationNames:{bubbled:y({onCut:!0}),captured:y({onCutCapture:!0})}},doubleClick:{phasedRegistrationNames:{bubbled:y({onDoubleClick:!0}),captured:y({onDoubleClickCapture:!0})}},drag:{phasedRegistrationNames:{bubbled:y({onDrag:!0}),captured:y({onDragCapture:!0})}},dragEnd:{phasedRegistrationNames:{bubbled:y({onDragEnd:!0}),captured:y({onDragEndCapture:!0})}},dragEnter:{phasedRegistrationNames:{bubbled:y({onDragEnter:!0}),captured:y({onDragEnterCapture:!0})}},dragExit:{phasedRegistrationNames:{bubbled:y({onDragExit:!0}),captured:y({onDragExitCapture:!0})}},dragLeave:{phasedRegistrationNames:{bubbled:y({onDragLeave:!0}),captured:y({onDragLeaveCapture:!0})}},dragOver:{phasedRegistrationNames:{bubbled:y({onDragOver:!0}),captured:y({onDragOverCapture:!0})}},dragStart:{phasedRegistrationNames:{bubbled:y({onDragStart:!0}),captured:y({onDragStartCapture:!0})}},drop:{phasedRegistrationNames:{bubbled:y({onDrop:!0}),captured:y({onDropCapture:!0})}},focus:{phasedRegistrationNames:{bubbled:y({onFocus:!0}),captured:y({onFocusCapture:!0})}},input:{phasedRegistrationNames:{bubbled:y({onInput:!0}),captured:y({onInputCapture:!0})}},keyDown:{phasedRegistrationNames:{bubbled:y({onKeyDown:!0}),captured:y({onKeyDownCapture:!0})}},keyPress:{phasedRegistrationNames:{bubbled:y({onKeyPress:!0}),captured:y({onKeyPressCapture:!0})}},keyUp:{phasedRegistrationNames:{bubbled:y({onKeyUp:!0}),captured:y({onKeyUpCapture:!0})}},load:{phasedRegistrationNames:{bubbled:y({onLoad:!0}),captured:y({onLoadCapture:!0})}},error:{phasedRegistrationNames:{bubbled:y({onError:!0}),captured:y({onErrorCapture:!0})}},mouseDown:{phasedRegistrationNames:{bubbled:y({onMouseDown:!0}),captured:y({onMouseDownCapture:!0})}},mouseMove:{phasedRegistrationNames:{bubbled:y({onMouseMove:!0}),captured:y({onMouseMoveCapture:!0})}},mouseOut:{phasedRegistrationNames:{bubbled:y({onMouseOut:!0}),captured:y({onMouseOutCapture:!0})}},mouseOver:{phasedRegistrationNames:{bubbled:y({onMouseOver:!0}),captured:y({onMouseOverCapture:!0})}},mouseUp:{phasedRegistrationNames:{bubbled:y({onMouseUp:!0}),captured:y({onMouseUpCapture:!0})}},paste:{phasedRegistrationNames:{bubbled:y({onPaste:!0}),captured:y({onPasteCapture:!0})}},reset:{phasedRegistrationNames:{bubbled:y({onReset:!0}),captured:y({onResetCapture:!0})}},scroll:{phasedRegistrationNames:{bubbled:y({onScroll:!0}),captured:y({onScrollCapture:!0})}},submit:{phasedRegistrationNames:{bubbled:y({onSubmit:!0}),captured:y({onSubmitCapture:!0})}},touchCancel:{phasedRegistrationNames:{bubbled:y({onTouchCancel:!0}),captured:y({onTouchCancelCapture:!0})}},touchEnd:{phasedRegistrationNames:{bubbled:y({onTouchEnd:!0}),captured:y({onTouchEndCapture:!0})}},touchMove:{phasedRegistrationNames:{bubbled:y({onTouchMove:!0}),captured:y({onTouchMoveCapture:!0})}},touchStart:{phasedRegistrationNames:{bubbled:y({onTouchStart:!0}),captured:y({onTouchStartCapture:!0})}},wheel:{phasedRegistrationNames:{bubbled:y({onWheel:!0}),captured:y({onWheelCapture:!0})}}},L={topBlur:g.blur,topClick:g.click,topContextMenu:g.contextMenu,topCopy:g.copy,topCut:g.cut,topDoubleClick:g.doubleClick,topDrag:g.drag,topDragEnd:g.dragEnd,topDragEnter:g.dragEnter,topDragExit:g.dragExit,topDragLeave:g.dragLeave,topDragOver:g.dragOver,topDragStart:g.dragStart,topDrop:g.drop,topError:g.error,topFocus:g.focus,topInput:g.input,topKeyDown:g.keyDown,topKeyPress:g.keyPress,topKeyUp:g.keyUp,topLoad:g.load,topMouseDown:g.mouseDown,topMouseMove:g.mouseMove,topMouseOut:g.mouseOut,topMouseOver:g.mouseOver,topMouseUp:g.mouseUp,topPaste:g.paste,topReset:g.reset,topScroll:g.scroll,topSubmit:g.submit,topTouchCancel:g.touchCancel,topTouchEnd:g.touchEnd,topTouchMove:g.touchMove,topTouchStart:g.touchStart,topWheel:g.wheel};for(var D in L)L[D].dependencies=[D];var Y={eventTypes:g,executeDispatch:function(e,n,r){var o=a.executeDispatch(e,n,r);"production"!==t.env.NODE_ENV?M("boolean"!=typeof o,"Returning `false` from an event handler is deprecated and will be ignored in a future release. Instead, manually call e.stopPropagation() or e.preventDefault(), as appropriate."):null,o===!1&&(e.stopPropagation(),e.preventDefault())},extractEvents:function(e,n,r,a){var y=L[e];if(!y)return null;var M;switch(e){case v.topInput:case v.topLoad:case v.topError:case v.topReset:case v.topSubmit:M=s;break;case v.topKeyPress:if(0===h(a))return null;case v.topKeyDown:case v.topKeyUp:M=d;break;case v.topBlur:case v.topFocus:M=u;break;case v.topClick:if(2===a.button)return null;case v.topContextMenu:case v.topDoubleClick:case v.topMouseDown:case v.topMouseMove:case v.topMouseOut:case v.topMouseOver:case v.topMouseUp:M=l;break;case v.topDrag:case v.topDragEnd:case v.topDragEnter:case v.topDragExit:case v.topDragLeave:case v.topDragOver:case v.topDragStart:case v.topDrop:M=c;break;case v.topTouchCancel:case v.topTouchEnd:case v.topTouchMove:case v.topTouchStart:M=_;break;case v.topScroll:M=p;break;case v.topWheel:M=m;break;case v.topCopy:case v.topCut:case v.topPaste:M=i}"production"!==t.env.NODE_ENV?f(M,"SimpleEventPlugin: Unhandled event type, `%s`.",e):f(M);var g=M.getPooled(y,r,a);return o.accumulateTwoPhaseDispatches(g),g}};e.exports=Y}).call(t,n(9))},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(103),o={clipboardData:function(e){return"clipboardData"in e?e.clipboardData:window.clipboardData}};a.augmentClass(r,o),e.exports=r},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(112),o={relatedTarget:null};a.augmentClass(r,o),e.exports=r},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(112),o=n(149),i=n(150),s=n(113),u={key:i,location:null,ctrlKey:null,shiftKey:null,altKey:null,metaKey:null,repeat:null,locale:null,getModifierState:s,charCode:function(e){return"keypress"===e.type?o(e):0},keyCode:function(e){return"keydown"===e.type||"keyup"===e.type?e.keyCode:0},which:function(e){return"keypress"===e.type?o(e):"keydown"===e.type||"keyup"===e.type?e.keyCode:0}};a.augmentClass(r,u),e.exports=r},function(e,t){"use strict";function n(e){var t,n=e.keyCode;return"charCode"in e?(t=e.charCode,0===t&&13===n&&(t=13)):t=n,t>=32||13===t?t:0}e.exports=n},function(e,t,n){"use strict";function r(e){if(e.key){var t=o[e.key]||e.key;if("Unidentified"!==t)return t}if("keypress"===e.type){var n=a(e);return 13===n?"Enter":String.fromCharCode(n)}return"keydown"===e.type||"keyup"===e.type?i[e.keyCode]||"Unidentified":""}var a=n(149),o={Esc:"Escape",Spacebar:" ",Left:"ArrowLeft",Up:"ArrowUp",Right:"ArrowRight",Down:"ArrowDown",Del:"Delete",Win:"OS",Menu:"ContextMenu",Apps:"ContextMenu",Scroll:"ScrollLock",MozPrintableKey:"Unidentified"},i={8:"Backspace",9:"Tab",12:"Clear",13:"Enter",16:"Shift",17:"Control",18:"Alt",19:"Pause",20:"CapsLock",27:"Escape",32:" ",33:"PageUp",34:"PageDown",35:"End",36:"Home",37:"ArrowLeft",38:"ArrowUp",39:"ArrowRight",40:"ArrowDown",45:"Insert",46:"Delete",112:"F1",113:"F2",114:"F3",115:"F4",116:"F5",117:"F6",118:"F7",119:"F8",120:"F9",121:"F10",122:"F11",123:"F12",144:"NumLock",145:"ScrollLock",224:"Meta"};e.exports=r},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(111),o={dataTransfer:null};a.augmentClass(r,o),e.exports=r},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(112),o=n(113),i={touches:null,targetTouches:null,changedTouches:null,altKey:null,metaKey:null,ctrlKey:null,shiftKey:null,getModifierState:o};a.augmentClass(r,i),e.exports=r},function(e,t,n){"use strict";function r(e,t,n){a.call(this,e,t,n)}var a=n(111),o={deltaX:function(e){return"deltaX"in e?e.deltaX:"wheelDeltaX"in e?-e.wheelDeltaX:0},deltaY:function(e){return"deltaY"in e?e.deltaY:"wheelDeltaY"in e?-e.wheelDeltaY:"wheelDelta"in e?-e.wheelDelta:0},deltaZ:null,deltaMode:null};a.augmentClass(r,o),e.exports=r},function(e,t,n){"use strict";var r=n(50),a=r.injection.MUST_USE_ATTRIBUTE,o={Properties:{clipPath:a,cx:a,cy:a,d:a,dx:a,dy:a,fill:a,fillOpacity:a,fontFamily:a,fontSize:a,fx:a,fy:a,gradientTransform:a,gradientUnits:a,markerEnd:a,markerMid:a,markerStart:a,offset:a,opacity:a,patternContentUnits:a,patternUnits:a,points:a,preserveAspectRatio:a,r:a,rx:a,ry:a,spreadMethod:a,stopColor:a,stopOpacity:a,stroke:a,strokeDasharray:a,strokeLinecap:a,strokeOpacity:a,strokeWidth:a,textAnchor:a,transform:a,version:a,viewBox:a,x1:a,x2:a,x:a,y1:a,y2:a,y:a},DOMAttributeNames:{clipPath:"clip-path",fillOpacity:"fill-opacity",fontFamily:"font-family",fontSize:"font-size",gradientTransform:"gradientTransform",gradientUnits:"gradientUnits",markerEnd:"marker-end",markerMid:"marker-mid",markerStart:"marker-start",patternContentUnits:"patternContentUnits",patternUnits:"patternUnits",preserveAspectRatio:"preserveAspectRatio",spreadMethod:"spreadMethod",stopColor:"stop-color",stopOpacity:"stop-opacity",strokeDasharray:"stroke-dasharray",strokeLinecap:"stroke-linecap",strokeOpacity:"stroke-opacity",strokeWidth:"stroke-width",textAnchor:"text-anchor",viewBox:"viewBox"}};e.exports=o},function(e,t,n){(function(t){"use strict";function r(e){var n=o.createFactory(e),r=a.createClass({tagName:e.toUpperCase(),displayName:"ReactFullPageComponent"+e,componentWillUnmount:function(){"production"!==t.env.NODE_ENV?i(!1,"%s tried to unmount. Because of cross-browser quirks it is impossible to unmount some top-level components (eg <html>, <head>, and <body>) reliably and efficiently. To fix this, have a single top-level component that never unmounts render these elements.",this.constructor.displayName):i(!1)},render:function(){return n(this.props)}});return r}var a=n(43),o=n(17),i=n(13);e.exports=r}).call(t,n(9))},function(e,t,n){"use strict";function r(e){return Math.floor(100*e)/100}function a(e,t,n){e[t]=(e[t]||0)+n}var o=n(50),i=n(157),s=n(73),u=n(34),d=n(158),l={_allMeasurements:[],_mountStack:[0],_injected:!1,start:function(){l._injected||u.injection.injectMeasure(l.measure),l._allMeasurements.length=0,u.enableMeasure=!0},stop:function(){u.enableMeasure=!1},getLastMeasurements:function(){return l._allMeasurements},printExclusive:function(e){e=e||l._allMeasurements;var t=i.getExclusiveSummary(e);console.table(t.map(function(e){return{"Component class name":e.componentName,"Total inclusive time (ms)":r(e.inclusive),"Exclusive mount time (ms)":r(e.exclusive),"Exclusive render time (ms)":r(e.render),"Mount time per instance (ms)":r(e.exclusive/e.count),"Render time per instance (ms)":r(e.render/e.count),Instances:e.count}}))},printInclusive:function(e){e=e||l._allMeasurements;var t=i.getInclusiveSummary(e);console.table(t.map(function(e){return{"Owner > component":e.componentName,"Inclusive time (ms)":r(e.time),Instances:e.count}})),console.log("Total time:",i.getTotalTime(e).toFixed(2)+" ms")},getMeasurementsSummaryMap:function(e){var t=i.getInclusiveSummary(e,!0);return t.map(function(e){return{"Owner > component":e.componentName,"Wasted time (ms)":e.time,Instances:e.count}})},printWasted:function(e){e=e||l._allMeasurements,console.table(l.getMeasurementsSummaryMap(e)),console.log("Total time:",i.getTotalTime(e).toFixed(2)+" ms")},printDOM:function(e){e=e||l._allMeasurements;var t=i.getDOMSummary(e);console.table(t.map(function(e){var t={};return t[o.ID_ATTRIBUTE_NAME]=e.id,t.type=e.type,t.args=JSON.stringify(e.args),t})),console.log("Total time:",i.getTotalTime(e).toFixed(2)+" ms")},_recordWrite:function(e,t,n,r){var a=l._allMeasurements[l._allMeasurements.length-1].writes;a[e]=a[e]||[],a[e].push({type:t,time:n,args:r})},measure:function(e,t,n){return function(){for(var r=[],o=0,i=arguments.length;i>o;o++)r.push(arguments[o]);var u,c,_;if("_renderNewRootComponent"===t||"flushBatchedUpdates"===t)return l._allMeasurements.push({exclusive:{},inclusive:{},render:{},counts:{},writes:{},displayNames:{},totalTime:0}),_=d(),c=n.apply(this,r),l._allMeasurements[l._allMeasurements.length-1].totalTime=d()-_,c;if("_mountImageIntoNode"===t||"ReactDOMIDOperations"===e){if(_=d(),c=n.apply(this,r),u=d()-_,"_mountImageIntoNode"===t){var p=s.getID(r[1]);l._recordWrite(p,t,u,r[0])}else"dangerouslyProcessChildrenUpdates"===t?r[0].forEach(function(e){var t={};null!==e.fromIndex&&(t.fromIndex=e.fromIndex),null!==e.toIndex&&(t.toIndex=e.toIndex),null!==e.textContent&&(t.textContent=e.textContent),null!==e.markupIndex&&(t.markup=r[1][e.markupIndex]),l._recordWrite(e.parentID,e.type,u,t)}):l._recordWrite(r[0],t,u,Array.prototype.slice.call(r,1));return c}if("ReactCompositeComponent"!==e||"mountComponent"!==t&&"updateComponent"!==t&&"_renderValidatedComponent"!==t)return n.apply(this,r);if("string"==typeof this._currentElement.type)return n.apply(this,r);var m="mountComponent"===t?r[0]:this._rootNodeID,h="_renderValidatedComponent"===t,f="mountComponent"===t,y=l._mountStack,M=l._allMeasurements[l._allMeasurements.length-1];if(h?a(M.counts,m,1):f&&y.push(0),_=d(),c=n.apply(this,r),u=d()-_,h)a(M.render,m,u);else if(f){var v=y.pop();y[y.length-1]+=u,a(M.exclusive,m,u-v),a(M.inclusive,m,u)}else a(M.inclusive,m,u);return M.displayNames[m]={current:this.getName(),owner:this._currentElement._owner?this._currentElement._owner.getName():"<root>"},c}}};e.exports=l},function(e,t,n){function r(e){for(var t=0,n=0;n<e.length;n++){var r=e[n];t+=r.totalTime}return t}function a(e){for(var t=[],n=0;n<e.length;n++){var r,a=e[n];for(r in a.writes)a.writes[r].forEach(function(e){t.push({id:r,type:l[e.type]||e.type,args:e.args})})}return t}function o(e){for(var t,n={},r=0;r<e.length;r++){var a=e[r],o=u({},a.exclusive,a.inclusive);for(var i in o)t=a.displayNames[i].current,n[t]=n[t]||{componentName:t,inclusive:0,exclusive:0,render:0,count:0},a.render[i]&&(n[t].render+=a.render[i]),a.exclusive[i]&&(n[t].exclusive+=a.exclusive[i]),a.inclusive[i]&&(n[t].inclusive+=a.inclusive[i]),a.counts[i]&&(n[t].count+=a.counts[i])}var s=[];for(t in n)n[t].exclusive>=d&&s.push(n[t]);return s.sort(function(e,t){return t.exclusive-e.exclusive}),s}function i(e,t){for(var n,r={},a=0;a<e.length;a++){var o,i=e[a],l=u({},i.exclusive,i.inclusive);t&&(o=s(i));for(var c in l)if(!t||o[c]){var _=i.displayNames[c];n=_.owner+" > "+_.current,r[n]=r[n]||{componentName:n,time:0,count:0},i.inclusive[c]&&(r[n].time+=i.inclusive[c]),i.counts[c]&&(r[n].count+=i.counts[c])}}var p=[];for(n in r)r[n].time>=d&&p.push(r[n]);return p.sort(function(e,t){return t.time-e.time}),p}function s(e){var t={},n=Object.keys(e.writes),r=u({},e.exclusive,e.inclusive);for(var a in r){for(var o=!1,i=0;i<n.length;i++)if(0===n[i].indexOf(a)){o=!0;break}!o&&e.counts[a]>0&&(t[a]=!0)}return t}var u=n(19),d=1.2,l={_mountImageIntoNode:"set innerHTML",INSERT_MARKUP:"set innerHTML",MOVE_EXISTING:"move",REMOVE_NODE:"remove",TEXT_CONTENT:"set textContent",updatePropertyByID:"update attribute",deletePropertyByID:"delete attribute",updateStylesByID:"update styles",updateInnerHTMLByID:"set innerHTML",dangerouslyReplaceNodeWithMarkupByID:"replace"},c={getExclusiveSummary:o,getInclusiveSummary:i,getDOMSummary:a,getTotalTime:r};e.exports=c},function(e,t,n){var r=n(159);r&&r.now||(r=Date);var a=r.now.bind(r);e.exports=a},function(e,t,n){"use strict";var r,a=n(57);a.canUseDOM&&(r=window.performance||window.msPerformance||window.webkitPerformance),e.exports=r||{}},function(e,t,n){(function(t){"use strict";function r(e){"production"!==t.env.NODE_ENV?c(o.isValidElement(e),"renderToString(): You must pass a valid ReactElement."):c(o.isValidElement(e));var n;try{var r=i.createReactRootID();return n=u.getPooled(!1),n.perform(function(){var t=l(e,null),a=t.mountComponent(r,n,d);return s.addChecksumToMarkup(a)},null)}finally{u.release(n)}}function a(e){"production"!==t.env.NODE_ENV?c(o.isValidElement(e),"renderToStaticMarkup(): You must pass a valid ReactElement."):c(o.isValidElement(e));var n;try{var r=i.createReactRootID();return n=u.getPooled(!0),n.perform(function(){var t=l(e,null);return t.mountComponent(r,n,d)},null)}finally{u.release(n)}}var o=n(17),i=n(25),s=n(83),u=n(161),d=n(20),l=n(89),c=n(13);e.exports={renderToString:r,renderToStaticMarkup:a}}).call(t,n(9))},function(e,t,n){"use strict";function r(e){this.reinitializeTransaction(),this.renderToStaticMarkup=e,this.reactMountReady=o.getPooled(null),this.putListenerQueue=i.getPooled()}var a=n(15),o=n(33),i=n(141),s=n(42),u=n(19),d=n(22),l={initialize:function(){this.reactMountReady.reset()},close:d},c={initialize:function(){this.putListenerQueue.reset()},close:d},_=[c,l],p={getTransactionWrappers:function(){return _},getReactMountReady:function(){return this.reactMountReady},getPutListenerQueue:function(){return this.putListenerQueue},destructor:function(){o.release(this.reactMountReady),this.reactMountReady=null,i.release(this.putListenerQueue),this.putListenerQueue=null}};u(r.prototype,s.Mixin,p),a.addPoolingTo(r),e.exports=r},function(e,t,n){(function(t){"use strict";function r(e){return"production"!==t.env.NODE_ENV?o(a.isValidElement(e),"onlyChild must be passed a children with exactly one child."):o(a.isValidElement(e)),e}var a=n(17),o=n(13);e.exports=r}).call(t,n(9))},function(e,t,n){"use strict";var r=n(7),a=n(164),o=n(165),i=n(255),s=n(256);e.exports=r.createClass({displayName:"App",render:function(){return r.createElement("div",{className:"main-container"},r.createElement(a,null),r.createElement(o,null),r.createElement(i,{url:s}))}})},function(e,t,n){"use strict";var r=n(7);e.exports=r.createClass({displayName:"Header",render:function(){return r.createElement("div",{className:"header"},"In fulfillment of Busbud's front-end-challenge-b --- By Nathaniel Kitzke")}})},function(e,t,n){"use strict";var r=n(7),a=n(166),o="https://napi.busbud.com/x-departures/dr5reg/f25dvk/2016-01-14",i=n(254),s=function(e,t,n){for(var r=0;r<e.length;r++)if(e[r][t]===n)return r;return-1};e.exports=r.createClass({displayName:"Content",getInitialState:function(){return{travelData:{departures:[],locations:[],operators:[]},showButton:!0,showLoading:!1}},requestTravelData:function(e){var t=this,n=new XMLHttpRequest,r=e===o?o+"?adult=1¤cy=usd":e+"&adult=1¤cy=usd";n.open("get",r,!0),n.setRequestHeader("Accept","application/vnd.busbud+json; version=2; profile=https://schema.busbud.com/v2/"),n.onload=function(){var r=JSON.parse(n.responseText),a=e===o?r:t.state.travelData;return a.complete?(t.setState({travelData:a}),void t.setState({showLoading:!1})):(a.operators=a.operators.concat(r.operators),a.departures=a.departures.concat(r.departures),a.complete=r.complete,t.setState({travelData:a}),console.log(a),void t.requestTravelData(o+"/poll?index="+a.departures.length))},n.send()},onButtonClick:function(){this.setState({showButton:!1}),this.setState({showLoading:!0}),this.requestTravelData(o)},render:function(){return r.createElement("div",{className:"content-container"},r.createElement("div",{className:"page-permanent-announcement"},"Hey New York! Busbud wants to get you to Igloofest."),r.createElement("div",{className:"on-boarding-section"},this.state.showButton?r.createElement("input",{className:"the-button btn-default",ref:"refreshButton",type:"button",onClick:this.onButtonClick,value:"Click here to find a bus!"}):null,this.state.showLoading?r.createElement("span",{className:"glyphicon glyphicon-refresh spinning"}):null),r.createElement("div",{className:"departures"},this.state.travelData.departures.map(function(e,t){return[r.createElement("div",{key:t,className:"panel panel-primary departure-info-panel"},r.createElement("div",{className:"panel-heading"},r.createElement("h3",{className:"panel-title"},"Bus ",t+1)),r.createElement("div",{className:"panel-body"},r.createElement("div",{className:"flex-row"},r.createElement("div",{className:"info-box"},"Departure Time: ",r.createElement("span",{className:"info-variable"},a(e.departure_time,a.ISO_8601).format("ddd, MMM Do YYYY, h:mm a"))),r.createElement("div",{className:"info-box"},"Arrival Time: ",r.createElement("span",{className:"info-variable"},a(e.arrival_time,a.ISO_8601).format("ddd, MMM Do YYYY, h:mm a")))),r.createElement("div",{className:"flex-row"},r.createElement("div",{className:"info-box"},"Location: ",r.createElement("span",{className:"info-variable"},this.state.travelData.locations[s(this.state.travelData.locations,"id",e.origin_location_id)].name)),r.createElement("div",{className:"info-box"},"Price: ",r.createElement("span",{className:"info-variable"},"$"+e.prices.total/100+" per adult")))),r.createElement("div",{className:"panel-footer"},r.createElement("img",{className:"mini-bus-pic",src:i})))]},this)))}})},function(e,t,n){(function(e){//! moment.js
//! version : 2.10.6
//! authors : Tim Wood, Iskren Chernev, Moment.js contributors
//! license : MIT
//! momentjs.com
!function(t,n){e.exports=n()}(this,function(){"use strict";function t(){return Hn.apply(null,arguments)}function r(e){Hn=e}function a(e){return"[object Array]"===Object.prototype.toString.call(e)}function o(e){return e instanceof Date||"[object Date]"===Object.prototype.toString.call(e)}function i(e,t){var n,r=[];for(n=0;n<e.length;++n)r.push(t(e[n],n));return r}function s(e,t){return Object.prototype.hasOwnProperty.call(e,t)}function u(e,t){for(var n in t)s(t,n)&&(e[n]=t[n]);return s(t,"toString")&&(e.toString=t.toString),s(t,"valueOf")&&(e.valueOf=t.valueOf),e}function d(e,t,n,r){return Se(e,t,n,r,!0).utc()}function l(){return{empty:!1,unusedTokens:[],unusedInput:[],overflow:-2,charsLeftOver:0,nullInput:!1,invalidMonth:null,invalidFormat:!1,userInvalidated:!1,iso:!1}}function c(e){return null==e._pf&&(e._pf=l()),e._pf}function _(e){if(null==e._isValid){var t=c(e);e._isValid=!(isNaN(e._d.getTime())||!(t.overflow<0)||t.empty||t.invalidMonth||t.invalidWeekday||t.nullInput||t.invalidFormat||t.userInvalidated),e._strict&&(e._isValid=e._isValid&&0===t.charsLeftOver&&0===t.unusedTokens.length&&void 0===t.bigHour)}return e._isValid}function p(e){var t=d(NaN);return null!=e?u(c(t),e):c(t).userInvalidated=!0,t}function m(e,t){var n,r,a;if("undefined"!=typeof t._isAMomentObject&&(e._isAMomentObject=t._isAMomentObject),"undefined"!=typeof t._i&&(e._i=t._i),"undefined"!=typeof t._f&&(e._f=t._f),"undefined"!=typeof t._l&&(e._l=t._l),"undefined"!=typeof t._strict&&(e._strict=t._strict),"undefined"!=typeof t._tzm&&(e._tzm=t._tzm),"undefined"!=typeof t._isUTC&&(e._isUTC=t._isUTC),"undefined"!=typeof t._offset&&(e._offset=t._offset),"undefined"!=typeof t._pf&&(e._pf=c(t)),"undefined"!=typeof t._locale&&(e._locale=t._locale),Rn.length>0)for(n in Rn)r=Rn[n],a=t[r],"undefined"!=typeof a&&(e[r]=a);return e}function h(e){m(this,e),this._d=new Date(null!=e._d?e._d.getTime():NaN),An===!1&&(An=!0,t.updateOffset(this),An=!1)}function f(e){return e instanceof h||null!=e&&null!=e._isAMomentObject}function y(e){return 0>e?Math.ceil(e):Math.floor(e)}function M(e){var t=+e,n=0;return 0!==t&&isFinite(t)&&(n=y(t)),n}function v(e,t,n){var r,a=Math.min(e.length,t.length),o=Math.abs(e.length-t.length),i=0;for(r=0;a>r;r++)(n&&e[r]!==t[r]||!n&&M(e[r])!==M(t[r]))&&i++;return i+o}function g(){}function L(e){return e?e.toLowerCase().replace("_","-"):e}function D(e){for(var t,n,r,a,o=0;o<e.length;){for(a=L(e[o]).split("-"),t=a.length,n=L(e[o+1]),n=n?n.split("-"):null;t>0;){if(r=Y(a.slice(0,t).join("-")))return r;if(n&&n.length>=t&&v(a,n,!0)>=t-1)break;t--}o++}return null}function Y(t){var r=null;if(!Vn[t]&&"undefined"!=typeof e&&e&&e.exports)try{r=In._abbr,n(168)("./"+t),b(r)}catch(a){}return Vn[t]}function b(e,t){var n;return e&&(n="undefined"==typeof t?T(e):k(e,t),n&&(In=n)),In._abbr}function k(e,t){return null!==t?(t.abbr=e,Vn[e]=Vn[e]||new g,Vn[e].set(t),b(e),Vn[e]):(delete Vn[e],null)}function T(e){var t;if(e&&e._locale&&e._locale._abbr&&(e=e._locale._abbr),!e)return In;if(!a(e)){if(t=Y(e))return t;e=[e]}return D(e)}function E(e,t){var n=e.toLowerCase();Fn[n]=Fn[n+"s"]=Fn[t]=e}function w(e){return"string"==typeof e?Fn[e]||Fn[e.toLowerCase()]:void 0}function N(e){var t,n,r={};for(n in e)s(e,n)&&(t=w(n),t&&(r[t]=e[n]));return r}function S(e,n){return function(r){return null!=r?(C(this,e,r),t.updateOffset(this,n),this):x(this,e)}}function x(e,t){return e._d["get"+(e._isUTC?"UTC":"")+t]()}function C(e,t,n){return e._d["set"+(e._isUTC?"UTC":"")+t](n)}function O(e,t){var n;if("object"==typeof e)for(n in e)this.set(n,e[n]);else if(e=w(e),"function"==typeof this[e])return this[e](t);return this}function P(e,t,n){var r=""+Math.abs(e),a=t-r.length,o=e>=0;return(o?n?"+":"":"-")+Math.pow(10,Math.max(0,a)).toString().substr(1)+r}function j(e,t,n,r){var a=r;"string"==typeof r&&(a=function(){return this[r]()}),e&&(Bn[e]=a),t&&(Bn[t[0]]=function(){return P(a.apply(this,arguments),t[1],t[2])}),n&&(Bn[n]=function(){return this.localeData().ordinal(a.apply(this,arguments),e)})}function H(e){return e.match(/\[[\s\S]/)?e.replace(/^\[|\]$/g,""):e.replace(/\\/g,"")}function I(e){var t,n,r=e.match(Un);for(t=0,n=r.length;n>t;t++)Bn[r[t]]?r[t]=Bn[r[t]]:r[t]=H(r[t]);return function(a){var o="";for(t=0;n>t;t++)o+=r[t]instanceof Function?r[t].call(a,e):r[t];return o}}function R(e,t){return e.isValid()?(t=A(t,e.localeData()),zn[t]=zn[t]||I(t),zn[t](e)):e.localeData().invalidDate()}function A(e,t){function n(e){return t.longDateFormat(e)||e}var r=5;for(Wn.lastIndex=0;r>=0&&Wn.test(e);)e=e.replace(Wn,n),Wn.lastIndex=0,r-=1;return e}function V(e){return"function"==typeof e&&"[object Function]"===Object.prototype.toString.call(e)}function F(e,t,n){ir[e]=V(t)?t:function(e){return e&&n?n:t}}function U(e,t){return s(ir,e)?ir[e](t._strict,t._locale):new RegExp(W(e))}function W(e){return e.replace("\\","").replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g,function(e,t,n,r,a){return t||n||r||a}).replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&")}function z(e,t){var n,r=t;for("string"==typeof e&&(e=[e]),"number"==typeof t&&(r=function(e,n){n[t]=M(e)}),n=0;n<e.length;n++)sr[e[n]]=r}function B(e,t){z(e,function(e,n,r,a){r._w=r._w||{},t(e,r._w,r,a)})}function J(e,t,n){null!=t&&s(sr,e)&&sr[e](t,n._a,n,e)}function G(e,t){return new Date(Date.UTC(e,t+1,0)).getUTCDate()}function q(e){return this._months[e.month()]}function K(e){return this._monthsShort[e.month()]}function Q(e,t,n){var r,a,o;for(this._monthsParse||(this._monthsParse=[],this._longMonthsParse=[],this._shortMonthsParse=[]),r=0;12>r;r++){if(a=d([2e3,r]),n&&!this._longMonthsParse[r]&&(this._longMonthsParse[r]=new RegExp("^"+this.months(a,"").replace(".","")+"$","i"),this._shortMonthsParse[r]=new RegExp("^"+this.monthsShort(a,"").replace(".","")+"$","i")),n||this._monthsParse[r]||(o="^"+this.months(a,"")+"|^"+this.monthsShort(a,""),this._monthsParse[r]=new RegExp(o.replace(".",""),"i")),n&&"MMMM"===t&&this._longMonthsParse[r].test(e))return r;if(n&&"MMM"===t&&this._shortMonthsParse[r].test(e))return r;if(!n&&this._monthsParse[r].test(e))return r}}function X(e,t){var n;return"string"==typeof t&&(t=e.localeData().monthsParse(t),"number"!=typeof t)?e:(n=Math.min(e.date(),G(e.year(),t)),e._d["set"+(e._isUTC?"UTC":"")+"Month"](t,n),e)}function $(e){return null!=e?(X(this,e),t.updateOffset(this,!0),this):x(this,"Month")}function Z(){return G(this.year(),this.month())}function ee(e){var t,n=e._a;return n&&-2===c(e).overflow&&(t=n[dr]<0||n[dr]>11?dr:n[lr]<1||n[lr]>G(n[ur],n[dr])?lr:n[cr]<0||n[cr]>24||24===n[cr]&&(0!==n[_r]||0!==n[pr]||0!==n[mr])?cr:n[_r]<0||n[_r]>59?_r:n[pr]<0||n[pr]>59?pr:n[mr]<0||n[mr]>999?mr:-1,c(e)._overflowDayOfYear&&(ur>t||t>lr)&&(t=lr),c(e).overflow=t),e}function te(e){t.suppressDeprecationWarnings===!1&&"undefined"!=typeof console&&console.warn&&console.warn("Deprecation warning: "+e)}function ne(e,t){var n=!0;return u(function(){return n&&(te(e+"\n"+(new Error).stack),n=!1),t.apply(this,arguments)},t)}function re(e,t){yr[e]||(te(t),yr[e]=!0)}function ae(e){var t,n,r=e._i,a=Mr.exec(r);if(a){for(c(e).iso=!0,t=0,n=vr.length;n>t;t++)if(vr[t][1].exec(r)){e._f=vr[t][0];break}for(t=0,n=gr.length;n>t;t++)if(gr[t][1].exec(r)){e._f+=(a[6]||" ")+gr[t][0];break}r.match(rr)&&(e._f+="Z"),Ye(e)}else e._isValid=!1}function oe(e){var n=Lr.exec(e._i);return null!==n?void(e._d=new Date(+n[1])):(ae(e),void(e._isValid===!1&&(delete e._isValid,t.createFromInputFallback(e))))}function ie(e,t,n,r,a,o,i){var s=new Date(e,t,n,r,a,o,i);return 1970>e&&s.setFullYear(e),s}function se(e){var t=new Date(Date.UTC.apply(null,arguments));return 1970>e&&t.setUTCFullYear(e),t}function ue(e){return de(e)?366:365}function de(e){return e%4===0&&e%100!==0||e%400===0}function le(){return de(this.year())}function ce(e,t,n){var r,a=n-t,o=n-e.day();return o>a&&(o-=7),a-7>o&&(o+=7),r=xe(e).add(o,"d"),{week:Math.ceil(r.dayOfYear()/7),year:r.year()}}function _e(e){return ce(e,this._week.dow,this._week.doy).week}function pe(){return this._week.dow}function me(){return this._week.doy}function he(e){var t=this.localeData().week(this);return null==e?t:this.add(7*(e-t),"d")}function fe(e){var t=ce(this,1,4).week;return null==e?t:this.add(7*(e-t),"d")}function ye(e,t,n,r,a){var o,i=6+a-r,s=se(e,0,1+i),u=s.getUTCDay();return a>u&&(u+=7),n=null!=n?1*n:a,o=1+i+7*(t-1)-u+n,{year:o>0?e:e-1,dayOfYear:o>0?o:ue(e-1)+o}}function Me(e){var t=Math.round((this.clone().startOf("day")-this.clone().startOf("year"))/864e5)+1;return null==e?t:this.add(e-t,"d")}function ve(e,t,n){return null!=e?e:null!=t?t:n}function ge(e){var t=new Date;return e._useUTC?[t.getUTCFullYear(),t.getUTCMonth(),t.getUTCDate()]:[t.getFullYear(),t.getMonth(),t.getDate()]}function Le(e){var t,n,r,a,o=[];if(!e._d){for(r=ge(e),e._w&&null==e._a[lr]&&null==e._a[dr]&&De(e),e._dayOfYear&&(a=ve(e._a[ur],r[ur]),e._dayOfYear>ue(a)&&(c(e)._overflowDayOfYear=!0),n=se(a,0,e._dayOfYear),e._a[dr]=n.getUTCMonth(),e._a[lr]=n.getUTCDate()),t=0;3>t&&null==e._a[t];++t)e._a[t]=o[t]=r[t];for(;7>t;t++)e._a[t]=o[t]=null==e._a[t]?2===t?1:0:e._a[t];24===e._a[cr]&&0===e._a[_r]&&0===e._a[pr]&&0===e._a[mr]&&(e._nextDay=!0,e._a[cr]=0),e._d=(e._useUTC?se:ie).apply(null,o),null!=e._tzm&&e._d.setUTCMinutes(e._d.getUTCMinutes()-e._tzm),e._nextDay&&(e._a[cr]=24)}}function De(e){var t,n,r,a,o,i,s;t=e._w,null!=t.GG||null!=t.W||null!=t.E?(o=1,i=4,n=ve(t.GG,e._a[ur],ce(xe(),1,4).year),r=ve(t.W,1),a=ve(t.E,1)):(o=e._locale._week.dow,i=e._locale._week.doy,n=ve(t.gg,e._a[ur],ce(xe(),o,i).year),r=ve(t.w,1),null!=t.d?(a=t.d,o>a&&++r):a=null!=t.e?t.e+o:o),s=ye(n,r,a,i,o),e._a[ur]=s.year,e._dayOfYear=s.dayOfYear}function Ye(e){if(e._f===t.ISO_8601)return void ae(e);e._a=[],c(e).empty=!0;var n,r,a,o,i,s=""+e._i,u=s.length,d=0;for(a=A(e._f,e._locale).match(Un)||[],n=0;n<a.length;n++)o=a[n],r=(s.match(U(o,e))||[])[0],r&&(i=s.substr(0,s.indexOf(r)),i.length>0&&c(e).unusedInput.push(i),s=s.slice(s.indexOf(r)+r.length),d+=r.length),Bn[o]?(r?c(e).empty=!1:c(e).unusedTokens.push(o),J(o,r,e)):e._strict&&!r&&c(e).unusedTokens.push(o);c(e).charsLeftOver=u-d,s.length>0&&c(e).unusedInput.push(s),c(e).bigHour===!0&&e._a[cr]<=12&&e._a[cr]>0&&(c(e).bigHour=void 0),e._a[cr]=be(e._locale,e._a[cr],e._meridiem),Le(e),ee(e)}function be(e,t,n){var r;return null==n?t:null!=e.meridiemHour?e.meridiemHour(t,n):null!=e.isPM?(r=e.isPM(n),r&&12>t&&(t+=12),r||12!==t||(t=0),t):t}function ke(e){var t,n,r,a,o;if(0===e._f.length)return c(e).invalidFormat=!0,void(e._d=new Date(NaN));for(a=0;a<e._f.length;a++)o=0,t=m({},e),null!=e._useUTC&&(t._useUTC=e._useUTC),t._f=e._f[a],Ye(t),_(t)&&(o+=c(t).charsLeftOver,o+=10*c(t).unusedTokens.length,c(t).score=o,(null==r||r>o)&&(r=o,n=t));u(e,n||t)}function Te(e){if(!e._d){var t=N(e._i);e._a=[t.year,t.month,t.day||t.date,t.hour,t.minute,t.second,t.millisecond],Le(e)}}function Ee(e){var t=new h(ee(we(e)));return t._nextDay&&(t.add(1,"d"),t._nextDay=void 0),t}function we(e){var t=e._i,n=e._f;return e._locale=e._locale||T(e._l),null===t||void 0===n&&""===t?p({nullInput:!0}):("string"==typeof t&&(e._i=t=e._locale.preparse(t)),f(t)?new h(ee(t)):(a(n)?ke(e):n?Ye(e):o(t)?e._d=t:Ne(e),e))}function Ne(e){var n=e._i;void 0===n?e._d=new Date:o(n)?e._d=new Date(+n):"string"==typeof n?oe(e):a(n)?(e._a=i(n.slice(0),function(e){return parseInt(e,10)}),Le(e)):"object"==typeof n?Te(e):"number"==typeof n?e._d=new Date(n):t.createFromInputFallback(e)}function Se(e,t,n,r,a){var o={};return"boolean"==typeof n&&(r=n,n=void 0),o._isAMomentObject=!0,o._useUTC=o._isUTC=a,o._l=n,o._i=e,o._f=t,o._strict=r,Ee(o)}function xe(e,t,n,r){return Se(e,t,n,r,!1)}function Ce(e,t){var n,r;if(1===t.length&&a(t[0])&&(t=t[0]),!t.length)return xe();for(n=t[0],r=1;r<t.length;++r)(!t[r].isValid()||t[r][e](n))&&(n=t[r]);return n}function Oe(){var e=[].slice.call(arguments,0);return Ce("isBefore",e)}function Pe(){var e=[].slice.call(arguments,0);return Ce("isAfter",e)}function je(e){var t=N(e),n=t.year||0,r=t.quarter||0,a=t.month||0,o=t.week||0,i=t.day||0,s=t.hour||0,u=t.minute||0,d=t.second||0,l=t.millisecond||0;this._milliseconds=+l+1e3*d+6e4*u+36e5*s,this._days=+i+7*o,this._months=+a+3*r+12*n,this._data={},this._locale=T(),this._bubble()}function He(e){return e instanceof je}function Ie(e,t){j(e,0,0,function(){var e=this.utcOffset(),n="+";return 0>e&&(e=-e,n="-"),n+P(~~(e/60),2)+t+P(~~e%60,2)})}function Re(e){var t=(e||"").match(rr)||[],n=t[t.length-1]||[],r=(n+"").match(Tr)||["-",0,0],a=+(60*r[1])+M(r[2]);return"+"===r[0]?a:-a}function Ae(e,n){var r,a;return n._isUTC?(r=n.clone(),a=(f(e)||o(e)?+e:+xe(e))-+r,r._d.setTime(+r._d+a),t.updateOffset(r,!1),r):xe(e).local()}function Ve(e){return 15*-Math.round(e._d.getTimezoneOffset()/15)}function Fe(e,n){var r,a=this._offset||0;return null!=e?("string"==typeof e&&(e=Re(e)),Math.abs(e)<16&&(e=60*e),!this._isUTC&&n&&(r=Ve(this)),this._offset=e,this._isUTC=!0,null!=r&&this.add(r,"m"),a!==e&&(!n||this._changeInProgress?rt(this,$e(e-a,"m"),1,!1):this._changeInProgress||(this._changeInProgress=!0,t.updateOffset(this,!0),this._changeInProgress=null)),this):this._isUTC?a:Ve(this)}function Ue(e,t){return null!=e?("string"!=typeof e&&(e=-e),this.utcOffset(e,t),this):-this.utcOffset()}function We(e){return this.utcOffset(0,e)}function ze(e){return this._isUTC&&(this.utcOffset(0,e),this._isUTC=!1,e&&this.subtract(Ve(this),"m")),this}function Be(){return this._tzm?this.utcOffset(this._tzm):"string"==typeof this._i&&this.utcOffset(Re(this._i)),this}function Je(e){return e=e?xe(e).utcOffset():0,(this.utcOffset()-e)%60===0}function Ge(){return this.utcOffset()>this.clone().month(0).utcOffset()||this.utcOffset()>this.clone().month(5).utcOffset()}function qe(){if("undefined"!=typeof this._isDSTShifted)return this._isDSTShifted;var e={};if(m(e,this),e=we(e),e._a){var t=e._isUTC?d(e._a):xe(e._a);this._isDSTShifted=this.isValid()&&v(e._a,t.toArray())>0}else this._isDSTShifted=!1;return this._isDSTShifted}function Ke(){return!this._isUTC}function Qe(){return this._isUTC}function Xe(){return this._isUTC&&0===this._offset}function $e(e,t){var n,r,a,o=e,i=null;return He(e)?o={ms:e._milliseconds,d:e._days,M:e._months}:"number"==typeof e?(o={},t?o[t]=e:o.milliseconds=e):(i=Er.exec(e))?(n="-"===i[1]?-1:1,o={y:0,d:M(i[lr])*n,h:M(i[cr])*n,m:M(i[_r])*n,s:M(i[pr])*n,ms:M(i[mr])*n}):(i=wr.exec(e))?(n="-"===i[1]?-1:1,o={y:Ze(i[2],n),M:Ze(i[3],n),d:Ze(i[4],n),h:Ze(i[5],n),m:Ze(i[6],n),s:Ze(i[7],n),w:Ze(i[8],n)}):null==o?o={}:"object"==typeof o&&("from"in o||"to"in o)&&(a=tt(xe(o.from),xe(o.to)),o={},o.ms=a.milliseconds,o.M=a.months),r=new je(o),He(e)&&s(e,"_locale")&&(r._locale=e._locale),r}function Ze(e,t){var n=e&&parseFloat(e.replace(",","."));return(isNaN(n)?0:n)*t}function et(e,t){var n={milliseconds:0,months:0};return n.months=t.month()-e.month()+12*(t.year()-e.year()),e.clone().add(n.months,"M").isAfter(t)&&--n.months,n.milliseconds=+t-+e.clone().add(n.months,"M"),n}function tt(e,t){var n;return t=Ae(t,e),e.isBefore(t)?n=et(e,t):(n=et(t,e),n.milliseconds=-n.milliseconds,n.months=-n.months),n}function nt(e,t){return function(n,r){var a,o;return null===r||isNaN(+r)||(re(t,"moment()."+t+"(period, number) is deprecated. Please use moment()."+t+"(number, period)."),o=n,n=r,r=o),n="string"==typeof n?+n:n,a=$e(n,r),rt(this,a,e),this}}function rt(e,n,r,a){var o=n._milliseconds,i=n._days,s=n._months;a=null==a?!0:a,o&&e._d.setTime(+e._d+o*r),i&&C(e,"Date",x(e,"Date")+i*r),s&&X(e,x(e,"Month")+s*r),a&&t.updateOffset(e,i||s)}function at(e,t){var n=e||xe(),r=Ae(n,this).startOf("day"),a=this.diff(r,"days",!0),o=-6>a?"sameElse":-1>a?"lastWeek":0>a?"lastDay":1>a?"sameDay":2>a?"nextDay":7>a?"nextWeek":"sameElse";return this.format(t&&t[o]||this.localeData().calendar(o,this,xe(n)))}function ot(){return new h(this)}function it(e,t){var n;return t=w("undefined"!=typeof t?t:"millisecond"),"millisecond"===t?(e=f(e)?e:xe(e),+this>+e):(n=f(e)?+e:+xe(e),n<+this.clone().startOf(t))}function st(e,t){var n;return t=w("undefined"!=typeof t?t:"millisecond"),"millisecond"===t?(e=f(e)?e:xe(e),+e>+this):(n=f(e)?+e:+xe(e),+this.clone().endOf(t)<n)}function ut(e,t,n){return this.isAfter(e,n)&&this.isBefore(t,n)}function dt(e,t){var n;return t=w(t||"millisecond"),"millisecond"===t?(e=f(e)?e:xe(e),+this===+e):(n=+xe(e),+this.clone().startOf(t)<=n&&n<=+this.clone().endOf(t))}function lt(e,t,n){var r,a,o=Ae(e,this),i=6e4*(o.utcOffset()-this.utcOffset());return t=w(t),"year"===t||"month"===t||"quarter"===t?(a=ct(this,o),"quarter"===t?a/=3:"year"===t&&(a/=12)):(r=this-o,a="second"===t?r/1e3:"minute"===t?r/6e4:"hour"===t?r/36e5:"day"===t?(r-i)/864e5:"week"===t?(r-i)/6048e5:r),n?a:y(a)}function ct(e,t){var n,r,a=12*(t.year()-e.year())+(t.month()-e.month()),o=e.clone().add(a,"months");return 0>t-o?(n=e.clone().add(a-1,"months"),r=(t-o)/(o-n)):(n=e.clone().add(a+1,"months"),r=(t-o)/(n-o)),-(a+r)}function _t(){return this.clone().locale("en").format("ddd MMM DD YYYY HH:mm:ss [GMT]ZZ")}function pt(){var e=this.clone().utc();return 0<e.year()&&e.year()<=9999?"function"==typeof Date.prototype.toISOString?this.toDate().toISOString():R(e,"YYYY-MM-DD[T]HH:mm:ss.SSS[Z]"):R(e,"YYYYYY-MM-DD[T]HH:mm:ss.SSS[Z]")}function mt(e){var n=R(this,e||t.defaultFormat);return this.localeData().postformat(n)}function ht(e,t){return this.isValid()?$e({to:this,from:e}).locale(this.locale()).humanize(!t):this.localeData().invalidDate()}function ft(e){return this.from(xe(),e)}function yt(e,t){return this.isValid()?$e({from:this,to:e}).locale(this.locale()).humanize(!t):this.localeData().invalidDate()}function Mt(e){return this.to(xe(),e)}function vt(e){var t;return void 0===e?this._locale._abbr:(t=T(e),null!=t&&(this._locale=t),this)}function gt(){return this._locale}function Lt(e){switch(e=w(e)){case"year":this.month(0);case"quarter":case"month":this.date(1);case"week":case"isoWeek":case"day":this.hours(0);case"hour":this.minutes(0);case"minute":this.seconds(0);case"second":this.milliseconds(0)}return"week"===e&&this.weekday(0),"isoWeek"===e&&this.isoWeekday(1),"quarter"===e&&this.month(3*Math.floor(this.month()/3)),this}function Dt(e){return e=w(e),void 0===e||"millisecond"===e?this:this.startOf(e).add(1,"isoWeek"===e?"week":e).subtract(1,"ms")}function Yt(){return+this._d-6e4*(this._offset||0)}function bt(){return Math.floor(+this/1e3)}function kt(){return this._offset?new Date(+this):this._d}function Tt(){var e=this;return[e.year(),e.month(),e.date(),e.hour(),e.minute(),e.second(),e.millisecond()]}function Et(){var e=this;return{years:e.year(),months:e.month(),date:e.date(),hours:e.hours(),minutes:e.minutes(),seconds:e.seconds(),milliseconds:e.milliseconds()}}function wt(){return _(this)}function Nt(){return u({},c(this))}function St(){return c(this).overflow}function xt(e,t){j(0,[e,e.length],0,t)}function Ct(e,t,n){return ce(xe([e,11,31+t-n]),t,n).week}function Ot(e){var t=ce(this,this.localeData()._week.dow,this.localeData()._week.doy).year;return null==e?t:this.add(e-t,"y")}function Pt(e){var t=ce(this,1,4).year;return null==e?t:this.add(e-t,"y")}function jt(){return Ct(this.year(),1,4)}function Ht(){var e=this.localeData()._week;return Ct(this.year(),e.dow,e.doy)}function It(e){return null==e?Math.ceil((this.month()+1)/3):this.month(3*(e-1)+this.month()%3)}function Rt(e,t){return"string"!=typeof e?e:isNaN(e)?(e=t.weekdaysParse(e),"number"==typeof e?e:null):parseInt(e,10)}function At(e){return this._weekdays[e.day()]}function Vt(e){return this._weekdaysShort[e.day()]}function Ft(e){return this._weekdaysMin[e.day()]}function Ut(e){var t,n,r;for(this._weekdaysParse=this._weekdaysParse||[],t=0;7>t;t++)if(this._weekdaysParse[t]||(n=xe([2e3,1]).day(t),r="^"+this.weekdays(n,"")+"|^"+this.weekdaysShort(n,"")+"|^"+this.weekdaysMin(n,""),this._weekdaysParse[t]=new RegExp(r.replace(".",""),"i")),this._weekdaysParse[t].test(e))return t}function Wt(e){var t=this._isUTC?this._d.getUTCDay():this._d.getDay();return null!=e?(e=Rt(e,this.localeData()),this.add(e-t,"d")):t}function zt(e){var t=(this.day()+7-this.localeData()._week.dow)%7;return null==e?t:this.add(e-t,"d")}function Bt(e){return null==e?this.day()||7:this.day(this.day()%7?e:e-7)}function Jt(e,t){j(e,0,0,function(){return this.localeData().meridiem(this.hours(),this.minutes(),t)})}function Gt(e,t){return t._meridiemParse}function qt(e){return"p"===(e+"").toLowerCase().charAt(0)}function Kt(e,t,n){return e>11?n?"pm":"PM":n?"am":"AM"}function Qt(e,t){t[mr]=M(1e3*("0."+e))}function Xt(){return this._isUTC?"UTC":""}function $t(){return this._isUTC?"Coordinated Universal Time":""}function Zt(e){return xe(1e3*e)}function en(){return xe.apply(null,arguments).parseZone()}function tn(e,t,n){var r=this._calendar[e];return"function"==typeof r?r.call(t,n):r}function nn(e){var t=this._longDateFormat[e],n=this._longDateFormat[e.toUpperCase()];return t||!n?t:(this._longDateFormat[e]=n.replace(/MMMM|MM|DD|dddd/g,function(e){return e.slice(1)}),this._longDateFormat[e])}function rn(){return this._invalidDate}function an(e){return this._ordinal.replace("%d",e)}function on(e){return e}function sn(e,t,n,r){var a=this._relativeTime[n];return"function"==typeof a?a(e,t,n,r):a.replace(/%d/i,e)}function un(e,t){var n=this._relativeTime[e>0?"future":"past"];return"function"==typeof n?n(t):n.replace(/%s/i,t)}function dn(e){var t,n;for(n in e)t=e[n],"function"==typeof t?this[n]=t:this["_"+n]=t;this._ordinalParseLenient=new RegExp(this._ordinalParse.source+"|"+/\d{1,2}/.source)}function ln(e,t,n,r){var a=T(),o=d().set(r,t);return a[n](o,e)}function cn(e,t,n,r,a){if("number"==typeof e&&(t=e,e=void 0),e=e||"",null!=t)return ln(e,t,n,a);var o,i=[];for(o=0;r>o;o++)i[o]=ln(e,o,n,a);return i}function _n(e,t){return cn(e,t,"months",12,"month")}function pn(e,t){return cn(e,t,"monthsShort",12,"month")}function mn(e,t){return cn(e,t,"weekdays",7,"day")}function hn(e,t){return cn(e,t,"weekdaysShort",7,"day")}function fn(e,t){return cn(e,t,"weekdaysMin",7,"day")}function yn(){var e=this._data;return this._milliseconds=Xr(this._milliseconds),this._days=Xr(this._days),this._months=Xr(this._months),e.milliseconds=Xr(e.milliseconds),e.seconds=Xr(e.seconds),e.minutes=Xr(e.minutes),e.hours=Xr(e.hours),e.months=Xr(e.months),e.years=Xr(e.years),this}function Mn(e,t,n,r){var a=$e(t,n);return e._milliseconds+=r*a._milliseconds,e._days+=r*a._days,e._months+=r*a._months,e._bubble()}function vn(e,t){return Mn(this,e,t,1)}function gn(e,t){return Mn(this,e,t,-1)}function Ln(e){return 0>e?Math.floor(e):Math.ceil(e)}function Dn(){var e,t,n,r,a,o=this._milliseconds,i=this._days,s=this._months,u=this._data;return o>=0&&i>=0&&s>=0||0>=o&&0>=i&&0>=s||(o+=864e5*Ln(bn(s)+i),i=0,s=0),u.milliseconds=o%1e3,e=y(o/1e3),u.seconds=e%60,t=y(e/60),u.minutes=t%60,n=y(t/60),u.hours=n%24,i+=y(n/24),a=y(Yn(i)),s+=a,i-=Ln(bn(a)),r=y(s/12),s%=12,u.days=i,u.months=s,u.years=r,this}function Yn(e){return 4800*e/146097}function bn(e){return 146097*e/4800}function kn(e){var t,n,r=this._milliseconds;if(e=w(e),"month"===e||"year"===e)return t=this._days+r/864e5,n=this._months+Yn(t),"month"===e?n:n/12;switch(t=this._days+Math.round(bn(this._months)),e){case"week":return t/7+r/6048e5;case"day":return t+r/864e5;case"hour":return 24*t+r/36e5;case"minute":return 1440*t+r/6e4;case"second":return 86400*t+r/1e3;case"millisecond":return Math.floor(864e5*t)+r;default:throw new Error("Unknown unit "+e)}}function Tn(){return this._milliseconds+864e5*this._days+this._months%12*2592e6+31536e6*M(this._months/12)}function En(e){return function(){return this.as(e)}}function wn(e){return e=w(e),this[e+"s"]()}function Nn(e){return function(){return this._data[e]}}function Sn(){return y(this.days()/7)}function xn(e,t,n,r,a){return a.relativeTime(t||1,!!n,e,r)}function Cn(e,t,n){var r=$e(e).abs(),a=pa(r.as("s")),o=pa(r.as("m")),i=pa(r.as("h")),s=pa(r.as("d")),u=pa(r.as("M")),d=pa(r.as("y")),l=a<ma.s&&["s",a]||1===o&&["m"]||o<ma.m&&["mm",o]||1===i&&["h"]||i<ma.h&&["hh",i]||1===s&&["d"]||s<ma.d&&["dd",s]||1===u&&["M"]||u<ma.M&&["MM",u]||1===d&&["y"]||["yy",d];return l[2]=t,l[3]=+e>0,l[4]=n,xn.apply(null,l)}function On(e,t){return void 0===ma[e]?!1:void 0===t?ma[e]:(ma[e]=t,!0)}function Pn(e){var t=this.localeData(),n=Cn(this,!e,t);return e&&(n=t.pastFuture(+this,n)),t.postformat(n)}function jn(){var e,t,n,r=ha(this._milliseconds)/1e3,a=ha(this._days),o=ha(this._months);e=y(r/60),t=y(e/60),r%=60,e%=60,n=y(o/12),o%=12;var i=n,s=o,u=a,d=t,l=e,c=r,_=this.asSeconds();return _?(0>_?"-":"")+"P"+(i?i+"Y":"")+(s?s+"M":"")+(u?u+"D":"")+(d||l||c?"T":"")+(d?d+"H":"")+(l?l+"M":"")+(c?c+"S":""):"P0D"}var Hn,In,Rn=t.momentProperties=[],An=!1,Vn={},Fn={},Un=/(\[[^\[]*\])|(\\)?(Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|Q|YYYYYY|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|S{1,9}|x|X|zz?|ZZ?|.)/g,Wn=/(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g,zn={},Bn={},Jn=/\d/,Gn=/\d\d/,qn=/\d{3}/,Kn=/\d{4}/,Qn=/[+-]?\d{6}/,Xn=/\d\d?/,$n=/\d{1,3}/,Zn=/\d{1,4}/,er=/[+-]?\d{1,6}/,tr=/\d+/,nr=/[+-]?\d+/,rr=/Z|[+-]\d\d:?\d\d/gi,ar=/[+-]?\d+(\.\d{1,3})?/,or=/[0-9]*['a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+|[\u0600-\u06FF\/]+(\s*?[\u0600-\u06FF]+){1,2}/i,ir={},sr={},ur=0,dr=1,lr=2,cr=3,_r=4,pr=5,mr=6;j("M",["MM",2],"Mo",function(){return this.month()+1}),j("MMM",0,0,function(e){return this.localeData().monthsShort(this,e)}),j("MMMM",0,0,function(e){return this.localeData().months(this,e)}),E("month","M"),F("M",Xn),F("MM",Xn,Gn),F("MMM",or),F("MMMM",or),z(["M","MM"],function(e,t){t[dr]=M(e)-1}),z(["MMM","MMMM"],function(e,t,n,r){var a=n._locale.monthsParse(e,r,n._strict);null!=a?t[dr]=a:c(n).invalidMonth=e});var hr="January_February_March_April_May_June_July_August_September_October_November_December".split("_"),fr="Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec".split("_"),yr={};t.suppressDeprecationWarnings=!1;var Mr=/^\s*(?:[+-]\d{6}|\d{4})-(?:(\d\d-\d\d)|(W\d\d$)|(W\d\d-\d)|(\d\d\d))((T| )(\d\d(:\d\d(:\d\d(\.\d+)?)?)?)?([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?$/,vr=[["YYYYYY-MM-DD",/[+-]\d{6}-\d{2}-\d{2}/],["YYYY-MM-DD",/\d{4}-\d{2}-\d{2}/],["GGGG-[W]WW-E",/\d{4}-W\d{2}-\d/],["GGGG-[W]WW",/\d{4}-W\d{2}/],["YYYY-DDD",/\d{4}-\d{3}/]],gr=[["HH:mm:ss.SSSS",/(T| )\d\d:\d\d:\d\d\.\d+/],["HH:mm:ss",/(T| )\d\d:\d\d:\d\d/],["HH:mm",/(T| )\d\d:\d\d/],["HH",/(T| )\d\d/]],Lr=/^\/?Date\((\-?\d+)/i;t.createFromInputFallback=ne("moment construction falls back to js Date. This is discouraged and will be removed in upcoming major release. Please refer to https://github.com/moment/moment/issues/1407 for more info.",function(e){e._d=new Date(e._i+(e._useUTC?" UTC":""))}),j(0,["YY",2],0,function(){return this.year()%100}),j(0,["YYYY",4],0,"year"),j(0,["YYYYY",5],0,"year"),j(0,["YYYYYY",6,!0],0,"year"),E("year","y"),F("Y",nr),F("YY",Xn,Gn),F("YYYY",Zn,Kn),F("YYYYY",er,Qn),F("YYYYYY",er,Qn),z(["YYYYY","YYYYYY"],ur),z("YYYY",function(e,n){n[ur]=2===e.length?t.parseTwoDigitYear(e):M(e)}),z("YY",function(e,n){n[ur]=t.parseTwoDigitYear(e)}),t.parseTwoDigitYear=function(e){return M(e)+(M(e)>68?1900:2e3)};var Dr=S("FullYear",!1);j("w",["ww",2],"wo","week"),j("W",["WW",2],"Wo","isoWeek"),E("week","w"),E("isoWeek","W"),F("w",Xn),F("ww",Xn,Gn),F("W",Xn),F("WW",Xn,Gn),B(["w","ww","W","WW"],function(e,t,n,r){t[r.substr(0,1)]=M(e)});var Yr={dow:0,doy:6};j("DDD",["DDDD",3],"DDDo","dayOfYear"),E("dayOfYear","DDD"),F("DDD",$n),F("DDDD",qn),z(["DDD","DDDD"],function(e,t,n){n._dayOfYear=M(e)}),t.ISO_8601=function(){};var br=ne("moment().min is deprecated, use moment.min instead. https://github.com/moment/moment/issues/1548",function(){var e=xe.apply(null,arguments);return this>e?this:e}),kr=ne("moment().max is deprecated, use moment.max instead. https://github.com/moment/moment/issues/1548",function(){var e=xe.apply(null,arguments);return e>this?this:e});Ie("Z",":"),Ie("ZZ",""),F("Z",rr),F("ZZ",rr),z(["Z","ZZ"],function(e,t,n){n._useUTC=!0,n._tzm=Re(e)});var Tr=/([\+\-]|\d\d)/gi;t.updateOffset=function(){};var Er=/(\-)?(?:(\d*)\.)?(\d+)\:(\d+)(?:\:(\d+)\.?(\d{3})?)?/,wr=/^(-)?P(?:(?:([0-9,.]*)Y)?(?:([0-9,.]*)M)?(?:([0-9,.]*)D)?(?:T(?:([0-9,.]*)H)?(?:([0-9,.]*)M)?(?:([0-9,.]*)S)?)?|([0-9,.]*)W)$/;$e.fn=je.prototype;var Nr=nt(1,"add"),Sr=nt(-1,"subtract");t.defaultFormat="YYYY-MM-DDTHH:mm:ssZ";var xr=ne("moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.",function(e){return void 0===e?this.localeData():this.locale(e)});j(0,["gg",2],0,function(){return this.weekYear()%100}),j(0,["GG",2],0,function(){return this.isoWeekYear()%100}),xt("gggg","weekYear"),xt("ggggg","weekYear"),xt("GGGG","isoWeekYear"),xt("GGGGG","isoWeekYear"),E("weekYear","gg"),E("isoWeekYear","GG"),F("G",nr),F("g",nr),F("GG",Xn,Gn),F("gg",Xn,Gn),F("GGGG",Zn,Kn),F("gggg",Zn,Kn),F("GGGGG",er,Qn),F("ggggg",er,Qn),B(["gggg","ggggg","GGGG","GGGGG"],function(e,t,n,r){t[r.substr(0,2)]=M(e)}),B(["gg","GG"],function(e,n,r,a){n[a]=t.parseTwoDigitYear(e)}),j("Q",0,0,"quarter"),E("quarter","Q"),F("Q",Jn),z("Q",function(e,t){t[dr]=3*(M(e)-1)}),j("D",["DD",2],"Do","date"),E("date","D"),F("D",Xn),F("DD",Xn,Gn),F("Do",function(e,t){return e?t._ordinalParse:t._ordinalParseLenient}),z(["D","DD"],lr),z("Do",function(e,t){t[lr]=M(e.match(Xn)[0],10)});var Cr=S("Date",!0);j("d",0,"do","day"),j("dd",0,0,function(e){return this.localeData().weekdaysMin(this,e)}),j("ddd",0,0,function(e){return this.localeData().weekdaysShort(this,e)}),j("dddd",0,0,function(e){return this.localeData().weekdays(this,e)}),j("e",0,0,"weekday"),j("E",0,0,"isoWeekday"),E("day","d"),E("weekday","e"),E("isoWeekday","E"),F("d",Xn),F("e",Xn),F("E",Xn),F("dd",or),F("ddd",or),F("dddd",or),B(["dd","ddd","dddd"],function(e,t,n){var r=n._locale.weekdaysParse(e);null!=r?t.d=r:c(n).invalidWeekday=e}),B(["d","e","E"],function(e,t,n,r){t[r]=M(e)});var Or="Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),Pr="Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),jr="Su_Mo_Tu_We_Th_Fr_Sa".split("_");j("H",["HH",2],0,"hour"),j("h",["hh",2],0,function(){return this.hours()%12||12}),Jt("a",!0),Jt("A",!1),E("hour","h"),F("a",Gt),F("A",Gt),F("H",Xn),F("h",Xn),F("HH",Xn,Gn),F("hh",Xn,Gn),z(["H","HH"],cr),z(["a","A"],function(e,t,n){n._isPm=n._locale.isPM(e),n._meridiem=e}),z(["h","hh"],function(e,t,n){t[cr]=M(e),c(n).bigHour=!0});var Hr=/[ap]\.?m?\.?/i,Ir=S("Hours",!0);j("m",["mm",2],0,"minute"),E("minute","m"),F("m",Xn),F("mm",Xn,Gn),z(["m","mm"],_r);var Rr=S("Minutes",!1);j("s",["ss",2],0,"second"),E("second","s"),F("s",Xn),F("ss",Xn,Gn),z(["s","ss"],pr);var Ar=S("Seconds",!1);j("S",0,0,function(){return~~(this.millisecond()/100)}),j(0,["SS",2],0,function(){return~~(this.millisecond()/10)}),j(0,["SSS",3],0,"millisecond"),j(0,["SSSS",4],0,function(){return 10*this.millisecond()}),j(0,["SSSSS",5],0,function(){return 100*this.millisecond()}),j(0,["SSSSSS",6],0,function(){return 1e3*this.millisecond()}),j(0,["SSSSSSS",7],0,function(){return 1e4*this.millisecond()}),j(0,["SSSSSSSS",8],0,function(){return 1e5*this.millisecond()}),j(0,["SSSSSSSSS",9],0,function(){return 1e6*this.millisecond()}),E("millisecond","ms"),F("S",$n,Jn),F("SS",$n,Gn),F("SSS",$n,qn);var Vr;for(Vr="SSSS";Vr.length<=9;Vr+="S")F(Vr,tr);for(Vr="S";Vr.length<=9;Vr+="S")z(Vr,Qt);var Fr=S("Milliseconds",!1);j("z",0,0,"zoneAbbr"),j("zz",0,0,"zoneName");var Ur=h.prototype;Ur.add=Nr,Ur.calendar=at,Ur.clone=ot,Ur.diff=lt,Ur.endOf=Dt,Ur.format=mt,Ur.from=ht,Ur.fromNow=ft,Ur.to=yt,Ur.toNow=Mt,Ur.get=O,Ur.invalidAt=St,Ur.isAfter=it,Ur.isBefore=st,Ur.isBetween=ut,Ur.isSame=dt,Ur.isValid=wt,Ur.lang=xr,Ur.locale=vt,Ur.localeData=gt,Ur.max=kr,Ur.min=br,Ur.parsingFlags=Nt,Ur.set=O,Ur.startOf=Lt,Ur.subtract=Sr,Ur.toArray=Tt,Ur.toObject=Et,Ur.toDate=kt,Ur.toISOString=pt,Ur.toJSON=pt,Ur.toString=_t,Ur.unix=bt,Ur.valueOf=Yt,Ur.year=Dr,Ur.isLeapYear=le,Ur.weekYear=Ot,Ur.isoWeekYear=Pt,Ur.quarter=Ur.quarters=It,Ur.month=$,Ur.daysInMonth=Z,Ur.week=Ur.weeks=he,Ur.isoWeek=Ur.isoWeeks=fe,Ur.weeksInYear=Ht,Ur.isoWeeksInYear=jt,Ur.date=Cr,Ur.day=Ur.days=Wt,Ur.weekday=zt,Ur.isoWeekday=Bt,Ur.dayOfYear=Me,Ur.hour=Ur.hours=Ir,Ur.minute=Ur.minutes=Rr,Ur.second=Ur.seconds=Ar,Ur.millisecond=Ur.milliseconds=Fr,Ur.utcOffset=Fe,Ur.utc=We,Ur.local=ze,Ur.parseZone=Be,Ur.hasAlignedHourOffset=Je,Ur.isDST=Ge,
Ur.isDSTShifted=qe,Ur.isLocal=Ke,Ur.isUtcOffset=Qe,Ur.isUtc=Xe,Ur.isUTC=Xe,Ur.zoneAbbr=Xt,Ur.zoneName=$t,Ur.dates=ne("dates accessor is deprecated. Use date instead.",Cr),Ur.months=ne("months accessor is deprecated. Use month instead",$),Ur.years=ne("years accessor is deprecated. Use year instead",Dr),Ur.zone=ne("moment().zone is deprecated, use moment().utcOffset instead. https://github.com/moment/moment/issues/1779",Ue);var Wr=Ur,zr={sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},Br={LTS:"h:mm:ss A",LT:"h:mm A",L:"MM/DD/YYYY",LL:"MMMM D, YYYY",LLL:"MMMM D, YYYY h:mm A",LLLL:"dddd, MMMM D, YYYY h:mm A"},Jr="Invalid date",Gr="%d",qr=/\d{1,2}/,Kr={future:"in %s",past:"%s ago",s:"a few seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},Qr=g.prototype;Qr._calendar=zr,Qr.calendar=tn,Qr._longDateFormat=Br,Qr.longDateFormat=nn,Qr._invalidDate=Jr,Qr.invalidDate=rn,Qr._ordinal=Gr,Qr.ordinal=an,Qr._ordinalParse=qr,Qr.preparse=on,Qr.postformat=on,Qr._relativeTime=Kr,Qr.relativeTime=sn,Qr.pastFuture=un,Qr.set=dn,Qr.months=q,Qr._months=hr,Qr.monthsShort=K,Qr._monthsShort=fr,Qr.monthsParse=Q,Qr.week=_e,Qr._week=Yr,Qr.firstDayOfYear=me,Qr.firstDayOfWeek=pe,Qr.weekdays=At,Qr._weekdays=Or,Qr.weekdaysMin=Ft,Qr._weekdaysMin=jr,Qr.weekdaysShort=Vt,Qr._weekdaysShort=Pr,Qr.weekdaysParse=Ut,Qr.isPM=qt,Qr._meridiemParse=Hr,Qr.meridiem=Kt,b("en",{ordinalParse:/\d{1,2}(th|st|nd|rd)/,ordinal:function(e){var t=e%10,n=1===M(e%100/10)?"th":1===t?"st":2===t?"nd":3===t?"rd":"th";return e+n}}),t.lang=ne("moment.lang is deprecated. Use moment.locale instead.",b),t.langData=ne("moment.langData is deprecated. Use moment.localeData instead.",T);var Xr=Math.abs,$r=En("ms"),Zr=En("s"),ea=En("m"),ta=En("h"),na=En("d"),ra=En("w"),aa=En("M"),oa=En("y"),ia=Nn("milliseconds"),sa=Nn("seconds"),ua=Nn("minutes"),da=Nn("hours"),la=Nn("days"),ca=Nn("months"),_a=Nn("years"),pa=Math.round,ma={s:45,m:45,h:22,d:26,M:11},ha=Math.abs,fa=je.prototype;fa.abs=yn,fa.add=vn,fa.subtract=gn,fa.as=kn,fa.asMilliseconds=$r,fa.asSeconds=Zr,fa.asMinutes=ea,fa.asHours=ta,fa.asDays=na,fa.asWeeks=ra,fa.asMonths=aa,fa.asYears=oa,fa.valueOf=Tn,fa._bubble=Dn,fa.get=wn,fa.milliseconds=ia,fa.seconds=sa,fa.minutes=ua,fa.hours=da,fa.days=la,fa.weeks=Sn,fa.months=ca,fa.years=_a,fa.humanize=Pn,fa.toISOString=jn,fa.toString=jn,fa.toJSON=jn,fa.locale=vt,fa.localeData=gt,fa.toIsoString=ne("toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)",jn),fa.lang=xr,j("X",0,0,"unix"),j("x",0,0,"valueOf"),F("x",nr),F("X",ar),z("X",function(e,t,n){n._d=new Date(1e3*parseFloat(e,10))}),z("x",function(e,t,n){n._d=new Date(M(e))}),t.version="2.10.6",r(xe),t.fn=Wr,t.min=Oe,t.max=Pe,t.utc=d,t.unix=Zt,t.months=_n,t.isDate=o,t.locale=b,t.invalid=p,t.duration=$e,t.isMoment=f,t.weekdays=mn,t.parseZone=en,t.localeData=T,t.isDuration=He,t.monthsShort=pn,t.weekdaysMin=fn,t.defineLocale=k,t.weekdaysShort=hn,t.normalizeUnits=w,t.relativeTimeThreshold=On;var ya=t;return ya})}).call(t,n(167)(e))},function(e,t){e.exports=function(e){return e.webpackPolyfill||(e.deprecate=function(){},e.paths=[],e.children=[],e.webpackPolyfill=1),e}},function(e,t,n){function r(e){return n(a(e))}function a(e){return o[e]||function(){throw new Error("Cannot find module '"+e+"'.")}()}var o={"./af":169,"./af.js":169,"./ar":170,"./ar-ma":171,"./ar-ma.js":171,"./ar-sa":172,"./ar-sa.js":172,"./ar-tn":173,"./ar-tn.js":173,"./ar.js":170,"./az":174,"./az.js":174,"./be":175,"./be.js":175,"./bg":176,"./bg.js":176,"./bn":177,"./bn.js":177,"./bo":178,"./bo.js":178,"./br":179,"./br.js":179,"./bs":180,"./bs.js":180,"./ca":181,"./ca.js":181,"./cs":182,"./cs.js":182,"./cv":183,"./cv.js":183,"./cy":184,"./cy.js":184,"./da":185,"./da.js":185,"./de":186,"./de-at":187,"./de-at.js":187,"./de.js":186,"./el":188,"./el.js":188,"./en-au":189,"./en-au.js":189,"./en-ca":190,"./en-ca.js":190,"./en-gb":191,"./en-gb.js":191,"./eo":192,"./eo.js":192,"./es":193,"./es.js":193,"./et":194,"./et.js":194,"./eu":195,"./eu.js":195,"./fa":196,"./fa.js":196,"./fi":197,"./fi.js":197,"./fo":198,"./fo.js":198,"./fr":199,"./fr-ca":200,"./fr-ca.js":200,"./fr.js":199,"./fy":201,"./fy.js":201,"./gl":202,"./gl.js":202,"./he":203,"./he.js":203,"./hi":204,"./hi.js":204,"./hr":205,"./hr.js":205,"./hu":206,"./hu.js":206,"./hy-am":207,"./hy-am.js":207,"./id":208,"./id.js":208,"./is":209,"./is.js":209,"./it":210,"./it.js":210,"./ja":211,"./ja.js":211,"./jv":212,"./jv.js":212,"./ka":213,"./ka.js":213,"./km":214,"./km.js":214,"./ko":215,"./ko.js":215,"./lb":216,"./lb.js":216,"./lt":217,"./lt.js":217,"./lv":218,"./lv.js":218,"./me":219,"./me.js":219,"./mk":220,"./mk.js":220,"./ml":221,"./ml.js":221,"./mr":222,"./mr.js":222,"./ms":223,"./ms-my":224,"./ms-my.js":224,"./ms.js":223,"./my":225,"./my.js":225,"./nb":226,"./nb.js":226,"./ne":227,"./ne.js":227,"./nl":228,"./nl.js":228,"./nn":229,"./nn.js":229,"./pl":230,"./pl.js":230,"./pt":231,"./pt-br":232,"./pt-br.js":232,"./pt.js":231,"./ro":233,"./ro.js":233,"./ru":234,"./ru.js":234,"./si":235,"./si.js":235,"./sk":236,"./sk.js":236,"./sl":237,"./sl.js":237,"./sq":238,"./sq.js":238,"./sr":239,"./sr-cyrl":240,"./sr-cyrl.js":240,"./sr.js":239,"./sv":241,"./sv.js":241,"./ta":242,"./ta.js":242,"./th":243,"./th.js":243,"./tl-ph":244,"./tl-ph.js":244,"./tr":245,"./tr.js":245,"./tzl":246,"./tzl.js":246,"./tzm":247,"./tzm-latn":248,"./tzm-latn.js":248,"./tzm.js":247,"./uk":249,"./uk.js":249,"./uz":250,"./uz.js":250,"./vi":251,"./vi.js":251,"./zh-cn":252,"./zh-cn.js":252,"./zh-tw":253,"./zh-tw.js":253};r.keys=function(){return Object.keys(o)},r.resolve=a,e.exports=r,r.id=168},function(e,t,n){
//! moment.js locale configuration
//! locale : afrikaans (af)
//! author : Werner Mollentze : https://github.com/wernerm
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("af",{months:"Januarie_Februarie_Maart_April_Mei_Junie_Julie_Augustus_September_Oktober_November_Desember".split("_"),monthsShort:"Jan_Feb_Mar_Apr_Mei_Jun_Jul_Aug_Sep_Okt_Nov_Des".split("_"),weekdays:"Sondag_Maandag_Dinsdag_Woensdag_Donderdag_Vrydag_Saterdag".split("_"),weekdaysShort:"Son_Maa_Din_Woe_Don_Vry_Sat".split("_"),weekdaysMin:"So_Ma_Di_Wo_Do_Vr_Sa".split("_"),meridiemParse:/vm|nm/i,isPM:function(e){return/^nm$/i.test(e)},meridiem:function(e,t,n){return 12>e?n?"vm":"VM":n?"nm":"NM"},longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},calendar:{sameDay:"[Vandag om] LT",nextDay:"[Môre om] LT",nextWeek:"dddd [om] LT",lastDay:"[Gister om] LT",lastWeek:"[Laas] dddd [om] LT",sameElse:"L"},relativeTime:{future:"oor %s",past:"%s gelede",s:"'n paar sekondes",m:"'n minuut",mm:"%d minute",h:"'n uur",hh:"%d ure",d:"'n dag",dd:"%d dae",M:"'n maand",MM:"%d maande",y:"'n jaar",yy:"%d jaar"},ordinalParse:/\d{1,2}(ste|de)/,ordinal:function(e){return e+(1===e||8===e||e>=20?"ste":"de")},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! Locale: Arabic (ar)
//! Author: Abdel Said: https://github.com/abdelsaid
//! Changes in months, weekdays: Ahmed Elkhatib
//! Native plural forms: forabi https://github.com/forabi
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"١",2:"٢",3:"٣",4:"٤",5:"٥",6:"٦",7:"٧",8:"٨",9:"٩",0:"٠"},n={"١":"1","٢":"2","٣":"3","٤":"4","٥":"5","٦":"6","٧":"7","٨":"8","٩":"9","٠":"0"},r=function(e){return 0===e?0:1===e?1:2===e?2:e%100>=3&&10>=e%100?3:e%100>=11?4:5},a={s:["أقل من ثانية","ثانية واحدة",["ثانيتان","ثانيتين"],"%d ثوان","%d ثانية","%d ثانية"],m:["أقل من دقيقة","دقيقة واحدة",["دقيقتان","دقيقتين"],"%d دقائق","%d دقيقة","%d دقيقة"],h:["أقل من ساعة","ساعة واحدة",["ساعتان","ساعتين"],"%d ساعات","%d ساعة","%d ساعة"],d:["أقل من يوم","يوم واحد",["يومان","يومين"],"%d أيام","%d يومًا","%d يوم"],M:["أقل من شهر","شهر واحد",["شهران","شهرين"],"%d أشهر","%d شهرا","%d شهر"],y:["أقل من عام","عام واحد",["عامان","عامين"],"%d أعوام","%d عامًا","%d عام"]},o=function(e){return function(t,n,o,i){var s=r(t),u=a[e][r(t)];return 2===s&&(u=u[n?0:1]),u.replace(/%d/i,t)}},i=["كانون الثاني يناير","شباط فبراير","آذار مارس","نيسان أبريل","أيار مايو","حزيران يونيو","تموز يوليو","آب أغسطس","أيلول سبتمبر","تشرين الأول أكتوبر","تشرين الثاني نوفمبر","كانون الأول ديسمبر"],s=e.defineLocale("ar",{months:i,monthsShort:i,weekdays:"الأحد_الإثنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت".split("_"),weekdaysShort:"أحد_إثنين_ثلاثاء_أربعاء_خميس_جمعة_سبت".split("_"),weekdaysMin:"ح_ن_ث_ر_خ_ج_س".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"D/M/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},meridiemParse:/ص|م/,isPM:function(e){return"م"===e},meridiem:function(e,t,n){return 12>e?"ص":"م"},calendar:{sameDay:"[اليوم عند الساعة] LT",nextDay:"[غدًا عند الساعة] LT",nextWeek:"dddd [عند الساعة] LT",lastDay:"[أمس عند الساعة] LT",lastWeek:"dddd [عند الساعة] LT",sameElse:"L"},relativeTime:{future:"بعد %s",past:"منذ %s",s:o("s"),m:o("m"),mm:o("m"),h:o("h"),hh:o("h"),d:o("d"),dd:o("d"),M:o("M"),MM:o("M"),y:o("y"),yy:o("y")},preparse:function(e){return e.replace(/\u200f/g,"").replace(/[١٢٣٤٥٦٧٨٩٠]/g,function(e){return n[e]}).replace(/،/g,",")},postformat:function(e){return e.replace(/\d/g,function(e){return t[e]}).replace(/,/g,"،")},week:{dow:6,doy:12}});return s})},function(e,t,n){
//! moment.js locale configuration
//! locale : Moroccan Arabic (ar-ma)
//! author : ElFadili Yassine : https://github.com/ElFadiliY
//! author : Abdel Said : https://github.com/abdelsaid
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("ar-ma",{months:"يناير_فبراير_مارس_أبريل_ماي_يونيو_يوليوز_غشت_شتنبر_أكتوبر_نونبر_دجنبر".split("_"),monthsShort:"يناير_فبراير_مارس_أبريل_ماي_يونيو_يوليوز_غشت_شتنبر_أكتوبر_نونبر_دجنبر".split("_"),weekdays:"الأحد_الإتنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت".split("_"),weekdaysShort:"احد_اتنين_ثلاثاء_اربعاء_خميس_جمعة_سبت".split("_"),weekdaysMin:"ح_ن_ث_ر_خ_ج_س".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[اليوم على الساعة] LT",nextDay:"[غدا على الساعة] LT",nextWeek:"dddd [على الساعة] LT",lastDay:"[أمس على الساعة] LT",lastWeek:"dddd [على الساعة] LT",sameElse:"L"},relativeTime:{future:"في %s",past:"منذ %s",s:"ثوان",m:"دقيقة",mm:"%d دقائق",h:"ساعة",hh:"%d ساعات",d:"يوم",dd:"%d أيام",M:"شهر",MM:"%d أشهر",y:"سنة",yy:"%d سنوات"},week:{dow:6,doy:12}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Arabic Saudi Arabia (ar-sa)
//! author : Suhail Alkowaileet : https://github.com/xsoh
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"١",2:"٢",3:"٣",4:"٤",5:"٥",6:"٦",7:"٧",8:"٨",9:"٩",0:"٠"},n={"١":"1","٢":"2","٣":"3","٤":"4","٥":"5","٦":"6","٧":"7","٨":"8","٩":"9","٠":"0"},r=e.defineLocale("ar-sa",{months:"يناير_فبراير_مارس_أبريل_مايو_يونيو_يوليو_أغسطس_سبتمبر_أكتوبر_نوفمبر_ديسمبر".split("_"),monthsShort:"يناير_فبراير_مارس_أبريل_مايو_يونيو_يوليو_أغسطس_سبتمبر_أكتوبر_نوفمبر_ديسمبر".split("_"),weekdays:"الأحد_الإثنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت".split("_"),weekdaysShort:"أحد_إثنين_ثلاثاء_أربعاء_خميس_جمعة_سبت".split("_"),weekdaysMin:"ح_ن_ث_ر_خ_ج_س".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},meridiemParse:/ص|م/,isPM:function(e){return"م"===e},meridiem:function(e,t,n){return 12>e?"ص":"م"},calendar:{sameDay:"[اليوم على الساعة] LT",nextDay:"[غدا على الساعة] LT",nextWeek:"dddd [على الساعة] LT",lastDay:"[أمس على الساعة] LT",lastWeek:"dddd [على الساعة] LT",sameElse:"L"},relativeTime:{future:"في %s",past:"منذ %s",s:"ثوان",m:"دقيقة",mm:"%d دقائق",h:"ساعة",hh:"%d ساعات",d:"يوم",dd:"%d أيام",M:"شهر",MM:"%d أشهر",y:"سنة",yy:"%d سنوات"},preparse:function(e){return e.replace(/[١٢٣٤٥٦٧٨٩٠]/g,function(e){return n[e]}).replace(/،/g,",")},postformat:function(e){return e.replace(/\d/g,function(e){return t[e]}).replace(/,/g,"،")},week:{dow:6,doy:12}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : Tunisian Arabic (ar-tn)
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("ar-tn",{months:"جانفي_فيفري_مارس_أفريل_ماي_جوان_جويلية_أوت_سبتمبر_أكتوبر_نوفمبر_ديسمبر".split("_"),monthsShort:"جانفي_فيفري_مارس_أفريل_ماي_جوان_جويلية_أوت_سبتمبر_أكتوبر_نوفمبر_ديسمبر".split("_"),weekdays:"الأحد_الإثنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت".split("_"),weekdaysShort:"أحد_إثنين_ثلاثاء_أربعاء_خميس_جمعة_سبت".split("_"),weekdaysMin:"ح_ن_ث_ر_خ_ج_س".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[اليوم على الساعة] LT",nextDay:"[غدا على الساعة] LT",nextWeek:"dddd [على الساعة] LT",lastDay:"[أمس على الساعة] LT",lastWeek:"dddd [على الساعة] LT",sameElse:"L"},relativeTime:{future:"في %s",past:"منذ %s",s:"ثوان",m:"دقيقة",mm:"%d دقائق",h:"ساعة",hh:"%d ساعات",d:"يوم",dd:"%d أيام",M:"شهر",MM:"%d أشهر",y:"سنة",yy:"%d سنوات"},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : azerbaijani (az)
//! author : topchiyev : https://github.com/topchiyev
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"-inci",5:"-inci",8:"-inci",70:"-inci",80:"-inci",2:"-nci",7:"-nci",20:"-nci",50:"-nci",3:"-üncü",4:"-üncü",100:"-üncü",6:"-ncı",9:"-uncu",10:"-uncu",30:"-uncu",60:"-ıncı",90:"-ıncı"},n=e.defineLocale("az",{months:"yanvar_fevral_mart_aprel_may_iyun_iyul_avqust_sentyabr_oktyabr_noyabr_dekabr".split("_"),monthsShort:"yan_fev_mar_apr_may_iyn_iyl_avq_sen_okt_noy_dek".split("_"),weekdays:"Bazar_Bazar ertəsi_Çərşənbə axşamı_Çərşənbə_Cümə axşamı_Cümə_Şənbə".split("_"),weekdaysShort:"Baz_BzE_ÇAx_Çər_CAx_Cüm_Şən".split("_"),weekdaysMin:"Bz_BE_ÇA_Çə_CA_Cü_Şə".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},calendar:{sameDay:"[bugün saat] LT",nextDay:"[sabah saat] LT",nextWeek:"[gələn həftə] dddd [saat] LT",lastDay:"[dünən] LT",lastWeek:"[keçən həftə] dddd [saat] LT",sameElse:"L"},relativeTime:{future:"%s sonra",past:"%s əvvəl",s:"birneçə saniyyə",m:"bir dəqiqə",mm:"%d dəqiqə",h:"bir saat",hh:"%d saat",d:"bir gün",dd:"%d gün",M:"bir ay",MM:"%d ay",y:"bir il",yy:"%d il"},meridiemParse:/gecə|səhər|gündüz|axşam/,isPM:function(e){return/^(gündüz|axşam)$/.test(e)},meridiem:function(e,t,n){return 4>e?"gecə":12>e?"səhər":17>e?"gündüz":"axşam"},ordinalParse:/\d{1,2}-(ıncı|inci|nci|üncü|ncı|uncu)/,ordinal:function(e){if(0===e)return e+"-ıncı";var n=e%10,r=e%100-n,a=e>=100?100:null;return e+(t[n]||t[r]||t[a])},week:{dow:1,doy:7}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : belarusian (be)
//! author : Dmitry Demidov : https://github.com/demidov91
//! author: Praleska: http://praleska.pro/
//! Author : Menelion Elensúle : https://github.com/Oire
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t){var n=e.split("_");return t%10===1&&t%100!==11?n[0]:t%10>=2&&4>=t%10&&(10>t%100||t%100>=20)?n[1]:n[2]}function n(e,n,r){var a={mm:n?"хвіліна_хвіліны_хвілін":"хвіліну_хвіліны_хвілін",hh:n?"гадзіна_гадзіны_гадзін":"гадзіну_гадзіны_гадзін",dd:"дзень_дні_дзён",MM:"месяц_месяцы_месяцаў",yy:"год_гады_гадоў"};return"m"===r?n?"хвіліна":"хвіліну":"h"===r?n?"гадзіна":"гадзіну":e+" "+t(a[r],+e)}function r(e,t){var n={nominative:"студзень_люты_сакавік_красавік_травень_чэрвень_ліпень_жнівень_верасень_кастрычнік_лістапад_снежань".split("_"),accusative:"студзеня_лютага_сакавіка_красавіка_траўня_чэрвеня_ліпеня_жніўня_верасня_кастрычніка_лістапада_снежня".split("_")},r=/D[oD]?(\[[^\[\]]*\]|\s+)+MMMM?/.test(t)?"accusative":"nominative";return n[r][e.month()]}function a(e,t){var n={nominative:"нядзеля_панядзелак_аўторак_серада_чацвер_пятніца_субота".split("_"),accusative:"нядзелю_панядзелак_аўторак_сераду_чацвер_пятніцу_суботу".split("_")},r=/\[ ?[Вв] ?(?:мінулую|наступную)? ?\] ?dddd/.test(t)?"accusative":"nominative";return n[r][e.day()]}var o=e.defineLocale("be",{months:r,monthsShort:"студ_лют_сак_крас_трав_чэрв_ліп_жнів_вер_каст_ліст_снеж".split("_"),weekdays:a,weekdaysShort:"нд_пн_ат_ср_чц_пт_сб".split("_"),weekdaysMin:"нд_пн_ат_ср_чц_пт_сб".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D MMMM YYYY г.",LLL:"D MMMM YYYY г., HH:mm",LLLL:"dddd, D MMMM YYYY г., HH:mm"},calendar:{sameDay:"[Сёння ў] LT",nextDay:"[Заўтра ў] LT",lastDay:"[Учора ў] LT",nextWeek:function(){return"[У] dddd [ў] LT"},lastWeek:function(){switch(this.day()){case 0:case 3:case 5:case 6:return"[У мінулую] dddd [ў] LT";case 1:case 2:case 4:return"[У мінулы] dddd [ў] LT"}},sameElse:"L"},relativeTime:{future:"праз %s",past:"%s таму",s:"некалькі секунд",m:n,mm:n,h:n,hh:n,d:"дзень",dd:n,M:"месяц",MM:n,y:"год",yy:n},meridiemParse:/ночы|раніцы|дня|вечара/,isPM:function(e){return/^(дня|вечара)$/.test(e)},meridiem:function(e,t,n){return 4>e?"ночы":12>e?"раніцы":17>e?"дня":"вечара"},ordinalParse:/\d{1,2}-(і|ы|га)/,ordinal:function(e,t){switch(t){case"M":case"d":case"DDD":case"w":case"W":return e%10!==2&&e%10!==3||e%100===12||e%100===13?e+"-ы":e+"-і";case"D":return e+"-га";default:return e}},week:{dow:1,doy:7}});return o})},function(e,t,n){
//! moment.js locale configuration
//! locale : bulgarian (bg)
//! author : Krasen Borisov : https://github.com/kraz
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("bg",{months:"януари_февруари_март_април_май_юни_юли_август_септември_октомври_ноември_декември".split("_"),monthsShort:"янр_фев_мар_апр_май_юни_юли_авг_сеп_окт_ное_дек".split("_"),weekdays:"неделя_понеделник_вторник_сряда_четвъртък_петък_събота".split("_"),weekdaysShort:"нед_пон_вто_сря_чет_пет_съб".split("_"),weekdaysMin:"нд_пн_вт_ср_чт_пт_сб".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"D.MM.YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY H:mm",LLLL:"dddd, D MMMM YYYY H:mm"},calendar:{sameDay:"[Днес в] LT",nextDay:"[Утре в] LT",nextWeek:"dddd [в] LT",lastDay:"[Вчера в] LT",lastWeek:function(){switch(this.day()){case 0:case 3:case 6:return"[В изминалата] dddd [в] LT";case 1:case 2:case 4:case 5:return"[В изминалия] dddd [в] LT"}},sameElse:"L"},relativeTime:{future:"след %s",past:"преди %s",s:"няколко секунди",m:"минута",mm:"%d минути",h:"час",hh:"%d часа",d:"ден",dd:"%d дни",M:"месец",MM:"%d месеца",y:"година",yy:"%d години"},ordinalParse:/\d{1,2}-(ев|ен|ти|ви|ри|ми)/,ordinal:function(e){var t=e%10,n=e%100;return 0===e?e+"-ев":0===n?e+"-ен":n>10&&20>n?e+"-ти":1===t?e+"-ви":2===t?e+"-ри":7===t||8===t?e+"-ми":e+"-ти"},week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Bengali (bn)
//! author : Kaushik Gandhi : https://github.com/kaushikgandhi
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"১",2:"২",3:"৩",4:"৪",5:"৫",6:"৬",7:"৭",8:"৮",9:"৯",0:"০"},n={"১":"1","২":"2","৩":"3","৪":"4","৫":"5","৬":"6","৭":"7","৮":"8","৯":"9","০":"0"},r=e.defineLocale("bn",{months:"জানুয়ারী_ফেবুয়ারী_মার্চ_এপ্রিল_মে_জুন_জুলাই_অগাস্ট_সেপ্টেম্বর_অক্টোবর_নভেম্বর_ডিসেম্বর".split("_"),monthsShort:"জানু_ফেব_মার্চ_এপর_মে_জুন_জুল_অগ_সেপ্ট_অক্টো_নভ_ডিসেম্".split("_"),weekdays:"রবিবার_সোমবার_মঙ্গলবার_বুধবার_বৃহস্পত্তিবার_শুক্রুবার_শনিবার".split("_"),weekdaysShort:"রবি_সোম_মঙ্গল_বুধ_বৃহস্পত্তি_শুক্রু_শনি".split("_"),weekdaysMin:"রব_সম_মঙ্গ_বু_ব্রিহ_শু_শনি".split("_"),longDateFormat:{LT:"A h:mm সময়",LTS:"A h:mm:ss সময়",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY, A h:mm সময়",LLLL:"dddd, D MMMM YYYY, A h:mm সময়"},calendar:{sameDay:"[আজ] LT",nextDay:"[আগামীকাল] LT",nextWeek:"dddd, LT",lastDay:"[গতকাল] LT",lastWeek:"[গত] dddd, LT",sameElse:"L"},relativeTime:{future:"%s পরে",past:"%s আগে",s:"কএক সেকেন্ড",m:"এক মিনিট",mm:"%d মিনিট",h:"এক ঘন্টা",hh:"%d ঘন্টা",d:"এক দিন",dd:"%d দিন",M:"এক মাস",MM:"%d মাস",y:"এক বছর",yy:"%d বছর"},preparse:function(e){return e.replace(/[১২৩৪৫৬৭৮৯০]/g,function(e){return n[e]})},postformat:function(e){return e.replace(/\d/g,function(e){return t[e]})},meridiemParse:/রাত|সকাল|দুপুর|বিকেল|রাত/,isPM:function(e){return/^(দুপুর|বিকেল|রাত)$/.test(e)},meridiem:function(e,t,n){return 4>e?"রাত":10>e?"সকাল":17>e?"দুপুর":20>e?"বিকেল":"রাত"},week:{dow:0,doy:6}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : tibetan (bo)
//! author : Thupten N. Chakrishar : https://github.com/vajradog
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"༡",2:"༢",3:"༣",4:"༤",5:"༥",6:"༦",7:"༧",8:"༨",9:"༩",0:"༠"},n={"༡":"1","༢":"2","༣":"3","༤":"4","༥":"5","༦":"6","༧":"7","༨":"8","༩":"9","༠":"0"},r=e.defineLocale("bo",{months:"ཟླ་བ་དང་པོ_ཟླ་བ་གཉིས་པ_ཟླ་བ་གསུམ་པ_ཟླ་བ་བཞི་པ_ཟླ་བ་ལྔ་པ_ཟླ་བ་དྲུག་པ_ཟླ་བ་བདུན་པ_ཟླ་བ་བརྒྱད་པ_ཟླ་བ་དགུ་པ_ཟླ་བ་བཅུ་པ_ཟླ་བ་བཅུ་གཅིག་པ_ཟླ་བ་བཅུ་གཉིས་པ".split("_"),monthsShort:"ཟླ་བ་དང་པོ_ཟླ་བ་གཉིས་པ_ཟླ་བ་གསུམ་པ_ཟླ་བ་བཞི་པ_ཟླ་བ་ལྔ་པ_ཟླ་བ་དྲུག་པ_ཟླ་བ་བདུན་པ_ཟླ་བ་བརྒྱད་པ_ཟླ་བ་དགུ་པ_ཟླ་བ་བཅུ་པ_ཟླ་བ་བཅུ་གཅིག་པ_ཟླ་བ་བཅུ་གཉིས་པ".split("_"),weekdays:"གཟའ་ཉི་མ་_གཟའ་ཟླ་བ་_གཟའ་མིག་དམར་_གཟའ་ལྷག་པ་_གཟའ་ཕུར་བུ_གཟའ་པ་སངས་_གཟའ་སྤེན་པ་".split("_"),weekdaysShort:"ཉི་མ་_ཟླ་བ་_མིག་དམར་_ལྷག་པ་_ཕུར་བུ_པ་སངས་_སྤེན་པ་".split("_"),weekdaysMin:"ཉི་མ་_ཟླ་བ་_མིག་དམར་_ལྷག་པ་_ཕུར་བུ_པ་སངས་_སྤེན་པ་".split("_"),longDateFormat:{LT:"A h:mm",LTS:"A h:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY, A h:mm",LLLL:"dddd, D MMMM YYYY, A h:mm"},calendar:{sameDay:"[དི་རིང] LT",nextDay:"[སང་ཉིན] LT",nextWeek:"[བདུན་ཕྲག་རྗེས་མ], LT",lastDay:"[ཁ་སང] LT",lastWeek:"[བདུན་ཕྲག་མཐའ་མ] dddd, LT",sameElse:"L"},relativeTime:{future:"%s ལ་",past:"%s སྔན་ལ",s:"ལམ་སང",m:"སྐར་མ་གཅིག",mm:"%d སྐར་མ",h:"ཆུ་ཚོད་གཅིག",hh:"%d ཆུ་ཚོད",d:"ཉིན་གཅིག",dd:"%d ཉིན་",M:"ཟླ་བ་གཅིག",MM:"%d ཟླ་བ",y:"ལོ་གཅིག",yy:"%d ལོ"},preparse:function(e){return e.replace(/[༡༢༣༤༥༦༧༨༩༠]/g,function(e){return n[e]})},postformat:function(e){return e.replace(/\d/g,function(e){return t[e]})},meridiemParse:/མཚན་མོ|ཞོགས་ཀས|ཉིན་གུང|དགོང་དག|མཚན་མོ/,isPM:function(e){return/^(ཉིན་གུང|དགོང་དག|མཚན་མོ)$/.test(e)},meridiem:function(e,t,n){return 4>e?"མཚན་མོ":10>e?"ཞོགས་ཀས":17>e?"ཉིན་གུང":20>e?"དགོང་དག":"མཚན་མོ"},week:{dow:0,doy:6}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : breton (br)
//! author : Jean-Baptiste Le Duigou : https://github.com/jbleduigou
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n){var r={mm:"munutenn",MM:"miz",dd:"devezh"};return e+" "+a(r[n],e)}function n(e){switch(r(e)){case 1:case 3:case 4:case 5:case 9:return e+" bloaz";default:return e+" vloaz"}}function r(e){return e>9?r(e%10):e}function a(e,t){return 2===t?o(e):e}function o(e){var t={m:"v",b:"v",d:"z"};return void 0===t[e.charAt(0)]?e:t[e.charAt(0)]+e.substring(1)}var i=e.defineLocale("br",{months:"Genver_C'hwevrer_Meurzh_Ebrel_Mae_Mezheven_Gouere_Eost_Gwengolo_Here_Du_Kerzu".split("_"),monthsShort:"Gen_C'hwe_Meu_Ebr_Mae_Eve_Gou_Eos_Gwe_Her_Du_Ker".split("_"),weekdays:"Sul_Lun_Meurzh_Merc'her_Yaou_Gwener_Sadorn".split("_"),weekdaysShort:"Sul_Lun_Meu_Mer_Yao_Gwe_Sad".split("_"),weekdaysMin:"Su_Lu_Me_Mer_Ya_Gw_Sa".split("_"),longDateFormat:{LT:"h[e]mm A",LTS:"h[e]mm:ss A",L:"DD/MM/YYYY",LL:"D [a viz] MMMM YYYY",LLL:"D [a viz] MMMM YYYY h[e]mm A",LLLL:"dddd, D [a viz] MMMM YYYY h[e]mm A"},calendar:{sameDay:"[Hiziv da] LT",nextDay:"[Warc'hoazh da] LT",nextWeek:"dddd [da] LT",lastDay:"[Dec'h da] LT",lastWeek:"dddd [paset da] LT",sameElse:"L"},relativeTime:{future:"a-benn %s",past:"%s 'zo",s:"un nebeud segondennoù",m:"ur vunutenn",mm:t,h:"un eur",hh:"%d eur",d:"un devezh",dd:t,M:"ur miz",MM:t,y:"ur bloaz",yy:n},ordinalParse:/\d{1,2}(añ|vet)/,ordinal:function(e){var t=1===e?"añ":"vet";return e+t},week:{dow:1,doy:4}});return i})},function(e,t,n){
//! moment.js locale configuration
//! locale : bosnian (bs)
//! author : Nedim Cholich : https://github.com/frontyard
//! based on (hr) translation by Bojan Marković
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n){var r=e+" ";switch(n){case"m":return t?"jedna minuta":"jedne minute";case"mm":return r+=1===e?"minuta":2===e||3===e||4===e?"minute":"minuta";case"h":return t?"jedan sat":"jednog sata";case"hh":return r+=1===e?"sat":2===e||3===e||4===e?"sata":"sati";case"dd":return r+=1===e?"dan":"dana";case"MM":return r+=1===e?"mjesec":2===e||3===e||4===e?"mjeseca":"mjeseci";case"yy":return r+=1===e?"godina":2===e||3===e||4===e?"godine":"godina"}}var n=e.defineLocale("bs",{months:"januar_februar_mart_april_maj_juni_juli_august_septembar_oktobar_novembar_decembar".split("_"),monthsShort:"jan._feb._mar._apr._maj._jun._jul._aug._sep._okt._nov._dec.".split("_"),weekdays:"nedjelja_ponedjeljak_utorak_srijeda_četvrtak_petak_subota".split("_"),weekdaysShort:"ned._pon._uto._sri._čet._pet._sub.".split("_"),weekdaysMin:"ne_po_ut_sr_če_pe_su".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD. MM. YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm",LLLL:"dddd, D. MMMM YYYY H:mm"},calendar:{sameDay:"[danas u] LT",nextDay:"[sutra u] LT",nextWeek:function(){switch(this.day()){case 0:return"[u] [nedjelju] [u] LT";case 3:return"[u] [srijedu] [u] LT";case 6:return"[u] [subotu] [u] LT";case 1:case 2:case 4:case 5:return"[u] dddd [u] LT"}},lastDay:"[jučer u] LT",lastWeek:function(){switch(this.day()){case 0:case 3:return"[prošlu] dddd [u] LT";case 6:return"[prošle] [subote] [u] LT";case 1:case 2:case 4:case 5:return"[prošli] dddd [u] LT"}},sameElse:"L"},relativeTime:{future:"za %s",past:"prije %s",s:"par sekundi",m:t,mm:t,h:t,hh:t,d:"dan",dd:t,M:"mjesec",MM:t,y:"godinu",yy:t},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:7}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : catalan (ca)
//! author : Juan G. Hurtado : https://github.com/juanghurtado
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("ca",{months:"gener_febrer_març_abril_maig_juny_juliol_agost_setembre_octubre_novembre_desembre".split("_"),monthsShort:"gen._febr._mar._abr._mai._jun._jul._ag._set._oct._nov._des.".split("_"),weekdays:"diumenge_dilluns_dimarts_dimecres_dijous_divendres_dissabte".split("_"),weekdaysShort:"dg._dl._dt._dc._dj._dv._ds.".split("_"),weekdaysMin:"Dg_Dl_Dt_Dc_Dj_Dv_Ds".split("_"),longDateFormat:{LT:"H:mm",LTS:"LT:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY H:mm",LLLL:"dddd D MMMM YYYY H:mm"},calendar:{sameDay:function(){return"[avui a "+(1!==this.hours()?"les":"la")+"] LT"},nextDay:function(){return"[demà a "+(1!==this.hours()?"les":"la")+"] LT"},nextWeek:function(){return"dddd [a "+(1!==this.hours()?"les":"la")+"] LT"},lastDay:function(){return"[ahir a "+(1!==this.hours()?"les":"la")+"] LT"},lastWeek:function(){return"[el] dddd [passat a "+(1!==this.hours()?"les":"la")+"] LT"},sameElse:"L"},relativeTime:{future:"en %s",past:"fa %s",s:"uns segons",m:"un minut",mm:"%d minuts",h:"una hora",hh:"%d hores",d:"un dia",dd:"%d dies",M:"un mes",MM:"%d mesos",y:"un any",yy:"%d anys"},ordinalParse:/\d{1,2}(r|n|t|è|a)/,ordinal:function(e,t){var n=1===e?"r":2===e?"n":3===e?"r":4===e?"t":"è";return("w"===t||"W"===t)&&(n="a"),e+n},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : czech (cs)
//! author : petrbela : https://github.com/petrbela
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e){return e>1&&5>e&&1!==~~(e/10)}function n(e,n,r,a){var o=e+" ";switch(r){case"s":return n||a?"pár sekund":"pár sekundami";case"m":return n?"minuta":a?"minutu":"minutou";case"mm":return n||a?o+(t(e)?"minuty":"minut"):o+"minutami";case"h":return n?"hodina":a?"hodinu":"hodinou";case"hh":return n||a?o+(t(e)?"hodiny":"hodin"):o+"hodinami";case"d":return n||a?"den":"dnem";case"dd":return n||a?o+(t(e)?"dny":"dní"):o+"dny";case"M":return n||a?"měsíc":"měsícem";case"MM":return n||a?o+(t(e)?"měsíce":"měsíců"):o+"měsíci";case"y":return n||a?"rok":"rokem";case"yy":return n||a?o+(t(e)?"roky":"let"):o+"lety"}}var r="leden_únor_březen_duben_květen_červen_červenec_srpen_září_říjen_listopad_prosinec".split("_"),a="led_úno_bře_dub_kvě_čvn_čvc_srp_zář_říj_lis_pro".split("_"),o=e.defineLocale("cs",{months:r,monthsShort:a,monthsParse:function(e,t){var n,r=[];for(n=0;12>n;n++)r[n]=new RegExp("^"+e[n]+"$|^"+t[n]+"$","i");return r}(r,a),weekdays:"neděle_pondělí_úterý_středa_čtvrtek_pátek_sobota".split("_"),weekdaysShort:"ne_po_út_st_čt_pá_so".split("_"),weekdaysMin:"ne_po_út_st_čt_pá_so".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD.MM.YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm",LLLL:"dddd D. MMMM YYYY H:mm"},calendar:{sameDay:"[dnes v] LT",nextDay:"[zítra v] LT",nextWeek:function(){switch(this.day()){case 0:return"[v neděli v] LT";case 1:case 2:return"[v] dddd [v] LT";case 3:return"[ve středu v] LT";case 4:return"[ve čtvrtek v] LT";case 5:return"[v pátek v] LT";case 6:return"[v sobotu v] LT"}},lastDay:"[včera v] LT",lastWeek:function(){switch(this.day()){case 0:return"[minulou neděli v] LT";case 1:case 2:return"[minulé] dddd [v] LT";case 3:return"[minulou středu v] LT";case 4:case 5:return"[minulý] dddd [v] LT";case 6:return"[minulou sobotu v] LT"}},sameElse:"L"},relativeTime:{future:"za %s",past:"před %s",s:n,m:n,mm:n,h:n,hh:n,d:n,dd:n,M:n,MM:n,y:n,yy:n},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return o})},function(e,t,n){
//! moment.js locale configuration
//! locale : chuvash (cv)
//! author : Anatoly Mironov : https://github.com/mirontoli
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("cv",{months:"кӑрлач_нарӑс_пуш_ака_май_ҫӗртме_утӑ_ҫурла_авӑн_юпа_чӳк_раштав".split("_"),monthsShort:"кӑр_нар_пуш_ака_май_ҫӗр_утӑ_ҫур_авн_юпа_чӳк_раш".split("_"),weekdays:"вырсарникун_тунтикун_ытларикун_юнкун_кӗҫнерникун_эрнекун_шӑматкун".split("_"),weekdaysShort:"выр_тун_ытл_юн_кӗҫ_эрн_шӑм".split("_"),weekdaysMin:"вр_тн_ыт_юн_кҫ_эр_шм".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD-MM-YYYY",LL:"YYYY [ҫулхи] MMMM [уйӑхӗн] D[-мӗшӗ]",LLL:"YYYY [ҫулхи] MMMM [уйӑхӗн] D[-мӗшӗ], HH:mm",LLLL:"dddd, YYYY [ҫулхи] MMMM [уйӑхӗн] D[-мӗшӗ], HH:mm"},calendar:{sameDay:"[Паян] LT [сехетре]",nextDay:"[Ыран] LT [сехетре]",lastDay:"[Ӗнер] LT [сехетре]",nextWeek:"[Ҫитес] dddd LT [сехетре]",lastWeek:"[Иртнӗ] dddd LT [сехетре]",sameElse:"L"},relativeTime:{future:function(e){var t=/сехет$/i.exec(e)?"рен":/ҫул$/i.exec(e)?"тан":"ран";return e+t},past:"%s каялла",s:"пӗр-ик ҫеккунт",m:"пӗр минут",mm:"%d минут",h:"пӗр сехет",hh:"%d сехет",d:"пӗр кун",dd:"%d кун",M:"пӗр уйӑх",MM:"%d уйӑх",y:"пӗр ҫул",yy:"%d ҫул"},ordinalParse:/\d{1,2}-мӗш/,ordinal:"%d-мӗш",week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Welsh (cy)
//! author : Robert Allen
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("cy",{months:"Ionawr_Chwefror_Mawrth_Ebrill_Mai_Mehefin_Gorffennaf_Awst_Medi_Hydref_Tachwedd_Rhagfyr".split("_"),monthsShort:"Ion_Chwe_Maw_Ebr_Mai_Meh_Gor_Aws_Med_Hyd_Tach_Rhag".split("_"),weekdays:"Dydd Sul_Dydd Llun_Dydd Mawrth_Dydd Mercher_Dydd Iau_Dydd Gwener_Dydd Sadwrn".split("_"),weekdaysShort:"Sul_Llun_Maw_Mer_Iau_Gwe_Sad".split("_"),weekdaysMin:"Su_Ll_Ma_Me_Ia_Gw_Sa".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},calendar:{sameDay:"[Heddiw am] LT",nextDay:"[Yfory am] LT",nextWeek:"dddd [am] LT",lastDay:"[Ddoe am] LT",lastWeek:"dddd [diwethaf am] LT",sameElse:"L"},relativeTime:{future:"mewn %s",past:"%s yn ôl",s:"ychydig eiliadau",m:"munud",mm:"%d munud",h:"awr",hh:"%d awr",d:"diwrnod",dd:"%d diwrnod",M:"mis",MM:"%d mis",y:"blwyddyn",yy:"%d flynedd"},ordinalParse:/\d{1,2}(fed|ain|af|il|ydd|ed|eg)/,ordinal:function(e){var t=e,n="",r=["","af","il","ydd","ydd","ed","ed","ed","fed","fed","fed","eg","fed","eg","eg","fed","eg","eg","fed","eg","fed"];return t>20?n=40===t||50===t||60===t||80===t||100===t?"fed":"ain":t>0&&(n=r[t]),e+n},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : danish (da)
//! author : Ulrik Nielsen : https://github.com/mrbase
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("da",{months:"januar_februar_marts_april_maj_juni_juli_august_september_oktober_november_december".split("_"),monthsShort:"jan_feb_mar_apr_maj_jun_jul_aug_sep_okt_nov_dec".split("_"),weekdays:"søndag_mandag_tirsdag_onsdag_torsdag_fredag_lørdag".split("_"),weekdaysShort:"søn_man_tir_ons_tor_fre_lør".split("_"),weekdaysMin:"sø_ma_ti_on_to_fr_lø".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY HH:mm",LLLL:"dddd [d.] D. MMMM YYYY HH:mm"},calendar:{sameDay:"[I dag kl.] LT",nextDay:"[I morgen kl.] LT",nextWeek:"dddd [kl.] LT",lastDay:"[I går kl.] LT",lastWeek:"[sidste] dddd [kl] LT",sameElse:"L"},relativeTime:{future:"om %s",past:"%s siden",s:"få sekunder",m:"et minut",mm:"%d minutter",h:"en time",hh:"%d timer",d:"en dag",dd:"%d dage",M:"en måned",MM:"%d måneder",y:"et år",yy:"%d år"},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : german (de)
//! author : lluchs : https://github.com/lluchs
//! author: Menelion Elensúle: https://github.com/Oire
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n,r){var a={m:["eine Minute","einer Minute"],h:["eine Stunde","einer Stunde"],d:["ein Tag","einem Tag"],dd:[e+" Tage",e+" Tagen"],M:["ein Monat","einem Monat"],MM:[e+" Monate",e+" Monaten"],y:["ein Jahr","einem Jahr"],yy:[e+" Jahre",e+" Jahren"]};return t?a[n][0]:a[n][1]}var n=e.defineLocale("de",{months:"Januar_Februar_März_April_Mai_Juni_Juli_August_September_Oktober_November_Dezember".split("_"),monthsShort:"Jan._Febr._Mrz._Apr._Mai_Jun._Jul._Aug._Sept._Okt._Nov._Dez.".split("_"),weekdays:"Sonntag_Montag_Dienstag_Mittwoch_Donnerstag_Freitag_Samstag".split("_"),weekdaysShort:"So._Mo._Di._Mi._Do._Fr._Sa.".split("_"),weekdaysMin:"So_Mo_Di_Mi_Do_Fr_Sa".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY HH:mm",LLLL:"dddd, D. MMMM YYYY HH:mm"},calendar:{sameDay:"[Heute um] LT [Uhr]",sameElse:"L",nextDay:"[Morgen um] LT [Uhr]",nextWeek:"dddd [um] LT [Uhr]",lastDay:"[Gestern um] LT [Uhr]",lastWeek:"[letzten] dddd [um] LT [Uhr]"},relativeTime:{future:"in %s",past:"vor %s",s:"ein paar Sekunden",m:t,mm:"%d Minuten",h:t,hh:"%d Stunden",d:t,dd:t,M:t,MM:t,y:t,yy:t},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : austrian german (de-at)
//! author : lluchs : https://github.com/lluchs
//! author: Menelion Elensúle: https://github.com/Oire
//! author : Martin Groller : https://github.com/MadMG
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n,r){var a={m:["eine Minute","einer Minute"],h:["eine Stunde","einer Stunde"],d:["ein Tag","einem Tag"],dd:[e+" Tage",e+" Tagen"],M:["ein Monat","einem Monat"],MM:[e+" Monate",e+" Monaten"],y:["ein Jahr","einem Jahr"],yy:[e+" Jahre",e+" Jahren"]};return t?a[n][0]:a[n][1]}var n=e.defineLocale("de-at",{months:"Jänner_Februar_März_April_Mai_Juni_Juli_August_September_Oktober_November_Dezember".split("_"),monthsShort:"Jän._Febr._Mrz._Apr._Mai_Jun._Jul._Aug._Sept._Okt._Nov._Dez.".split("_"),weekdays:"Sonntag_Montag_Dienstag_Mittwoch_Donnerstag_Freitag_Samstag".split("_"),weekdaysShort:"So._Mo._Di._Mi._Do._Fr._Sa.".split("_"),weekdaysMin:"So_Mo_Di_Mi_Do_Fr_Sa".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY HH:mm",LLLL:"dddd, D. MMMM YYYY HH:mm"},calendar:{sameDay:"[Heute um] LT [Uhr]",sameElse:"L",nextDay:"[Morgen um] LT [Uhr]",nextWeek:"dddd [um] LT [Uhr]",lastDay:"[Gestern um] LT [Uhr]",lastWeek:"[letzten] dddd [um] LT [Uhr]"},relativeTime:{future:"in %s",past:"vor %s",s:"ein paar Sekunden",m:t,mm:"%d Minuten",h:t,hh:"%d Stunden",d:t,dd:t,M:t,MM:t,y:t,yy:t},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : modern greek (el)
//! author : Aggelos Karalias : https://github.com/mehiel
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("el",{monthsNominativeEl:"Ιανουάριος_Φεβρουάριος_Μάρτιος_Απρίλιος_Μάιος_Ιούνιος_Ιούλιος_Αύγουστος_Σεπτέμβριος_Οκτώβριος_Νοέμβριος_Δεκέμβριος".split("_"),monthsGenitiveEl:"Ιανουαρίου_Φεβρουαρίου_Μαρτίου_Απριλίου_Μαΐου_Ιουνίου_Ιουλίου_Αυγούστου_Σεπτεμβρίου_Οκτωβρίου_Νοεμβρίου_Δεκεμβρίου".split("_"),months:function(e,t){return/D/.test(t.substring(0,t.indexOf("MMMM")))?this._monthsGenitiveEl[e.month()]:this._monthsNominativeEl[e.month()]},monthsShort:"Ιαν_Φεβ_Μαρ_Απρ_Μαϊ_Ιουν_Ιουλ_Αυγ_Σεπ_Οκτ_Νοε_Δεκ".split("_"),weekdays:"Κυριακή_Δευτέρα_Τρίτη_Τετάρτη_Πέμπτη_Παρασκευή_Σάββατο".split("_"),weekdaysShort:"Κυρ_Δευ_Τρι_Τετ_Πεμ_Παρ_Σαβ".split("_"),weekdaysMin:"Κυ_Δε_Τρ_Τε_Πε_Πα_Σα".split("_"),meridiem:function(e,t,n){return e>11?n?"μμ":"ΜΜ":n?"πμ":"ΠΜ"},isPM:function(e){return"μ"===(e+"").toLowerCase()[0]},meridiemParse:/[ΠΜ]\.?Μ?\.?/i,longDateFormat:{LT:"h:mm A",LTS:"h:mm:ss A",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY h:mm A",LLLL:"dddd, D MMMM YYYY h:mm A"},calendarEl:{sameDay:"[Σήμερα {}] LT",nextDay:"[Αύριο {}] LT",nextWeek:"dddd [{}] LT",lastDay:"[Χθες {}] LT",lastWeek:function(){switch(this.day()){case 6:return"[το προηγούμενο] dddd [{}] LT";default:return"[την προηγούμενη] dddd [{}] LT"}},sameElse:"L"},calendar:function(e,t){var n=this._calendarEl[e],r=t&&t.hours();return"function"==typeof n&&(n=n.apply(t)),n.replace("{}",r%12===1?"στη":"στις")},relativeTime:{future:"σε %s",past:"%s πριν",s:"λίγα δευτερόλεπτα",m:"ένα λεπτό",mm:"%d λεπτά",h:"μία ώρα",hh:"%d ώρες",d:"μία μέρα",dd:"%d μέρες",M:"ένας μήνας",MM:"%d μήνες",y:"ένας χρόνος",yy:"%d χρόνια"},ordinalParse:/\d{1,2}η/,ordinal:"%dη",week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : australian english (en-au)
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("en-au",{months:"January_February_March_April_May_June_July_August_September_October_November_December".split("_"),monthsShort:"Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec".split("_"),weekdays:"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),weekdaysShort:"Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),weekdaysMin:"Su_Mo_Tu_We_Th_Fr_Sa".split("_"),longDateFormat:{LT:"h:mm A",LTS:"h:mm:ss A",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY h:mm A",LLLL:"dddd, D MMMM YYYY h:mm A"},calendar:{sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},relativeTime:{future:"in %s",past:"%s ago",s:"a few seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},ordinalParse:/\d{1,2}(st|nd|rd|th)/,ordinal:function(e){var t=e%10,n=1===~~(e%100/10)?"th":1===t?"st":2===t?"nd":3===t?"rd":"th";return e+n},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : canadian english (en-ca)
//! author : Jonathan Abourbih : https://github.com/jonbca
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("en-ca",{months:"January_February_March_April_May_June_July_August_September_October_November_December".split("_"),monthsShort:"Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec".split("_"),weekdays:"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),weekdaysShort:"Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),weekdaysMin:"Su_Mo_Tu_We_Th_Fr_Sa".split("_"),longDateFormat:{LT:"h:mm A",LTS:"h:mm:ss A",L:"YYYY-MM-DD",LL:"D MMMM, YYYY",LLL:"D MMMM, YYYY h:mm A",LLLL:"dddd, D MMMM, YYYY h:mm A"},calendar:{sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},relativeTime:{future:"in %s",past:"%s ago",s:"a few seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},ordinalParse:/\d{1,2}(st|nd|rd|th)/,ordinal:function(e){var t=e%10,n=1===~~(e%100/10)?"th":1===t?"st":2===t?"nd":3===t?"rd":"th";return e+n}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : great britain english (en-gb)
//! author : Chris Gedrim : https://github.com/chrisgedrim
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("en-gb",{months:"January_February_March_April_May_June_July_August_September_October_November_December".split("_"),monthsShort:"Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec".split("_"),weekdays:"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),weekdaysShort:"Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),weekdaysMin:"Su_Mo_Tu_We_Th_Fr_Sa".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},calendar:{sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},relativeTime:{future:"in %s",past:"%s ago",s:"a few seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},ordinalParse:/\d{1,2}(st|nd|rd|th)/,ordinal:function(e){var t=e%10,n=1===~~(e%100/10)?"th":1===t?"st":2===t?"nd":3===t?"rd":"th";return e+n},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : esperanto (eo)
//! author : Colin Dean : https://github.com/colindean
//! komento: Mi estas malcerta se mi korekte traktis akuzativojn en tiu traduko.
//! Se ne, bonvolu korekti kaj avizi min por ke mi povas lerni!
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("eo",{months:"januaro_februaro_marto_aprilo_majo_junio_julio_aŭgusto_septembro_oktobro_novembro_decembro".split("_"),monthsShort:"jan_feb_mar_apr_maj_jun_jul_aŭg_sep_okt_nov_dec".split("_"),weekdays:"Dimanĉo_Lundo_Mardo_Merkredo_Ĵaŭdo_Vendredo_Sabato".split("_"),weekdaysShort:"Dim_Lun_Mard_Merk_Ĵaŭ_Ven_Sab".split("_"),weekdaysMin:"Di_Lu_Ma_Me_Ĵa_Ve_Sa".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"YYYY-MM-DD",LL:"D[-an de] MMMM, YYYY",LLL:"D[-an de] MMMM, YYYY HH:mm",LLLL:"dddd, [la] D[-an de] MMMM, YYYY HH:mm"},meridiemParse:/[ap]\.t\.m/i,isPM:function(e){return"p"===e.charAt(0).toLowerCase()},meridiem:function(e,t,n){return e>11?n?"p.t.m.":"P.T.M.":n?"a.t.m.":"A.T.M."},calendar:{sameDay:"[Hodiaŭ je] LT",nextDay:"[Morgaŭ je] LT",nextWeek:"dddd [je] LT",lastDay:"[Hieraŭ je] LT",lastWeek:"[pasinta] dddd [je] LT",sameElse:"L"},relativeTime:{future:"je %s",past:"antaŭ %s",s:"sekundoj",m:"minuto",mm:"%d minutoj",h:"horo",hh:"%d horoj",d:"tago",dd:"%d tagoj",M:"monato",MM:"%d monatoj",y:"jaro",yy:"%d jaroj"},ordinalParse:/\d{1,2}a/,ordinal:"%da",week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : spanish (es)
//! author : Julio Napurí : https://github.com/julionc
!function(e,t){t(n(166))}(this,function(e){"use strict";var t="Ene._Feb._Mar._Abr._May._Jun._Jul._Ago._Sep._Oct._Nov._Dic.".split("_"),n="Ene_Feb_Mar_Abr_May_Jun_Jul_Ago_Sep_Oct_Nov_Dic".split("_"),r=e.defineLocale("es",{months:"Enero_Febrero_Marzo_Abril_Mayo_Junio_Julio_Agosto_Septiembre_Octubre_Noviembre_Diciembre".split("_"),monthsShort:function(e,r){return/-MMM-/.test(r)?n[e.month()]:t[e.month()]},weekdays:"Domingo_Lunes_Martes_Miércoles_Jueves_Viernes_Sábado".split("_"),weekdaysShort:"Dom._Lun._Mar._Mié._Jue._Vie._Sáb.".split("_"),weekdaysMin:"Do_Lu_Ma_Mi_Ju_Vi_Sá".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD/MM/YYYY",LL:"D [de] MMMM [de] YYYY",LLL:"D [de] MMMM [de] YYYY H:mm",LLLL:"dddd, D [de] MMMM [de] YYYY H:mm"},calendar:{sameDay:function(){return"[hoy a la"+(1!==this.hours()?"s":"")+"] LT"},nextDay:function(){return"[mañana a la"+(1!==this.hours()?"s":"")+"] LT"},nextWeek:function(){return"dddd [a la"+(1!==this.hours()?"s":"")+"] LT"},lastDay:function(){return"[ayer a la"+(1!==this.hours()?"s":"")+"] LT"},lastWeek:function(){return"[el] dddd [pasado a la"+(1!==this.hours()?"s":"")+"] LT"},sameElse:"L"},relativeTime:{future:"en %s",past:"hace %s",s:"unos segundos",m:"un minuto",mm:"%d minutos",h:"una hora",hh:"%d horas",d:"un día",dd:"%d días",M:"un mes",MM:"%d meses",y:"un año",yy:"%d años"},ordinalParse:/\d{1,2}º/,ordinal:"%dº",week:{dow:1,doy:4}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : estonian (et)
//! author : Henry Kehlmann : https://github.com/madhenry
//! improvements : Illimar Tambek : https://github.com/ragulka
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n,r){var a={s:["mõne sekundi","mõni sekund","paar sekundit"],m:["ühe minuti","üks minut"],mm:[e+" minuti",e+" minutit"],h:["ühe tunni","tund aega","üks tund"],hh:[e+" tunni",e+" tundi"],d:["ühe päeva","üks päev"],M:["kuu aja","kuu aega","üks kuu"],MM:[e+" kuu",e+" kuud"],y:["ühe aasta","aasta","üks aasta"],yy:[e+" aasta",e+" aastat"]};return t?a[n][2]?a[n][2]:a[n][1]:r?a[n][0]:a[n][1]}var n=e.defineLocale("et",{months:"jaanuar_veebruar_märts_aprill_mai_juuni_juuli_august_september_oktoober_november_detsember".split("_"),monthsShort:"jaan_veebr_märts_apr_mai_juuni_juuli_aug_sept_okt_nov_dets".split("_"),weekdays:"pühapäev_esmaspäev_teisipäev_kolmapäev_neljapäev_reede_laupäev".split("_"),weekdaysShort:"P_E_T_K_N_R_L".split("_"),weekdaysMin:"P_E_T_K_N_R_L".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD.MM.YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm",LLLL:"dddd, D. MMMM YYYY H:mm"},calendar:{sameDay:"[Täna,] LT",nextDay:"[Homme,] LT",nextWeek:"[Järgmine] dddd LT",lastDay:"[Eile,] LT",lastWeek:"[Eelmine] dddd LT",sameElse:"L"},relativeTime:{future:"%s pärast",past:"%s tagasi",s:t,m:t,mm:t,h:t,hh:t,d:t,dd:"%d päeva",M:t,MM:t,y:t,yy:t},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : euskara (eu)
//! author : Eneko Illarramendi : https://github.com/eillarra
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("eu",{months:"urtarrila_otsaila_martxoa_apirila_maiatza_ekaina_uztaila_abuztua_iraila_urria_azaroa_abendua".split("_"),monthsShort:"urt._ots._mar._api._mai._eka._uzt._abu._ira._urr._aza._abe.".split("_"),weekdays:"igandea_astelehena_asteartea_asteazkena_osteguna_ostirala_larunbata".split("_"),weekdaysShort:"ig._al._ar._az._og._ol._lr.".split("_"),weekdaysMin:"ig_al_ar_az_og_ol_lr".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"YYYY-MM-DD",LL:"YYYY[ko] MMMM[ren] D[a]",LLL:"YYYY[ko] MMMM[ren] D[a] HH:mm",LLLL:"dddd, YYYY[ko] MMMM[ren] D[a] HH:mm",l:"YYYY-M-D",ll:"YYYY[ko] MMM D[a]",lll:"YYYY[ko] MMM D[a] HH:mm",llll:"ddd, YYYY[ko] MMM D[a] HH:mm"},calendar:{sameDay:"[gaur] LT[etan]",nextDay:"[bihar] LT[etan]",nextWeek:"dddd LT[etan]",lastDay:"[atzo] LT[etan]",lastWeek:"[aurreko] dddd LT[etan]",sameElse:"L"},relativeTime:{future:"%s barru",past:"duela %s",s:"segundo batzuk",m:"minutu bat",mm:"%d minutu",h:"ordu bat",hh:"%d ordu",d:"egun bat",dd:"%d egun",M:"hilabete bat",MM:"%d hilabete",y:"urte bat",yy:"%d urte"},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Persian (fa)
//! author : Ebrahim Byagowi : https://github.com/ebraminio
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"۱",2:"۲",3:"۳",4:"۴",5:"۵",6:"۶",7:"۷",8:"۸",9:"۹",0:"۰"},n={"۱":"1","۲":"2","۳":"3","۴":"4","۵":"5","۶":"6","۷":"7","۸":"8","۹":"9","۰":"0"},r=e.defineLocale("fa",{months:"ژانویه_فوریه_مارس_آوریل_مه_ژوئن_ژوئیه_اوت_سپتامبر_اکتبر_نوامبر_دسامبر".split("_"),monthsShort:"ژانویه_فوریه_مارس_آوریل_مه_ژوئن_ژوئیه_اوت_سپتامبر_اکتبر_نوامبر_دسامبر".split("_"),weekdays:"یکشنبه_دوشنبه_سهشنبه_چهارشنبه_پنجشنبه_جمعه_شنبه".split("_"),weekdaysShort:"یکشنبه_دوشنبه_سهشنبه_چهارشنبه_پنجشنبه_جمعه_شنبه".split("_"),weekdaysMin:"ی_د_س_چ_پ_ج_ش".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},meridiemParse:/قبل از ظهر|بعد از ظهر/,isPM:function(e){return/بعد از ظهر/.test(e)},meridiem:function(e,t,n){return 12>e?"قبل از ظهر":"بعد از ظهر"},calendar:{sameDay:"[امروز ساعت] LT",nextDay:"[فردا ساعت] LT",nextWeek:"dddd [ساعت] LT",lastDay:"[دیروز ساعت] LT",lastWeek:"dddd [پیش] [ساعت] LT",sameElse:"L"},relativeTime:{future:"در %s",past:"%s پیش",s:"چندین ثانیه",m:"یک دقیقه",mm:"%d دقیقه",h:"یک ساعت",hh:"%d ساعت",d:"یک روز",dd:"%d روز",M:"یک ماه",MM:"%d ماه",y:"یک سال",yy:"%d سال"},preparse:function(e){return e.replace(/[۰-۹]/g,function(e){return n[e]}).replace(/،/g,",")},postformat:function(e){return e.replace(/\d/g,function(e){return t[e]}).replace(/,/g,"،")},ordinalParse:/\d{1,2}م/,ordinal:"%dم",week:{dow:6,doy:12}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : finnish (fi)
//! author : Tarmo Aidantausta : https://github.com/bleadof
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,r,a){var o="";switch(r){case"s":return a?"muutaman sekunnin":"muutama sekunti";case"m":return a?"minuutin":"minuutti";case"mm":o=a?"minuutin":"minuuttia";break;case"h":return a?"tunnin":"tunti";case"hh":o=a?"tunnin":"tuntia";break;case"d":return a?"päivän":"päivä";case"dd":o=a?"päivän":"päivää";break;case"M":return a?"kuukauden":"kuukausi";case"MM":o=a?"kuukauden":"kuukautta";break;case"y":return a?"vuoden":"vuosi";case"yy":o=a?"vuoden":"vuotta"}return o=n(e,a)+" "+o}function n(e,t){return 10>e?t?a[e]:r[e]:e}var r="nolla yksi kaksi kolme neljä viisi kuusi seitsemän kahdeksan yhdeksän".split(" "),a=["nolla","yhden","kahden","kolmen","neljän","viiden","kuuden",r[7],r[8],r[9]],o=e.defineLocale("fi",{months:"tammikuu_helmikuu_maaliskuu_huhtikuu_toukokuu_kesäkuu_heinäkuu_elokuu_syyskuu_lokakuu_marraskuu_joulukuu".split("_"),monthsShort:"tammi_helmi_maalis_huhti_touko_kesä_heinä_elo_syys_loka_marras_joulu".split("_"),weekdays:"sunnuntai_maanantai_tiistai_keskiviikko_torstai_perjantai_lauantai".split("_"),weekdaysShort:"su_ma_ti_ke_to_pe_la".split("_"),weekdaysMin:"su_ma_ti_ke_to_pe_la".split("_"),longDateFormat:{LT:"HH.mm",LTS:"HH.mm.ss",L:"DD.MM.YYYY",LL:"Do MMMM[ta] YYYY",LLL:"Do MMMM[ta] YYYY, [klo] HH.mm",LLLL:"dddd, Do MMMM[ta] YYYY, [klo] HH.mm",l:"D.M.YYYY",ll:"Do MMM YYYY",lll:"Do MMM YYYY, [klo] HH.mm",llll:"ddd, Do MMM YYYY, [klo] HH.mm"},calendar:{sameDay:"[tänään] [klo] LT",nextDay:"[huomenna] [klo] LT",nextWeek:"dddd [klo] LT",lastDay:"[eilen] [klo] LT",lastWeek:"[viime] dddd[na] [klo] LT",sameElse:"L"},relativeTime:{future:"%s päästä",past:"%s sitten",s:t,m:t,mm:t,h:t,hh:t,d:t,dd:t,M:t,MM:t,y:t,yy:t},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return o})},function(e,t,n){
//! moment.js locale configuration
//! locale : faroese (fo)
//! author : Ragnar Johannesen : https://github.com/ragnar123
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("fo",{months:"januar_februar_mars_apríl_mai_juni_juli_august_september_oktober_november_desember".split("_"),monthsShort:"jan_feb_mar_apr_mai_jun_jul_aug_sep_okt_nov_des".split("_"),weekdays:"sunnudagur_mánadagur_týsdagur_mikudagur_hósdagur_fríggjadagur_leygardagur".split("_"),weekdaysShort:"sun_mán_týs_mik_hós_frí_ley".split("_"),weekdaysMin:"su_má_tý_mi_hó_fr_le".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D. MMMM, YYYY HH:mm"},calendar:{sameDay:"[Í dag kl.] LT",nextDay:"[Í morgin kl.] LT",nextWeek:"dddd [kl.] LT",lastDay:"[Í gjár kl.] LT",lastWeek:"[síðstu] dddd [kl] LT",sameElse:"L"},relativeTime:{future:"um %s",past:"%s síðani",s:"fá sekund",m:"ein minutt",mm:"%d minuttir",h:"ein tími",hh:"%d tímar",d:"ein dagur",dd:"%d dagar",M:"ein mánaði",MM:"%d mánaðir",y:"eitt ár",yy:"%d ár"},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : french (fr)
//! author : John Fischer : https://github.com/jfroffice
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("fr",{months:"janvier_février_mars_avril_mai_juin_juillet_août_septembre_octobre_novembre_décembre".split("_"),monthsShort:"janv._févr._mars_avr._mai_juin_juil._août_sept._oct._nov._déc.".split("_"),weekdays:"dimanche_lundi_mardi_mercredi_jeudi_vendredi_samedi".split("_"),weekdaysShort:"dim._lun._mar._mer._jeu._ven._sam.".split("_"),weekdaysMin:"Di_Lu_Ma_Me_Je_Ve_Sa".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[Aujourd'hui à] LT",nextDay:"[Demain à] LT",nextWeek:"dddd [à] LT",lastDay:"[Hier à] LT",lastWeek:"dddd [dernier à] LT",sameElse:"L"},relativeTime:{future:"dans %s",past:"il y a %s",s:"quelques secondes",m:"une minute",mm:"%d minutes",h:"une heure",hh:"%d heures",d:"un jour",dd:"%d jours",M:"un mois",MM:"%d mois",y:"un an",yy:"%d ans"},ordinalParse:/\d{1,2}(er|)/,ordinal:function(e){return e+(1===e?"er":"")},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : canadian french (fr-ca)
//! author : Jonathan Abourbih : https://github.com/jonbca
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("fr-ca",{months:"janvier_février_mars_avril_mai_juin_juillet_août_septembre_octobre_novembre_décembre".split("_"),monthsShort:"janv._févr._mars_avr._mai_juin_juil._août_sept._oct._nov._déc.".split("_"),weekdays:"dimanche_lundi_mardi_mercredi_jeudi_vendredi_samedi".split("_"),weekdaysShort:"dim._lun._mar._mer._jeu._ven._sam.".split("_"),weekdaysMin:"Di_Lu_Ma_Me_Je_Ve_Sa".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"YYYY-MM-DD",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[Aujourd'hui à] LT",nextDay:"[Demain à] LT",nextWeek:"dddd [à] LT",lastDay:"[Hier à] LT",lastWeek:"dddd [dernier à] LT",sameElse:"L"},relativeTime:{future:"dans %s",past:"il y a %s",s:"quelques secondes",m:"une minute",mm:"%d minutes",h:"une heure",hh:"%d heures",d:"un jour",dd:"%d jours",M:"un mois",MM:"%d mois",y:"un an",yy:"%d ans"},ordinalParse:/\d{1,2}(er|e)/,ordinal:function(e){return e+(1===e?"er":"e")}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : frisian (fy)
//! author : Robin van der Vliet : https://github.com/robin0van0der0v
!function(e,t){t(n(166))}(this,function(e){"use strict";var t="jan._feb._mrt._apr._mai_jun._jul._aug._sep._okt._nov._des.".split("_"),n="jan_feb_mrt_apr_mai_jun_jul_aug_sep_okt_nov_des".split("_"),r=e.defineLocale("fy",{months:"jannewaris_febrewaris_maart_april_maaie_juny_july_augustus_septimber_oktober_novimber_desimber".split("_"),monthsShort:function(e,r){return/-MMM-/.test(r)?n[e.month()]:t[e.month()]},weekdays:"snein_moandei_tiisdei_woansdei_tongersdei_freed_sneon".split("_"),weekdaysShort:"si._mo._ti._wo._to._fr._so.".split("_"),weekdaysMin:"Si_Mo_Ti_Wo_To_Fr_So".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD-MM-YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[hjoed om] LT",nextDay:"[moarn om] LT",nextWeek:"dddd [om] LT",lastDay:"[juster om] LT",lastWeek:"[ôfrûne] dddd [om] LT",sameElse:"L"},relativeTime:{future:"oer %s",past:"%s lyn",s:"in pear sekonden",m:"ien minút",mm:"%d minuten",h:"ien oere",hh:"%d oeren",d:"ien dei",dd:"%d dagen",M:"ien moanne",MM:"%d moannen",y:"ien jier",yy:"%d jierren"},ordinalParse:/\d{1,2}(ste|de)/,ordinal:function(e){return e+(1===e||8===e||e>=20?"ste":"de")},week:{dow:1,doy:4}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : galician (gl)
//! author : Juan G. Hurtado : https://github.com/juanghurtado
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("gl",{months:"Xaneiro_Febreiro_Marzo_Abril_Maio_Xuño_Xullo_Agosto_Setembro_Outubro_Novembro_Decembro".split("_"),monthsShort:"Xan._Feb._Mar._Abr._Mai._Xuñ._Xul._Ago._Set._Out._Nov._Dec.".split("_"),weekdays:"Domingo_Luns_Martes_Mércores_Xoves_Venres_Sábado".split("_"),weekdaysShort:"Dom._Lun._Mar._Mér._Xov._Ven._Sáb.".split("_"),weekdaysMin:"Do_Lu_Ma_Mé_Xo_Ve_Sá".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY H:mm",LLLL:"dddd D MMMM YYYY H:mm"},calendar:{sameDay:function(){return"[hoxe "+(1!==this.hours()?"ás":"á")+"] LT"},nextDay:function(){return"[mañá "+(1!==this.hours()?"ás":"á")+"] LT"},nextWeek:function(){return"dddd ["+(1!==this.hours()?"ás":"a")+"] LT"},lastDay:function(){return"[onte "+(1!==this.hours()?"á":"a")+"] LT"},lastWeek:function(){return"[o] dddd [pasado "+(1!==this.hours()?"ás":"a")+"] LT"},sameElse:"L"},relativeTime:{future:function(e){return"uns segundos"===e?"nuns segundos":"en "+e},past:"hai %s",s:"uns segundos",m:"un minuto",mm:"%d minutos",h:"unha hora",hh:"%d horas",d:"un día",dd:"%d días",M:"un mes",MM:"%d meses",y:"un ano",yy:"%d anos"},ordinalParse:/\d{1,2}º/,ordinal:"%dº",week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Hebrew (he)
//! author : Tomer Cohen : https://github.com/tomer
//! author : Moshe Simantov : https://github.com/DevelopmentIL
//! author : Tal Ater : https://github.com/TalAter
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("he",{months:"ינואר_פברואר_מרץ_אפריל_מאי_יוני_יולי_אוגוסט_ספטמבר_אוקטובר_נובמבר_דצמבר".split("_"),monthsShort:"ינו׳_פבר׳_מרץ_אפר׳_מאי_יוני_יולי_אוג׳_ספט׳_אוק׳_נוב׳_דצמ׳".split("_"),weekdays:"ראשון_שני_שלישי_רביעי_חמישי_שישי_שבת".split("_"),weekdaysShort:"א׳_ב׳_ג׳_ד׳_ה׳_ו׳_ש׳".split("_"),weekdaysMin:"א_ב_ג_ד_ה_ו_ש".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D [ב]MMMM YYYY",LLL:"D [ב]MMMM YYYY HH:mm",LLLL:"dddd, D [ב]MMMM YYYY HH:mm",l:"D/M/YYYY",ll:"D MMM YYYY",lll:"D MMM YYYY HH:mm",llll:"ddd, D MMM YYYY HH:mm"},calendar:{sameDay:"[היום ב־]LT",nextDay:"[מחר ב־]LT",nextWeek:"dddd [בשעה] LT",lastDay:"[אתמול ב־]LT",lastWeek:"[ביום] dddd [האחרון בשעה] LT",sameElse:"L"},relativeTime:{future:"בעוד %s",past:"לפני %s",s:"מספר שניות",m:"דקה",mm:"%d דקות",h:"שעה",hh:function(e){return 2===e?"שעתיים":e+" שעות"},d:"יום",dd:function(e){return 2===e?"יומיים":e+" ימים"},M:"חודש",MM:function(e){return 2===e?"חודשיים":e+" חודשים"},y:"שנה",yy:function(e){return 2===e?"שנתיים":e%10===0&&10!==e?e+" שנה":e+" שנים"}}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : hindi (hi)
//! author : Mayank Singhal : https://github.com/mayanksinghal
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"१",2:"२",3:"३",4:"४",5:"५",6:"६",7:"७",8:"८",9:"९",0:"०"},n={"१":"1","२":"2","३":"3","४":"4","५":"5","६":"6","७":"7","८":"8","९":"9","०":"0"},r=e.defineLocale("hi",{months:"जनवरी_फ़रवरी_मार्च_अप्रैल_मई_जून_जुलाई_अगस्त_सितम्बर_अक्टूबर_नवम्बर_दिसम्बर".split("_"),monthsShort:"जन._फ़र._मार्च_अप्रै._मई_जून_जुल._अग._सित._अक्टू._नव._दिस.".split("_"),weekdays:"रविवार_सोमवार_मंगलवार_बुधवार_गुरूवार_शुक्रवार_शनिवार".split("_"),weekdaysShort:"रवि_सोम_मंगल_बुध_गुरू_शुक्र_शनि".split("_"),weekdaysMin:"र_सो_मं_बु_गु_शु_श".split("_"),longDateFormat:{LT:"A h:mm बजे",LTS:"A h:mm:ss बजे",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY, A h:mm बजे",LLLL:"dddd, D MMMM YYYY, A h:mm बजे"},calendar:{sameDay:"[आज] LT",nextDay:"[कल] LT",nextWeek:"dddd, LT",lastDay:"[कल] LT",lastWeek:"[पिछले] dddd, LT",sameElse:"L"},relativeTime:{future:"%s में",past:"%s पहले",s:"कुछ ही क्षण",m:"एक मिनट",mm:"%d मिनट",h:"एक घंटा",hh:"%d घंटे",d:"एक दिन",dd:"%d दिन",M:"एक महीने",MM:"%d महीने",y:"एक वर्ष",yy:"%d वर्ष"},preparse:function(e){return e.replace(/[१२३४५६७८९०]/g,function(e){return n[e]})},postformat:function(e){return e.replace(/\d/g,function(e){return t[e]})},meridiemParse:/रात|सुबह|दोपहर|शाम/,meridiemHour:function(e,t){return 12===e&&(e=0),"रात"===t?4>e?e:e+12:"सुबह"===t?e:"दोपहर"===t?e>=10?e:e+12:"शाम"===t?e+12:void 0},meridiem:function(e,t,n){return 4>e?"रात":10>e?"सुबह":17>e?"दोपहर":20>e?"शाम":"रात"},week:{dow:0,doy:6}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : hrvatski (hr)
//! author : Bojan Marković : https://github.com/bmarkovic
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n){var r=e+" ";switch(n){case"m":return t?"jedna minuta":"jedne minute";case"mm":return r+=1===e?"minuta":2===e||3===e||4===e?"minute":"minuta";case"h":return t?"jedan sat":"jednog sata";case"hh":return r+=1===e?"sat":2===e||3===e||4===e?"sata":"sati";case"dd":return r+=1===e?"dan":"dana";case"MM":return r+=1===e?"mjesec":2===e||3===e||4===e?"mjeseca":"mjeseci";case"yy":return r+=1===e?"godina":2===e||3===e||4===e?"godine":"godina"}}var n=e.defineLocale("hr",{months:"siječanj_veljača_ožujak_travanj_svibanj_lipanj_srpanj_kolovoz_rujan_listopad_studeni_prosinac".split("_"),monthsShort:"sij._velj._ožu._tra._svi._lip._srp._kol._ruj._lis._stu._pro.".split("_"),weekdays:"nedjelja_ponedjeljak_utorak_srijeda_četvrtak_petak_subota".split("_"),weekdaysShort:"ned._pon._uto._sri._čet._pet._sub.".split("_"),weekdaysMin:"ne_po_ut_sr_če_pe_su".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD. MM. YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm",LLLL:"dddd, D. MMMM YYYY H:mm"},calendar:{sameDay:"[danas u] LT",nextDay:"[sutra u] LT",nextWeek:function(){switch(this.day()){case 0:return"[u] [nedjelju] [u] LT";case 3:return"[u] [srijedu] [u] LT";case 6:return"[u] [subotu] [u] LT";case 1:case 2:case 4:case 5:return"[u] dddd [u] LT"}},lastDay:"[jučer u] LT",lastWeek:function(){switch(this.day()){case 0:case 3:return"[prošlu] dddd [u] LT";case 6:return"[prošle] [subote] [u] LT";case 1:case 2:case 4:case 5:return"[prošli] dddd [u] LT"}},sameElse:"L"},relativeTime:{future:"za %s",past:"prije %s",s:"par sekundi",m:t,mm:t,h:t,hh:t,d:"dan",dd:t,M:"mjesec",MM:t,y:"godinu",yy:t},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:7}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : hungarian (hu)
//! author : Adam Brunner : https://github.com/adambrunner
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n,r){var a=e;switch(n){case"s":return r||t?"néhány másodperc":"néhány másodperce";case"m":return"egy"+(r||t?" perc":" perce");case"mm":return a+(r||t?" perc":" perce");case"h":return"egy"+(r||t?" óra":" órája");case"hh":return a+(r||t?" óra":" órája");case"d":return"egy"+(r||t?" nap":" napja");case"dd":return a+(r||t?" nap":" napja");case"M":return"egy"+(r||t?" hónap":" hónapja");case"MM":return a+(r||t?" hónap":" hónapja");case"y":return"egy"+(r||t?" év":" éve");case"yy":return a+(r||t?" év":" éve")}return""}function n(e){return(e?"":"[múlt] ")+"["+r[this.day()]+"] LT[-kor]"}var r="vasárnap hétfőn kedden szerdán csütörtökön pénteken szombaton".split(" "),a=e.defineLocale("hu",{months:"január_február_március_április_május_június_július_augusztus_szeptember_október_november_december".split("_"),monthsShort:"jan_feb_márc_ápr_máj_jún_júl_aug_szept_okt_nov_dec".split("_"),weekdays:"vasárnap_hétfő_kedd_szerda_csütörtök_péntek_szombat".split("_"),weekdaysShort:"vas_hét_kedd_sze_csüt_pén_szo".split("_"),weekdaysMin:"v_h_k_sze_cs_p_szo".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"YYYY.MM.DD.",LL:"YYYY. MMMM D.",LLL:"YYYY. MMMM D. H:mm",LLLL:"YYYY. MMMM D., dddd H:mm"},meridiemParse:/de|du/i,isPM:function(e){return"u"===e.charAt(1).toLowerCase()},meridiem:function(e,t,n){return 12>e?n===!0?"de":"DE":n===!0?"du":"DU"},calendar:{sameDay:"[ma] LT[-kor]",nextDay:"[holnap] LT[-kor]",nextWeek:function(){return n.call(this,!0)},lastDay:"[tegnap] LT[-kor]",lastWeek:function(){return n.call(this,!1)},sameElse:"L"},relativeTime:{future:"%s múlva",past:"%s",s:t,m:t,mm:t,h:t,hh:t,d:t,dd:t,M:t,MM:t,y:t,yy:t},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:7}});return a})},function(e,t,n){
//! moment.js locale configuration
//! locale : Armenian (hy-am)
//! author : Armendarabyan : https://github.com/armendarabyan
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t){var n={nominative:"հունվար_փետրվար_մարտ_ապրիլ_մայիս_հունիս_հուլիս_օգոստոս_սեպտեմբեր_հոկտեմբեր_նոյեմբեր_դեկտեմբեր".split("_"),accusative:"հունվարի_փետրվարի_մարտի_ապրիլի_մայիսի_հունիսի_հուլիսի_օգոստոսի_սեպտեմբերի_հոկտեմբերի_նոյեմբերի_դեկտեմբերի".split("_")},r=/D[oD]?(\[[^\[\]]*\]|\s+)+MMMM?/.test(t)?"accusative":"nominative";return n[r][e.month()]}function n(e,t){var n="հնվ_փտր_մրտ_ապր_մյս_հնս_հլս_օգս_սպտ_հկտ_նմբ_դկտ".split("_");return n[e.month()]}function r(e,t){var n="կիրակի_երկուշաբթի_երեքշաբթի_չորեքշաբթի_հինգշաբթի_ուրբաթ_շաբաթ".split("_");return n[e.day()]}var a=e.defineLocale("hy-am",{months:t,monthsShort:n,weekdays:r,weekdaysShort:"կրկ_երկ_երք_չրք_հնգ_ուրբ_շբթ".split("_"),weekdaysMin:"կրկ_երկ_երք_չրք_հնգ_ուրբ_շբթ".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D MMMM YYYY թ.",LLL:"D MMMM YYYY թ., HH:mm",LLLL:"dddd, D MMMM YYYY թ., HH:mm"},calendar:{sameDay:"[այսօր] LT",nextDay:"[վաղը] LT",lastDay:"[երեկ] LT",nextWeek:function(){return"dddd [օրը ժամը] LT"},lastWeek:function(){return"[անցած] dddd [օրը ժամը] LT"},sameElse:"L"},relativeTime:{future:"%s հետո",past:"%s առաջ",s:"մի քանի վայրկյան",m:"րոպե",mm:"%d րոպե",h:"ժամ",hh:"%d ժամ",d:"օր",dd:"%d օր",M:"ամիս",MM:"%d ամիս",y:"տարի",yy:"%d տարի"},meridiemParse:/գիշերվա|առավոտվա|ցերեկվա|երեկոյան/,isPM:function(e){return/^(ցերեկվա|երեկոյան)$/.test(e)},meridiem:function(e){return 4>e?"գիշերվա":12>e?"առավոտվա":17>e?"ցերեկվա":"երեկոյան"},ordinalParse:/\d{1,2}|\d{1,2}-(ին|րդ)/,ordinal:function(e,t){switch(t){case"DDD":case"w":case"W":case"DDDo":return 1===e?e+"-ին":e+"-րդ";default:return e}},week:{dow:1,doy:7}});return a})},function(e,t,n){
//! moment.js locale configuration
//! locale : Bahasa Indonesia (id)
//! author : Mohammad Satrio Utomo : https://github.com/tyok
//! reference: http://id.wikisource.org/wiki/Pedoman_Umum_Ejaan_Bahasa_Indonesia_yang_Disempurnakan
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("id",{months:"Januari_Februari_Maret_April_Mei_Juni_Juli_Agustus_September_Oktober_November_Desember".split("_"),monthsShort:"Jan_Feb_Mar_Apr_Mei_Jun_Jul_Ags_Sep_Okt_Nov_Des".split("_"),weekdays:"Minggu_Senin_Selasa_Rabu_Kamis_Jumat_Sabtu".split("_"),weekdaysShort:"Min_Sen_Sel_Rab_Kam_Jum_Sab".split("_"),weekdaysMin:"Mg_Sn_Sl_Rb_Km_Jm_Sb".split("_"),longDateFormat:{LT:"HH.mm",LTS:"HH.mm.ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY [pukul] HH.mm",LLLL:"dddd, D MMMM YYYY [pukul] HH.mm"},meridiemParse:/pagi|siang|sore|malam/,meridiemHour:function(e,t){return 12===e&&(e=0),"pagi"===t?e:"siang"===t?e>=11?e:e+12:"sore"===t||"malam"===t?e+12:void 0},meridiem:function(e,t,n){return 11>e?"pagi":15>e?"siang":19>e?"sore":"malam"},calendar:{sameDay:"[Hari ini pukul] LT",nextDay:"[Besok pukul] LT",nextWeek:"dddd [pukul] LT",lastDay:"[Kemarin pukul] LT",lastWeek:"dddd [lalu pukul] LT",sameElse:"L"},relativeTime:{future:"dalam %s",past:"%s yang lalu",s:"beberapa detik",m:"semenit",mm:"%d menit",h:"sejam",hh:"%d jam",d:"sehari",dd:"%d hari",M:"sebulan",MM:"%d bulan",y:"setahun",yy:"%d tahun"},week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : icelandic (is)
//! author : Hinrik Örn Sigurðsson : https://github.com/hinrik
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e){return e%100===11?!0:e%10===1?!1:!0}function n(e,n,r,a){var o=e+" ";switch(r){case"s":return n||a?"nokkrar sekúndur":"nokkrum sekúndum";case"m":return n?"mínúta":"mínútu";case"mm":return t(e)?o+(n||a?"mínútur":"mínútum"):n?o+"mínúta":o+"mínútu";case"hh":return t(e)?o+(n||a?"klukkustundir":"klukkustundum"):o+"klukkustund";case"d":return n?"dagur":a?"dag":"degi";case"dd":return t(e)?n?o+"dagar":o+(a?"daga":"dögum"):n?o+"dagur":o+(a?"dag":"degi");case"M":return n?"mánuður":a?"mánuð":"mánuði";case"MM":return t(e)?n?o+"mánuðir":o+(a?"mánuði":"mánuðum"):n?o+"mánuður":o+(a?"mánuð":"mánuði");case"y":return n||a?"ár":"ári";case"yy":return t(e)?o+(n||a?"ár":"árum"):o+(n||a?"ár":"ári")}}var r=e.defineLocale("is",{months:"janúar_febrúar_mars_apríl_maí_júní_júlí_ágúst_september_október_nóvember_desember".split("_"),monthsShort:"jan_feb_mar_apr_maí_jún_júl_ágú_sep_okt_nóv_des".split("_"),weekdays:"sunnudagur_mánudagur_þriðjudagur_miðvikudagur_fimmtudagur_föstudagur_laugardagur".split("_"),weekdaysShort:"sun_mán_þri_mið_fim_fös_lau".split("_"),weekdaysMin:"Su_Má_Þr_Mi_Fi_Fö_La".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD/MM/YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY [kl.] H:mm",LLLL:"dddd, D. MMMM YYYY [kl.] H:mm"},calendar:{sameDay:"[í dag kl.] LT",nextDay:"[á morgun kl.] LT",nextWeek:"dddd [kl.] LT",lastDay:"[í gær kl.] LT",lastWeek:"[síðasta] dddd [kl.] LT",sameElse:"L"},relativeTime:{future:"eftir %s",past:"fyrir %s síðan",s:n,m:n,mm:n,h:"klukkustund",hh:n,d:n,dd:n,M:n,MM:n,y:n,yy:n},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : italian (it)
//! author : Lorenzo : https://github.com/aliem
//! author: Mattia Larentis: https://github.com/nostalgiaz
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("it",{months:"gennaio_febbraio_marzo_aprile_maggio_giugno_luglio_agosto_settembre_ottobre_novembre_dicembre".split("_"),monthsShort:"gen_feb_mar_apr_mag_giu_lug_ago_set_ott_nov_dic".split("_"),weekdays:"Domenica_Lunedì_Martedì_Mercoledì_Giovedì_Venerdì_Sabato".split("_"),weekdaysShort:"Dom_Lun_Mar_Mer_Gio_Ven_Sab".split("_"),weekdaysMin:"D_L_Ma_Me_G_V_S".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},calendar:{sameDay:"[Oggi alle] LT",nextDay:"[Domani alle] LT",nextWeek:"dddd [alle] LT",lastDay:"[Ieri alle] LT",lastWeek:function(){switch(this.day()){case 0:return"[la scorsa] dddd [alle] LT";default:return"[lo scorso] dddd [alle] LT"}},sameElse:"L"},relativeTime:{future:function(e){return(/^[0-9].+$/.test(e)?"tra":"in")+" "+e},past:"%s fa",s:"alcuni secondi",m:"un minuto",mm:"%d minuti",h:"un'ora",hh:"%d ore",d:"un giorno",dd:"%d giorni",M:"un mese",MM:"%d mesi",y:"un anno",yy:"%d anni"},ordinalParse:/\d{1,2}º/,ordinal:"%dº",week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : japanese (ja)
//! author : LI Long : https://github.com/baryon
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("ja",{months:"1月_2月_3月_4月_5月_6月_7月_8月_9月_10月_11月_12月".split("_"),monthsShort:"1月_2月_3月_4月_5月_6月_7月_8月_9月_10月_11月_12月".split("_"),weekdays:"日曜日_月曜日_火曜日_水曜日_木曜日_金曜日_土曜日".split("_"),weekdaysShort:"日_月_火_水_木_金_土".split("_"),weekdaysMin:"日_月_火_水_木_金_土".split("_"),longDateFormat:{LT:"Ah時m分",LTS:"Ah時m分s秒",L:"YYYY/MM/DD",LL:"YYYY年M月D日",LLL:"YYYY年M月D日Ah時m分",LLLL:"YYYY年M月D日Ah時m分 dddd"},meridiemParse:/午前|午後/i,isPM:function(e){return"午後"===e},meridiem:function(e,t,n){return 12>e?"午前":"午後"},calendar:{sameDay:"[今日] LT",nextDay:"[明日] LT",nextWeek:"[来週]dddd LT",lastDay:"[昨日] LT",lastWeek:"[前週]dddd LT",sameElse:"L"},relativeTime:{future:"%s後",past:"%s前",s:"数秒",m:"1分",mm:"%d分",h:"1時間",hh:"%d時間",d:"1日",dd:"%d日",M:"1ヶ月",MM:"%dヶ月",y:"1年",yy:"%d年"}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Boso Jowo (jv)
//! author : Rony Lantip : https://github.com/lantip
//! reference: http://jv.wikipedia.org/wiki/Basa_Jawa
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("jv",{months:"Januari_Februari_Maret_April_Mei_Juni_Juli_Agustus_September_Oktober_Nopember_Desember".split("_"),monthsShort:"Jan_Feb_Mar_Apr_Mei_Jun_Jul_Ags_Sep_Okt_Nop_Des".split("_"),weekdays:"Minggu_Senen_Seloso_Rebu_Kemis_Jemuwah_Septu".split("_"),weekdaysShort:"Min_Sen_Sel_Reb_Kem_Jem_Sep".split("_"),weekdaysMin:"Mg_Sn_Sl_Rb_Km_Jm_Sp".split("_"),longDateFormat:{LT:"HH.mm",LTS:"HH.mm.ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY [pukul] HH.mm",LLLL:"dddd, D MMMM YYYY [pukul] HH.mm"},meridiemParse:/enjing|siyang|sonten|ndalu/,meridiemHour:function(e,t){return 12===e&&(e=0),"enjing"===t?e:"siyang"===t?e>=11?e:e+12:"sonten"===t||"ndalu"===t?e+12:void 0},meridiem:function(e,t,n){return 11>e?"enjing":15>e?"siyang":19>e?"sonten":"ndalu"},calendar:{sameDay:"[Dinten puniko pukul] LT",nextDay:"[Mbenjang pukul] LT",nextWeek:"dddd [pukul] LT",lastDay:"[Kala wingi pukul] LT",lastWeek:"dddd [kepengker pukul] LT",sameElse:"L"},relativeTime:{future:"wonten ing %s",past:"%s ingkang kepengker",s:"sawetawis detik",m:"setunggal menit",mm:"%d menit",h:"setunggal jam",hh:"%d jam",d:"sedinten",dd:"%d dinten",M:"sewulan",MM:"%d wulan",y:"setaun",yy:"%d taun"},week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Georgian (ka)
//! author : Irakli Janiashvili : https://github.com/irakli-janiashvili
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t){var n={nominative:"იანვარი_თებერვალი_მარტი_აპრილი_მაისი_ივნისი_ივლისი_აგვისტო_სექტემბერი_ოქტომბერი_ნოემბერი_დეკემბერი".split("_"),accusative:"იანვარს_თებერვალს_მარტს_აპრილის_მაისს_ივნისს_ივლისს_აგვისტს_სექტემბერს_ოქტომბერს_ნოემბერს_დეკემბერს".split("_")},r=/D[oD] *MMMM?/.test(t)?"accusative":"nominative";return n[r][e.month()]}function n(e,t){var n={nominative:"კვირა_ორშაბათი_სამშაბათი_ოთხშაბათი_ხუთშაბათი_პარასკევი_შაბათი".split("_"),accusative:"კვირას_ორშაბათს_სამშაბათს_ოთხშაბათს_ხუთშაბათს_პარასკევს_შაბათს".split("_")},r=/(წინა|შემდეგ)/.test(t)?"accusative":"nominative";return n[r][e.day()]}var r=e.defineLocale("ka",{months:t,monthsShort:"იან_თებ_მარ_აპრ_მაი_ივნ_ივლ_აგვ_სექ_ოქტ_ნოე_დეკ".split("_"),weekdays:n,weekdaysShort:"კვი_ორშ_სამ_ოთხ_ხუთ_პარ_შაბ".split("_"),weekdaysMin:"კვ_ორ_სა_ოთ_ხუ_პა_შა".split("_"),longDateFormat:{LT:"h:mm A",LTS:"h:mm:ss A",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY h:mm A",LLLL:"dddd, D MMMM YYYY h:mm A"},calendar:{sameDay:"[დღეს] LT[-ზე]",nextDay:"[ხვალ] LT[-ზე]",lastDay:"[გუშინ] LT[-ზე]",nextWeek:"[შემდეგ] dddd LT[-ზე]",lastWeek:"[წინა] dddd LT-ზე",sameElse:"L"},relativeTime:{future:function(e){return/(წამი|წუთი|საათი|წელი)/.test(e)?e.replace(/ი$/,"ში"):e+"ში"},past:function(e){return/(წამი|წუთი|საათი|დღე|თვე)/.test(e)?e.replace(/(ი|ე)$/,"ის წინ"):/წელი/.test(e)?e.replace(/წელი$/,"წლის წინ"):void 0},s:"რამდენიმე წამი",m:"წუთი",mm:"%d წუთი",h:"საათი",hh:"%d საათი",d:"დღე",dd:"%d დღე",M:"თვე",MM:"%d თვე",y:"წელი",yy:"%d წელი"},ordinalParse:/0|1-ლი|მე-\d{1,2}|\d{1,2}-ე/,ordinal:function(e){return 0===e?e:1===e?e+"-ლი":20>e||100>=e&&e%20===0||e%100===0?"მე-"+e:e+"-ე"},week:{dow:1,doy:7}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : khmer (km)
//! author : Kruy Vanna : https://github.com/kruyvanna
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("km",{months:"មករា_កុម្ភៈ_មិនា_មេសា_ឧសភា_មិថុនា_កក្កដា_សីហា_កញ្ញា_តុលា_វិច្ឆិកា_ធ្នូ".split("_"),monthsShort:"មករា_កុម្ភៈ_មិនា_មេសា_ឧសភា_មិថុនា_កក្កដា_សីហា_កញ្ញា_តុលា_វិច្ឆិកា_ធ្នូ".split("_"),weekdays:"អាទិត្យ_ច័ន្ទ_អង្គារ_ពុធ_ព្រហស្បតិ៍_សុក្រ_សៅរ៍".split("_"),weekdaysShort:"អាទិត្យ_ច័ន្ទ_អង្គារ_ពុធ_ព្រហស្បតិ៍_សុក្រ_សៅរ៍".split("_"),weekdaysMin:"អាទិត្យ_ច័ន្ទ_អង្គារ_ពុធ_ព្រហស្បតិ៍_សុក្រ_សៅរ៍".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},calendar:{sameDay:"[ថ្ងៃនៈ ម៉ោង] LT",nextDay:"[ស្អែក ម៉ោង] LT",nextWeek:"dddd [ម៉ោង] LT",lastDay:"[ម្សិលមិញ ម៉ោង] LT",lastWeek:"dddd [សប្តាហ៍មុន] [ម៉ោង] LT",sameElse:"L"},relativeTime:{future:"%sទៀត",past:"%sមុន",s:"ប៉ុន្មានវិនាទី",m:"មួយនាទី",mm:"%d នាទី",h:"មួយម៉ោង",hh:"%d ម៉ោង",d:"មួយថ្ងៃ",dd:"%d ថ្ងៃ",M:"មួយខែ",MM:"%d ខែ",y:"មួយឆ្នាំ",yy:"%d ឆ្នាំ"},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : korean (ko)
//!
//! authors
//!
//! - Kyungwook, Park : https://github.com/kyungw00k
//! - Jeeeyul Lee <jeeeyul@gmail.com>
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("ko",{months:"1월_2월_3월_4월_5월_6월_7월_8월_9월_10월_11월_12월".split("_"),monthsShort:"1월_2월_3월_4월_5월_6월_7월_8월_9월_10월_11월_12월".split("_"),weekdays:"일요일_월요일_화요일_수요일_목요일_금요일_토요일".split("_"),weekdaysShort:"일_월_화_수_목_금_토".split("_"),weekdaysMin:"일_월_화_수_목_금_토".split("_"),longDateFormat:{LT:"A h시 m분",LTS:"A h시 m분 s초",L:"YYYY.MM.DD",LL:"YYYY년 MMMM D일",LLL:"YYYY년 MMMM D일 A h시 m분",LLLL:"YYYY년 MMMM D일 dddd A h시 m분"},calendar:{sameDay:"오늘 LT",nextDay:"내일 LT",nextWeek:"dddd LT",lastDay:"어제 LT",lastWeek:"지난주 dddd LT",sameElse:"L"},relativeTime:{future:"%s 후",past:"%s 전",s:"몇초",ss:"%d초",m:"일분",mm:"%d분",h:"한시간",hh:"%d시간",d:"하루",dd:"%d일",M:"한달",MM:"%d달",y:"일년",yy:"%d년"},ordinalParse:/\d{1,2}일/,ordinal:"%d일",meridiemParse:/오전|오후/,isPM:function(e){return"오후"===e},meridiem:function(e,t,n){return 12>e?"오전":"오후"}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Luxembourgish (lb)
//! author : mweimerskirch : https://github.com/mweimerskirch, David Raison : https://github.com/kwisatz
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n,r){var a={m:["eng Minutt","enger Minutt"],h:["eng Stonn","enger Stonn"],d:["een Dag","engem Dag"],M:["ee Mount","engem Mount"],y:["ee Joer","engem Joer"]};return t?a[n][0]:a[n][1]}function n(e){var t=e.substr(0,e.indexOf(" "));return a(t)?"a "+e:"an "+e}function r(e){var t=e.substr(0,e.indexOf(" "));return a(t)?"viru "+e:"virun "+e}function a(e){if(e=parseInt(e,10),isNaN(e))return!1;if(0>e)return!0;if(10>e)return e>=4&&7>=e?!0:!1;if(100>e){var t=e%10,n=e/10;return a(0===t?n:t)}if(1e4>e){for(;e>=10;)e/=10;return a(e)}return e/=1e3,a(e)}var o=e.defineLocale("lb",{months:"Januar_Februar_Mäerz_Abrëll_Mee_Juni_Juli_August_September_Oktober_November_Dezember".split("_"),monthsShort:"Jan._Febr._Mrz._Abr._Mee_Jun._Jul._Aug._Sept._Okt._Nov._Dez.".split("_"),weekdays:"Sonndeg_Méindeg_Dënschdeg_Mëttwoch_Donneschdeg_Freideg_Samschdeg".split("_"),weekdaysShort:"So._Mé._Dë._Më._Do._Fr._Sa.".split("_"),weekdaysMin:"So_Mé_Dë_Më_Do_Fr_Sa".split("_"),longDateFormat:{LT:"H:mm [Auer]",LTS:"H:mm:ss [Auer]",L:"DD.MM.YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm [Auer]",LLLL:"dddd, D. MMMM YYYY H:mm [Auer]"},calendar:{sameDay:"[Haut um] LT",sameElse:"L",nextDay:"[Muer um] LT",nextWeek:"dddd [um] LT",lastDay:"[Gëschter um] LT",lastWeek:function(){switch(this.day()){case 2:case 4:return"[Leschten] dddd [um] LT";default:return"[Leschte] dddd [um] LT"}}},relativeTime:{future:n,past:r,s:"e puer Sekonnen",m:t,mm:"%d Minutten",h:t,hh:"%d Stonnen",d:t,dd:"%d Deeg",M:t,MM:"%d Méint",y:t,yy:"%d Joer"},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return o})},function(e,t,n){
//! moment.js locale configuration
//! locale : Lithuanian (lt)
//! author : Mindaugas Mozūras : https://github.com/mmozuras
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n,r){return t?"kelios sekundės":r?"kelių sekundžių":"kelias sekundes"}function n(e,t){var n={nominative:"sausis_vasaris_kovas_balandis_gegužė_birželis_liepa_rugpjūtis_rugsėjis_spalis_lapkritis_gruodis".split("_"),accusative:"sausio_vasario_kovo_balandžio_gegužės_birželio_liepos_rugpjūčio_rugsėjo_spalio_lapkričio_gruodžio".split("_")},r=/D[oD]?(\[[^\[\]]*\]|\s+)+MMMM?/.test(t)?"accusative":"nominative";return n[r][e.month()]}function r(e,t,n,r){return t?o(n)[0]:r?o(n)[1]:o(n)[2]}function a(e){return e%10===0||e>10&&20>e}function o(e){return u[e].split("_")}function i(e,t,n,i){var s=e+" ";return 1===e?s+r(e,t,n[0],i):t?s+(a(e)?o(n)[1]:o(n)[0]):i?s+o(n)[1]:s+(a(e)?o(n)[1]:o(n)[2])}function s(e,t){var n=-1===t.indexOf("dddd HH:mm"),r=d[e.day()];return n?r:r.substring(0,r.length-2)+"į"}var u={m:"minutė_minutės_minutę",mm:"minutės_minučių_minutes",h:"valanda_valandos_valandą",hh:"valandos_valandų_valandas",d:"diena_dienos_dieną",dd:"dienos_dienų_dienas",M:"mėnuo_mėnesio_mėnesį",MM:"mėnesiai_mėnesių_mėnesius",y:"metai_metų_metus",yy:"metai_metų_metus"},d="sekmadienis_pirmadienis_antradienis_trečiadienis_ketvirtadienis_penktadienis_šeštadienis".split("_"),l=e.defineLocale("lt",{months:n,monthsShort:"sau_vas_kov_bal_geg_bir_lie_rgp_rgs_spa_lap_grd".split("_"),weekdays:s,weekdaysShort:"Sek_Pir_Ant_Tre_Ket_Pen_Šeš".split("_"),weekdaysMin:"S_P_A_T_K_Pn_Š".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"YYYY-MM-DD",LL:"YYYY [m.] MMMM D [d.]",LLL:"YYYY [m.] MMMM D [d.], HH:mm [val.]",LLLL:"YYYY [m.] MMMM D [d.], dddd, HH:mm [val.]",l:"YYYY-MM-DD",ll:"YYYY [m.] MMMM D [d.]",lll:"YYYY [m.] MMMM D [d.], HH:mm [val.]",llll:"YYYY [m.] MMMM D [d.], ddd, HH:mm [val.]"},calendar:{sameDay:"[Šiandien] LT",nextDay:"[Rytoj] LT",nextWeek:"dddd LT",lastDay:"[Vakar] LT",lastWeek:"[Praėjusį] dddd LT",sameElse:"L"},relativeTime:{future:"po %s",past:"prieš %s",s:t,m:r,mm:i,h:r,hh:i,d:r,dd:i,M:r,MM:i,y:r,yy:i},ordinalParse:/\d{1,2}-oji/,ordinal:function(e){return e+"-oji"},week:{dow:1,doy:4}});return l})},function(e,t,n){
//! moment.js locale configuration
//! locale : latvian (lv)
//! author : Kristaps Karlsons : https://github.com/skakri
//! author : Jānis Elmeris : https://github.com/JanisE
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n){return n?t%10===1&&11!==t?e[2]:e[3]:t%10===1&&11!==t?e[0]:e[1]}function n(e,n,r){return e+" "+t(o[r],e,n)}function r(e,n,r){return t(o[r],e,n)}function a(e,t){return t?"dažas sekundes":"dažām sekundēm"}var o={m:"minūtes_minūtēm_minūte_minūtes".split("_"),mm:"minūtes_minūtēm_minūte_minūtes".split("_"),h:"stundas_stundām_stunda_stundas".split("_"),hh:"stundas_stundām_stunda_stundas".split("_"),d:"dienas_dienām_diena_dienas".split("_"),dd:"dienas_dienām_diena_dienas".split("_"),M:"mēneša_mēnešiem_mēnesis_mēneši".split("_"),MM:"mēneša_mēnešiem_mēnesis_mēneši".split("_"),y:"gada_gadiem_gads_gadi".split("_"),yy:"gada_gadiem_gads_gadi".split("_")},i=e.defineLocale("lv",{months:"janvāris_februāris_marts_aprīlis_maijs_jūnijs_jūlijs_augusts_septembris_oktobris_novembris_decembris".split("_"),monthsShort:"jan_feb_mar_apr_mai_jūn_jūl_aug_sep_okt_nov_dec".split("_"),weekdays:"svētdiena_pirmdiena_otrdiena_trešdiena_ceturtdiena_piektdiena_sestdiena".split("_"),weekdaysShort:"Sv_P_O_T_C_Pk_S".split("_"),weekdaysMin:"Sv_P_O_T_C_Pk_S".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY.",LL:"YYYY. [gada] D. MMMM",LLL:"YYYY. [gada] D. MMMM, HH:mm",LLLL:"YYYY. [gada] D. MMMM, dddd, HH:mm"},calendar:{sameDay:"[Šodien pulksten] LT",nextDay:"[Rīt pulksten] LT",nextWeek:"dddd [pulksten] LT",lastDay:"[Vakar pulksten] LT",lastWeek:"[Pagājušā] dddd [pulksten] LT",sameElse:"L"},relativeTime:{future:"pēc %s",past:"pirms %s",s:a,m:r,mm:n,h:r,hh:n,d:r,dd:n,M:r,MM:n,y:r,yy:n},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return i})},function(e,t,n){
//! moment.js locale configuration
//! locale : Montenegrin (me)
//! author : Miodrag Nikač <miodrag@restartit.me> : https://github.com/miodragnikac
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={words:{m:["jedan minut","jednog minuta"],mm:["minut","minuta","minuta"],h:["jedan sat","jednog sata"],hh:["sat","sata","sati"],dd:["dan","dana","dana"],MM:["mjesec","mjeseca","mjeseci"],yy:["godina","godine","godina"]},correctGrammaticalCase:function(e,t){return 1===e?t[0]:e>=2&&4>=e?t[1]:t[2]},translate:function(e,n,r){var a=t.words[r];return 1===r.length?n?a[0]:a[1]:e+" "+t.correctGrammaticalCase(e,a)}},n=e.defineLocale("me",{months:["januar","februar","mart","april","maj","jun","jul","avgust","septembar","oktobar","novembar","decembar"],monthsShort:["jan.","feb.","mar.","apr.","maj","jun","jul","avg.","sep.","okt.","nov.","dec."],weekdays:["nedjelja","ponedjeljak","utorak","srijeda","četvrtak","petak","subota"],weekdaysShort:["ned.","pon.","uto.","sri.","čet.","pet.","sub."],weekdaysMin:["ne","po","ut","sr","če","pe","su"],longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD. MM. YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm",LLLL:"dddd, D. MMMM YYYY H:mm"},calendar:{sameDay:"[danas u] LT",nextDay:"[sjutra u] LT",nextWeek:function(){switch(this.day()){case 0:return"[u] [nedjelju] [u] LT";case 3:return"[u] [srijedu] [u] LT";case 6:return"[u] [subotu] [u] LT";case 1:case 2:case 4:case 5:return"[u] dddd [u] LT"}},lastDay:"[juče u] LT",lastWeek:function(){var e=["[prošle] [nedjelje] [u] LT","[prošlog] [ponedjeljka] [u] LT","[prošlog] [utorka] [u] LT","[prošle] [srijede] [u] LT","[prošlog] [četvrtka] [u] LT","[prošlog] [petka] [u] LT","[prošle] [subote] [u] LT"];return e[this.day()]},sameElse:"L"},relativeTime:{future:"za %s",past:"prije %s",s:"nekoliko sekundi",m:t.translate,mm:t.translate,h:t.translate,hh:t.translate,d:"dan",dd:t.translate,M:"mjesec",MM:t.translate,y:"godinu",yy:t.translate},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:7}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : macedonian (mk)
//! author : Borislav Mickov : https://github.com/B0k0
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("mk",{months:"јануари_февруари_март_април_мај_јуни_јули_август_септември_октомври_ноември_декември".split("_"),monthsShort:"јан_фев_мар_апр_мај_јун_јул_авг_сеп_окт_ное_дек".split("_"),weekdays:"недела_понеделник_вторник_среда_четврток_петок_сабота".split("_"),weekdaysShort:"нед_пон_вто_сре_чет_пет_саб".split("_"),weekdaysMin:"нe_пo_вт_ср_че_пе_сa".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"D.MM.YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY H:mm",LLLL:"dddd, D MMMM YYYY H:mm"},calendar:{sameDay:"[Денес во] LT",nextDay:"[Утре во] LT",nextWeek:"dddd [во] LT",lastDay:"[Вчера во] LT",lastWeek:function(){switch(this.day()){case 0:case 3:case 6:return"[Во изминатата] dddd [во] LT";case 1:case 2:case 4:case 5:return"[Во изминатиот] dddd [во] LT"}},sameElse:"L"},relativeTime:{future:"после %s",past:"пред %s",s:"неколку секунди",m:"минута",mm:"%d минути",h:"час",hh:"%d часа",d:"ден",dd:"%d дена",M:"месец",MM:"%d месеци",y:"година",yy:"%d години"},ordinalParse:/\d{1,2}-(ев|ен|ти|ви|ри|ми)/,ordinal:function(e){var t=e%10,n=e%100;return 0===e?e+"-ев":0===n?e+"-ен":n>10&&20>n?e+"-ти":1===t?e+"-ви":2===t?e+"-ри":7===t||8===t?e+"-ми":e+"-ти"},week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : malayalam (ml)
//! author : Floyd Pink : https://github.com/floydpink
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("ml",{months:"ജനുവരി_ഫെബ്രുവരി_മാർച്ച്_ഏപ്രിൽ_മേയ്_ജൂൺ_ജൂലൈ_ഓഗസ്റ്റ്_സെപ്റ്റംബർ_ഒക്ടോബർ_നവംബർ_ഡിസംബർ".split("_"),monthsShort:"ജനു._ഫെബ്രു._മാർ._ഏപ്രി._മേയ്_ജൂൺ_ജൂലൈ._ഓഗ._സെപ്റ്റ._ഒക്ടോ._നവം._ഡിസം.".split("_"),weekdays:"ഞായറാഴ്ച_തിങ്കളാഴ്ച_ചൊവ്വാഴ്ച_ബുധനാഴ്ച_വ്യാഴാഴ്ച_വെള്ളിയാഴ്ച_ശനിയാഴ്ച".split("_"),weekdaysShort:"ഞായർ_തിങ്കൾ_ചൊവ്വ_ബുധൻ_വ്യാഴം_വെള്ളി_ശനി".split("_"),weekdaysMin:"ഞാ_തി_ചൊ_ബു_വ്യാ_വെ_ശ".split("_"),longDateFormat:{LT:"A h:mm -നു",LTS:"A h:mm:ss -നു",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY, A h:mm -നു",LLLL:"dddd, D MMMM YYYY, A h:mm -നു"},calendar:{sameDay:"[ഇന്ന്] LT",nextDay:"[നാളെ] LT",nextWeek:"dddd, LT",lastDay:"[ഇന്നലെ] LT",lastWeek:"[കഴിഞ്ഞ] dddd, LT",sameElse:"L"},relativeTime:{future:"%s കഴിഞ്ഞ്",past:"%s മുൻപ്",s:"അൽപ നിമിഷങ്ങൾ",m:"ഒരു മിനിറ്റ്",mm:"%d മിനിറ്റ്",h:"ഒരു മണിക്കൂർ",hh:"%d മണിക്കൂർ",d:"ഒരു ദിവസം",dd:"%d ദിവസം",M:"ഒരു മാസം",MM:"%d മാസം",y:"ഒരു വർഷം",yy:"%d വർഷം"},meridiemParse:/രാത്രി|രാവിലെ|ഉച്ച കഴിഞ്ഞ്|വൈകുന്നേരം|രാത്രി/i,isPM:function(e){return/^(ഉച്ച കഴിഞ്ഞ്|വൈകുന്നേരം|രാത്രി)$/.test(e)},meridiem:function(e,t,n){return 4>e?"രാത്രി":12>e?"രാവിലെ":17>e?"ഉച്ച കഴിഞ്ഞ്":20>e?"വൈകുന്നേരം":"രാത്രി"}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Marathi (mr)
//! author : Harshad Kale : https://github.com/kalehv
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"१",2:"२",3:"३",4:"४",5:"५",6:"६",7:"७",8:"८",9:"९",0:"०"},n={"१":"1","२":"2","३":"3","४":"4","५":"5","६":"6","७":"7","८":"8","९":"9","०":"0"},r=e.defineLocale("mr",{months:"जानेवारी_फेब्रुवारी_मार्च_एप्रिल_मे_जून_जुलै_ऑगस्ट_सप्टेंबर_ऑक्टोबर_नोव्हेंबर_डिसेंबर".split("_"),monthsShort:"जाने._फेब्रु._मार्च._एप्रि._मे._जून._जुलै._ऑग._सप्टें._ऑक्टो._नोव्हें._डिसें.".split("_"),weekdays:"रविवार_सोमवार_मंगळवार_बुधवार_गुरूवार_शुक्रवार_शनिवार".split("_"),weekdaysShort:"रवि_सोम_मंगळ_बुध_गुरू_शुक्र_शनि".split("_"),weekdaysMin:"र_सो_मं_बु_गु_शु_श".split("_"),longDateFormat:{LT:"A h:mm वाजता",LTS:"A h:mm:ss वाजता",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY, A h:mm वाजता",LLLL:"dddd, D MMMM YYYY, A h:mm वाजता"},calendar:{sameDay:"[आज] LT",nextDay:"[उद्या] LT",nextWeek:"dddd, LT",lastDay:"[काल] LT",lastWeek:"[मागील] dddd, LT",sameElse:"L"},relativeTime:{future:"%s नंतर",past:"%s पूर्वी",s:"सेकंद",m:"एक मिनिट",mm:"%d मिनिटे",h:"एक तास",hh:"%d तास",d:"एक दिवस",dd:"%d दिवस",M:"एक महिना",MM:"%d महिने",y:"एक वर्ष",yy:"%d वर्षे"},preparse:function(e){return e.replace(/[१२३४५६७८९०]/g,function(e){return n[e]})},postformat:function(e){return e.replace(/\d/g,function(e){return t[e]})},meridiemParse:/रात्री|सकाळी|दुपारी|सायंकाळी/,meridiemHour:function(e,t){return 12===e&&(e=0),"रात्री"===t?4>e?e:e+12:"सकाळी"===t?e:"दुपारी"===t?e>=10?e:e+12:"सायंकाळी"===t?e+12:void 0},meridiem:function(e,t,n){return 4>e?"रात्री":10>e?"सकाळी":17>e?"दुपारी":20>e?"सायंकाळी":"रात्री"},week:{dow:0,doy:6}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : Bahasa Malaysia (ms-MY)
//! author : Weldan Jamili : https://github.com/weldan
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("ms",{months:"Januari_Februari_Mac_April_Mei_Jun_Julai_Ogos_September_Oktober_November_Disember".split("_"),monthsShort:"Jan_Feb_Mac_Apr_Mei_Jun_Jul_Ogs_Sep_Okt_Nov_Dis".split("_"),weekdays:"Ahad_Isnin_Selasa_Rabu_Khamis_Jumaat_Sabtu".split("_"),weekdaysShort:"Ahd_Isn_Sel_Rab_Kha_Jum_Sab".split("_"),weekdaysMin:"Ah_Is_Sl_Rb_Km_Jm_Sb".split("_"),longDateFormat:{LT:"HH.mm",LTS:"HH.mm.ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY [pukul] HH.mm",LLLL:"dddd, D MMMM YYYY [pukul] HH.mm"},meridiemParse:/pagi|tengahari|petang|malam/,meridiemHour:function(e,t){return 12===e&&(e=0),"pagi"===t?e:"tengahari"===t?e>=11?e:e+12:"petang"===t||"malam"===t?e+12:void 0},meridiem:function(e,t,n){return 11>e?"pagi":15>e?"tengahari":19>e?"petang":"malam"},calendar:{sameDay:"[Hari ini pukul] LT",nextDay:"[Esok pukul] LT",nextWeek:"dddd [pukul] LT",lastDay:"[Kelmarin pukul] LT",lastWeek:"dddd [lepas pukul] LT",sameElse:"L"},relativeTime:{future:"dalam %s",past:"%s yang lepas",s:"beberapa saat",m:"seminit",mm:"%d minit",h:"sejam",hh:"%d jam",d:"sehari",dd:"%d hari",M:"sebulan",MM:"%d bulan",y:"setahun",yy:"%d tahun"},week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Bahasa Malaysia (ms-MY)
//! author : Weldan Jamili : https://github.com/weldan
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("ms-my",{months:"Januari_Februari_Mac_April_Mei_Jun_Julai_Ogos_September_Oktober_November_Disember".split("_"),monthsShort:"Jan_Feb_Mac_Apr_Mei_Jun_Jul_Ogs_Sep_Okt_Nov_Dis".split("_"),weekdays:"Ahad_Isnin_Selasa_Rabu_Khamis_Jumaat_Sabtu".split("_"),weekdaysShort:"Ahd_Isn_Sel_Rab_Kha_Jum_Sab".split("_"),weekdaysMin:"Ah_Is_Sl_Rb_Km_Jm_Sb".split("_"),longDateFormat:{LT:"HH.mm",LTS:"HH.mm.ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY [pukul] HH.mm",LLLL:"dddd, D MMMM YYYY [pukul] HH.mm"},meridiemParse:/pagi|tengahari|petang|malam/,meridiemHour:function(e,t){return 12===e&&(e=0),"pagi"===t?e:"tengahari"===t?e>=11?e:e+12:"petang"===t||"malam"===t?e+12:void 0},meridiem:function(e,t,n){return 11>e?"pagi":15>e?"tengahari":19>e?"petang":"malam"},calendar:{sameDay:"[Hari ini pukul] LT",nextDay:"[Esok pukul] LT",nextWeek:"dddd [pukul] LT",lastDay:"[Kelmarin pukul] LT",lastWeek:"dddd [lepas pukul] LT",sameElse:"L"},relativeTime:{future:"dalam %s",past:"%s yang lepas",s:"beberapa saat",m:"seminit",mm:"%d minit",h:"sejam",hh:"%d jam",d:"sehari",dd:"%d hari",M:"sebulan",MM:"%d bulan",y:"setahun",yy:"%d tahun"},week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Burmese (my)
//! author : Squar team, mysquar.com
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"၁",2:"၂",3:"၃",4:"၄",5:"၅",6:"၆",7:"၇",8:"၈",9:"၉",0:"၀"},n={"၁":"1","၂":"2","၃":"3","၄":"4","၅":"5","၆":"6","၇":"7","၈":"8","၉":"9","၀":"0"},r=e.defineLocale("my",{months:"ဇန်နဝါရီ_ဖေဖော်ဝါရီ_မတ်_ဧပြီ_မေ_ဇွန်_ဇူလိုင်_သြဂုတ်_စက်တင်ဘာ_အောက်တိုဘာ_နိုဝင်ဘာ_ဒီဇင်ဘာ".split("_"),monthsShort:"ဇန်_ဖေ_မတ်_ပြီ_မေ_ဇွန်_လိုင်_သြ_စက်_အောက်_နို_ဒီ".split("_"),weekdays:"တနင်္ဂနွေ_တနင်္လာ_အင်္ဂါ_ဗုဒ္ဓဟူး_ကြာသပတေး_သောကြာ_စနေ".split("_"),weekdaysShort:"နွေ_လာ_ဂါ_ဟူး_ကြာ_သော_နေ".split("_"),weekdaysMin:"နွေ_လာ_ဂါ_ဟူး_ကြာ_သော_နေ".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[ယနေ.] LT [မှာ]",nextDay:"[မနက်ဖြန်] LT [မှာ]",nextWeek:"dddd LT [မှာ]",lastDay:"[မနေ.က] LT [မှာ]",lastWeek:"[ပြီးခဲ့သော] dddd LT [မှာ]",sameElse:"L"},relativeTime:{future:"လာမည့် %s မှာ",past:"လွန်ခဲ့သော %s က",s:"စက္ကန်.အနည်းငယ်",m:"တစ်မိနစ်",mm:"%d မိနစ်",h:"တစ်နာရီ",hh:"%d နာရီ",d:"တစ်ရက်",dd:"%d ရက်",M:"တစ်လ",MM:"%d လ",y:"တစ်နှစ်",yy:"%d နှစ်"},preparse:function(e){return e.replace(/[၁၂၃၄၅၆၇၈၉၀]/g,function(e){return n[e]})},postformat:function(e){return e.replace(/\d/g,function(e){return t[e]})},week:{dow:1,doy:4}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : norwegian bokmål (nb)
//! authors : Espen Hovlandsdal : https://github.com/rexxars
//! Sigurd Gartmann : https://github.com/sigurdga
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("nb",{months:"januar_februar_mars_april_mai_juni_juli_august_september_oktober_november_desember".split("_"),monthsShort:"jan_feb_mar_apr_mai_jun_jul_aug_sep_okt_nov_des".split("_"),weekdays:"søndag_mandag_tirsdag_onsdag_torsdag_fredag_lørdag".split("_"),weekdaysShort:"søn_man_tirs_ons_tors_fre_lør".split("_"),weekdaysMin:"sø_ma_ti_on_to_fr_lø".split("_"),longDateFormat:{LT:"H.mm",LTS:"H.mm.ss",L:"DD.MM.YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY [kl.] H.mm",LLLL:"dddd D. MMMM YYYY [kl.] H.mm"},calendar:{sameDay:"[i dag kl.] LT",nextDay:"[i morgen kl.] LT",nextWeek:"dddd [kl.] LT",lastDay:"[i går kl.] LT",lastWeek:"[forrige] dddd [kl.] LT",sameElse:"L"},relativeTime:{future:"om %s",past:"for %s siden",s:"noen sekunder",m:"ett minutt",mm:"%d minutter",h:"en time",hh:"%d timer",d:"en dag",dd:"%d dager",M:"en måned",MM:"%d måneder",y:"ett år",yy:"%d år"},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : nepali/nepalese
//! author : suvash : https://github.com/suvash
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"१",2:"२",3:"३",4:"४",5:"५",6:"६",7:"७",8:"८",9:"९",0:"०"},n={"१":"1","२":"2","३":"3","४":"4","५":"5","६":"6","७":"7","८":"8","९":"9","०":"0"},r=e.defineLocale("ne",{months:"जनवरी_फेब्रुवरी_मार्च_अप्रिल_मई_जुन_जुलाई_अगष्ट_सेप्टेम्बर_अक्टोबर_नोभेम्बर_डिसेम्बर".split("_"),monthsShort:"जन._फेब्रु._मार्च_अप्रि._मई_जुन_जुलाई._अग._सेप्ट._अक्टो._नोभे._डिसे.".split("_"),weekdays:"आइतबार_सोमबार_मङ्गलबार_बुधबार_बिहिबार_शुक्रबार_शनिबार".split("_"),weekdaysShort:"आइत._सोम._मङ्गल._बुध._बिहि._शुक्र._शनि.".split("_"),weekdaysMin:"आइ._सो._मङ्_बु._बि._शु._श.".split("_"),longDateFormat:{LT:"Aको h:mm बजे",LTS:"Aको h:mm:ss बजे",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY, Aको h:mm बजे",LLLL:"dddd, D MMMM YYYY, Aको h:mm बजे"},preparse:function(e){return e.replace(/[१२३४५६७८९०]/g,function(e){return n[e]})},postformat:function(e){return e.replace(/\d/g,function(e){return t[e]})},meridiemParse:/राती|बिहान|दिउँसो|बेलुका|साँझ|राती/,meridiemHour:function(e,t){return 12===e&&(e=0),"राती"===t?3>e?e:e+12:"बिहान"===t?e:"दिउँसो"===t?e>=10?e:e+12:"बेलुका"===t||"साँझ"===t?e+12:void 0},meridiem:function(e,t,n){return 3>e?"राती":10>e?"बिहान":15>e?"दिउँसो":18>e?"बेलुका":20>e?"साँझ":"राती"},calendar:{sameDay:"[आज] LT",nextDay:"[भोली] LT",nextWeek:"[आउँदो] dddd[,] LT",lastDay:"[हिजो] LT",lastWeek:"[गएको] dddd[,] LT",sameElse:"L"},relativeTime:{future:"%sमा",past:"%s अगाडी",s:"केही समय",m:"एक मिनेट",mm:"%d मिनेट",h:"एक घण्टा",hh:"%d घण्टा",d:"एक दिन",dd:"%d दिन",M:"एक महिना",MM:"%d महिना",y:"एक बर्ष",yy:"%d बर्ष"},week:{dow:1,doy:7}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : dutch (nl)
//! author : Joris Röling : https://github.com/jjupiter
!function(e,t){t(n(166))}(this,function(e){"use strict";var t="jan._feb._mrt._apr._mei_jun._jul._aug._sep._okt._nov._dec.".split("_"),n="jan_feb_mrt_apr_mei_jun_jul_aug_sep_okt_nov_dec".split("_"),r=e.defineLocale("nl",{months:"januari_februari_maart_april_mei_juni_juli_augustus_september_oktober_november_december".split("_"),monthsShort:function(e,r){return/-MMM-/.test(r)?n[e.month()]:t[e.month()]},weekdays:"zondag_maandag_dinsdag_woensdag_donderdag_vrijdag_zaterdag".split("_"),weekdaysShort:"zo._ma._di._wo._do._vr._za.".split("_"),weekdaysMin:"Zo_Ma_Di_Wo_Do_Vr_Za".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD-MM-YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[vandaag om] LT",nextDay:"[morgen om] LT",nextWeek:"dddd [om] LT",lastDay:"[gisteren om] LT",lastWeek:"[afgelopen] dddd [om] LT",sameElse:"L"},relativeTime:{future:"over %s",past:"%s geleden",s:"een paar seconden",m:"één minuut",mm:"%d minuten",h:"één uur",hh:"%d uur",d:"één dag",dd:"%d dagen",M:"één maand",MM:"%d maanden",y:"één jaar",yy:"%d jaar"},ordinalParse:/\d{1,2}(ste|de)/,ordinal:function(e){return e+(1===e||8===e||e>=20?"ste":"de")},week:{dow:1,doy:4}});return r})},function(e,t,n){
//! moment.js locale configuration
//! locale : norwegian nynorsk (nn)
//! author : https://github.com/mechuwind
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("nn",{months:"januar_februar_mars_april_mai_juni_juli_august_september_oktober_november_desember".split("_"),monthsShort:"jan_feb_mar_apr_mai_jun_jul_aug_sep_okt_nov_des".split("_"),weekdays:"sundag_måndag_tysdag_onsdag_torsdag_fredag_laurdag".split("_"),weekdaysShort:"sun_mån_tys_ons_tor_fre_lau".split("_"),weekdaysMin:"su_må_ty_on_to_fr_lø".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[I dag klokka] LT",nextDay:"[I morgon klokka] LT",nextWeek:"dddd [klokka] LT",lastDay:"[I går klokka] LT",lastWeek:"[Føregåande] dddd [klokka] LT",sameElse:"L"},relativeTime:{future:"om %s",past:"for %s sidan",s:"nokre sekund",m:"eit minutt",mm:"%d minutt",h:"ein time",hh:"%d timar",d:"ein dag",dd:"%d dagar",M:"ein månad",MM:"%d månader",y:"eit år",yy:"%d år"},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : polish (pl)
//! author : Rafal Hirsz : https://github.com/evoL
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e){return 5>e%10&&e%10>1&&~~(e/10)%10!==1}function n(e,n,r){var a=e+" ";switch(r){case"m":return n?"minuta":"minutę";case"mm":return a+(t(e)?"minuty":"minut");case"h":return n?"godzina":"godzinę";case"hh":return a+(t(e)?"godziny":"godzin");case"MM":return a+(t(e)?"miesiące":"miesięcy");case"yy":return a+(t(e)?"lata":"lat")}}var r="styczeń_luty_marzec_kwiecień_maj_czerwiec_lipiec_sierpień_wrzesień_październik_listopad_grudzień".split("_"),a="stycznia_lutego_marca_kwietnia_maja_czerwca_lipca_sierpnia_września_października_listopada_grudnia".split("_"),o=e.defineLocale("pl",{months:function(e,t){return""===t?"("+a[e.month()]+"|"+r[e.month()]+")":/D MMMM/.test(t)?a[e.month()]:r[e.month()]},monthsShort:"sty_lut_mar_kwi_maj_cze_lip_sie_wrz_paź_lis_gru".split("_"),weekdays:"niedziela_poniedziałek_wtorek_środa_czwartek_piątek_sobota".split("_"),weekdaysShort:"nie_pon_wt_śr_czw_pt_sb".split("_"),weekdaysMin:"N_Pn_Wt_Śr_Cz_Pt_So".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},calendar:{sameDay:"[Dziś o] LT",nextDay:"[Jutro o] LT",nextWeek:"[W] dddd [o] LT",lastDay:"[Wczoraj o] LT",lastWeek:function(){switch(this.day()){case 0:return"[W zeszłą niedzielę o] LT";case 3:return"[W zeszłą środę o] LT";case 6:return"[W zeszłą sobotę o] LT";default:return"[W zeszły] dddd [o] LT"}},sameElse:"L"},relativeTime:{future:"za %s",past:"%s temu",s:"kilka sekund",m:n,mm:n,h:n,hh:n,d:"1 dzień",dd:"%d dni",M:"miesiąc",MM:n,y:"rok",yy:n},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return o})},function(e,t,n){
//! moment.js locale configuration
//! locale : portuguese (pt)
//! author : Jefferson : https://github.com/jalex79
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("pt",{months:"Janeiro_Fevereiro_Março_Abril_Maio_Junho_Julho_Agosto_Setembro_Outubro_Novembro_Dezembro".split("_"),monthsShort:"Jan_Fev_Mar_Abr_Mai_Jun_Jul_Ago_Set_Out_Nov_Dez".split("_"),weekdays:"Domingo_Segunda-Feira_Terça-Feira_Quarta-Feira_Quinta-Feira_Sexta-Feira_Sábado".split("_"),weekdaysShort:"Dom_Seg_Ter_Qua_Qui_Sex_Sáb".split("_"),weekdaysMin:"Dom_2ª_3ª_4ª_5ª_6ª_Sáb".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D [de] MMMM [de] YYYY",LLL:"D [de] MMMM [de] YYYY HH:mm",LLLL:"dddd, D [de] MMMM [de] YYYY HH:mm"},calendar:{sameDay:"[Hoje às] LT",nextDay:"[Amanhã às] LT",nextWeek:"dddd [às] LT",lastDay:"[Ontem às] LT",lastWeek:function(){return 0===this.day()||6===this.day()?"[Último] dddd [às] LT":"[Última] dddd [às] LT"},sameElse:"L"},relativeTime:{future:"em %s",past:"há %s",s:"segundos",m:"um minuto",mm:"%d minutos",h:"uma hora",hh:"%d horas",d:"um dia",dd:"%d dias",M:"um mês",MM:"%d meses",y:"um ano",yy:"%d anos"},ordinalParse:/\d{1,2}º/,ordinal:"%dº",week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : brazilian portuguese (pt-br)
//! author : Caio Ribeiro Pereira : https://github.com/caio-ribeiro-pereira
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("pt-br",{months:"Janeiro_Fevereiro_Março_Abril_Maio_Junho_Julho_Agosto_Setembro_Outubro_Novembro_Dezembro".split("_"),monthsShort:"Jan_Fev_Mar_Abr_Mai_Jun_Jul_Ago_Set_Out_Nov_Dez".split("_"),weekdays:"Domingo_Segunda-Feira_Terça-Feira_Quarta-Feira_Quinta-Feira_Sexta-Feira_Sábado".split("_"),weekdaysShort:"Dom_Seg_Ter_Qua_Qui_Sex_Sáb".split("_"),weekdaysMin:"Dom_2ª_3ª_4ª_5ª_6ª_Sáb".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D [de] MMMM [de] YYYY",LLL:"D [de] MMMM [de] YYYY [às] HH:mm",LLLL:"dddd, D [de] MMMM [de] YYYY [às] HH:mm"},calendar:{sameDay:"[Hoje às] LT",nextDay:"[Amanhã às] LT",nextWeek:"dddd [às] LT",lastDay:"[Ontem às] LT",lastWeek:function(){return 0===this.day()||6===this.day()?"[Último] dddd [às] LT":"[Última] dddd [às] LT"},sameElse:"L"},relativeTime:{future:"em %s",past:"%s atrás",s:"poucos segundos",m:"um minuto",mm:"%d minutos",h:"uma hora",hh:"%d horas",d:"um dia",dd:"%d dias",M:"um mês",MM:"%d meses",y:"um ano",yy:"%d anos"},ordinalParse:/\d{1,2}º/,ordinal:"%dº"});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : romanian (ro)
//! author : Vlad Gurdiga : https://github.com/gurdiga
//! author : Valentin Agachi : https://github.com/avaly
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n){var r={mm:"minute",hh:"ore",dd:"zile",MM:"luni",yy:"ani"},a=" ";return(e%100>=20||e>=100&&e%100===0)&&(a=" de "),e+a+r[n]}var n=e.defineLocale("ro",{months:"ianuarie_februarie_martie_aprilie_mai_iunie_iulie_august_septembrie_octombrie_noiembrie_decembrie".split("_"),monthsShort:"ian._febr._mart._apr._mai_iun._iul._aug._sept._oct._nov._dec.".split("_"),weekdays:"duminică_luni_marți_miercuri_joi_vineri_sâmbătă".split("_"),weekdaysShort:"Dum_Lun_Mar_Mie_Joi_Vin_Sâm".split("_"),weekdaysMin:"Du_Lu_Ma_Mi_Jo_Vi_Sâ".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD.MM.YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY H:mm",LLLL:"dddd, D MMMM YYYY H:mm"},calendar:{sameDay:"[azi la] LT",nextDay:"[mâine la] LT",nextWeek:"dddd [la] LT",lastDay:"[ieri la] LT",lastWeek:"[fosta] dddd [la] LT",sameElse:"L"},relativeTime:{future:"peste %s",past:"%s în urmă",s:"câteva secunde",m:"un minut",mm:t,h:"o oră",hh:t,d:"o zi",dd:t,M:"o lună",MM:t,y:"un an",yy:t},week:{dow:1,doy:7}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : russian (ru)
//! author : Viktorminator : https://github.com/Viktorminator
//! Author : Menelion Elensúle : https://github.com/Oire
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t){var n=e.split("_");return t%10===1&&t%100!==11?n[0]:t%10>=2&&4>=t%10&&(10>t%100||t%100>=20)?n[1]:n[2]}function n(e,n,r){var a={mm:n?"минута_минуты_минут":"минуту_минуты_минут",hh:"час_часа_часов",dd:"день_дня_дней",MM:"месяц_месяца_месяцев",yy:"год_года_лет"};return"m"===r?n?"минута":"минуту":e+" "+t(a[r],+e)}function r(e,t){var n={nominative:"январь_февраль_март_апрель_май_июнь_июль_август_сентябрь_октябрь_ноябрь_декабрь".split("_"),accusative:"января_февраля_марта_апреля_мая_июня_июля_августа_сентября_октября_ноября_декабря".split("_")},r=/D[oD]?(\[[^\[\]]*\]|\s+)+MMMM?/.test(t)?"accusative":"nominative";return n[r][e.month()]}function a(e,t){var n={nominative:"янв_фев_март_апр_май_июнь_июль_авг_сен_окт_ноя_дек".split("_"),accusative:"янв_фев_мар_апр_мая_июня_июля_авг_сен_окт_ноя_дек".split("_")},r=/D[oD]?(\[[^\[\]]*\]|\s+)+MMMM?/.test(t)?"accusative":"nominative";return n[r][e.month()]}function o(e,t){var n={nominative:"воскресенье_понедельник_вторник_среда_четверг_пятница_суббота".split("_"),accusative:"воскресенье_понедельник_вторник_среду_четверг_пятницу_субботу".split("_")},r=/\[ ?[Вв] ?(?:прошлую|следующую|эту)? ?\] ?dddd/.test(t)?"accusative":"nominative";return n[r][e.day()]}var i=e.defineLocale("ru",{months:r,monthsShort:a,weekdays:o,weekdaysShort:"вс_пн_вт_ср_чт_пт_сб".split("_"),weekdaysMin:"вс_пн_вт_ср_чт_пт_сб".split("_"),monthsParse:[/^янв/i,/^фев/i,/^мар/i,/^апр/i,/^ма[й|я]/i,/^июн/i,/^июл/i,/^авг/i,/^сен/i,/^окт/i,/^ноя/i,/^дек/i],longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D MMMM YYYY г.",LLL:"D MMMM YYYY г., HH:mm",LLLL:"dddd, D MMMM YYYY г., HH:mm"},calendar:{sameDay:"[Сегодня в] LT",nextDay:"[Завтра в] LT",lastDay:"[Вчера в] LT",nextWeek:function(){return 2===this.day()?"[Во] dddd [в] LT":"[В] dddd [в] LT"},lastWeek:function(e){if(e.week()===this.week())return 2===this.day()?"[Во] dddd [в] LT":"[В] dddd [в] LT";switch(this.day()){case 0:return"[В прошлое] dddd [в] LT";case 1:case 2:case 4:return"[В прошлый] dddd [в] LT";case 3:case 5:case 6:return"[В прошлую] dddd [в] LT"}},sameElse:"L"},relativeTime:{future:"через %s",past:"%s назад",s:"несколько секунд",m:n,mm:n,h:"час",hh:n,d:"день",dd:n,M:"месяц",MM:n,y:"год",yy:n},meridiemParse:/ночи|утра|дня|вечера/i,isPM:function(e){return/^(дня|вечера)$/.test(e)},meridiem:function(e,t,n){return 4>e?"ночи":12>e?"утра":17>e?"дня":"вечера"},ordinalParse:/\d{1,2}-(й|го|я)/,ordinal:function(e,t){switch(t){case"M":case"d":case"DDD":return e+"-й";case"D":return e+"-го";case"w":case"W":return e+"-я";default:return e}},week:{dow:1,doy:7}});return i})},function(e,t,n){
//! moment.js locale configuration
//! locale : Sinhalese (si)
//! author : Sampath Sitinamaluwa : https://github.com/sampathsris
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("si",{months:"ජනවාරි_පෙබරවාරි_මාර්තු_අප්රේල්_මැයි_ජූනි_ජූලි_අගෝස්තු_සැප්තැම්බර්_ඔක්තෝබර්_නොවැම්බර්_දෙසැම්බර්".split("_"),monthsShort:"ජන_පෙබ_මාර්_අප්_මැයි_ජූනි_ජූලි_අගෝ_සැප්_ඔක්_නොවැ_දෙසැ".split("_"),weekdays:"ඉරිදා_සඳුදා_අඟහරුවාදා_බදාදා_බ්රහස්පතින්දා_සිකුරාදා_සෙනසුරාදා".split("_"),weekdaysShort:"ඉරි_සඳු_අඟ_බදා_බ්රහ_සිකු_සෙන".split("_"),weekdaysMin:"ඉ_ස_අ_බ_බ්ර_සි_සෙ".split("_"),longDateFormat:{LT:"a h:mm",LTS:"a h:mm:ss",L:"YYYY/MM/DD",LL:"YYYY MMMM D",LLL:"YYYY MMMM D, a h:mm",LLLL:"YYYY MMMM D [වැනි] dddd, a h:mm:ss"},calendar:{sameDay:"[අද] LT[ට]",nextDay:"[හෙට] LT[ට]",nextWeek:"dddd LT[ට]",lastDay:"[ඊයේ] LT[ට]",lastWeek:"[පසුගිය] dddd LT[ට]",sameElse:"L"},relativeTime:{future:"%sකින්",past:"%sකට පෙර",s:"තත්පර කිහිපය",m:"මිනිත්තුව",mm:"මිනිත්තු %d",h:"පැය",hh:"පැය %d",d:"දිනය",dd:"දින %d",M:"මාසය",MM:"මාස %d",y:"වසර",yy:"වසර %d"},ordinalParse:/\d{1,2} වැනි/,ordinal:function(e){return e+" වැනි"},meridiem:function(e,t,n){return e>11?n?"ප.ව.":"පස් වරු":n?"පෙ.ව.":"පෙර වරු"}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : slovak (sk)
//! author : Martin Minka : https://github.com/k2s
//! based on work of petrbela : https://github.com/petrbela
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e){return e>1&&5>e}function n(e,n,r,a){var o=e+" ";switch(r){case"s":return n||a?"pár sekúnd":"pár sekundami";case"m":return n?"minúta":a?"minútu":"minútou";case"mm":return n||a?o+(t(e)?"minúty":"minút"):o+"minútami";case"h":return n?"hodina":a?"hodinu":"hodinou";case"hh":return n||a?o+(t(e)?"hodiny":"hodín"):o+"hodinami";case"d":return n||a?"deň":"dňom";case"dd":return n||a?o+(t(e)?"dni":"dní"):o+"dňami";case"M":return n||a?"mesiac":"mesiacom";case"MM":return n||a?o+(t(e)?"mesiace":"mesiacov"):o+"mesiacmi";case"y":return n||a?"rok":"rokom";case"yy":return n||a?o+(t(e)?"roky":"rokov"):o+"rokmi"}}var r="január_február_marec_apríl_máj_jún_júl_august_september_október_november_december".split("_"),a="jan_feb_mar_apr_máj_jún_júl_aug_sep_okt_nov_dec".split("_"),o=e.defineLocale("sk",{months:r,monthsShort:a,monthsParse:function(e,t){var n,r=[];for(n=0;12>n;n++)r[n]=new RegExp("^"+e[n]+"$|^"+t[n]+"$","i");return r}(r,a),weekdays:"nedeľa_pondelok_utorok_streda_štvrtok_piatok_sobota".split("_"),weekdaysShort:"ne_po_ut_st_št_pi_so".split("_"),weekdaysMin:"ne_po_ut_st_št_pi_so".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD.MM.YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm",LLLL:"dddd D. MMMM YYYY H:mm"},calendar:{sameDay:"[dnes o] LT",nextDay:"[zajtra o] LT",nextWeek:function(){switch(this.day()){case 0:return"[v nedeľu o] LT";case 1:case 2:return"[v] dddd [o] LT";case 3:return"[v stredu o] LT";case 4:return"[vo štvrtok o] LT";case 5:return"[v piatok o] LT";case 6:return"[v sobotu o] LT"}},lastDay:"[včera o] LT",lastWeek:function(){switch(this.day()){case 0:return"[minulú nedeľu o] LT";case 1:case 2:return"[minulý] dddd [o] LT";case 3:return"[minulú stredu o] LT";case 4:case 5:return"[minulý] dddd [o] LT";case 6:return"[minulú sobotu o] LT"}},sameElse:"L"},relativeTime:{future:"za %s",past:"pred %s",s:n,m:n,mm:n,h:n,hh:n,d:n,dd:n,M:n,MM:n,y:n,yy:n},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return o})},function(e,t,n){
//! moment.js locale configuration
//! locale : slovenian (sl)
//! author : Robert Sedovšek : https://github.com/sedovsek
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n,r){var a=e+" ";switch(n){case"s":return t||r?"nekaj sekund":"nekaj sekundami";case"m":return t?"ena minuta":"eno minuto";case"mm":return a+=1===e?t?"minuta":"minuto":2===e?t||r?"minuti":"minutama":5>e?t||r?"minute":"minutami":t||r?"minut":"minutami";case"h":return t?"ena ura":"eno uro";case"hh":return a+=1===e?t?"ura":"uro":2===e?t||r?"uri":"urama":5>e?t||r?"ure":"urami":t||r?"ur":"urami";case"d":return t||r?"en dan":"enim dnem";case"dd":return a+=1===e?t||r?"dan":"dnem":2===e?t||r?"dni":"dnevoma":t||r?"dni":"dnevi";case"M":return t||r?"en mesec":"enim mesecem";case"MM":return a+=1===e?t||r?"mesec":"mesecem":2===e?t||r?"meseca":"mesecema":5>e?t||r?"mesece":"meseci":t||r?"mesecev":"meseci";case"y":return t||r?"eno leto":"enim letom";case"yy":return a+=1===e?t||r?"leto":"letom":2===e?t||r?"leti":"letoma":5>e?t||r?"leta":"leti":t||r?"let":"leti"}}var n=e.defineLocale("sl",{months:"januar_februar_marec_april_maj_junij_julij_avgust_september_oktober_november_december".split("_"),monthsShort:"jan._feb._mar._apr._maj._jun._jul._avg._sep._okt._nov._dec.".split("_"),weekdays:"nedelja_ponedeljek_torek_sreda_četrtek_petek_sobota".split("_"),weekdaysShort:"ned._pon._tor._sre._čet._pet._sob.".split("_"),weekdaysMin:"ne_po_to_sr_če_pe_so".split("_"),longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD. MM. YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm",LLLL:"dddd, D. MMMM YYYY H:mm"},calendar:{sameDay:"[danes ob] LT",nextDay:"[jutri ob] LT",nextWeek:function(){switch(this.day()){case 0:return"[v] [nedeljo] [ob] LT";case 3:return"[v] [sredo] [ob] LT";case 6:return"[v] [soboto] [ob] LT";case 1:case 2:case 4:case 5:return"[v] dddd [ob] LT"}},lastDay:"[včeraj ob] LT",lastWeek:function(){switch(this.day()){case 0:return"[prejšnjo] [nedeljo] [ob] LT";case 3:return"[prejšnjo] [sredo] [ob] LT";case 6:return"[prejšnjo] [soboto] [ob] LT";case 1:case 2:case 4:case 5:return"[prejšnji] dddd [ob] LT"}},sameElse:"L"},relativeTime:{future:"čez %s",past:"pred %s",s:t,m:t,mm:t,h:t,hh:t,d:t,dd:t,M:t,MM:t,y:t,yy:t},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:7}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : Albanian (sq)
//! author : Flakërim Ismani : https://github.com/flakerimi
//! author: Menelion Elensúle: https://github.com/Oire (tests)
//! author : Oerd Cukalla : https://github.com/oerd (fixes)
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("sq",{months:"Janar_Shkurt_Mars_Prill_Maj_Qershor_Korrik_Gusht_Shtator_Tetor_Nëntor_Dhjetor".split("_"),monthsShort:"Jan_Shk_Mar_Pri_Maj_Qer_Kor_Gus_Sht_Tet_Nën_Dhj".split("_"),weekdays:"E Diel_E Hënë_E Martë_E Mërkurë_E Enjte_E Premte_E Shtunë".split("_"),weekdaysShort:"Die_Hën_Mar_Mër_Enj_Pre_Sht".split("_"),weekdaysMin:"D_H_Ma_Më_E_P_Sh".split("_"),meridiemParse:/PD|MD/,isPM:function(e){return"M"===e.charAt(0)},meridiem:function(e,t,n){return 12>e?"PD":"MD"},longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},calendar:{sameDay:"[Sot në] LT",nextDay:"[Nesër në] LT",nextWeek:"dddd [në] LT",lastDay:"[Dje në] LT",lastWeek:"dddd [e kaluar në] LT",sameElse:"L"},relativeTime:{future:"në %s",past:"%s më parë",s:"disa sekonda",m:"një minutë",mm:"%d minuta",h:"një orë",hh:"%d orë",d:"një ditë",dd:"%d ditë",M:"një muaj",MM:"%d muaj",y:"një vit",yy:"%d vite"},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Serbian-latin (sr)
//! author : Milan Janačković<milanjanackovic@gmail.com> : https://github.com/milan-j
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={words:{m:["jedan minut","jedne minute"],mm:["minut","minute","minuta"],h:["jedan sat","jednog sata"],hh:["sat","sata","sati"],dd:["dan","dana","dana"],MM:["mesec","meseca","meseci"],yy:["godina","godine","godina"]},correctGrammaticalCase:function(e,t){return 1===e?t[0]:e>=2&&4>=e?t[1]:t[2]},translate:function(e,n,r){var a=t.words[r];return 1===r.length?n?a[0]:a[1]:e+" "+t.correctGrammaticalCase(e,a)}},n=e.defineLocale("sr",{months:["januar","februar","mart","april","maj","jun","jul","avgust","septembar","oktobar","novembar","decembar"],monthsShort:["jan.","feb.","mar.","apr.","maj","jun","jul","avg.","sep.","okt.","nov.","dec."],weekdays:["nedelja","ponedeljak","utorak","sreda","četvrtak","petak","subota"],weekdaysShort:["ned.","pon.","uto.","sre.","čet.","pet.","sub."],weekdaysMin:["ne","po","ut","sr","če","pe","su"],longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD. MM. YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm",LLLL:"dddd, D. MMMM YYYY H:mm"},calendar:{sameDay:"[danas u] LT",nextDay:"[sutra u] LT",nextWeek:function(){switch(this.day()){case 0:return"[u] [nedelju] [u] LT";case 3:return"[u] [sredu] [u] LT";case 6:return"[u] [subotu] [u] LT";case 1:case 2:case 4:case 5:return"[u] dddd [u] LT"}},lastDay:"[juče u] LT",lastWeek:function(){var e=["[prošle] [nedelje] [u] LT","[prošlog] [ponedeljka] [u] LT","[prošlog] [utorka] [u] LT","[prošle] [srede] [u] LT","[prošlog] [četvrtka] [u] LT","[prošlog] [petka] [u] LT","[prošle] [subote] [u] LT"];return e[this.day()]},sameElse:"L"},relativeTime:{future:"za %s",past:"pre %s",s:"nekoliko sekundi",m:t.translate,mm:t.translate,h:t.translate,hh:t.translate,d:"dan",dd:t.translate,M:"mesec",MM:t.translate,y:"godinu",yy:t.translate},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:7}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : Serbian-cyrillic (sr-cyrl)
//! author : Milan Janačković<milanjanackovic@gmail.com> : https://github.com/milan-j
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={words:{m:["један минут","једне минуте"],mm:["минут","минуте","минута"],h:["један сат","једног сата"],hh:["сат","сата","сати"],dd:["дан","дана","дана"],MM:["месец","месеца","месеци"],yy:["година","године","година"]},correctGrammaticalCase:function(e,t){return 1===e?t[0]:e>=2&&4>=e?t[1]:t[2]},translate:function(e,n,r){var a=t.words[r];return 1===r.length?n?a[0]:a[1]:e+" "+t.correctGrammaticalCase(e,a)}},n=e.defineLocale("sr-cyrl",{months:["јануар","фебруар","март","април","мај","јун","јул","август","септембар","октобар","новембар","децембар"],monthsShort:["јан.","феб.","мар.","апр.","мај","јун","јул","авг.","сеп.","окт.","нов.","дец."],weekdays:["недеља","понедељак","уторак","среда","четвртак","петак","субота"],weekdaysShort:["нед.","пон.","уто.","сре.","чет.","пет.","суб."],weekdaysMin:["не","по","ут","ср","че","пе","су"],longDateFormat:{LT:"H:mm",LTS:"H:mm:ss",L:"DD. MM. YYYY",LL:"D. MMMM YYYY",LLL:"D. MMMM YYYY H:mm",LLLL:"dddd, D. MMMM YYYY H:mm"},calendar:{sameDay:"[данас у] LT",nextDay:"[сутра у] LT",nextWeek:function(){switch(this.day()){case 0:return"[у] [недељу] [у] LT";case 3:return"[у] [среду] [у] LT";case 6:return"[у] [суботу] [у] LT";case 1:case 2:case 4:case 5:return"[у] dddd [у] LT"}},lastDay:"[јуче у] LT",lastWeek:function(){var e=["[прошле] [недеље] [у] LT","[прошлог] [понедељка] [у] LT","[прошлог] [уторка] [у] LT","[прошле] [среде] [у] LT","[прошлог] [четвртка] [у] LT","[прошлог] [петка] [у] LT","[прошле] [суботе] [у] LT"];return e[this.day()]},sameElse:"L"},relativeTime:{future:"за %s",past:"пре %s",s:"неколико секунди",m:t.translate,mm:t.translate,h:t.translate,hh:t.translate,d:"дан",dd:t.translate,M:"месец",MM:t.translate,y:"годину",yy:t.translate},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:7}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : swedish (sv)
//! author : Jens Alm : https://github.com/ulmus
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("sv",{months:"januari_februari_mars_april_maj_juni_juli_augusti_september_oktober_november_december".split("_"),monthsShort:"jan_feb_mar_apr_maj_jun_jul_aug_sep_okt_nov_dec".split("_"),weekdays:"söndag_måndag_tisdag_onsdag_torsdag_fredag_lördag".split("_"),weekdaysShort:"sön_mån_tis_ons_tor_fre_lör".split("_"),weekdaysMin:"sö_må_ti_on_to_fr_lö".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"YYYY-MM-DD",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[Idag] LT",nextDay:"[Imorgon] LT",lastDay:"[Igår] LT",nextWeek:"[På] dddd LT",lastWeek:"[I] dddd[s] LT",sameElse:"L"},relativeTime:{future:"om %s",past:"för %s sedan",s:"några sekunder",m:"en minut",mm:"%d minuter",h:"en timme",hh:"%d timmar",d:"en dag",dd:"%d dagar",M:"en månad",MM:"%d månader",y:"ett år",yy:"%d år"},ordinalParse:/\d{1,2}(e|a)/,ordinal:function(e){var t=e%10,n=1===~~(e%100/10)?"e":1===t?"a":2===t?"a":"e";return e+n},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : tamil (ta)
//! author : Arjunkumar Krishnamoorthy : https://github.com/tk120404
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("ta",{months:"ஜனவரி_பிப்ரவரி_மார்ச்_ஏப்ரல்_மே_ஜூன்_ஜூலை_ஆகஸ்ட்_செப்டெம்பர்_அக்டோபர்_நவம்பர்_டிசம்பர்".split("_"),monthsShort:"ஜனவரி_பிப்ரவரி_மார்ச்_ஏப்ரல்_மே_ஜூன்_ஜூலை_ஆகஸ்ட்_செப்டெம்பர்_அக்டோபர்_நவம்பர்_டிசம்பர்".split("_"),weekdays:"ஞாயிற்றுக்கிழமை_திங்கட்கிழமை_செவ்வாய்கிழமை_புதன்கிழமை_வியாழக்கிழமை_வெள்ளிக்கிழமை_சனிக்கிழமை".split("_"),weekdaysShort:"ஞாயிறு_திங்கள்_செவ்வாய்_புதன்_வியாழன்_வெள்ளி_சனி".split("_"),weekdaysMin:"ஞா_தி_செ_பு_வி_வெ_ச".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY, HH:mm",LLLL:"dddd, D MMMM YYYY, HH:mm"},calendar:{sameDay:"[இன்று] LT",nextDay:"[நாளை] LT",nextWeek:"dddd, LT",lastDay:"[நேற்று] LT",lastWeek:"[கடந்த வாரம்] dddd, LT",sameElse:"L"},relativeTime:{future:"%s இல்",past:"%s முன்",s:"ஒரு சில விநாடிகள்",m:"ஒரு நிமிடம்",mm:"%d நிமிடங்கள்",h:"ஒரு மணி நேரம்",hh:"%d மணி நேரம்",d:"ஒரு நாள்",dd:"%d நாட்கள்",M:"ஒரு மாதம்",MM:"%d மாதங்கள்",y:"ஒரு வருடம்",yy:"%d ஆண்டுகள்"},ordinalParse:/\d{1,2}வது/,ordinal:function(e){return e+"வது"},meridiemParse:/யாமம்|வைகறை|காலை|நண்பகல்|எற்பாடு|மாலை/,meridiem:function(e,t,n){return 2>e?" யாமம்":6>e?" வைகறை":10>e?" காலை":14>e?" நண்பகல்":18>e?" எற்பாடு":22>e?" மாலை":" யாமம்"},meridiemHour:function(e,t){return 12===e&&(e=0),"யாமம்"===t?2>e?e:e+12:"வைகறை"===t||"காலை"===t?e:"நண்பகல்"===t&&e>=10?e:e+12},week:{dow:0,doy:6}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : thai (th)
//! author : Kridsada Thanabulpong : https://github.com/sirn
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("th",{months:"มกราคม_กุมภาพันธ์_มีนาคม_เมษายน_พฤษภาคม_มิถุนายน_กรกฎาคม_สิงหาคม_กันยายน_ตุลาคม_พฤศจิกายน_ธันวาคม".split("_"),monthsShort:"มกรา_กุมภา_มีนา_เมษา_พฤษภา_มิถุนา_กรกฎา_สิงหา_กันยา_ตุลา_พฤศจิกา_ธันวา".split("_"),weekdays:"อาทิตย์_จันทร์_อังคาร_พุธ_พฤหัสบดี_ศุกร์_เสาร์".split("_"),weekdaysShort:"อาทิตย์_จันทร์_อังคาร_พุธ_พฤหัส_ศุกร์_เสาร์".split("_"),weekdaysMin:"อา._จ._อ._พ._พฤ._ศ._ส.".split("_"),longDateFormat:{LT:"H นาฬิกา m นาที",LTS:"H นาฬิกา m นาที s วินาที",L:"YYYY/MM/DD",LL:"D MMMM YYYY",LLL:"D MMMM YYYY เวลา H นาฬิกา m นาที",LLLL:"วันddddที่ D MMMM YYYY เวลา H นาฬิกา m นาที"},meridiemParse:/ก่อนเที่ยง|หลังเที่ยง/,isPM:function(e){return"หลังเที่ยง"===e},meridiem:function(e,t,n){return 12>e?"ก่อนเที่ยง":"หลังเที่ยง"},calendar:{sameDay:"[วันนี้ เวลา] LT",nextDay:"[พรุ่งนี้ เวลา] LT",nextWeek:"dddd[หน้า เวลา] LT",lastDay:"[เมื่อวานนี้ เวลา] LT",lastWeek:"[วัน]dddd[ที่แล้ว เวลา] LT",sameElse:"L"},relativeTime:{future:"อีก %s",past:"%sที่แล้ว",s:"ไม่กี่วินาที",m:"1 นาที",mm:"%d นาที",h:"1 ชั่วโมง",hh:"%d ชั่วโมง",d:"1 วัน",dd:"%d วัน",M:"1 เดือน",MM:"%d เดือน",y:"1 ปี",yy:"%d ปี"}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Tagalog/Filipino (tl-ph)
//! author : Dan Hagman
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("tl-ph",{months:"Enero_Pebrero_Marso_Abril_Mayo_Hunyo_Hulyo_Agosto_Setyembre_Oktubre_Nobyembre_Disyembre".split("_"),monthsShort:"Ene_Peb_Mar_Abr_May_Hun_Hul_Ago_Set_Okt_Nob_Dis".split("_"),weekdays:"Linggo_Lunes_Martes_Miyerkules_Huwebes_Biyernes_Sabado".split("_"),weekdaysShort:"Lin_Lun_Mar_Miy_Huw_Biy_Sab".split("_"),weekdaysMin:"Li_Lu_Ma_Mi_Hu_Bi_Sab".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"MM/D/YYYY",LL:"MMMM D, YYYY",LLL:"MMMM D, YYYY HH:mm",LLLL:"dddd, MMMM DD, YYYY HH:mm"},calendar:{sameDay:"[Ngayon sa] LT",nextDay:"[Bukas sa] LT",nextWeek:"dddd [sa] LT",lastDay:"[Kahapon sa] LT",lastWeek:"dddd [huling linggo] LT",sameElse:"L"},relativeTime:{future:"sa loob ng %s",past:"%s ang nakalipas",s:"ilang segundo",m:"isang minuto",mm:"%d minuto",h:"isang oras",hh:"%d oras",d:"isang araw",dd:"%d araw",M:"isang buwan",MM:"%d buwan",y:"isang taon",yy:"%d taon"},ordinalParse:/\d{1,2}/,ordinal:function(e){return e},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : turkish (tr)
//! authors : Erhan Gundogan : https://github.com/erhangundogan,
//! Burak Yiğit Kaya: https://github.com/BYK
!function(e,t){t(n(166))}(this,function(e){"use strict";var t={1:"'inci",5:"'inci",8:"'inci",70:"'inci",80:"'inci",2:"'nci",7:"'nci",20:"'nci",50:"'nci",3:"'üncü",4:"'üncü",100:"'üncü",6:"'ncı",9:"'uncu",10:"'uncu",30:"'uncu",60:"'ıncı",90:"'ıncı"},n=e.defineLocale("tr",{months:"Ocak_Şubat_Mart_Nisan_Mayıs_Haziran_Temmuz_Ağustos_Eylül_Ekim_Kasım_Aralık".split("_"),monthsShort:"Oca_Şub_Mar_Nis_May_Haz_Tem_Ağu_Eyl_Eki_Kas_Ara".split("_"),weekdays:"Pazar_Pazartesi_Salı_Çarşamba_Perşembe_Cuma_Cumartesi".split("_"),weekdaysShort:"Paz_Pts_Sal_Çar_Per_Cum_Cts".split("_"),weekdaysMin:"Pz_Pt_Sa_Ça_Pe_Cu_Ct".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd, D MMMM YYYY HH:mm"},calendar:{sameDay:"[bugün saat] LT",nextDay:"[yarın saat] LT",nextWeek:"[haftaya] dddd [saat] LT",lastDay:"[dün] LT",lastWeek:"[geçen hafta] dddd [saat] LT",sameElse:"L"},relativeTime:{future:"%s sonra",past:"%s önce",s:"birkaç saniye",m:"bir dakika",mm:"%d dakika",h:"bir saat",hh:"%d saat",d:"bir gün",dd:"%d gün",M:"bir ay",MM:"%d ay",y:"bir yıl",yy:"%d yıl"},ordinalParse:/\d{1,2}'(inci|nci|üncü|ncı|uncu|ıncı)/,ordinal:function(e){if(0===e)return e+"'ıncı";var n=e%10,r=e%100-n,a=e>=100?100:null;return e+(t[n]||t[r]||t[a])},week:{dow:1,doy:7}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : talossan (tzl)
//! author : Robin van der Vliet : https://github.com/robin0van0der0v with the help of Iustì Canun
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t,n,r){var a={s:["viensas secunds","'iensas secunds"],m:["'n míut","'iens míut"],mm:[e+" míuts"," "+e+" míuts"],h:["'n þora","'iensa þora"],hh:[e+" þoras"," "+e+" þoras"],d:["'n ziua","'iensa ziua"],dd:[e+" ziuas"," "+e+" ziuas"],M:["'n mes","'iens mes"],MM:[e+" mesen"," "+e+" mesen"],y:["'n ar","'iens ar"],yy:[e+" ars"," "+e+" ars"]};return r?a[n][0]:t?a[n][0]:a[n][1].trim()}var n=e.defineLocale("tzl",{months:"Januar_Fevraglh_Març_Avrïu_Mai_Gün_Julia_Guscht_Setemvar_Listopäts_Noemvar_Zecemvar".split("_"),monthsShort:"Jan_Fev_Mar_Avr_Mai_Gün_Jul_Gus_Set_Lis_Noe_Zec".split("_"),weekdays:"Súladi_Lúneçi_Maitzi_Márcuri_Xhúadi_Viénerçi_Sáturi".split("_"),weekdaysShort:"Súl_Lún_Mai_Már_Xhú_Vié_Sát".split("_"),weekdaysMin:"Sú_Lú_Ma_Má_Xh_Vi_Sá".split("_"),longDateFormat:{LT:"HH.mm",LTS:"LT.ss",L:"DD.MM.YYYY",LL:"D. MMMM [dallas] YYYY",LLL:"D. MMMM [dallas] YYYY LT",LLLL:"dddd, [li] D. MMMM [dallas] YYYY LT"},meridiem:function(e,t,n){return e>11?n?"d'o":"D'O":n?"d'a":"D'A"},calendar:{sameDay:"[oxhi à] LT",nextDay:"[demà à] LT",nextWeek:"dddd [à] LT",lastDay:"[ieiri à] LT",lastWeek:"[sür el] dddd [lasteu à] LT",sameElse:"L"},relativeTime:{future:"osprei %s",past:"ja%s",s:t,m:t,mm:t,h:t,hh:t,d:t,dd:t,M:t,MM:t,y:t,yy:t},ordinalParse:/\d{1,2}\./,ordinal:"%d.",week:{dow:1,doy:4}});return n})},function(e,t,n){
//! moment.js locale configuration
//! locale : Morocco Central Atlas Tamaziɣt (tzm)
//! author : Abdel Said : https://github.com/abdelsaid
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("tzm",{months:"ⵉⵏⵏⴰⵢⵔ_ⴱⵕⴰⵢⵕ_ⵎⴰⵕⵚ_ⵉⴱⵔⵉⵔ_ⵎⴰⵢⵢⵓ_ⵢⵓⵏⵢⵓ_ⵢⵓⵍⵢⵓⵣ_ⵖⵓⵛⵜ_ⵛⵓⵜⴰⵏⴱⵉⵔ_ⴽⵟⵓⴱⵕ_ⵏⵓⵡⴰⵏⴱⵉⵔ_ⴷⵓⵊⵏⴱⵉⵔ".split("_"),monthsShort:"ⵉⵏⵏⴰⵢⵔ_ⴱⵕⴰⵢⵕ_ⵎⴰⵕⵚ_ⵉⴱⵔⵉⵔ_ⵎⴰⵢⵢⵓ_ⵢⵓⵏⵢⵓ_ⵢⵓⵍⵢⵓⵣ_ⵖⵓⵛⵜ_ⵛⵓⵜⴰⵏⴱⵉⵔ_ⴽⵟⵓⴱⵕ_ⵏⵓⵡⴰⵏⴱⵉⵔ_ⴷⵓⵊⵏⴱⵉⵔ".split("_"),weekdays:"ⴰⵙⴰⵎⴰⵙ_ⴰⵢⵏⴰⵙ_ⴰⵙⵉⵏⴰⵙ_ⴰⴽⵔⴰⵙ_ⴰⴽⵡⴰⵙ_ⴰⵙⵉⵎⵡⴰⵙ_ⴰⵙⵉⴹⵢⴰⵙ".split("_"),weekdaysShort:"ⴰⵙⴰⵎⴰⵙ_ⴰⵢⵏⴰⵙ_ⴰⵙⵉⵏⴰⵙ_ⴰⴽⵔⴰⵙ_ⴰⴽⵡⴰⵙ_ⴰⵙⵉⵎⵡⴰⵙ_ⴰⵙⵉⴹⵢⴰⵙ".split("_"),weekdaysMin:"ⴰⵙⴰⵎⴰⵙ_ⴰⵢⵏⴰⵙ_ⴰⵙⵉⵏⴰⵙ_ⴰⴽⵔⴰⵙ_ⴰⴽⵡⴰⵙ_ⴰⵙⵉⵎⵡⴰⵙ_ⴰⵙⵉⴹⵢⴰⵙ".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[ⴰⵙⴷⵅ ⴴ] LT",nextDay:"[ⴰⵙⴽⴰ ⴴ] LT",nextWeek:"dddd [ⴴ] LT",lastDay:"[ⴰⵚⴰⵏⵜ ⴴ] LT",lastWeek:"dddd [ⴴ] LT",sameElse:"L"},relativeTime:{future:"ⴷⴰⴷⵅ ⵙ ⵢⴰⵏ %s",past:"ⵢⴰⵏ %s",s:"ⵉⵎⵉⴽ",m:"ⵎⵉⵏⵓⴺ",mm:"%d ⵎⵉⵏⵓⴺ",h:"ⵙⴰⵄⴰ",hh:"%d ⵜⴰⵙⵙⴰⵄⵉⵏ",d:"ⴰⵙⵙ",dd:"%d oⵙⵙⴰⵏ",M:"ⴰⵢoⵓⵔ",MM:"%d ⵉⵢⵢⵉⵔⵏ",y:"ⴰⵙⴳⴰⵙ",yy:"%d ⵉⵙⴳⴰⵙⵏ"},week:{dow:6,doy:12}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : Morocco Central Atlas Tamaziɣt in Latin (tzm-latn)
//! author : Abdel Said : https://github.com/abdelsaid
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("tzm-latn",{months:"innayr_brˤayrˤ_marˤsˤ_ibrir_mayyw_ywnyw_ywlywz_ɣwšt_šwtanbir_ktˤwbrˤ_nwwanbir_dwjnbir".split("_"),monthsShort:"innayr_brˤayrˤ_marˤsˤ_ibrir_mayyw_ywnyw_ywlywz_ɣwšt_šwtanbir_ktˤwbrˤ_nwwanbir_dwjnbir".split("_"),weekdays:"asamas_aynas_asinas_akras_akwas_asimwas_asiḍyas".split("_"),weekdaysShort:"asamas_aynas_asinas_akras_akwas_asimwas_asiḍyas".split("_"),weekdaysMin:"asamas_aynas_asinas_akras_akwas_asimwas_asiḍyas".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[asdkh g] LT",nextDay:"[aska g] LT",nextWeek:"dddd [g] LT",lastDay:"[assant g] LT",lastWeek:"dddd [g] LT",sameElse:"L"},relativeTime:{future:"dadkh s yan %s",past:"yan %s",s:"imik",m:"minuḍ",mm:"%d minuḍ",h:"saɛa",hh:"%d tassaɛin",d:"ass",dd:"%d ossan",M:"ayowr",MM:"%d iyyirn",y:"asgas",yy:"%d isgasn"},week:{dow:6,doy:12}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : ukrainian (uk)
//! author : zemlanin : https://github.com/zemlanin
//! Author : Menelion Elensúle : https://github.com/Oire
!function(e,t){t(n(166))}(this,function(e){"use strict";function t(e,t){var n=e.split("_");return t%10===1&&t%100!==11?n[0]:t%10>=2&&4>=t%10&&(10>t%100||t%100>=20)?n[1]:n[2]}function n(e,n,r){var a={mm:"хвилина_хвилини_хвилин",hh:"година_години_годин",dd:"день_дні_днів",MM:"місяць_місяці_місяців",yy:"рік_роки_років"};return"m"===r?n?"хвилина":"хвилину":"h"===r?n?"година":"годину":e+" "+t(a[r],+e)}function r(e,t){var n={nominative:"січень_лютий_березень_квітень_травень_червень_липень_серпень_вересень_жовтень_листопад_грудень".split("_"),accusative:"січня_лютого_березня_квітня_травня_червня_липня_серпня_вересня_жовтня_листопада_грудня".split("_")},r=/D[oD]? *MMMM?/.test(t)?"accusative":"nominative";return n[r][e.month()]}function a(e,t){var n={nominative:"неділя_понеділок_вівторок_середа_четвер_п’ятниця_субота".split("_"),accusative:"неділю_понеділок_вівторок_середу_четвер_п’ятницю_суботу".split("_"),genitive:"неділі_понеділка_вівторка_середи_четверга_п’ятниці_суботи".split("_")},r=/(\[[ВвУу]\]) ?dddd/.test(t)?"accusative":/\[?(?:минулої|наступної)? ?\] ?dddd/.test(t)?"genitive":"nominative";return n[r][e.day()]}function o(e){return function(){return e+"о"+(11===this.hours()?"б":"")+"] LT"}}var i=e.defineLocale("uk",{months:r,monthsShort:"січ_лют_бер_квіт_трав_черв_лип_серп_вер_жовт_лист_груд".split("_"),weekdays:a,weekdaysShort:"нд_пн_вт_ср_чт_пт_сб".split("_"),weekdaysMin:"нд_пн_вт_ср_чт_пт_сб".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD.MM.YYYY",LL:"D MMMM YYYY р.",LLL:"D MMMM YYYY р., HH:mm",LLLL:"dddd, D MMMM YYYY р., HH:mm"},calendar:{sameDay:o("[Сьогодні "),nextDay:o("[Завтра "),lastDay:o("[Вчора "),nextWeek:o("[У] dddd ["),lastWeek:function(){switch(this.day()){case 0:case 3:case 5:case 6:return o("[Минулої] dddd [").call(this);case 1:case 2:case 4:return o("[Минулого] dddd [").call(this)}},sameElse:"L"},relativeTime:{future:"за %s",past:"%s тому",s:"декілька секунд",m:n,mm:n,h:"годину",hh:n,d:"день",dd:n,M:"місяць",MM:n,y:"рік",yy:n},meridiemParse:/ночі|ранку|дня|вечора/,isPM:function(e){return/^(дня|вечора)$/.test(e)},meridiem:function(e,t,n){return 4>e?"ночі":12>e?"ранку":17>e?"дня":"вечора"},ordinalParse:/\d{1,2}-(й|го)/,ordinal:function(e,t){switch(t){case"M":case"d":case"DDD":case"w":case"W":return e+"-й";case"D":return e+"-го";default:return e}},week:{dow:1,doy:7}});return i})},function(e,t,n){
//! moment.js locale configuration
//! locale : uzbek (uz)
//! author : Sardor Muminov : https://github.com/muminoff
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("uz",{months:"январь_февраль_март_апрель_май_июнь_июль_август_сентябрь_октябрь_ноябрь_декабрь".split("_"),monthsShort:"янв_фев_мар_апр_май_июн_июл_авг_сен_окт_ноя_дек".split("_"),weekdays:"Якшанба_Душанба_Сешанба_Чоршанба_Пайшанба_Жума_Шанба".split("_"),weekdaysShort:"Якш_Душ_Сеш_Чор_Пай_Жум_Шан".split("_"),weekdaysMin:"Як_Ду_Се_Чо_Па_Жу_Ша".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"D MMMM YYYY, dddd HH:mm"},calendar:{sameDay:"[Бугун соат] LT [да]",nextDay:"[Эртага] LT [да]",nextWeek:"dddd [куни соат] LT [да]",lastDay:"[Кеча соат] LT [да]",lastWeek:"[Утган] dddd [куни соат] LT [да]",sameElse:"L"},relativeTime:{future:"Якин %s ичида",past:"Бир неча %s олдин",s:"фурсат",m:"бир дакика",mm:"%d дакика",h:"бир соат",hh:"%d соат",d:"бир кун",dd:"%d кун",M:"бир ой",MM:"%d ой",y:"бир йил",yy:"%d йил"},week:{dow:1,doy:7}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : vietnamese (vi)
//! author : Bang Nguyen : https://github.com/bangnk
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("vi",{months:"tháng 1_tháng 2_tháng 3_tháng 4_tháng 5_tháng 6_tháng 7_tháng 8_tháng 9_tháng 10_tháng 11_tháng 12".split("_"),monthsShort:"Th01_Th02_Th03_Th04_Th05_Th06_Th07_Th08_Th09_Th10_Th11_Th12".split("_"),weekdays:"chủ nhật_thứ hai_thứ ba_thứ tư_thứ năm_thứ sáu_thứ bảy".split("_"),weekdaysShort:"CN_T2_T3_T4_T5_T6_T7".split("_"),weekdaysMin:"CN_T2_T3_T4_T5_T6_T7".split("_"),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM [năm] YYYY",LLL:"D MMMM [năm] YYYY HH:mm",LLLL:"dddd, D MMMM [năm] YYYY HH:mm",l:"DD/M/YYYY",ll:"D MMM YYYY",lll:"D MMM YYYY HH:mm",llll:"ddd, D MMM YYYY HH:mm"},calendar:{sameDay:"[Hôm nay lúc] LT",nextDay:"[Ngày mai lúc] LT",nextWeek:"dddd [tuần tới lúc] LT",lastDay:"[Hôm qua lúc] LT",lastWeek:"dddd [tuần rồi lúc] LT",sameElse:"L"},relativeTime:{future:"%s tới",past:"%s trước",s:"vài giây",m:"một phút",mm:"%d phút",h:"một giờ",hh:"%d giờ",d:"một ngày",dd:"%d ngày",M:"một tháng",MM:"%d tháng",y:"một năm",yy:"%d năm"},ordinalParse:/\d{1,2}/,ordinal:function(e){return e},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : chinese (zh-cn)
//! author : suupic : https://github.com/suupic
//! author : Zeno Zeng : https://github.com/zenozeng
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("zh-cn",{months:"一月_二月_三月_四月_五月_六月_七月_八月_九月_十月_十一月_十二月".split("_"),monthsShort:"1月_2月_3月_4月_5月_6月_7月_8月_9月_10月_11月_12月".split("_"),weekdays:"星期日_星期一_星期二_星期三_星期四_星期五_星期六".split("_"),weekdaysShort:"周日_周一_周二_周三_周四_周五_周六".split("_"),weekdaysMin:"日_一_二_三_四_五_六".split("_"),longDateFormat:{LT:"Ah点mm分",LTS:"Ah点m分s秒",L:"YYYY-MM-DD",LL:"YYYY年MMMD日",LLL:"YYYY年MMMD日Ah点mm分",LLLL:"YYYY年MMMD日ddddAh点mm分",l:"YYYY-MM-DD",ll:"YYYY年MMMD日",lll:"YYYY年MMMD日Ah点mm分",llll:"YYYY年MMMD日ddddAh点mm分"},meridiemParse:/凌晨|早上|上午|中午|下午|晚上/,meridiemHour:function(e,t){return 12===e&&(e=0),"凌晨"===t||"早上"===t||"上午"===t?e:"下午"===t||"晚上"===t?e+12:e>=11?e:e+12},meridiem:function(e,t,n){var r=100*e+t;return 600>r?"凌晨":900>r?"早上":1130>r?"上午":1230>r?"中午":1800>r?"下午":"晚上"},calendar:{sameDay:function(){return 0===this.minutes()?"[今天]Ah[点整]":"[今天]LT"},nextDay:function(){return 0===this.minutes()?"[明天]Ah[点整]":"[明天]LT"},lastDay:function(){return 0===this.minutes()?"[昨天]Ah[点整]":"[昨天]LT"},nextWeek:function(){var t,n;return t=e().startOf("week"),n=this.unix()-t.unix()>=604800?"[下]":"[本]",0===this.minutes()?n+"dddAh点整":n+"dddAh点mm"},lastWeek:function(){var t,n;return t=e().startOf("week"),n=this.unix()<t.unix()?"[上]":"[本]",0===this.minutes()?n+"dddAh点整":n+"dddAh点mm"},sameElse:"LL"},ordinalParse:/\d{1,2}(日|月|周)/,ordinal:function(e,t){switch(t){case"d":case"D":case"DDD":return e+"日";case"M":return e+"月";case"w":case"W":return e+"周";default:return e}},relativeTime:{future:"%s内",past:"%s前",s:"几秒",m:"1 分钟",mm:"%d 分钟",h:"1 小时",hh:"%d 小时",d:"1 天",dd:"%d 天",M:"1 个月",MM:"%d 个月",y:"1 年",yy:"%d 年"},week:{dow:1,doy:4}});return t})},function(e,t,n){
//! moment.js locale configuration
//! locale : traditional chinese (zh-tw)
//! author : Ben : https://github.com/ben-lin
!function(e,t){t(n(166))}(this,function(e){"use strict";var t=e.defineLocale("zh-tw",{months:"一月_二月_三月_四月_五月_六月_七月_八月_九月_十月_十一月_十二月".split("_"),monthsShort:"1月_2月_3月_4月_5月_6月_7月_8月_9月_10月_11月_12月".split("_"),weekdays:"星期日_星期一_星期二_星期三_星期四_星期五_星期六".split("_"),weekdaysShort:"週日_週一_週二_週三_週四_週五_週六".split("_"),weekdaysMin:"日_一_二_三_四_五_六".split("_"),longDateFormat:{LT:"Ah點mm分",LTS:"Ah點m分s秒",L:"YYYY年MMMD日",LL:"YYYY年MMMD日",LLL:"YYYY年MMMD日Ah點mm分",LLLL:"YYYY年MMMD日ddddAh點mm分",l:"YYYY年MMMD日",ll:"YYYY年MMMD日",lll:"YYYY年MMMD日Ah點mm分",llll:"YYYY年MMMD日ddddAh點mm分"},meridiemParse:/早上|上午|中午|下午|晚上/,meridiemHour:function(e,t){return 12===e&&(e=0),"早上"===t||"上午"===t?e:"中午"===t?e>=11?e:e+12:"下午"===t||"晚上"===t?e+12:void 0},meridiem:function(e,t,n){var r=100*e+t;return 900>r?"早上":1130>r?"上午":1230>r?"中午":1800>r?"下午":"晚上"},calendar:{sameDay:"[今天]LT",nextDay:"[明天]LT",nextWeek:"[下]ddddLT",lastDay:"[昨天]LT",lastWeek:"[上]ddddLT",sameElse:"L"},ordinalParse:/\d{1,2}(日|月|週)/,ordinal:function(e,t){switch(t){case"d":case"D":case"DDD":return e+"日";case"M":return e+"月";case"w":case"W":return e+"週";default:return e}},relativeTime:{future:"%s內",past:"%s前",s:"幾秒",m:"一分鐘",mm:"%d分鐘",h:"一小時",hh:"%d小時",d:"一天",dd:"%d天",M:"一個月",MM:"%d個月",y:"一年",yy:"%d年"}});return t})},function(e,t,n){e.exports=n.p+"eb5b7cbecc413d009de3b08e876c561d.png"},function(e,t,n){"use strict";function r(e,t,n){var r=document.createElement("img");r.onload=function(){var e=document.createElement("canvas");e.width=r.width,e.height=r.height;var a=e.getContext("2d");a.clearRect(0,0,r.width,r.height),a.drawImage(r,0,0);var o=r.height-9,i=r.width-7-1,s=16,u=16,d=2;a.fillStyle="#F03D25",a.strokeStyle="#F03D25",a.lineWidth=1,a.beginPath(),a.moveTo(i+d,o),a.quadraticCurveTo(i,o,i,o+d),a.lineTo(i,s-d),a.quadraticCurveTo(i,s,i+d,s),a.lineTo(u-d,s),a.quadraticCurveTo(u,s,u,s-d),a.lineTo(u,o+d),a.quadraticCurveTo(u,o,u-d,o),a.closePath(),a.fill(),a.font="bold 10px arial",a.fillStyle="#FFF",a.textAlign="right",a.textBaseline="top",a.fillText(t,15,6),n(null,a.canvas.toDataURL())},r.src=e}var a,o=n(7),i=o.createClass({displayName:"Favicon",getDefaultProps:function(){return{alertCount:null,animated:!0,animationDelay:500}},getInitialState:function(){return{animationIndex:0,animationLoop:null,animationRunning:!1}},statics:{mountedInstances:[],getActiveInstance:function(){return i.mountedInstances[i.mountedInstances.length-1]},draw:function(){if("undefined"!=typeof document){if("undefined"==typeof a){var e=document.getElementsByTagName("head")[0];a=document.createElement("link"),a.type="image/x-icon",a.rel="icon";for(var t=e.getElementsByTagName("link"),n=t.length;--n>=0;/\bicon\b/i.test(t[n].getAttribute("rel"))&&e.removeChild(t[n]));e.appendChild(a)}var o,s=i.getActiveInstance();o=s.props.url instanceof Array?s.props.url[s.state.animationIndex]:s.props.url,s.props.alertCount?r(o,s.props.alertCount,function(e,t){a.href=t}):a.href=o}},update:function(){if("undefined"!=typeof document){var e=i.getActiveInstance(),t=e.props.url instanceof Array&&e.props.animated,n=null;if(clearInterval(e.state.animationLoop),t){var r=function(){var t=(e.state.animationIndex+1)%e.props.url.length;i.draw(),e.setState({animationIndex:t})};n=setInterval(r,e.props.animationDelay),r()}else i.draw();e.setState({animationLoop:n})}}},componentWillMount:function(){i.mountedInstances.push(this),i.update()},componentDidUpdate:function(e){(e.url!==this.props.url||e.animated!==this.props.animated||e.alertCount!==this.props.alertCount)&&i.update()},render:function(){return null}});e.exports=i},function(e,t,n){e.exports=n.p+"db575f67b2a47099e51fd909d12e3392.ico"}]);
//# sourceMappingURL=bundle.36c698ce42050a6f7fed.js.map
|
/*
* Copyright (c) 2019 Antmicro Ltd
*
* Copyright (c) 2019 Intel Corporation
*
* SPDX-License-Identifier: Apache-2.0
*/
#ifndef ZEPHYR_INCLUDE_NET_SOCKS_H_
#define ZEPHYR_INCLUDE_NET_SOCKS_H_
#include <zephyr/net/socket.h>
/**@brief Connects to destination through a SOCKS5 proxy server.
*
* @param[in] ctx Network context.
* @param[in] dest Address of the destination server.
* @param[in] dest_len Address length of the destination server.
*
* @retval 0 or an error code if it was unsuccessful.
*/
#if defined(CONFIG_SOCKS)
int net_socks5_connect(struct net_context *ctx,
const struct sockaddr *dest,
socklen_t dest_len);
#else
inline int net_socks5_connect(struct net_context *ctx,
const struct sockaddr *dest,
socklen_t dest_len)
{
ARG_UNUSED(ctx);
ARG_UNUSED(dest);
ARG_UNUSED(dest_len);
return -ENOTSUP;
}
#endif
#endif /* ZEPHYR_INCLUDE_NET_SOCKS_H_ */
|
'use strict';
const utils = require('./utils');
const webpack = require('webpack');
const config = require('../config');
const merge = require('webpack-merge');
const path = require('path');
const baseWebpackConfig = require('./webpack.base.conf');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const FriendlyErrorsPlugin = require('friendly-errors-webpack-plugin');
const portfinder = require('portfinder');
const HOST = process.env.HOST;
const PORT = process.env.PORT && Number(process.env.PORT);
const devWebpackConfig = merge(baseWebpackConfig, {
mode: 'development',
module: {
rules: utils.styleLoaders({
sourceMap: config.dev.cssSourceMap,
usePostCSS: true
})
},
// cheap-module-eval-source-map is faster for development
devtool: config.dev.devtool,
// these devServer options should be customized in /config/index.js
devServer: {
clientLogLevel: 'warning',
historyApiFallback: {
rewrites: [
{
from: /.*/,
to: path.posix.join(config.dev.assetsPublicPath, 'index.html')
}
]
},
hot: true,
contentBase: false, // since we use CopyWebpackPlugin.
compress: true,
host: HOST || config.dev.host,
port: PORT || config.dev.port,
open: config.dev.autoOpenBrowser,
overlay: config.dev.errorOverlay
? { warnings: false, errors: true }
: false,
publicPath: config.dev.assetsPublicPath,
proxy: config.dev.proxyTable,
quiet: true, // necessary for FriendlyErrorsPlugin
watchOptions: {
poll: config.dev.poll
}
},
optimization: {
namedModules: true, // NamedModulesPlugin()
noEmitOnErrors: true // NoEmitOnErrorsPlugin
},
plugins: [
new webpack.DefinePlugin({
'process.env': require('../config/dev.env')
}),
new webpack.HotModuleReplacementPlugin(),
// https://github.com/ampedandwired/html-webpack-plugin
new HtmlWebpackPlugin({
filename: 'index.html',
template: 'index.html',
inject: true
}),
// copy custom static assets
new CopyWebpackPlugin([
{
from: path.resolve(__dirname, '../static'),
to: config.dev.assetsSubDirectory,
ignore: ['.*']
}
])
]
});
module.exports = new Promise((resolve, reject) => {
portfinder.basePort = process.env.PORT || config.dev.port;
portfinder.getPort((err, port) => {
if (err) {
reject(err);
} else {
// publish the new Port, necessary for e2e tests
process.env.PORT = port;
// add port to devServer config
devWebpackConfig.devServer.port = port;
// Add FriendlyErrorsPlugin
devWebpackConfig.plugins.push(
new FriendlyErrorsPlugin({
compilationSuccessInfo: {
messages: [
`Your application is running here: http://${
devWebpackConfig.devServer.host
}:${port}`
]
},
onErrors: config.dev.notifyOnErrors
? utils.createNotifierCallback()
: undefined
})
);
resolve(devWebpackConfig);
}
});
});
|
from django.urls import path
from django.views.generic import TemplateView, RedirectView
from . import views
urlpatterns = [
path('', views.index, name='main'),
path('contacts', views.contacts, name='contacts'),
path('yandex_2076966e8ed47beb.html', TemplateView.as_view(template_name='main/yandex_2076966e8ed47beb.html')),
path('sitemap.xml', TemplateView.as_view(template_name='main/sitemap.xml')),
path('robots.txt', TemplateView.as_view(template_name='main/robots.txt')),
path('favicon.ico', RedirectView.as_view(url='static/images/favicon.ico', permanent=True))
]
|
import datetime
from functools import partial
import json
import logging
import os
import platform
import re
import shutil
import sys
import tempfile
from dateutil.relativedelta import relativedelta
from gql import Client, gql
from gql.client import RetryError
from gql.transport.requests import RequestsHTTPTransport
import requests
import six
from six.moves import urllib
import wandb
from wandb import __version__, env, util
from wandb.apis.internal import Api as InternalApi
from wandb.apis.normalize import normalize_exceptions
from wandb.data_types import WBValue
from wandb.errors.term import termlog
from wandb.old.summary import HTTPSummary
import yaml
PY3 = sys.version_info.major == 3 and sys.version_info.minor >= 6
if PY3:
from wandb.sdk.lib import retry
else:
from wandb.sdk_py27.lib import retry
# TODO: consolidate dynamic imports
PY3 = sys.version_info.major == 3 and sys.version_info.minor >= 6
if PY3:
from wandb.sdk.interface import artifacts
else:
from wandb.sdk_py27.interface import artifacts
logger = logging.getLogger(__name__)
# Only retry requests for 20 seconds in the public api
RETRY_TIMEDELTA = datetime.timedelta(seconds=20)
WANDB_INTERNAL_KEYS = {"_wandb", "wandb_version"}
PROJECT_FRAGMENT = """fragment ProjectFragment on Project {
id
name
entityName
createdAt
isBenchmark
}"""
RUN_FRAGMENT = """fragment RunFragment on Run {
id
tags
name
displayName
sweepName
state
config
group
jobType
commit
readOnly
createdAt
heartbeatAt
description
notes
systemMetrics
summaryMetrics
historyLineCount
user {
name
username
}
historyKeys
}"""
FILE_FRAGMENT = """fragment RunFilesFragment on Run {
files(names: $fileNames, after: $fileCursor, first: $fileLimit) {
edges {
node {
id
name
url(upload: $upload)
directUrl
sizeBytes
mimetype
updatedAt
md5
}
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
}"""
ARTIFACTS_TYPES_FRAGMENT = """
fragment ArtifactTypesFragment on ArtifactTypeConnection {
edges {
node {
id
name
description
createdAt
}
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
"""
ARTIFACT_FRAGMENT = """
fragment ArtifactFragment on Artifact {
id
digest
description
state
size
createdAt
updatedAt
labels
metadata
versionIndex
aliases {
artifactCollectionName
alias
}
artifactSequence {
id
name
}
artifactType {
id
name
}
commitHash
}
"""
# TODO, factor out common file fragment
ARTIFACT_FILES_FRAGMENT = """fragment ArtifactFilesFragment on Artifact {
files(names: $fileNames, after: $fileCursor, first: $fileLimit) {
edges {
node {
id
name: displayName
url
sizeBytes
mimetype
updatedAt
digest
md5
}
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
}"""
class RetryingClient(object):
def __init__(self, client):
self._client = client
@property
def app_url(self):
return util.app_url(self._client.transport.url).replace("/graphql", "/")
@retry.retriable(
retry_timedelta=RETRY_TIMEDELTA,
check_retry_fn=util.no_retry_auth,
retryable_exceptions=(RetryError, requests.RequestException),
)
def execute(self, *args, **kwargs):
return self._client.execute(*args, **kwargs)
class Api(object):
"""
Used for querying the wandb server.
Examples:
Most common way to initialize
>>> wandb.Api()
Arguments:
overrides: (dict) You can set `base_url` if you are using a wandb server
other than https://api.wandb.ai.
You can also set defaults for `entity`, `project`, and `run`.
"""
_HTTP_TIMEOUT = env.get_http_timeout(9)
VIEWER_QUERY = gql(
"""
query Viewer{
viewer {
id
flags
entity
teams {
edges {
node {
name
}
}
}
}
}
"""
)
def __init__(self, overrides={}):
self.settings = InternalApi().settings()
if self.api_key is None:
wandb.login()
self.settings.update(overrides)
if "username" in overrides and "entity" not in overrides:
wandb.termwarn(
'Passing "username" to Api is deprecated. please use "entity" instead.'
)
self.settings["entity"] = overrides["username"]
self._projects = {}
self._runs = {}
self._sweeps = {}
self._reports = {}
self._default_entity = None
self._base_client = Client(
transport=RequestsHTTPTransport(
headers={"User-Agent": self.user_agent, "Use-Admin-Privileges": "true"},
use_json=True,
# this timeout won't apply when the DNS lookup fails. in that case, it will be 60s
# https://bugs.python.org/issue22889
timeout=self._HTTP_TIMEOUT,
auth=("api", self.api_key),
url="%s/graphql" % self.settings["base_url"],
)
)
self._client = RetryingClient(self._base_client)
def create_run(self, **kwargs):
"""Create a new run"""
if kwargs.get("entity") is None:
kwargs["entity"] = self.default_entity
return Run.create(self, **kwargs)
def sync_tensorboard(self, root_dir, run_id=None, project=None, entity=None):
"""Sync a local directory containing tfevent files to wandb"""
from wandb.sync import SyncManager # noqa: F401 TODO: circular import madness
run_id = run_id or util.generate_id()
project = project or self.settings.get("project") or "uncategorized"
entity = entity or self.default_entity
sm = SyncManager(
project=project,
entity=entity,
run_id=run_id,
mark_synced=False,
app_url=self.client.app_url,
view=False,
verbose=False,
sync_tensorboard=True,
)
sm.add(root_dir)
sm.start()
while not sm.is_done():
_ = sm.poll()
return self.run("/".join([entity, project, run_id]))
@property
def client(self):
return self._client
@property
def user_agent(self):
return "W&B Public Client %s" % __version__
@property
def api_key(self):
auth = requests.utils.get_netrc_auth(self.settings["base_url"])
key = None
if auth:
key = auth[-1]
# Environment should take precedence
if os.getenv("WANDB_API_KEY"):
key = os.environ["WANDB_API_KEY"]
return key
@property
def default_entity(self):
if self._default_entity is None:
res = self._client.execute(self.VIEWER_QUERY)
self._default_entity = (res.get("viewer") or {}).get("entity")
return self._default_entity
def flush(self):
"""
The api object keeps a local cache of runs, so if the state of the run may
change while executing your script you must clear the local cache with `api.flush()`
to get the latest values associated with the run.
"""
self._runs = {}
def _parse_project_path(self, path):
"""Returns project and entity for project specified by path"""
project = self.settings["project"]
entity = self.settings["entity"] or self.default_entity
if path is None:
return entity, project
parts = path.split("/", 1)
if len(parts) == 1:
return entity, path
return parts
def _parse_path(self, path):
"""Parses paths in the following formats:
url: entity/project/runs/run_id
path: entity/project/run_id
docker: entity/project:run_id
entity is optional and will fallback to the current logged in user.
"""
project = self.settings["project"]
entity = self.settings["entity"] or self.default_entity
parts = path.replace("/runs/", "/").strip("/ ").split("/")
if ":" in parts[-1]:
run = parts[-1].split(":")[-1]
parts[-1] = parts[-1].split(":")[0]
elif parts[-1]:
run = parts[-1]
if len(parts) > 1:
project = parts[1]
if entity and run == project:
project = parts[0]
else:
entity = parts[0]
if len(parts) == 3:
entity = parts[0]
else:
project = parts[0]
return entity, project, run
def _parse_artifact_path(self, path):
"""Returns project, entity and artifact name for project specified by path"""
project = self.settings["project"]
entity = self.settings["entity"] or self.default_entity
if path is None:
return entity, project
parts = path.split("/")
if len(parts) > 3:
raise ValueError("Invalid artifact path: %s" % path)
elif len(parts) == 1:
return entity, project, path
elif len(parts) == 2:
return entity, parts[0], parts[1]
return parts
def projects(self, entity=None, per_page=200):
"""
Get projects for a given entity.
Arguments:
entity: (str) Name of the entity requested. If None will fallback to
default entity passed to `Api`. If no default entity, will raise a `ValueError`.
per_page: (int) Sets the page size for query pagination. None will use the default size.
Usually there is no reason to change this.
Returns:
A `Projects` object which is an iterable collection of `Project` objects.
"""
if entity is None:
entity = self.settings["entity"] or self.default_entity
if entity is None:
raise ValueError(
"entity must be passed as a parameter, or set in settings"
)
if entity not in self._projects:
self._projects[entity] = Projects(self.client, entity, per_page=per_page)
return self._projects[entity]
def reports(self, path="", name=None, per_page=50):
"""Get reports for a given project path.
WARNING: This api is in beta and will likely change in a future release
Arguments:
path: (str) path to project the report resides in, should be in the form: "entity/project"
name: (str) optional name of the report requested.
per_page: (int) Sets the page size for query pagination. None will use the default size.
Usually there is no reason to change this.
Returns:
A `Reports` object which is an iterable collection of `BetaReport` objects.
"""
entity, project, run = self._parse_path(path)
if entity is None:
entity = self.settings["entity"] or self.default_entity
if entity is None:
raise ValueError(
"entity must be passed as a parameter, or set in settings"
)
if name:
name = urllib.parse.unquote(name)
key = "/".join([entity, project, str(name)])
if key not in self._reports:
self._reports[key] = Reports(
self.client,
Project(self.client, entity, project, {}),
name=name,
per_page=per_page,
)
return self._reports[key]
def runs(self, path="", filters=None, order="-created_at", per_page=50):
"""
Return a set of runs from a project that match the filters provided.
You can filter by `config.*`, `summary.*`, `state`, `entity`, `createdAt`, etc.
Examples:
Find runs in my_project where config.experiment_name has been set to "foo"
```
api.runs(path="my_entity/my_project", filters={"config.experiment_name": "foo"})
```
Find runs in my_project where config.experiment_name has been set to "foo" or "bar"
```
api.runs(path="my_entity/my_project",
filters={"$or": [{"config.experiment_name": "foo"}, {"config.experiment_name": "bar"}]})
```
Find runs in my_project where config.experiment_name matches a regex (anchors are not supported)
```
api.runs(path="my_entity/my_project",
filters={"config.experiment_name": {"$regex": "b.*"}})
```
Find runs in my_project sorted by ascending loss
```
api.runs(path="my_entity/my_project", order="+summary_metrics.loss")
```
Arguments:
path: (str) path to project, should be in the form: "entity/project"
filters: (dict) queries for specific runs using the MongoDB query language.
You can filter by run properties such as config.key, summary_metrics.key, state, entity, createdAt, etc.
For example: {"config.experiment_name": "foo"} would find runs with a config entry
of experiment name set to "foo"
You can compose operations to make more complicated queries,
see Reference for the language is at https://docs.mongodb.com/manual/reference/operator/query
order: (str) Order can be `created_at`, `heartbeat_at`, `config.*.value`, or `summary_metrics.*`.
If you prepend order with a + order is ascending.
If you prepend order with a - order is descending (default).
The default order is run.created_at from newest to oldest.
Returns:
A `Runs` object, which is an iterable collection of `Run` objects.
"""
entity, project = self._parse_project_path(path)
filters = filters or {}
key = path + str(filters) + str(order)
if not self._runs.get(key):
self._runs[key] = Runs(
self.client,
entity,
project,
filters=filters,
order=order,
per_page=per_page,
)
return self._runs[key]
@normalize_exceptions
def run(self, path=""):
"""
Returns a single run by parsing path in the form entity/project/run_id.
Arguments:
path: (str) path to run in the form `entity/project/run_id`.
If api.entity is set, this can be in the form `project/run_id`
and if `api.project` is set this can just be the run_id.
Returns:
A `Run` object.
"""
entity, project, run = self._parse_path(path)
if not self._runs.get(path):
self._runs[path] = Run(self.client, entity, project, run)
return self._runs[path]
@normalize_exceptions
def sweep(self, path=""):
"""
Returns a sweep by parsing path in the form `entity/project/sweep_id`.
Arguments:
path: (str, optional) path to sweep in the form entity/project/sweep_id. If api.entity
is set, this can be in the form project/sweep_id and if `api.project` is set
this can just be the sweep_id.
Returns:
A `Sweep` object.
"""
entity, project, sweep_id = self._parse_path(path)
if not self._sweeps.get(path):
self._sweeps[path] = Sweep(self.client, entity, project, sweep_id)
return self._sweeps[path]
@normalize_exceptions
def artifact_types(self, project=None):
entity, project = self._parse_project_path(project)
return ProjectArtifactTypes(self.client, entity, project)
@normalize_exceptions
def artifact_type(self, type_name, project=None):
entity, project = self._parse_project_path(project)
return ArtifactType(self.client, entity, project, type_name)
@normalize_exceptions
def artifact_versions(self, type_name, name, per_page=50):
entity, project, collection_name = self._parse_artifact_path(name)
artifact_type = ArtifactType(self.client, entity, project, type_name)
return artifact_type.collection(collection_name).versions(per_page=per_page)
@normalize_exceptions
def artifact(self, name, type=None):
"""
Returns a single artifact by parsing path in the form `entity/project/run_id`.
Arguments:
name: (str) An artifact name. May be prefixed with entity/project. Valid names
can be in the following forms:
name:version
name:alias
digest
type: (str, optional) The type of artifact to fetch.
Returns:
A `Artifact` object.
"""
if name is None:
raise ValueError("You must specify name= to fetch an artifact.")
entity, project, artifact_name = self._parse_artifact_path(name)
artifact = Artifact(self.client, entity, project, artifact_name)
if type is not None and artifact.type != type:
raise ValueError("type %s specified but this artifact is of type %s")
return artifact
class Attrs(object):
def __init__(self, attrs):
self._attrs = attrs
def snake_to_camel(self, string):
camel = "".join([i.title() for i in string.split("_")])
return camel[0].lower() + camel[1:]
def __getattr__(self, name):
key = self.snake_to_camel(name)
if key == "user":
raise AttributeError()
if key in self._attrs.keys():
return self._attrs[key]
elif name in self._attrs.keys():
return self._attrs[name]
else:
raise AttributeError(
"'{}' object has no attribute '{}'".format(repr(self), name)
)
class Paginator(object):
QUERY = None
def __init__(self, client, variables, per_page=None):
self.client = client
self.variables = variables
# We don't allow unbounded paging
self.per_page = per_page
if self.per_page is None:
self.per_page = 50
self.objects = []
self.index = -1
self.last_response = None
def __iter__(self):
self.index = -1
return self
def __len__(self):
if self.length is None:
self._load_page()
if self.length is None:
raise ValueError("Object doesn't provide length")
return self.length
@property
def length(self):
raise NotImplementedError()
@property
def more(self):
raise NotImplementedError()
@property
def cursor(self):
raise NotImplementedError()
def convert_objects(self):
raise NotImplementedError()
def update_variables(self):
self.variables.update({"perPage": self.per_page, "cursor": self.cursor})
def _load_page(self):
if not self.more:
return False
self.update_variables()
self.last_response = self.client.execute(
self.QUERY, variable_values=self.variables
)
self.objects.extend(self.convert_objects())
return True
def __getitem__(self, index):
loaded = True
while loaded and index > len(self.objects) - 1:
loaded = self._load_page()
return self.objects[index]
def __next__(self):
self.index += 1
if len(self.objects) <= self.index:
if not self._load_page():
raise StopIteration
if len(self.objects) <= self.index:
raise StopIteration
return self.objects[self.index]
next = __next__
class User(Attrs):
def init(self, attrs):
super(User, self).__init__(attrs)
class Projects(Paginator):
"""
An iterable collection of `Project` objects.
"""
QUERY = gql(
"""
query Projects($entity: String, $cursor: String, $perPage: Int = 50) {
models(entityName: $entity, after: $cursor, first: $perPage) {
edges {
node {
...ProjectFragment
}
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
}
%s
"""
% PROJECT_FRAGMENT
)
def __init__(self, client, entity, per_page=50):
self.client = client
self.entity = entity
variables = {
"entity": self.entity,
}
super(Projects, self).__init__(client, variables, per_page)
@property
def length(self):
return None
@property
def more(self):
if self.last_response:
return self.last_response["models"]["pageInfo"]["hasNextPage"]
else:
return True
@property
def cursor(self):
if self.last_response:
return self.last_response["models"]["edges"][-1]["cursor"]
else:
return None
def convert_objects(self):
return [
Project(self.client, self.entity, p["node"]["name"], p["node"])
for p in self.last_response["models"]["edges"]
]
def __repr__(self):
return "<Projects {}>".format(self.entity)
class Project(Attrs):
"""A project is a namespace for runs."""
def __init__(self, client, entity, project, attrs):
super(Project, self).__init__(dict(attrs))
self.client = client
self.name = project
self.entity = entity
@property
def path(self):
return [self.entity, self.name]
def __repr__(self):
return "<Project {}>".format("/".join(self.path))
@normalize_exceptions
def artifacts_types(self, per_page=50):
return ProjectArtifactTypes(self.client, self.entity, self.name)
class Runs(Paginator):
"""An iterable collection of runs associated with a project and optional filter.
This is generally used indirectly via the `Api`.runs method
"""
QUERY = gql(
"""
query Runs($project: String!, $entity: String!, $cursor: String, $perPage: Int = 50, $order: String, $filters: JSONString) {
project(name: $project, entityName: $entity) {
runCount(filters: $filters)
readOnly
runs(filters: $filters, after: $cursor, first: $perPage, order: $order) {
edges {
node {
...RunFragment
}
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
%s
"""
% RUN_FRAGMENT
)
def __init__(self, client, entity, project, filters={}, order=None, per_page=50):
self.entity = entity
self.project = project
self.filters = filters
self.order = order
self._sweeps = {}
variables = {
"project": self.project,
"entity": self.entity,
"order": self.order,
"filters": json.dumps(self.filters),
}
super(Runs, self).__init__(client, variables, per_page)
@property
def length(self):
if self.last_response:
return self.last_response["project"]["runCount"]
else:
return None
@property
def more(self):
if self.last_response:
return self.last_response["project"]["runs"]["pageInfo"]["hasNextPage"]
else:
return True
@property
def cursor(self):
if self.last_response:
return self.last_response["project"]["runs"]["edges"][-1]["cursor"]
else:
return None
def convert_objects(self):
objs = []
if self.last_response is None or self.last_response.get("project") is None:
raise ValueError("Could not find project %s" % self.project)
for run_response in self.last_response["project"]["runs"]["edges"]:
run = Run(
self.client,
self.entity,
self.project,
run_response["node"]["name"],
run_response["node"],
)
objs.append(run)
if run.sweep_name:
if run.sweep_name in self._sweeps:
sweep = self._sweeps[run.sweep_name]
else:
sweep = Sweep.get(
self.client,
self.entity,
self.project,
run.sweep_name,
withRuns=False,
)
self._sweeps[run.sweep_name] = sweep
if sweep is None:
continue
run.sweep = sweep
if run.id not in sweep.runs_by_id:
sweep.runs_by_id[run.id] = run
sweep.runs.append(run)
return objs
def __repr__(self):
return "<Runs {}/{} ({})>".format(self.entity, self.project, len(self))
class Run(Attrs):
"""
A single run associated with an entity and project.
Attributes:
tags ([str]): a list of tags associated with the run
url (str): the url of this run
id (str): unique identifier for the run (defaults to eight characters)
name (str): the name of the run
state (str): one of: running, finished, crashed, aborted
config (dict): a dict of hyperparameters associated with the run
created_at (str): ISO timestamp when the run was started
system_metrics (dict): the latest system metrics recorded for the run
summary (dict): A mutable dict-like property that holds the current summary.
Calling update will persist any changes.
project (str): the project associated with the run
entity (str): the name of the entity associated with the run
user (str): the name of the user who created the run
path (str): Unique identifier [entity]/[project]/[run_id]
notes (str): Notes about the run
read_only (boolean): Whether the run is editable
history_keys (str): Keys of the history metrics that have been logged
with `wandb.log({key: value})`
"""
def __init__(self, client, entity, project, run_id, attrs={}):
"""
Run is always initialized by calling api.runs() where api is an instance of wandb.Api
"""
super(Run, self).__init__(dict(attrs))
self.client = client
self._entity = entity
self.project = project
self._files = {}
self._base_dir = env.get_dir(tempfile.gettempdir())
self.id = run_id
self.sweep = None
self.dir = os.path.join(self._base_dir, *self.path)
try:
os.makedirs(self.dir)
except OSError:
pass
self._summary = None
self.state = attrs.get("state", "not found")
self.load(force=not attrs)
@property
def entity(self):
return self._entity
@property
def username(self):
wandb.termwarn("Run.username is deprecated. Please use Run.entity instead.")
return self._entity
@property
def storage_id(self):
# For compatibility with wandb.Run, which has storage IDs
# in self.storage_id and names in self.id.
return self._attrs.get("id")
@property
def id(self):
return self._attrs.get("name")
@id.setter
def id(self, new_id):
attrs = self._attrs
attrs["name"] = new_id
return new_id
@property
def name(self):
return self._attrs.get("displayName")
@name.setter
def name(self, new_name):
self._attrs["displayName"] = new_name
return new_name
@classmethod
def create(cls, api, run_id=None, project=None, entity=None):
"""Create a run for the given project"""
run_id = run_id or util.generate_id()
project = project or api.settings.get("project") or "uncategorized"
mutation = gql(
"""
mutation UpsertBucket($project: String, $entity: String, $name: String!) {
upsertBucket(input: {modelName: $project, entityName: $entity, name: $name}) {
bucket {
project {
name
entity { name }
}
id
name
}
inserted
}
}
"""
)
variables = {"entity": entity, "project": project, "name": run_id}
res = api.client.execute(mutation, variable_values=variables)
res = res["upsertBucket"]["bucket"]
return Run(
api.client,
res["project"]["entity"]["name"],
res["project"]["name"],
res["name"],
{
"id": res["id"],
"config": "{}",
"systemMetrics": "{}",
"summaryMetrics": "{}",
"tags": [],
"description": None,
"notes": None,
"state": "running",
},
)
def load(self, force=False):
query = gql(
"""
query Run($project: String!, $entity: String!, $name: String!) {
project(name: $project, entityName: $entity) {
run(name: $name) {
...RunFragment
}
}
}
%s
"""
% RUN_FRAGMENT
)
if force or not self._attrs:
response = self._exec(query)
if (
response is None
or response.get("project") is None
or response["project"].get("run") is None
):
raise ValueError("Could not find run %s" % self)
self._attrs = response["project"]["run"]
self.state = self._attrs["state"]
if self.sweep_name and not self.sweep:
# There may be a lot of runs. Don't bother pulling them all
# just for the sake of this one.
self.sweep = Sweep.get(
self.client,
self.entity,
self.project,
self.sweep_name,
withRuns=False,
)
# TODO: Older runs don't always have sweeps when sweep_name is set
if self.sweep:
self.sweep.runs.append(self)
self.sweep.runs_by_id[self.id] = self
self._attrs["summaryMetrics"] = (
json.loads(self._attrs["summaryMetrics"])
if self._attrs.get("summaryMetrics")
else {}
)
self._attrs["systemMetrics"] = (
json.loads(self._attrs["systemMetrics"])
if self._attrs.get("systemMetrics")
else {}
)
if self._attrs.get("user"):
self.user = User(self._attrs["user"])
config_user, config_raw = {}, {}
for key, value in six.iteritems(json.loads(self._attrs.get("config") or "{}")):
config = config_raw if key in WANDB_INTERNAL_KEYS else config_user
if isinstance(value, dict) and "value" in value:
config[key] = value["value"]
else:
config[key] = value
config_raw.update(config_user)
self._attrs["config"] = config_user
self._attrs["rawconfig"] = config_raw
return self._attrs
@normalize_exceptions
def update(self):
"""
Persists changes to the run object to the wandb backend.
"""
mutation = gql(
"""
mutation UpsertBucket($id: String!, $description: String, $display_name: String, $notes: String, $tags: [String!], $config: JSONString!, $groupName: String) {
upsertBucket(input: {id: $id, description: $description, displayName: $display_name, notes: $notes, tags: $tags, config: $config, groupName: $groupName}) {
bucket {
...RunFragment
}
}
}
%s
"""
% RUN_FRAGMENT
)
_ = self._exec(
mutation,
id=self.storage_id,
tags=self.tags,
description=self.description,
notes=self.notes,
display_name=self.display_name,
config=self.json_config,
groupName=self.group,
)
self.summary.update()
@normalize_exceptions
def delete(self, delete_artifacts=False):
"""
Deletes the given run from the wandb backend.
"""
mutation = gql(
"""
mutation DeleteRun(
$id: ID!,
%s
) {
deleteRun(input: {
id: $id,
%s
}) {
clientMutationId
}
}
"""
%
# Older backends might not support the 'deleteArtifacts' argument,
# so only supply it when it is explicitly set.
(
"$deleteArtifacts: Boolean" if delete_artifacts else "",
"deleteArtifacts: $deleteArtifacts" if delete_artifacts else "",
)
)
self.client.execute(
mutation,
variable_values={
"id": self.storage_id,
"deleteArtifacts": delete_artifacts,
},
)
def save(self):
self.update()
@property
def json_config(self):
config = {}
for k, v in six.iteritems(self.config):
config[k] = {"value": v, "desc": None}
return json.dumps(config)
def _exec(self, query, **kwargs):
"""Execute a query against the cloud backend"""
variables = {"entity": self.entity, "project": self.project, "name": self.id}
variables.update(kwargs)
return self.client.execute(query, variable_values=variables)
def _sampled_history(self, keys, x_axis="_step", samples=500):
spec = {"keys": [x_axis] + keys, "samples": samples}
query = gql(
"""
query Run($project: String!, $entity: String!, $name: String!, $specs: [JSONString!]!) {
project(name: $project, entityName: $entity) {
run(name: $name) { sampledHistory(specs: $specs) }
}
}
"""
)
response = self._exec(query, specs=[json.dumps(spec)])
# sampledHistory returns one list per spec, we only send one spec
return response["project"]["run"]["sampledHistory"][0]
def _full_history(self, samples=500, stream="default"):
node = "history" if stream == "default" else "events"
query = gql(
"""
query Run($project: String!, $entity: String!, $name: String!, $samples: Int) {
project(name: $project, entityName: $entity) {
run(name: $name) { %s(samples: $samples) }
}
}
"""
% node
)
response = self._exec(query, samples=samples)
return [json.loads(line) for line in response["project"]["run"][node]]
@normalize_exceptions
def files(self, names=[], per_page=50):
"""
Arguments:
names (list): names of the requested files, if empty returns all files
per_page (int): number of results per page
Returns:
A `Files` object, which is an iterator over `File` obejcts.
"""
return Files(self.client, self, names, per_page)
@normalize_exceptions
def file(self, name):
"""
Arguments:
name (str): name of requested file.
Returns:
A `File` matching the name argument.
"""
return Files(self.client, self, [name])[0]
@normalize_exceptions
def upload_file(self, path, root="."):
"""
Arguments:
path (str): name of file to upload.
root (str): the root path to save the file relative to. i.e.
If you want to have the file saved in the run as "my_dir/file.txt"
and you're currently in "my_dir" you would set root to "../"
Returns:
A `File` matching the name argument.
"""
api = InternalApi(
default_settings={"entity": self.entity, "project": self.project},
retry_timedelta=RETRY_TIMEDELTA,
)
api.set_current_run_id(self.id)
root = os.path.abspath(root)
name = os.path.relpath(path, root)
with open(os.path.join(root, name), "rb") as f:
api.push({util.to_forward_slash_path(name): f})
return Files(self.client, self, [name])[0]
@normalize_exceptions
def history(
self, samples=500, keys=None, x_axis="_step", pandas=True, stream="default"
):
"""
Returns sampled history metrics for a run. This is simpler and faster if you are ok with
the history records being sampled.
Arguments:
samples (int, optional): The number of samples to return
pandas (bool, optional): Return a pandas dataframe
keys (list, optional): Only return metrics for specific keys
x_axis (str, optional): Use this metric as the xAxis defaults to _step
stream (str, optional): "default" for metrics, "system" for machine metrics
Returns:
If pandas=True returns a `pandas.DataFrame` of history metrics.
If pandas=False returns a list of dicts of history metrics.
"""
if keys is not None and not isinstance(keys, list):
wandb.termerror("keys must be specified in a list")
return []
if keys is not None and len(keys) > 0 and not isinstance(keys[0], str):
wandb.termerror("keys argument must be a list of strings")
return []
if keys and stream != "default":
wandb.termerror("stream must be default when specifying keys")
return []
elif keys:
lines = self._sampled_history(keys=keys, x_axis=x_axis, samples=samples)
else:
lines = self._full_history(samples=samples, stream=stream)
if pandas:
pandas = util.get_module("pandas")
if pandas:
lines = pandas.DataFrame.from_records(lines)
else:
print("Unable to load pandas, call history with pandas=False")
return lines
@normalize_exceptions
def scan_history(self, keys=None, page_size=1000, min_step=None, max_step=None):
"""
Returns an iterable collection of all history records for a run.
Example:
Export all the loss values for an example run
```python
run = api.run("l2k2/examples-numpy-boston/i0wt6xua")
history = run.scan_history(keys=["Loss"])
losses = [row["Loss"] for row in history]
```
Arguments:
keys ([str], optional): only fetch these keys, and only fetch rows that have all of keys defined.
page_size (int, optional): size of pages to fetch from the api
Returns:
An iterable collection over history records (dict).
"""
if keys is not None and not isinstance(keys, list):
wandb.termerror("keys must be specified in a list")
return []
if keys is not None and len(keys) > 0 and not isinstance(keys[0], str):
wandb.termerror("keys argument must be a list of strings")
return []
last_step = self.lastHistoryStep
# set defaults for min/max step
if min_step is None:
min_step = 0
if max_step is None:
max_step = last_step + 1
# if the max step is past the actual last step, clamp it down
if max_step > last_step:
max_step = last_step + 1
if keys is None:
return HistoryScan(
run=self,
client=self.client,
page_size=page_size,
min_step=min_step,
max_step=max_step,
)
else:
return SampledHistoryScan(
run=self,
client=self.client,
keys=keys,
page_size=page_size,
min_step=min_step,
max_step=max_step,
)
@normalize_exceptions
def logged_artifacts(self, per_page=100):
return RunArtifacts(self.client, self, mode="logged", per_page=per_page)
@normalize_exceptions
def used_artifacts(self, per_page=100):
return RunArtifacts(self.client, self, mode="used", per_page=per_page)
@normalize_exceptions
def use_artifact(self, artifact):
""" Declare an artifact as an input to a run.
Arguments:
artifact (`Artifact`): An artifact returned from
`wandb.Api().artifact(name)`
Returns:
A `Artifact` object.
"""
api = InternalApi(
default_settings={"entity": self.entity, "project": self.project},
retry_timedelta=RETRY_TIMEDELTA,
)
api.set_current_run_id(self.id)
if isinstance(artifact, Artifact):
api.use_artifact(artifact.id)
return artifact
elif isinstance(artifact, wandb.Artifact):
raise ValueError(
"Only existing artifacts are accepted by this api. "
"Manually create one with `wandb artifacts put`"
)
else:
raise ValueError("You must pass a wandb.Api().artifact() to use_artifact")
@normalize_exceptions
def log_artifact(self, artifact, aliases=None):
""" Declare an artifact as output of a run.
Arguments:
artifact (`Artifact`): An artifact returned from
`wandb.Api().artifact(name)`
aliases (list, optional): Aliases to apply to this artifact
Returns:
A `Artifact` object.
"""
api = InternalApi(
default_settings={"entity": self.entity, "project": self.project},
retry_timedelta=RETRY_TIMEDELTA,
)
api.set_current_run_id(self.id)
if isinstance(artifact, Artifact):
artifact_collection_name = artifact.name.split(":")[0]
api.create_artifact(
artifact.type,
artifact_collection_name,
artifact.digest,
aliases=aliases,
)
return artifact
elif isinstance(artifact, wandb.Artifact):
raise ValueError(
"Only existing artifacts are accepted by this api. "
"Manually create one with `wandb artifacts put`"
)
else:
raise ValueError("You must pass a wandb.Api().artifact() to use_artifact")
@property
def summary(self):
if self._summary is None:
# TODO: fix the outdir issue
self._summary = HTTPSummary(self, self.client, summary=self.summary_metrics)
return self._summary
@property
def path(self):
return [
urllib.parse.quote_plus(str(self.entity)),
urllib.parse.quote_plus(str(self.project)),
urllib.parse.quote_plus(str(self.id)),
]
@property
def url(self):
path = self.path
path.insert(2, "runs")
return self.client.app_url + "/".join(path)
@property
def lastHistoryStep(self): # noqa: N802
query = gql(
"""
query Run($project: String!, $entity: String!, $name: String!) {
project(name: $project, entityName: $entity) {
run(name: $name) { historyKeys }
}
}
"""
)
response = self._exec(query)
if (
response is None
or response.get("project") is None
or response["project"].get("run") is None
or response["project"]["run"].get("historyKeys") is None
):
return -1
history_keys = response["project"]["run"]["historyKeys"]
return history_keys["lastStep"] if "lastStep" in history_keys else -1
def __repr__(self):
return "<Run {} ({})>".format("/".join(self.path), self.state)
class Sweep(Attrs):
"""A set of runs associated with a sweep.
Examples:
Instantiate with:
```
api = wandb.Api()
sweep = api.sweep(path/to/sweep)
```
Attributes:
runs: (`Runs`) list of runs
id: (str) sweep id
project: (str) name of project
config: (str) dictionary of sweep configuration
"""
QUERY = gql(
"""
query Sweep($project: String!, $entity: String, $name: String!, $withRuns: Boolean!, $order: String) {
project(name: $project, entityName: $entity) {
sweep(sweepName: $name) {
id
name
bestLoss
config
runs(order: $order) @include(if: $withRuns) {
edges {
node {
...RunFragment
}
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
%s
"""
% RUN_FRAGMENT
)
def __init__(self, client, entity, project, sweep_id, attrs={}):
# TODO: Add agents / flesh this out.
super(Sweep, self).__init__(dict(attrs))
self.client = client
self._entity = entity
self.project = project
self.id = sweep_id
self.runs = []
self.runs_by_id = {}
self.load(force=not attrs)
@property
def entity(self):
return self._entity
@property
def username(self):
wandb.termwarn("Sweep.username is deprecated. please use Sweep.entity instead.")
return self._entity
@property
def config(self):
return yaml.load(self._attrs["config"])
def load(self, force=False):
if force or not self._attrs:
sweep = self.get(self.client, self.entity, self.project, self.id)
if sweep is None:
raise ValueError("Could not find sweep %s" % self)
self._attrs = sweep._attrs
self.runs = sweep.runs
self.runs_by_id = sweep.runs_by_id
return self._attrs
@property
def order(self):
if self._attrs.get("config") and self.config.get("metric"):
sort_order = self.config["metric"].get("goal", "minimize")
prefix = "+" if sort_order == "minimize" else "-"
return QueryGenerator.format_order_key(
prefix + self.config["metric"]["name"]
)
def best_run(self, order=None):
"Returns the best run sorted by the metric defined in config or the order passed in"
if order is None:
order = self.order
else:
order = QueryGenerator.format_order_key(order)
if order is None:
wandb.termwarn(
"No order specified and couldn't find metric in sweep config, returning most recent run"
)
else:
wandb.termlog("Sorting runs by %s" % order)
filters = {"$and": [{"sweep": self.id}]}
try:
return Runs(
self.client,
self.entity,
self.project,
order=order,
filters=filters,
per_page=1,
)[0]
except IndexError:
return None
@property
def path(self):
return [
urllib.parse.quote_plus(str(self.entity)),
urllib.parse.quote_plus(str(self.project)),
urllib.parse.quote_plus(str(self.id)),
]
@property
def url(self):
path = self.path
path.insert(2, "sweeps")
return self.client.app_url + "/".join(path)
@classmethod
def get(
cls,
client,
entity=None,
project=None,
sid=None,
withRuns=True, # noqa: N803
order=None,
query=None,
**kwargs
):
"""Execute a query against the cloud backend"""
if query is None:
query = cls.QUERY
variables = {
"entity": entity,
"project": project,
"name": sid,
"order": order,
"withRuns": withRuns,
}
variables.update(kwargs)
response = client.execute(query, variable_values=variables)
if response.get("project") is None:
return None
elif response["project"].get("sweep") is None:
return None
sweep_response = response["project"]["sweep"]
# TODO: make this paginate
runs_response = sweep_response.get("runs")
runs = []
if runs_response:
for r in runs_response["edges"]:
run = Run(client, entity, project, r["node"]["name"], r["node"])
runs.append(run)
del sweep_response["runs"]
sweep = cls(client, entity, project, sid, attrs=sweep_response)
sweep.runs = runs
for run in runs:
sweep.runs_by_id[run.id] = run
run.sweep = sweep
return sweep
def __repr__(self):
return "<Sweep {}>".format("/".join(self.path))
class Files(Paginator):
"""An iterable collection of `File` objects."""
QUERY = gql(
"""
query Run($project: String!, $entity: String!, $name: String!, $fileCursor: String,
$fileLimit: Int = 50, $fileNames: [String] = [], $upload: Boolean = false) {
project(name: $project, entityName: $entity) {
run(name: $name) {
fileCount
...RunFilesFragment
}
}
}
%s
"""
% FILE_FRAGMENT
)
def __init__(self, client, run, names=[], per_page=50, upload=False):
self.run = run
variables = {
"project": run.project,
"entity": run.entity,
"name": run.id,
"fileNames": names,
"upload": upload,
}
super(Files, self).__init__(client, variables, per_page)
@property
def length(self):
if self.last_response:
return self.last_response["project"]["run"]["fileCount"]
else:
return None
@property
def more(self):
if self.last_response:
return self.last_response["project"]["run"]["files"]["pageInfo"][
"hasNextPage"
]
else:
return True
@property
def cursor(self):
if self.last_response:
return self.last_response["project"]["run"]["files"]["edges"][-1]["cursor"]
else:
return None
def update_variables(self):
self.variables.update({"fileLimit": self.per_page, "fileCursor": self.cursor})
def convert_objects(self):
return [
File(self.client, r["node"])
for r in self.last_response["project"]["run"]["files"]["edges"]
]
def __repr__(self):
return "<Files {} ({})>".format("/".join(self.run.path), len(self))
class File(object):
"""File is a class associated with a file saved by wandb.
Attributes:
name (string): filename
url (string): path to file
direct_url (string): path to file in the bucket
md5 (string): md5 of file
mimetype (string): mimetype of file
updated_at (string): timestamp of last update
size (int): size of file in bytes
"""
def __init__(self, client, attrs):
self.client = client
self._attrs = attrs
# if self.size == 0:
# raise AttributeError(
# "File {} does not exist.".format(self._attrs["name"]))
@property
def id(self):
return self._attrs["id"]
@property
def name(self):
return self._attrs["name"]
@property
def url(self):
return self._attrs["url"]
@property
def direct_url(self):
return self._attrs["directUrl"]
@property
def md5(self):
return self._attrs["md5"]
@property
def digest(self):
return self._attrs["digest"]
@property
def mimetype(self):
return self._attrs["mimetype"]
@property
def updated_at(self):
return self._attrs["updatedAt"]
@property
def size(self):
size_bytes = self._attrs["sizeBytes"]
if size_bytes is not None:
return int(size_bytes)
return 0
@normalize_exceptions
@retry.retriable(
retry_timedelta=RETRY_TIMEDELTA,
check_retry_fn=util.no_retry_auth,
retryable_exceptions=(RetryError, requests.RequestException),
)
def download(self, root=".", replace=False):
"""Downloads a file previously saved by a run from the wandb server.
Arguments:
replace (boolean): If `True`, download will overwrite a local file
if it exists. Defaults to `False`.
root (str): Local directory to save the file. Defaults to ".".
Raises:
`ValueError` if file already exists and replace=False
"""
path = os.path.join(root, self.name)
if os.path.exists(path) and not replace:
raise ValueError("File already exists, pass replace=True to overwrite")
util.download_file_from_url(path, self.url, Api().api_key)
return open(path, "r")
@normalize_exceptions
def delete(self):
mutation = gql(
"""
mutation deleteFiles($files: [ID!]!) {
deleteFiles(input: {
files: $files
}) {
success
}
}
"""
)
self.client.execute(mutation, variable_values={"files": [self.id]})
def __repr__(self):
return "<File {} ({}) {}>".format(
self.name,
self.mimetype,
util.to_human_size(self.size, units=util.POW_2_BYTES),
)
class Reports(Paginator):
"""Reports is an iterable collection of `BetaReport` objects."""
QUERY = gql(
"""
query Run($project: String!, $entity: String!, $reportCursor: String,
$reportLimit: Int = 50, $viewType: String = "runs", $viewName: String) {
project(name: $project, entityName: $entity) {
allViews(viewType: $viewType, viewName: $viewName, first:
$reportLimit, after: $reportCursor) {
edges {
node {
name
description
user {
username
photoUrl
}
spec
updatedAt
}
cursor
}
}
}
}
"""
)
def __init__(self, client, project, name=None, entity=None, per_page=50):
self.project = project
self.name = name
variables = {
"project": project.name,
"entity": project.entity,
"viewName": self.name,
}
super(Reports, self).__init__(client, variables, per_page)
@property
def length(self):
# TODO: Add the count the backend
return self.per_page
@property
def more(self):
if self.last_response:
return (
len(self.last_response["project"]["allViews"]["edges"]) == self.per_page
)
else:
return True
@property
def cursor(self):
if self.last_response:
return self.last_response["project"]["allViews"]["edges"][-1]["cursor"]
else:
return None
def update_variables(self):
self.variables.update(
{"reportCursor": self.cursor, "reportLimit": self.per_page}
)
def convert_objects(self):
return [
BetaReport(
self.client,
r["node"],
entity=self.project.entity,
project=self.project.name,
)
for r in self.last_response["project"]["allViews"]["edges"]
]
def __repr__(self):
return "<Reports {}>".format("/".join(self.project.path))
class QueryGenerator(object):
"""QueryGenerator is a helper object to write filters for runs"""
INDIVIDUAL_OP_TO_MONGO = {
"!=": "$ne",
">": "$gt",
">=": "$gte",
"<": "$lt",
"<=": "$lte",
"IN": "$in",
"NIN": "$nin",
"REGEX": "$regex",
}
GROUP_OP_TO_MONGO = {"AND": "$and", "OR": "$or"}
def __init__(self):
pass
@classmethod
def format_order_key(cls, key):
if key.startswith("+") or key.startswith("-"):
direction = key[0]
key = key[1:]
else:
direction = "-"
parts = key.split(".")
if len(parts) == 1:
# Assume the user meant summary_metrics if not a run column
if parts[0] not in ["createdAt", "updatedAt", "name", "sweep"]:
return direction + "summary_metrics." + parts[0]
# Assume summary metrics if prefix isn't known
elif parts[0] not in ["config", "summary_metrics", "tags"]:
return direction + ".".join(["summary_metrics"] + parts)
else:
return direction + ".".join(parts)
def _is_group(self, op):
return op.get("filters") is not None
def _is_individual(self, op):
return op.get("key") is not None
def _to_mongo_op_value(self, op, value):
if op == "=":
return value
else:
return {self.INDIVIDUAL_OP_TO_MONGO[op]: value}
def key_to_server_path(self, key):
if key["section"] == "config":
return "config." + key["name"]
elif key["section"] == "summary":
return "summary_metrics." + key["name"]
elif key["section"] == "keys_info":
return "keys_info.keys." + key["name"]
elif key["section"] == "run":
return key["name"]
elif key["section"] == "tags":
return "tags." + key["name"]
raise ValueError("Invalid key: %s" % key)
def _to_mongo_individual(self, filter):
if filter["key"]["name"] == "":
return None
if filter.get("value") is None and filter["op"] != "=" and filter["op"] != "!=":
return None
if filter.get("disabled") is None and filter["disabled"]:
return None
if filter["key"]["section"] == "tags":
if filter["op"] == "IN":
return {"tags": {"$in": filter["value"]}}
if filter["value"] is False:
return {
"$or": [{"tags": None}, {"tags": {"$ne": filter["key"]["name"]}}]
}
else:
return {"tags": filter["key"]["name"]}
path = self.key_to_server_path(filter.key)
if path is None:
return path
return {path: self._to_mongo_op_value(filter["op"], filter["value"])}
def filter_to_mongo(self, filter):
if self._is_individual(filter):
return self._to_mongo_individual(filter)
elif self._is_group(filter):
return {
self.GROUP_OP_TO_MONGO[filter["op"]]: [
self.filter_to_mongo(f) for f in filter["filters"]
]
}
class BetaReport(Attrs):
"""BetaReport is a class associated with reports created in wandb.
WARNING: this API will likely change in a future release
Attributes:
name (string): report name
description (string): report descirpiton;
user (User): the user that created the report
spec (dict): the spec off the report;
updated_at (string): timestamp of last update
"""
def __init__(self, client, attrs, entity=None, project=None):
self.client = client
self.project = project
self.entity = entity
self.query_generator = QueryGenerator()
super(BetaReport, self).__init__(dict(attrs))
self._attrs["spec"] = json.loads(self._attrs["spec"])
@property
def sections(self):
return self.spec["panelGroups"]
def runs(self, section, per_page=50, only_selected=True):
run_set_idx = section.get("openRunSet", 0)
run_set = section["runSets"][run_set_idx]
order = self.query_generator.key_to_server_path(run_set["sort"]["key"])
if run_set["sort"].get("ascending"):
order = "+" + order
else:
order = "-" + order
filters = self.query_generator.filter_to_mongo(run_set["filters"])
if only_selected:
# TODO: handle this not always existing
filters["$or"][0]["$and"].append(
{"name": {"$in": run_set["selections"]["tree"]}}
)
return Runs(
self.client,
self.entity,
self.project,
filters=filters,
order=order,
per_page=per_page,
)
@property
def updated_at(self):
return self._attrs["updatedAt"]
class HistoryScan(object):
QUERY = gql(
"""
query HistoryPage($entity: String!, $project: String!, $run: String!, $minStep: Int64!, $maxStep: Int64!, $pageSize: Int!) {
project(name: $project, entityName: $entity) {
run(name: $run) {
history(minStep: $minStep, maxStep: $maxStep, samples: $pageSize)
}
}
}
"""
)
def __init__(self, client, run, min_step, max_step, page_size=1000):
self.client = client
self.run = run
self.page_size = page_size
self.min_step = min_step
self.max_step = max_step
self.page_offset = min_step # minStep for next page
self.scan_offset = 0 # index within current page of rows
self.rows = [] # current page of rows
def __iter__(self):
self.page_offset = self.min_step
self.scan_offset = 0
self.rows = []
return self
def __next__(self):
while True:
if self.scan_offset < len(self.rows):
row = self.rows[self.scan_offset]
self.scan_offset += 1
return row
if self.page_offset >= self.max_step:
raise StopIteration()
self._load_next()
next = __next__
@normalize_exceptions
@retry.retriable(
check_retry_fn=util.no_retry_auth,
retryable_exceptions=(RetryError, requests.RequestException),
)
def _load_next(self):
max_step = self.page_offset + self.page_size
if max_step > self.max_step:
max_step = self.max_step
variables = {
"entity": self.run.entity,
"project": self.run.project,
"run": self.run.id,
"minStep": int(self.page_offset),
"maxStep": int(max_step),
"pageSize": int(self.page_size),
}
res = self.client.execute(self.QUERY, variable_values=variables)
res = res["project"]["run"]["history"]
self.rows = [json.loads(row) for row in res]
self.page_offset += self.page_size
self.scan_offset = 0
class SampledHistoryScan(object):
QUERY = gql(
"""
query SampledHistoryPage($entity: String!, $project: String!, $run: String!, $spec: JSONString!) {
project(name: $project, entityName: $entity) {
run(name: $run) {
sampledHistory(specs: [$spec])
}
}
}
"""
)
def __init__(self, client, run, keys, min_step, max_step, page_size=1000):
self.client = client
self.run = run
self.keys = keys
self.page_size = page_size
self.min_step = min_step
self.max_step = max_step
self.page_offset = min_step # minStep for next page
self.scan_offset = 0 # index within current page of rows
self.rows = [] # current page of rows
def __iter__(self):
self.page_offset = self.min_step
self.scan_offset = 0
self.rows = []
return self
def __next__(self):
while True:
if self.scan_offset < len(self.rows):
row = self.rows[self.scan_offset]
self.scan_offset += 1
return row
if self.page_offset >= self.max_step:
raise StopIteration()
self._load_next()
next = __next__
@normalize_exceptions
@retry.retriable(
check_retry_fn=util.no_retry_auth,
retryable_exceptions=(RetryError, requests.RequestException),
)
def _load_next(self):
max_step = self.page_offset + self.page_size
if max_step > self.max_step:
max_step = self.max_step
variables = {
"entity": self.run.entity,
"project": self.run.project,
"run": self.run.id,
"spec": json.dumps(
{
"keys": self.keys,
"minStep": int(self.page_offset),
"maxStep": int(max_step),
"samples": int(self.page_size),
}
),
}
res = self.client.execute(self.QUERY, variable_values=variables)
res = res["project"]["run"]["sampledHistory"]
self.rows = res[0]
self.page_offset += self.page_size
self.scan_offset = 0
class ProjectArtifactTypes(Paginator):
QUERY = gql(
"""
query ProjectArtifacts(
$entityName: String!,
$projectName: String!,
$cursor: String,
) {
project(name: $projectName, entityName: $entityName) {
artifactTypes(after: $cursor) {
...ArtifactTypesFragment
}
}
}
%s
"""
% ARTIFACTS_TYPES_FRAGMENT
)
def __init__(self, client, entity, project, name=None, per_page=50):
self.entity = entity
self.project = project
variable_values = {
"entityName": entity,
"projectName": project,
}
super(ProjectArtifactTypes, self).__init__(client, variable_values, per_page)
@property
def length(self):
# TODO
return None
@property
def more(self):
if self.last_response:
return self.last_response["project"]["artifactTypes"]["pageInfo"][
"hasNextPage"
]
else:
return True
@property
def cursor(self):
if self.last_response:
return self.last_response["project"]["artifactTypes"]["edges"][-1]["cursor"]
else:
return None
def update_variables(self):
self.variables.update({"cursor": self.cursor})
def convert_objects(self):
if self.last_response["project"] is None:
return []
return [
ArtifactType(
self.client, self.entity, self.project, r["node"]["name"], r["node"]
)
for r in self.last_response["project"]["artifactTypes"]["edges"]
]
class ProjectArtifactCollections(Paginator):
QUERY = gql(
"""
query ProjectArtifactCollections(
$entityName: String!,
$projectName: String!,
$artifactTypeName: String!
$cursor: String,
) {
project(name: $projectName, entityName: $entityName) {
artifactType(name: $artifactTypeName) {
artifactSequences(after: $cursor) {
pageInfo {
endCursor
hasNextPage
}
totalCount
edges {
node {
id
name
description
createdAt
}
}
}
}
}
}
"""
)
def __init__(self, client, entity, project, type_name, per_page=50):
self.entity = entity
self.project = project
self.type_name = type_name
variable_values = {
"entityName": entity,
"projectName": project,
"artifactTypeName": type_name,
}
super(ProjectArtifactCollections, self).__init__(
client, variable_values, per_page
)
@property
def length(self):
if self.last_response:
return self.last_response["project"]["artifactType"]["artifactSequences"][
"totalCount"
]
else:
return None
@property
def more(self):
if self.last_response:
return self.last_response["project"]["artifactType"]["artifactSequences"][
"pageInfo"
]["hasNextPage"]
else:
return True
@property
def cursor(self):
if self.last_response:
return self.last_response["project"]["artifactType"]["artifactSequences"][
"edges"
][-1]["cursor"]
else:
return None
def update_variables(self):
self.variables.update({"cursor": self.cursor})
def convert_objects(self):
return [
ArtifactCollection(
self.client,
self.entity,
self.project,
r["node"]["name"],
self.type_name,
r["node"],
)
for r in self.last_response["project"]["artifactType"]["artifactSequences"][
"edges"
]
]
class RunArtifacts(Paginator):
OUTPUT_QUERY = gql(
"""
query RunArtifacts(
$entity: String!, $project: String!, $runName: String!, $cursor: String,
) {
project(name: $project, entityName: $entity) {
run(name: $runName) {
outputArtifacts(after: $cursor) {
totalCount
edges {
node {
...ArtifactFragment
}
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
%s
"""
% ARTIFACT_FRAGMENT
)
INPUT_QUERY = gql(
"""
query RunArtifacts(
$entity: String!, $project: String!, $runName: String!, $cursor: String,
) {
project(name: $project, entityName: $entity) {
run(name: $runName) {
inputArtifacts(after: $cursor) {
totalCount
edges {
node {
...ArtifactFragment
}
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
%s
"""
% ARTIFACT_FRAGMENT
)
def __init__(self, client, run, mode="logged", per_page=50):
self.run = run
if mode == "logged":
self.run_key = "outputArtifacts"
self.QUERY = self.OUTPUT_QUERY
elif mode == "used":
self.run_key = "inputArtifacts"
self.QUERY = self.INPUT_QUERY
else:
raise ValueError("mode must be logged or used")
variable_values = {
"entity": run.entity,
"project": run.project,
"runName": run.id,
}
super(RunArtifacts, self).__init__(client, variable_values, per_page)
@property
def length(self):
if self.last_response:
return self.last_response["project"]["run"][self.run_key]["totalCount"]
else:
return None
@property
def more(self):
if self.last_response:
return self.last_response["project"]["run"][self.run_key]["pageInfo"][
"hasNextPage"
]
else:
return True
@property
def cursor(self):
if self.last_response:
return self.last_response["project"]["run"][self.run_key]["edges"]["cursor"]
else:
return None
def update_variables(self):
self.variables.update({"cursor": self.cursor})
def convert_objects(self):
return [
Artifact(
self.client,
self.run.entity,
self.run.project,
r["node"]["digest"],
r["node"],
)
for r in self.last_response["project"]["run"][self.run_key]["edges"]
]
class ArtifactType(object):
def __init__(self, client, entity, project, type_name, attrs=None):
self.client = client
self.entity = entity
self.project = project
self.type = type_name
self._attrs = attrs
if self._attrs is None:
self.load()
def load(self):
query = gql(
"""
query ProjectArtifactType(
$entityName: String!,
$projectName: String!,
$artifactTypeName: String!
) {
project(name: $projectName, entityName: $entityName) {
artifactType(name: $artifactTypeName) {
id
name
description
createdAt
}
}
}
"""
)
response = self.client.execute(
query,
variable_values={
"entityName": self.entity,
"projectName": self.project,
"artifactTypeName": self.type,
},
)
if (
response is None
or response.get("project") is None
or response["project"].get("artifactType") is None
):
raise ValueError("Could not find artifact type %s" % self.type)
self._attrs = response["project"]["artifactType"]
return self._attrs
@property
def id(self):
return self._attrs["id"]
@property
def name(self):
return self._attrs["name"]
@normalize_exceptions
def collections(self, per_page=50):
"""Artifact collections"""
return ProjectArtifactCollections(
self.client, self.entity, self.project, self.type
)
def collection(self, name):
return ArtifactCollection(
self.client, self.entity, self.project, name, self.type
)
def __repr__(self):
return "<ArtifactType {}>".format(self.type)
class ArtifactCollection(object):
def __init__(self, client, entity, project, name, type, attrs=None):
self.client = client
self.entity = entity
self.project = project
self.name = name
self.type = type
self._attrs = attrs
if self._attrs is None:
self.load()
@property
def id(self):
return self._attrs["id"]
@normalize_exceptions
def versions(self, per_page=50):
"""Artifact versions"""
return ArtifactVersions(
self.client,
self.entity,
self.project,
self.name,
self.type,
per_page=per_page,
)
def load(self):
query = gql(
"""
query ArtifactCollection(
$entityName: String!,
$projectName: String!,
$artifactTypeName: String!
$artifactCollectionName: String!
) {
project(name: $projectName, entityName: $entityName) {
artifactType(name: $artifactTypeName) {
artifactSequence(name: $artifactCollectionName) {
id
name
description
createdAt
}
}
}
}
"""
)
response = self.client.execute(
query,
variable_values={
"entityName": self.entity,
"projectName": self.project,
"artifactTypeName": self.type,
"artifactCollectionName": self.name,
},
)
if (
response is None
or response.get("project") is None
or response["project"].get("artifactType") is None
or response["project"]["artifactType"].get("artifactSequence") is None
):
raise ValueError("Could not find artifact type %s" % self.type)
self._attrs = response["project"]["artifactType"]["artifactSequence"]
return self._attrs
def __repr__(self):
return "<ArtifactCollection {} ({})>".format(self.name, self.type)
class _DownloadedArtifactEntry(artifacts.ArtifactEntry):
def __init__(self, name, entry, parent_artifact):
self.name = name
self.entry = entry
self._parent_artifact = parent_artifact
# Have to copy over a bunch of variables to get this ArtifactEntry interface
# to work properly
self.path = entry.path
self.ref = entry.ref
self.digest = entry.digest
self.birth_artifact_id = entry.birth_artifact_id
self.size = entry.size
self.extra = entry.extra
self.local_path = entry.local_path
def parent_artifact(self):
return self._parent_artifact
def copy(self, cache_path, target_path):
# can't have colons in Windows
if platform.system() == "Windows":
head, tail = os.path.splitdrive(target_path)
target_path = head + tail.replace(":", "-")
need_copy = (
not os.path.isfile(target_path)
or os.stat(cache_path).st_mtime != os.stat(target_path).st_mtime
)
if need_copy:
util.mkdir_exists_ok(os.path.dirname(target_path))
# We use copy2, which preserves file metadata including modified
# time (which we use above to check whether we should do the copy).
shutil.copy2(cache_path, target_path)
return target_path
def download(self, root=None):
root = root or self._parent_artifact._default_root()
self._parent_artifact._add_download_root(root)
manifest = self._parent_artifact._load_manifest()
if self.entry.ref is not None:
cache_path = manifest.storage_policy.load_reference(
self._parent_artifact,
self.name,
manifest.entries[self.name],
local=True,
)
else:
cache_path = manifest.storage_policy.load_file(
self._parent_artifact, self.name, manifest.entries[self.name]
)
return self.copy(cache_path, os.path.join(root, self.name))
def ref_target(self):
manifest = self._parent_artifact._load_manifest()
if self.entry.ref is not None:
return manifest.storage_policy.load_reference(
self._parent_artifact,
self.name,
manifest.entries[self.name],
local=False,
)
raise ValueError("Only reference entries support ref_target().")
def ref_url(self):
return (
"wandb-artifact://"
+ util.b64_to_hex_id(self._parent_artifact.id)
+ "/"
+ self.name
)
class Artifact(artifacts.Artifact):
"""
A wandb Artifact.
An artifact that has been logged, including all its attributes, links to the runs
that use it, and a link to the run that logged it.
Examples:
Basic usage
```
api = wandb.Api()
artifact = api.artifact('project/artifact:alias')
# Get information about the artifact...
artifact.digest
artifact.aliases
```
Updating an artifact
```
artifact = api.artifact('project/artifact:alias')
# Update the description
artifact.description = 'My new description'
# Selectively update metadata keys
artifact.metadata["oldKey"] = "new value"
# Replace the metadata entirely
artifact.metadata = {"newKey": "new value"}
# Add an alias
artifact.aliases.append('best')
# Remove an alias
artifact.aliases.remove('latest')
# Completely replace the aliases
artifact.aliases = ['replaced']
# Persist all artifact modifications
artifact.save()
```
Artifact graph traversal
```
artifact = api.artifact('project/artifact:alias')
# Walk up and down the graph from an artifact:
producer_run = artifact.logged_by()
consumer_runs = artifact.used_by()
# Walk up and down the graph from a run:
logged_artifacts = run.logged_artifacts()
used_artifacts = run.used_artifacts()
```
Deleting an artifact
```
artifact = api.artifact('project/artifact:alias')
artifact.delete()
```
"""
QUERY = gql(
"""
query Artifact(
$id: ID!,
) {
artifact(id: $id) {
currentManifest {
id
file {
id
directUrl
}
}
...ArtifactFragment
}
}
%s
"""
% ARTIFACT_FRAGMENT
)
@classmethod
def from_id(cls, artifact_id, client):
artifact = artifacts.get_artifacts_cache().get_artifact(artifact_id)
if artifact is not None:
return artifact
response = client.execute(Artifact.QUERY, variable_values={"id": artifact_id},)
name = None
if response.get("artifact") is not None:
if response["artifact"].get("aliases") is not None:
aliases = response["artifact"]["aliases"]
name = ":".join(
[aliases[0]["artifactCollectionName"], aliases[0]["alias"]]
)
if len(aliases) > 1:
for alias in aliases:
if alias["alias"] != "latest":
name = ":".join(
[alias["artifactCollectionName"], alias["alias"]]
)
break
artifact = cls(
client=client,
entity=None,
project=None,
name=name,
attrs=response["artifact"],
)
index_file_url = response["artifact"]["currentManifest"]["file"][
"directUrl"
]
with requests.get(index_file_url) as req:
req.raise_for_status()
artifact._manifest = artifacts.ArtifactManifest.from_manifest_json(
artifact, json.loads(six.ensure_text(req.content))
)
artifact._load_dependent_manifests()
return artifact
def __init__(self, client, entity, project, name, attrs=None):
self.client = client
self._entity = entity
self._project = project
self._artifact_name = name
self._attrs = attrs
if self._attrs is None:
self._load()
self._metadata = json.loads(self._attrs.get("metadata") or "{}")
self._description = self._attrs.get("description", None)
self._sequence_name = self._attrs["artifactSequence"]["name"]
self._version_index = self._attrs.get("versionIndex", None)
self._aliases = [
a["alias"]
for a in self._attrs["aliases"]
if not re.match(r"^v\d+$", a["alias"])
and a["artifactCollectionName"] == self._sequence_name
]
self._manifest = None
self._is_downloaded = False
self._dependent_artifacts = []
self._download_roots = set()
artifacts.get_artifacts_cache().store_artifact(self)
@property
def id(self):
return self._attrs["id"]
@property
def version(self):
return "v%d" % self._version_index
@property
def entity(self):
return self._entity
@property
def project(self):
return self._project
@property
def metadata(self):
return self._metadata
@metadata.setter
def metadata(self, metadata):
self._metadata = metadata
@property
def manifest(self):
return self._load_manifest()
@property
def digest(self):
return self._attrs["digest"]
@property
def state(self):
return self._attrs["state"]
@property
def size(self):
return self._attrs["size"]
@property
def created_at(self):
"""
Returns:
(datetime): The time at which the artifact was created.
"""
return self._attrs["createdAt"]
@property
def updated_at(self):
"""
Returns:
(datetime): The time at which the artifact was last updated.
"""
return self._attrs["updatedAt"] or self._attrs["createdAt"]
@property
def description(self):
return self._description
@description.setter
def description(self, desc):
self._description = desc
@property
def type(self):
return self._attrs["artifactType"]["name"]
@property
def commit_hash(self):
return self._attrs.get("commitHash", "")
@property
def name(self):
if self._version_index is None:
return self.digest
return "%s:v%s" % (self._sequence_name, self._version_index)
@property
def aliases(self):
"""
The aliases associated with this artifact.
Returns:
List[str]: The aliases associated with this artifact.
"""
return self._aliases
@aliases.setter
def aliases(self, aliases):
for alias in aliases:
if any(char in alias for char in ["/", ":"]):
raise ValueError(
'Invalid alias "%s", slashes and colons are disallowed' % alias
)
self._aliases = aliases
@staticmethod
def expected_type(client, name, entity_name, project_name):
"""Returns the expected type for a given artifact name and project"""
query = gql(
"""
query Artifact(
$entityName: String!,
$projectName: String!,
$name: String!
) {
project(name: $projectName, entityName: $entityName) {
artifact(name: $name) {
artifactType {
name
}
}
}
}
"""
)
if ":" not in name:
name += ":latest"
response = client.execute(
query,
variable_values={
"entityName": entity_name,
"projectName": project_name,
"name": name,
},
)
project = response.get("project")
if project is not None:
artifact = project.get("artifact")
if artifact is not None:
artifact_type = artifact.get("artifactType")
if artifact_type is not None:
return artifact_type.get("name")
return None
@normalize_exceptions
def delete(self):
"""Delete artifact and its files."""
mutation = gql(
"""
mutation deleteArtifact($id: ID!) {
deleteArtifact(input: {artifactID: $id}) {
artifact {
id
}
}
}
"""
)
self.client.execute(mutation, variable_values={"id": self.id,})
return True
def new_file(self, name, mode=None):
raise ValueError("Cannot add files to an artifact once it has been saved")
def add_file(self, local_path, name=None, is_tmp=False):
raise ValueError("Cannot add files to an artifact once it has been saved")
def add_dir(self, path, name=None):
raise ValueError("Cannot add files to an artifact once it has been saved")
def add_reference(self, uri, name=None, checksum=True, max_objects=None):
raise ValueError("Cannot add files to an artifact once it has been saved")
def add(self, obj, name):
raise ValueError("Cannot add files to an artifact once it has been saved")
def _add_download_root(self, dir_path):
"""Adds `dir_path` as one of the known directories which this
artifact treated as a root"""
self._download_roots.add(os.path.abspath(dir_path))
def _is_download_root(self, dir_path):
"""Determines if `dir_path` is a directory which this artifact as
treated as a root for downloading"""
return dir_path in self._download_roots
def _local_path_to_name(self, file_path):
"""Converts a local file path to a path entry in the artifact"""
abs_file_path = os.path.abspath(file_path)
abs_file_parts = abs_file_path.split(os.sep)
for i in range(len(abs_file_parts) + 1):
if self._is_download_root(os.path.join(os.sep, *abs_file_parts[:i])):
return os.path.join(*abs_file_parts[i:])
return None
def _get_obj_entry(self, name):
"""
When objects are added with `.add(obj, name)`, the name is typically
changed to include the suffix of the object type when serializing to JSON. So we need
to be able to resolve a name, without tasking the user with appending .THING.json.
This method returns an entry if it exists by a suffixed name.
Args:
name: (str) name used when adding
"""
self._load_manifest()
type_mapping = WBValue.type_mapping()
for artifact_type_str in type_mapping:
wb_class = type_mapping[artifact_type_str]
wandb_file_name = wb_class.with_suffix(name)
entry = self._manifest.entries.get(wandb_file_name)
if entry is not None:
return entry, wb_class
return None, None
def get_path(self, name):
manifest = self._load_manifest()
entry = manifest.entries.get(name)
if entry is None:
entry = self._get_obj_entry(name)[0]
if entry is None:
raise KeyError("Path not contained in artifact: %s" % name)
else:
name = entry.path
return _DownloadedArtifactEntry(name, entry, self)
def get(self, name):
entry, wb_class = self._get_obj_entry(name)
if entry is not None:
# If the entry is a reference from another artifact, then get it directly from that artifact
if self._manifest_entry_is_artifact_reference(entry):
artifact = self._get_ref_artifact_from_entry(entry)
return artifact.get(util.uri_from_path(entry.ref))
# Special case for wandb.Table. This is intended to be a short term optimization.
# Since tables are likely to download many other assets in artifact(s), we eagerly download
# the artifact using the parallelized `artifact.download`. In the future, we should refactor
# the deserialization pattern such that this special case is not needed.
if wb_class == wandb.Table:
self.download(recursive=True)
# Get the ArtifactEntry
item = self.get_path(entry.path)
item_path = item.download()
# Load the object from the JSON blob
result = None
json_obj = {}
with open(item_path, "r") as file:
json_obj = json.load(file)
result = wb_class.from_json(json_obj, self)
result._set_artifact_source(self, name)
return result
def download(self, root=None, recursive=False):
dirpath = root or self._default_root()
self._add_download_root(dirpath)
manifest = self._load_manifest()
nfiles = len(manifest.entries)
size = sum(e.size for e in manifest.entries.values())
log = False
if nfiles > 5000 or size > 50 * 1024 * 1024:
log = True
termlog(
"Downloading large artifact %s, %.2fMB. %s files... "
% (self._artifact_name, size / (1024 * 1024), nfiles),
newline=False,
)
start_time = datetime.datetime.now()
# Force all the files to download into the same directory.
# Download in parallel
import multiprocessing.dummy # this uses threads
pool = multiprocessing.dummy.Pool(32)
pool.map(partial(self._download_file, root=dirpath), manifest.entries)
if recursive:
pool.map(lambda artifact: artifact.download(), self._dependent_artifacts)
pool.close()
pool.join()
self._is_downloaded = True
if log:
delta = relativedelta(datetime.datetime.now() - start_time)
termlog(
"Done. %s:%s:%s" % (delta.hours, delta.minutes, delta.seconds),
prefix=False,
)
return dirpath
def checkout(self, root=None):
dirpath = root or self._default_root(include_version=False)
for root, _, files in os.walk(dirpath):
for file in files:
full_path = os.path.join(root, file)
artifact_path = util.to_forward_slash_path(
os.path.relpath(full_path, start=dirpath)
)
try:
self.get_path(artifact_path)
except KeyError:
# File is not part of the artifact, remove it.
os.remove(full_path)
return self.download(root=dirpath)
def verify(self, root=None):
dirpath = root or self._default_root()
manifest = self._load_manifest()
ref_count = 0
for root, _, files in os.walk(dirpath):
for file in files:
full_path = os.path.join(root, file)
artifact_path = util.to_forward_slash_path(
os.path.relpath(full_path, start=dirpath)
)
try:
self.get_path(artifact_path)
except KeyError:
raise ValueError(
"Found file {} which is not a member of artifact {}".format(
full_path, self.name
)
)
for entry in manifest.entries.values():
if entry.ref is None:
if (
artifacts.md5_file_b64(os.path.join(dirpath, entry.path))
!= entry.digest
):
raise ValueError("Digest mismatch for file: %s" % entry.path)
else:
ref_count += 1
if ref_count > 0:
print("Warning: skipped verification of %s refs" % ref_count)
def file(self, root=None):
"""Download a single file artifact to dir specified by the <root>
Arguments:
root: (str, optional) The root directory in which to place the file. Defaults to './artifacts/<self.name>/'.
Returns:
(str): The full path of the downloaded file.
"""
if root is None:
root = os.path.join(".", "artifacts", self.name)
manifest = self._load_manifest()
nfiles = len(manifest.entries)
if nfiles > 1:
raise ValueError(
"This artifact contains more than one file, call `.download()` to get all files or call "
'.get_path("filename").download()'
)
return self._download_file(list(manifest.entries)[0], root=root)
def _download_file(self, name, root):
# download file into cache and copy to target dir
return self.get_path(name).download(root)
def _default_root(self, include_version=True):
root = (
os.path.join(".", "artifacts", self.name)
if include_version
else os.path.join(".", "artifacts", self._sequence_name)
)
if platform.system() == "Windows":
head, tail = os.path.splitdrive(root)
root = head + tail.replace(":", "-")
return root
@normalize_exceptions
def save(self):
"""
Persists artifact changes to the wandb backend.
"""
mutation = gql(
"""
mutation updateArtifact(
$artifactID: ID!,
$description: String,
$metadata: JSONString,
$aliases: [ArtifactAliasInput!]
) {
updateArtifact(input: {
artifactID: $artifactID,
description: $description,
metadata: $metadata,
aliases: $aliases
}) {
artifact {
id
}
}
}
"""
)
self.client.execute(
mutation,
variable_values={
"artifactID": self.id,
"description": self.description,
"metadata": util.json_dumps_safer(self.metadata),
"aliases": [
{"artifactCollectionName": self._sequence_name, "alias": alias,}
for alias in self._aliases
],
},
)
return True
def wait(self):
return self
# TODO: not yet public, but we probably want something like this.
def _list(self):
manifest = self._load_manifest()
return manifest.entries.keys()
def __repr__(self):
return "<Artifact {}>".format(self.id)
def _load(self):
query = gql(
"""
query Artifact(
$entityName: String!,
$projectName: String!,
$name: String!
) {
project(name: $projectName, entityName: $entityName) {
artifact(name: $name) {
...ArtifactFragment
}
}
}
%s
"""
% ARTIFACT_FRAGMENT
)
response = None
try:
response = self.client.execute(
query,
variable_values={
"entityName": self.entity,
"projectName": self.project,
"name": self._artifact_name,
},
)
except Exception:
# we check for this after doing the call, since the backend supports raw digest lookups
# which don't include ":" and are 32 characters long
if ":" not in self._artifact_name and len(self._artifact_name) != 32:
raise ValueError(
'Attempted to fetch artifact without alias (e.g. "<artifact_name>:v3" or "<artifact_name>:latest")'
)
if (
response is None
or response.get("project") is None
or response["project"].get("artifact") is None
):
raise ValueError(
'Project %s/%s does not contain artifact: "%s"'
% (self.entity, self.project, self._artifact_name)
)
self._attrs = response["project"]["artifact"]
return self._attrs
# The only file should be wandb_manifest.json
def _files(self, names=None, per_page=50):
return ArtifactFiles(self.client, self, names, per_page)
def _load_manifest(self):
if self._manifest is None:
query = gql(
"""
query ArtifactManifest(
$entityName: String!,
$projectName: String!,
$name: String!
) {
project(name: $projectName, entityName: $entityName) {
artifact(name: $name) {
currentManifest {
id
file {
id
directUrl
}
}
}
}
}
"""
)
response = self.client.execute(
query,
variable_values={
"entityName": self.entity,
"projectName": self.project,
"name": self._artifact_name,
},
)
index_file_url = response["project"]["artifact"]["currentManifest"]["file"][
"directUrl"
]
with requests.get(index_file_url) as req:
req.raise_for_status()
self._manifest = artifacts.ArtifactManifest.from_manifest_json(
self, json.loads(six.ensure_text(req.content))
)
self._load_dependent_manifests()
return self._manifest
def _load_dependent_manifests(self):
"""Helper function to interrogate entries and ensure we have loaded their manifests"""
# Make sure dependencies are avail
for entry_key in self._manifest.entries:
entry = self._manifest.entries[entry_key]
if self._manifest_entry_is_artifact_reference(entry):
dep_artifact = self._get_ref_artifact_from_entry(entry)
if dep_artifact not in self._dependent_artifacts:
dep_artifact._load_manifest()
self._dependent_artifacts.append(dep_artifact)
@staticmethod
def _manifest_entry_is_artifact_reference(entry):
"""Helper function determines if an ArtifactEntry in manifest is an artifact reference"""
return (
entry.ref is not None
and urllib.parse.urlparse(entry.ref).scheme == "wandb-artifact"
)
def _get_ref_artifact_from_entry(self, entry):
"""Helper function returns the referenced artifact from an entry"""
artifact_id = util.host_from_path(entry.ref)
return Artifact.from_id(util.hex_to_b64_id(artifact_id), self.client)
def used_by(self):
"""Retrieves the runs which use this artifact directly
Returns:
[Run]: a list of Run objects which use this artifact
"""
query = gql(
"""
query Artifact(
$id: ID!,
$before: String,
$after: String,
$first: Int,
$last: Int
) {
artifact(id: $id) {
usedBy(before: $before, after: $after, first: $first, last: $last) {
edges {
node {
name
project {
name
entityName
}
}
}
}
}
}
"""
)
response = self.client.execute(query, variable_values={"id": self.id},)
# yes, "name" is actually id
runs = [
Run(
self.client,
edge["node"]["project"]["entityName"],
edge["node"]["project"]["name"],
edge["node"]["name"],
)
for edge in response.get("artifact", {}).get("usedBy", {}).get("edges", [])
]
return runs
def logged_by(self):
"""Retrieves the run which logged this artifact
Returns:
Run: Run object which logged this artifact
"""
query = gql(
"""
query Artifact(
$id: ID!
) {
artifact(id: $id) {
createdBy {
... on Run {
name
project {
name
entityName
}
}
}
}
}
"""
)
response = self.client.execute(query, variable_values={"id": self.id},)
run_obj = response.get("artifact", {}).get("createdBy", {})
if run_obj is not None:
return Run(
self.client,
run_obj["project"]["entityName"],
run_obj["project"]["name"],
run_obj["name"],
)
def __setitem__(self, name, item):
return self.add(item, name)
def __getitem__(self, name):
return self.get(name)
class ArtifactVersions(Paginator):
"""An iterable collection of artifact versions associated with a project and optional filter.
This is generally used indirectly via the `Api`.artifact_versions method
"""
QUERY = gql(
"""
query Artifacts($project: String!, $entity: String!, $type: String!, $collection: String!, $cursor: String, $perPage: Int = 50, $order: String, $filters: JSONString) {
project(name: $project, entityName: $entity) {
artifactType(name: $type) {
artifactSequence(name: $collection) {
name
artifacts(filters: $filters, after: $cursor, first: $perPage, order: $order) {
totalCount
edges {
node {
...ArtifactFragment
}
version
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
}
%s
"""
% ARTIFACT_FRAGMENT
)
def __init__(
self,
client,
entity,
project,
collection_name,
type,
filters={},
order=None,
per_page=50,
):
self.entity = entity
self.collection_name = collection_name
self.type = type
self.project = project
self.filters = filters
self.order = order
variables = {
"project": self.project,
"entity": self.entity,
"order": self.order,
"type": self.type,
"collection": self.collection_name,
"filters": json.dumps(self.filters),
}
super(ArtifactVersions, self).__init__(client, variables, per_page)
@property
def length(self):
if self.last_response:
return self.last_response["project"]["artifactType"]["artifactSequence"][
"artifacts"
]["totalCount"]
else:
return None
@property
def more(self):
if self.last_response:
return self.last_response["project"]["artifactType"]["artifactSequence"][
"artifacts"
]["pageInfo"]["hasNextPage"]
else:
return True
@property
def cursor(self):
if self.last_response:
return self.last_response["project"]["artifactType"]["artifactSequence"][
"artifacts"
]["edges"][-1]["cursor"]
else:
return None
def convert_objects(self):
if self.last_response["project"]["artifactType"]["artifactSequence"] is None:
return []
return [
Artifact(
self.client,
self.entity,
self.project,
self.collection_name + ":" + a["version"],
a["node"],
)
for a in self.last_response["project"]["artifactType"]["artifactSequence"][
"artifacts"
]["edges"]
]
class ArtifactFiles(Paginator):
QUERY = gql(
"""
query ArtifactFiles(
$entityName: String!,
$projectName: String!,
$artifactTypeName: String!,
$artifactName: String!
$fileNames: [String!],
$fileCursor: String,
$fileLimit: Int = 50
) {
project(name: $projectName, entityName: $entityName) {
artifactType(name: $artifactTypeName) {
artifact(name: $artifactName) {
...ArtifactFilesFragment
}
}
}
}
%s
"""
% ARTIFACT_FILES_FRAGMENT
)
def __init__(self, client, artifact, names=None, per_page=50):
self.artifact = artifact
variables = {
"entityName": artifact.entity,
"projectName": artifact.project,
"artifactTypeName": artifact.artifact_type_name,
"artifactName": artifact.artifact_name,
"fileNames": names,
}
super(ArtifactFiles, self).__init__(client, variables, per_page)
@property
def length(self):
# TODO
return None
@property
def more(self):
if self.last_response:
return self.last_response["project"]["artifactType"]["artifact"]["files"][
"pageInfo"
]["hasNextPage"]
else:
return True
@property
def cursor(self):
if self.last_response:
return self.last_response["project"]["artifactType"]["artifact"]["files"][
"edges"
][-1]["cursor"]
else:
return None
def update_variables(self):
self.variables.update({"fileLimit": self.per_page, "fileCursor": self.cursor})
def convert_objects(self):
return [
File(self.client, r["node"])
for r in self.last_response["project"]["artifactType"]["artifact"]["files"][
"edges"
]
]
def __repr__(self):
return "<ArtifactFiles {} ({})>".format("/".join(self.artifact.path), len(self))
|
//Var
var lastReceivedNotificationID = 0
var notificationsCount = 0;
//Hide Notifications Modal
function HideNotificationsModal() {
$('#mdlNotifications').modal('hide');
};
//Get List
function LoadPart_UserNotificationsListDiv() {
var loadingpart = LoadDivLoading();
$("#divNotificationsArea").html(loadingpart);
$.ajax({
url: getRootURL() + '/UserNotifications/GetUnread?lastRecievedID=0',
//page=" + $("#DynConfigCurrentPage").html,
type: "GET",
})
.success(function (partialViewResult) {
$("#divNotificationsArea").html(partialViewResult);
});
return false;
};
//API: Get Notifications Count
function API_GetNotificationsCount() {
$.ajax({
url: getRootURL() + '/api/Notifications/GetNotifications?lastNotificationID=' + lastReceivedNotificationID,
type: "GET",
})
.success(function (partialViewResult) {
if(partialViewResult.Message != 0)
{
LoadPart_UserNotificationsListDiv();
}
});
return false;
};
//Set Notification as Read
function SetNotificationAsRead(notificationID) {
var url = getRootURL() + 'UserNotifications/SetNotificationAsRead?id=' + notificationID
$.ajax({
url: url,
type: "POST",
success: function (resp) {
},
error: function (resp) {
}
})
$('#divNotification_' + notificationID).hide();
$('#lblNotificationsCount').text(notificationsCount - 1);
};
|
import React from 'react'
import styles from './styles.scss'
import classnames from 'classnames/bind'
const cx = classnames.bind(styles)
export default function CoinFrame(props) {
return (
<div className={cx('coin-frame')}>
<span></span>
</div>
)
}
|
/*=auto=========================================================================
Portions (c) Copyright 2005 Brigham and Women's Hospital (BWH) All Rights Reserved.
See COPYRIGHT.txt
or http://www.slicer.org/copyright/copyright.txt for details.
Program: 3D Slicer
Module: $RCSfile: vtkMRMLVolumeNode.h,v $
Date: $Date: 2006/03/19 17:12:29 $
Version: $Revision: 1.13 $
=========================================================================auto=*/
#ifndef __vtkMRMLDiffusionTensorVolumeNode_h
#define __vtkMRMLDiffusionTensorVolumeNode_h
#include "vtkMRMLDiffusionImageVolumeNode.h"
class vtkMRMLDiffusionTensorVolumeDisplayNode;
/// \brief MRML node for representing diffusion weighted MRI volume.
///
/// Diffusion Weigthed Volume nodes describe data sets that encode diffusion weigthed
/// images. These images are the basis for computing the diffusion tensor.
/// The node is a container for the necessary information to interpert DW images:
/// 1. Gradient information.
/// 2. B value for each gradient.
/// 3. Measurement frame that relates the coordinate system where the gradients are given
/// to RAS.
class VTK_MRML_EXPORT vtkMRMLDiffusionTensorVolumeNode : public vtkMRMLDiffusionImageVolumeNode
{
public:
static vtkMRMLDiffusionTensorVolumeNode *New();
vtkTypeMacro(vtkMRMLDiffusionTensorVolumeNode,vtkMRMLDiffusionImageVolumeNode);
void PrintSelf(ostream& os, vtkIndent indent) override;
vtkMRMLNode* CreateNodeInstance() override;
/// Get node XML tag name (like Volume, Model)
const char* GetNodeTagName() override { return "DiffusionTensorVolume"; }
/// Copy node content (excludes basic data, such as name and node references).
/// \sa vtkMRMLNode::CopyContent
vtkMRMLCopyContentDefaultMacro(vtkMRMLDiffusionTensorVolumeNode);
/// Associated volume display MRML node
virtual void SetAndObserveDisplayNodeID(const char *DisplayNodeID);
/// Associated display MRML node
virtual vtkMRMLDiffusionTensorVolumeDisplayNode* GetDiffusionTensorVolumeDisplayNode();
/// Create default storage node or nullptr if does not have one
vtkMRMLStorageNode* CreateDefaultStorageNode() override;
/// Create and observe default display node
void CreateDefaultDisplayNodes() override;
protected:
vtkMRMLDiffusionTensorVolumeNode();
~vtkMRMLDiffusionTensorVolumeNode() override;
vtkMRMLDiffusionTensorVolumeNode(const vtkMRMLDiffusionTensorVolumeNode&);
void operator=(const vtkMRMLDiffusionTensorVolumeNode&);
};
#endif
|