file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
view.py | # Webhooks for external integrations.
from __future__ import absolute_import
from typing import Any, Dict, List, Optional, Text, Tuple
from django.utils.translation import ugettext as _
from django.db.models import Q
from django.conf import settings
from django.http import HttpRequest, HttpResponse
from zerver.models import UserProfile, get_user_profile_by_email, Realm
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import api_key_only_webhook_view, has_request_variables, REQ
import logging
import re
import ujson
IGNORED_EVENTS = [
'comment_created', # we handle issue_update event instead
'comment_updated', # we handle issue_update event instead
'comment_deleted', # we handle issue_update event instead
]
def guess_zulip_user_from_jira(jira_username, realm):
# type: (Text, Realm) -> Optional[UserProfile]
try:
# Try to find a matching user in Zulip
# We search a user's full name, short name,
# and beginning of email address
user = UserProfile.objects.filter(
Q(full_name__iexact=jira_username) |
Q(short_name__iexact=jira_username) |
Q(email__istartswith=jira_username),
is_active=True,
realm=realm).order_by("id")[0]
return user
except IndexError:
return None
def convert_jira_markup(content, realm):
# type: (Text, Realm) -> Text
# Attempt to do some simplistic conversion of JIRA
# formatting to Markdown, for consumption in Zulip
# Jira uses *word* for bold, we use **word**
content = re.sub(r'\*([^\*]+)\*', r'**\1**', content)
# Jira uses {{word}} for monospacing, we use `word`
content = re.sub(r'{{([^\*]+?)}}', r'`\1`', content)
# Starting a line with bq. block quotes that line
content = re.sub(r'bq\. (.*)', r'> \1', content)
# Wrapping a block of code in {quote}stuff{quote} also block-quotes it
quote_re = re.compile(r'{quote}(.*?){quote}', re.DOTALL)
content = re.sub(quote_re, r'~~~ quote\n\1\n~~~', content)
# {noformat}stuff{noformat} blocks are just code blocks with no
# syntax highlighting
noformat_re = re.compile(r'{noformat}(.*?){noformat}', re.DOTALL)
content = re.sub(noformat_re, r'~~~\n\1\n~~~', content)
# Code blocks are delineated by {code[: lang]} {code}
code_re = re.compile(r'{code[^\n]*}(.*?){code}', re.DOTALL)
content = re.sub(code_re, r'~~~\n\1\n~~~', content)
# Links are of form: [https://www.google.com] or [Link Title|https://www.google.com]
# In order to support both forms, we don't match a | in bare links
content = re.sub(r'\[([^\|~]+?)\]', r'[\1](\1)', content)
# Full links which have a | are converted into a better markdown link
full_link_re = re.compile(r'\[(?:(?P<title>[^|~]+)\|)(?P<url>.*)\]')
content = re.sub(full_link_re, r'[\g<title>](\g<url>)', content)
# Try to convert a JIRA user mention of format [~username] into a
# Zulip user mention. We don't know the email, just the JIRA username,
# so we naively guess at their Zulip account using this
if realm:
mention_re = re.compile(u'\[~(.*?)\]')
for username in mention_re.findall(content):
# Try to look up username
user_profile = guess_zulip_user_from_jira(username, realm)
if user_profile:
replacement = u"**{}**".format(user_profile.full_name)
else:
replacement = u"**{}**".format(username)
content = content.replace("[~{}]".format(username,), replacement)
return content
def get_in(payload, keys, default=''):
# type: (Dict[str, Any], List[str], Text) -> Any
try:
for key in keys:
payload = payload[key]
except (AttributeError, KeyError, TypeError):
return default
return payload
def get_issue_string(payload, issue_id=None):
# type: (Dict[str, Any], Text) -> Text
# Guess the URL as it is not specified in the payload
# We assume that there is a /browse/BUG-### page
# from the REST url of the issue itself
if issue_id is None:
issue_id = get_issue_id(payload)
base_url = re.match("(.*)\/rest\/api/.*", get_in(payload, ['issue', 'self']))
if base_url and len(base_url.groups()):
return u"[{}]({}/browse/{})".format(issue_id, base_url.group(1), issue_id)
else:
return issue_id
def get_assignee_mention(assignee_email):
# type: (Text) -> Text
if assignee_email != '':
try:
assignee_name = get_user_profile_by_email(assignee_email).full_name
except UserProfile.DoesNotExist:
assignee_name = assignee_email
return u"**{}**".format(assignee_name)
return ''
def get_issue_author(payload):
# type: (Dict[str, Any]) -> Text
return get_in(payload, ['user', 'displayName'])
def get_issue_id(payload):
# type: (Dict[str, Any]) -> Text
return get_in(payload, ['issue', 'key'])
def get_issue_title(payload):
# type: (Dict[str, Any]) -> Text
return get_in(payload, ['issue', 'fields', 'summary'])
def get_issue_subject(payload):
# type: (Dict[str, Any]) -> Text
return u"{}: {}".format(get_issue_id(payload), get_issue_title(payload))
def get_sub_event_for_update_issue(payload):
# type: (Dict[str, Any]) -> Text
sub_event = payload.get('issue_event_type_name', '')
if sub_event == '':
if payload.get('comment'):
return 'issue_commented'
elif payload.get('transition'):
return 'issue_transited'
return sub_event
def get_event_type(payload):
# type: (Dict[str, Any]) -> Optional[Text]
event = payload.get('webhookEvent')
if event is None and payload.get('transition'):
event = 'jira:issue_updated'
return event
def add_change_info(content, field, from_field, to_field):
# type: (Text, Text, Text, Text) -> Text
content += u"* Changed {}".format(field)
if from_field:
content += u" from **{}**".format(from_field)
if to_field:
content += u" to {}\n".format(to_field)
return content
def handle_updated_issue_event(payload, user_profile):
# Reassigned, commented, reopened, and resolved events are all bundled
# into this one 'updated' event type, so we try to extract the meaningful
# event that happened
# type: (Dict[str, Any], UserProfile) -> Text
issue_id = get_in(payload, ['issue', 'key'])
issue = get_issue_string(payload, issue_id)
assignee_email = get_in(payload, ['issue', 'fields', 'assignee', 'emailAddress'], '')
assignee_mention = get_assignee_mention(assignee_email)
if assignee_mention != '':
assignee_blurb = u" (assigned to {})".format(assignee_mention)
else:
assignee_blurb = ''
sub_event = get_sub_event_for_update_issue(payload)
if 'comment' in sub_event:
if sub_event == 'issue_commented':
|
elif sub_event == 'issue_comment_edited':
verb = 'edited comment on'
else:
verb = 'deleted comment from'
content = u"{} **{}** {}{}".format(get_issue_author(payload), verb, issue, assignee_blurb)
comment = get_in(payload, ['comment', 'body'])
if comment:
comment = convert_jira_markup(comment, user_profile.realm)
content = u"{}:\n\n\n{}\n".format(content, comment)
else:
content = u"{} **updated** {}{}:\n\n".format(get_issue_author(payload), issue, assignee_blurb)
changelog = get_in(payload, ['changelog'])
if changelog != '':
# Use the changelog to display the changes, whitelist types we accept
items = changelog.get('items')
for item in items:
field = item.get('field')
if field == 'assignee' and assignee_mention != '':
target_field_string = assignee_mention
else:
# Convert a user's target to a @-mention if possible
target_field_string = u"**{}**".format(item.get('toString'))
from_field_string = item.get('fromString')
if target_field_string or from_field_string:
content = add_change_info(content, field, from_field_string, target_field_string)
elif sub_event == 'issue_transited':
from_field_string = get_in(payload, ['transition', 'from_status'])
target_field_string = u'**{}**'.format(get_in(payload, ['transition', 'to_status']))
if target_field_string or from_field_string:
content = add_change_info(content, 'status', from_field_string, target_field_string)
return content
def handle_created_issue_event(payload):
# type: (Dict[str, Any]) -> Text
return u"{} **created** {} priority {}, assigned to **{}**:\n\n> {}".format(
get_issue_author(payload),
get_issue_string(payload),
get_in(payload, ['issue', 'fields', 'priority', 'name']),
get_in(payload, ['issue', 'fields', 'assignee', 'displayName'], 'no one'),
get_issue_title(payload)
)
def handle_deleted_issue_event(payload):
# type: (Dict[str, Any]) -> Text
return u"{} **deleted** {}!".format(get_issue_author(payload), get_issue_string(payload))
@api_key_only_webhook_view("JIRA")
@has_request_variables
def api_jira_webhook(request, user_profile,
payload=REQ(argument_type='body'),
stream=REQ(default='jira')):
# type: (HttpRequest, UserProfile, Dict[str, Any], Text) -> HttpResponse
event = get_event_type(payload)
if event == 'jira:issue_created':
subject = get_issue_subject(payload)
content = handle_created_issue_event(payload)
elif event == 'jira:issue_deleted':
subject = get_issue_subject(payload)
content = handle_deleted_issue_event(payload)
elif event == 'jira:issue_updated':
subject = get_issue_subject(payload)
content = handle_updated_issue_event(payload, user_profile)
elif event in IGNORED_EVENTS:
return json_success()
else:
if event is None:
if not settings.TEST_SUITE:
message = u"Got JIRA event with None event type: {}".format(payload)
logging.warning(message)
return json_error(_("Event is not given by JIRA"))
else:
if not settings.TEST_SUITE:
logging.warning("Got JIRA event type we don't support: {}".format(event))
return json_success()
check_send_message(user_profile, request.client, "stream", [stream], subject, content)
return json_success()
| verb = 'added comment to' | conditional_block |
index.ts | import { window, document } from 'global';
import Channel, { ChannelEvent, ChannelHandler } from '@storybook/channels';
import { logger } from '@storybook/client-logger';
import { isJSON, parse, stringify } from 'telejson';
interface RawEvent {
data: string;
}
interface Config {
page: 'manager' | 'preview';
}
interface BufferedEvent {
event: ChannelEvent;
resolve: (value?: any) => void;
reject: (reason?: any) => void;
}
export const KEY = 'storybook-channel';
// TODO: we should export a method for opening child windows here and keep track of em.
// that way we can send postMessage to child windows as well, not just iframe
// https://stackoverflow.com/questions/6340160/how-to-get-the-references-of-all-already-opened-child-windows
export class PostmsgTransport {
private buffer: BufferedEvent[];
private handler: ChannelHandler;
private connected: boolean;
constructor(private readonly config: Config) |
setHandler(handler: ChannelHandler): void {
this.handler = (...args) => {
handler.apply(this, args);
if (!this.connected && this.getWindow()) {
this.flush();
this.connected = true;
}
};
}
/**
* Sends `event` to the associated window. If the window does not yet exist
* the event will be stored in a buffer and sent when the window exists.
* @param event
*/
send(event: ChannelEvent, options?: any): Promise<any> {
const iframeWindow = this.getWindow();
if (!iframeWindow) {
return new Promise((resolve, reject) => {
this.buffer.push({ event, resolve, reject });
});
}
let depth = 15;
let allowFunction = true;
if (options && typeof options.allowFunction === 'boolean') {
allowFunction = options.allowFunction;
}
if (options && Number.isInteger(options.depth)) {
depth = options.depth;
}
const data = stringify({ key: KEY, event }, { maxDepth: depth, allowFunction });
// TODO: investigate http://blog.teamtreehouse.com/cross-domain-messaging-with-postmessage
// might replace '*' with document.location ?
iframeWindow.postMessage(data, '*');
return Promise.resolve(null);
}
private flush(): void {
const { buffer } = this;
this.buffer = [];
buffer.forEach(item => {
this.send(item.event)
.then(item.resolve)
.catch(item.reject);
});
}
private getWindow(): Window {
if (this.config.page === 'manager') {
// FIXME this is a really bad idea! use a better way to do this.
// This finds the storybook preview iframe to send messages to.
const iframe = document.getElementById('storybook-preview-iframe');
if (!iframe) {
return null;
}
return iframe.contentWindow;
}
return window.parent;
}
private handleEvent(rawEvent: RawEvent): void {
try {
const { data } = rawEvent;
const { key, event } = typeof data === 'string' && isJSON(data) ? parse(data) : data;
if (key === KEY) {
logger.debug(`message arrived at ${this.config.page}`, event.type, ...event.args);
this.handler(event);
}
} catch (error) {
logger.error(error);
// debugger;
}
}
}
/**
* Creates a channel which communicates with an iframe or child window.
*/
export default function createChannel({ page }: Config): Channel {
const transport = new PostmsgTransport({ page });
return new Channel({ transport });
}
| {
this.buffer = [];
this.handler = null;
window.addEventListener('message', this.handleEvent.bind(this), false);
// Check whether the config.page parameter has a valid value
if (config.page !== 'manager' && config.page !== 'preview') {
throw new Error(`postmsg-channel: "config.page" cannot be "${config.page}"`);
}
} | identifier_body |
index.ts | import { window, document } from 'global';
import Channel, { ChannelEvent, ChannelHandler } from '@storybook/channels';
import { logger } from '@storybook/client-logger';
import { isJSON, parse, stringify } from 'telejson';
interface RawEvent {
data: string;
}
interface Config {
page: 'manager' | 'preview';
}
interface BufferedEvent {
event: ChannelEvent;
resolve: (value?: any) => void;
reject: (reason?: any) => void;
}
export const KEY = 'storybook-channel';
// TODO: we should export a method for opening child windows here and keep track of em.
// that way we can send postMessage to child windows as well, not just iframe
// https://stackoverflow.com/questions/6340160/how-to-get-the-references-of-all-already-opened-child-windows
export class PostmsgTransport {
private buffer: BufferedEvent[];
private handler: ChannelHandler;
private connected: boolean;
constructor(private readonly config: Config) {
this.buffer = [];
this.handler = null;
window.addEventListener('message', this.handleEvent.bind(this), false);
// Check whether the config.page parameter has a valid value
if (config.page !== 'manager' && config.page !== 'preview') {
throw new Error(`postmsg-channel: "config.page" cannot be "${config.page}"`);
}
}
setHandler(handler: ChannelHandler): void {
this.handler = (...args) => {
handler.apply(this, args);
if (!this.connected && this.getWindow()) |
};
}
/**
* Sends `event` to the associated window. If the window does not yet exist
* the event will be stored in a buffer and sent when the window exists.
* @param event
*/
send(event: ChannelEvent, options?: any): Promise<any> {
const iframeWindow = this.getWindow();
if (!iframeWindow) {
return new Promise((resolve, reject) => {
this.buffer.push({ event, resolve, reject });
});
}
let depth = 15;
let allowFunction = true;
if (options && typeof options.allowFunction === 'boolean') {
allowFunction = options.allowFunction;
}
if (options && Number.isInteger(options.depth)) {
depth = options.depth;
}
const data = stringify({ key: KEY, event }, { maxDepth: depth, allowFunction });
// TODO: investigate http://blog.teamtreehouse.com/cross-domain-messaging-with-postmessage
// might replace '*' with document.location ?
iframeWindow.postMessage(data, '*');
return Promise.resolve(null);
}
private flush(): void {
const { buffer } = this;
this.buffer = [];
buffer.forEach(item => {
this.send(item.event)
.then(item.resolve)
.catch(item.reject);
});
}
private getWindow(): Window {
if (this.config.page === 'manager') {
// FIXME this is a really bad idea! use a better way to do this.
// This finds the storybook preview iframe to send messages to.
const iframe = document.getElementById('storybook-preview-iframe');
if (!iframe) {
return null;
}
return iframe.contentWindow;
}
return window.parent;
}
private handleEvent(rawEvent: RawEvent): void {
try {
const { data } = rawEvent;
const { key, event } = typeof data === 'string' && isJSON(data) ? parse(data) : data;
if (key === KEY) {
logger.debug(`message arrived at ${this.config.page}`, event.type, ...event.args);
this.handler(event);
}
} catch (error) {
logger.error(error);
// debugger;
}
}
}
/**
* Creates a channel which communicates with an iframe or child window.
*/
export default function createChannel({ page }: Config): Channel {
const transport = new PostmsgTransport({ page });
return new Channel({ transport });
}
| {
this.flush();
this.connected = true;
} | conditional_block |
index.ts | import { window, document } from 'global';
import Channel, { ChannelEvent, ChannelHandler } from '@storybook/channels';
import { logger } from '@storybook/client-logger';
import { isJSON, parse, stringify } from 'telejson';
interface RawEvent {
data: string;
}
interface Config {
page: 'manager' | 'preview';
}
interface BufferedEvent {
event: ChannelEvent;
resolve: (value?: any) => void;
reject: (reason?: any) => void;
}
export const KEY = 'storybook-channel';
// TODO: we should export a method for opening child windows here and keep track of em.
// that way we can send postMessage to child windows as well, not just iframe
// https://stackoverflow.com/questions/6340160/how-to-get-the-references-of-all-already-opened-child-windows
export class PostmsgTransport {
private buffer: BufferedEvent[];
private handler: ChannelHandler;
private connected: boolean;
constructor(private readonly config: Config) {
this.buffer = [];
this.handler = null;
window.addEventListener('message', this.handleEvent.bind(this), false);
// Check whether the config.page parameter has a valid value
if (config.page !== 'manager' && config.page !== 'preview') {
throw new Error(`postmsg-channel: "config.page" cannot be "${config.page}"`);
}
}
setHandler(handler: ChannelHandler): void {
this.handler = (...args) => {
handler.apply(this, args);
if (!this.connected && this.getWindow()) {
this.flush();
this.connected = true;
}
};
}
/**
* Sends `event` to the associated window. If the window does not yet exist
* the event will be stored in a buffer and sent when the window exists.
* @param event
*/
send(event: ChannelEvent, options?: any): Promise<any> {
const iframeWindow = this.getWindow();
if (!iframeWindow) {
return new Promise((resolve, reject) => {
this.buffer.push({ event, resolve, reject });
});
}
let depth = 15;
let allowFunction = true;
if (options && typeof options.allowFunction === 'boolean') {
allowFunction = options.allowFunction;
}
if (options && Number.isInteger(options.depth)) {
depth = options.depth;
}
const data = stringify({ key: KEY, event }, { maxDepth: depth, allowFunction }); | // TODO: investigate http://blog.teamtreehouse.com/cross-domain-messaging-with-postmessage
// might replace '*' with document.location ?
iframeWindow.postMessage(data, '*');
return Promise.resolve(null);
}
private flush(): void {
const { buffer } = this;
this.buffer = [];
buffer.forEach(item => {
this.send(item.event)
.then(item.resolve)
.catch(item.reject);
});
}
private getWindow(): Window {
if (this.config.page === 'manager') {
// FIXME this is a really bad idea! use a better way to do this.
// This finds the storybook preview iframe to send messages to.
const iframe = document.getElementById('storybook-preview-iframe');
if (!iframe) {
return null;
}
return iframe.contentWindow;
}
return window.parent;
}
private handleEvent(rawEvent: RawEvent): void {
try {
const { data } = rawEvent;
const { key, event } = typeof data === 'string' && isJSON(data) ? parse(data) : data;
if (key === KEY) {
logger.debug(`message arrived at ${this.config.page}`, event.type, ...event.args);
this.handler(event);
}
} catch (error) {
logger.error(error);
// debugger;
}
}
}
/**
* Creates a channel which communicates with an iframe or child window.
*/
export default function createChannel({ page }: Config): Channel {
const transport = new PostmsgTransport({ page });
return new Channel({ transport });
} | random_line_split | |
index.ts | import { window, document } from 'global';
import Channel, { ChannelEvent, ChannelHandler } from '@storybook/channels';
import { logger } from '@storybook/client-logger';
import { isJSON, parse, stringify } from 'telejson';
interface RawEvent {
data: string;
}
interface Config {
page: 'manager' | 'preview';
}
interface BufferedEvent {
event: ChannelEvent;
resolve: (value?: any) => void;
reject: (reason?: any) => void;
}
export const KEY = 'storybook-channel';
// TODO: we should export a method for opening child windows here and keep track of em.
// that way we can send postMessage to child windows as well, not just iframe
// https://stackoverflow.com/questions/6340160/how-to-get-the-references-of-all-already-opened-child-windows
export class PostmsgTransport {
private buffer: BufferedEvent[];
private handler: ChannelHandler;
private connected: boolean;
constructor(private readonly config: Config) {
this.buffer = [];
this.handler = null;
window.addEventListener('message', this.handleEvent.bind(this), false);
// Check whether the config.page parameter has a valid value
if (config.page !== 'manager' && config.page !== 'preview') {
throw new Error(`postmsg-channel: "config.page" cannot be "${config.page}"`);
}
}
setHandler(handler: ChannelHandler): void {
this.handler = (...args) => {
handler.apply(this, args);
if (!this.connected && this.getWindow()) {
this.flush();
this.connected = true;
}
};
}
/**
* Sends `event` to the associated window. If the window does not yet exist
* the event will be stored in a buffer and sent when the window exists.
* @param event
*/
| (event: ChannelEvent, options?: any): Promise<any> {
const iframeWindow = this.getWindow();
if (!iframeWindow) {
return new Promise((resolve, reject) => {
this.buffer.push({ event, resolve, reject });
});
}
let depth = 15;
let allowFunction = true;
if (options && typeof options.allowFunction === 'boolean') {
allowFunction = options.allowFunction;
}
if (options && Number.isInteger(options.depth)) {
depth = options.depth;
}
const data = stringify({ key: KEY, event }, { maxDepth: depth, allowFunction });
// TODO: investigate http://blog.teamtreehouse.com/cross-domain-messaging-with-postmessage
// might replace '*' with document.location ?
iframeWindow.postMessage(data, '*');
return Promise.resolve(null);
}
private flush(): void {
const { buffer } = this;
this.buffer = [];
buffer.forEach(item => {
this.send(item.event)
.then(item.resolve)
.catch(item.reject);
});
}
private getWindow(): Window {
if (this.config.page === 'manager') {
// FIXME this is a really bad idea! use a better way to do this.
// This finds the storybook preview iframe to send messages to.
const iframe = document.getElementById('storybook-preview-iframe');
if (!iframe) {
return null;
}
return iframe.contentWindow;
}
return window.parent;
}
private handleEvent(rawEvent: RawEvent): void {
try {
const { data } = rawEvent;
const { key, event } = typeof data === 'string' && isJSON(data) ? parse(data) : data;
if (key === KEY) {
logger.debug(`message arrived at ${this.config.page}`, event.type, ...event.args);
this.handler(event);
}
} catch (error) {
logger.error(error);
// debugger;
}
}
}
/**
* Creates a channel which communicates with an iframe or child window.
*/
export default function createChannel({ page }: Config): Channel {
const transport = new PostmsgTransport({ page });
return new Channel({ transport });
}
| send | identifier_name |
debug.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debug estimators.
Debug estimators are bias-only estimators that can be used for debugging
and as simple baselines.
Example:
```
# Build DebugClassifier
classifier = DebugClassifier()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
classifier.fit(input_fn=input_fn_train)
# Evaluate cross entropy between the test and train labels.
loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
# predict_classes outputs the most commonly seen class in training.
predicted_label = classifier.predict_classes(new_samples)
# predict_proba outputs the class distribution from training.
label_distribution = classifier.predict_proba(new_samples)
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
def _get_feature_dict(features):
if isinstance(features, dict):
return features
return {"": features}
def debug_model_fn(features, labels, mode, params, config=None):
"""Model_fn for debug models.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: Labels that are compatible with the `_Head` instance in `params`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters containing:
* head: A `_Head` instance.
config: `RunConfig` object to configure the runtime settings.
Raises:
KeyError: If weight column is specified but not present.
ValueError: If features is an empty dictionary.
Returns:
A `ModelFnOps` instance.
"""
del config # Unused.
features = _get_feature_dict(features)
if not features:
raise ValueError("Features cannot be empty.")
head = params["head"]
size_checks = []
batch_size = None
# The first dimension is assumed to be a batch size and must be consistent
# among all of the features.
for feature in features.values():
first_dim = array_ops.shape(feature)[0]
if batch_size is None:
|
else:
size_checks.append(check_ops.assert_equal(batch_size, first_dim))
with ops.control_dependencies(size_checks):
logits = array_ops.zeros([batch_size, head.logits_dimension])
def train_op_fn(loss):
return optimizers.optimize_loss(
loss, global_step=None, learning_rate=0.3, optimizer="Adagrad")
return head.create_model_fn_ops(
features=features,
labels=labels,
mode=mode,
train_op_fn=train_op_fn,
logits=logits)
class DebugClassifier(estimator.Estimator):
"""A classifier for TensorFlow Debug models.
Example:
```python
# Build DebugClassifier
classifier = DebugClassifier()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
classifier.fit(input_fn=input_fn_train)
# Evaluate cross entropy between the test and train labels.
loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
# predict_class outputs the most commonly seen class in training.
predicted_label = classifier.predict_class(new_samples)
# predict_proba outputs the class distribution from training.
label_distribution = classifier.predict_proba(new_samples)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
"""
def __init__(self,
model_dir=None,
n_classes=2,
weight_column_name=None,
config=None,
feature_engineering_fn=None,
label_keys=None):
"""Initializes a DebugClassifier instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
n_classes: number of label classes. Default is binary classification.
It must be greater than 1. Note: Class labels are integers representing
the class index (i.e. values from 0 to n_classes-1). For arbitrary
label values (e.g. string labels), convert to class indices first.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns
features and labels which will be fed into the model.
label_keys: Optional list of strings with size `[n_classes]` defining the
label vocabulary. Only supported for `n_classes` > 2.
Returns:
A `DebugClassifier` estimator.
Raises:
ValueError: If `n_classes` < 2.
"""
params = {"head":
head_lib.multi_class_head(
n_classes=n_classes,
weight_column_name=weight_column_name,
enable_centered_bias=True,
label_keys=label_keys)}
super(DebugClassifier, self).__init__(
model_fn=debug_model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
def predict_classes(self, input_fn=None, batch_size=None):
"""Returns predicted classes for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted classes. Each predicted class is represented by
its class index (i.e. integer from 0 to n_classes-1).
"""
key = prediction_key.PredictionKey.CLASSES
preds = self.predict(
input_fn=input_fn, batch_size=batch_size, outputs=[key])
return (pred[key] for pred in preds)
def predict_proba(self,
input_fn=None,
batch_size=None):
"""Returns prediction probabilities for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted probabilities with shape [batch_size, n_classes].
"""
key = prediction_key.PredictionKey.PROBABILITIES
preds = self.predict(
input_fn=input_fn,
batch_size=batch_size,
outputs=[key])
return (pred[key] for pred in preds)
class DebugRegressor(estimator.Estimator):
"""A regressor for TensorFlow Debug models.
Example:
```python
# Build DebugRegressor
regressor = DebugRegressor()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
regressor.fit(input_fn=input_fn_train)
# Evaluate squared-loss between the test and train targets.
loss = regressor.evaluate(input_fn=input_fn_eval)["loss"]
# predict_scores outputs mean value seen during training.
predicted_targets = regressor.predict_scores(new_samples)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
"""
def __init__(self,
model_dir=None,
label_dimension=1,
weight_column_name=None,
config=None,
feature_engineering_fn=None):
"""Initializes a DebugRegressor instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
label_dimension: Number of regression targets per example. This is the
size of the last dimension of the labels and logits `Tensor` objects
(typically, these have shape `[batch_size, label_dimension]`).
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns
features and labels which will be fed into the model.
Returns:
A `DebugRegressor` estimator.
"""
params = {
"head":
head_lib.regression_head(
weight_column_name=weight_column_name,
label_dimension=label_dimension,
enable_centered_bias=True)
}
super(DebugRegressor, self).__init__(
model_fn=debug_model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
def predict_scores(self, input_fn=None, batch_size=None):
"""Returns predicted scores for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted scores.
"""
key = prediction_key.PredictionKey.SCORES
preds = self.predict(
input_fn=input_fn, batch_size=batch_size, outputs=[key])
return (pred[key] for pred in preds)
| batch_size = first_dim | conditional_block |
debug.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debug estimators.
|
```
# Build DebugClassifier
classifier = DebugClassifier()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
classifier.fit(input_fn=input_fn_train)
# Evaluate cross entropy between the test and train labels.
loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
# predict_classes outputs the most commonly seen class in training.
predicted_label = classifier.predict_classes(new_samples)
# predict_proba outputs the class distribution from training.
label_distribution = classifier.predict_proba(new_samples)
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
def _get_feature_dict(features):
if isinstance(features, dict):
return features
return {"": features}
def debug_model_fn(features, labels, mode, params, config=None):
"""Model_fn for debug models.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: Labels that are compatible with the `_Head` instance in `params`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters containing:
* head: A `_Head` instance.
config: `RunConfig` object to configure the runtime settings.
Raises:
KeyError: If weight column is specified but not present.
ValueError: If features is an empty dictionary.
Returns:
A `ModelFnOps` instance.
"""
del config # Unused.
features = _get_feature_dict(features)
if not features:
raise ValueError("Features cannot be empty.")
head = params["head"]
size_checks = []
batch_size = None
# The first dimension is assumed to be a batch size and must be consistent
# among all of the features.
for feature in features.values():
first_dim = array_ops.shape(feature)[0]
if batch_size is None:
batch_size = first_dim
else:
size_checks.append(check_ops.assert_equal(batch_size, first_dim))
with ops.control_dependencies(size_checks):
logits = array_ops.zeros([batch_size, head.logits_dimension])
def train_op_fn(loss):
return optimizers.optimize_loss(
loss, global_step=None, learning_rate=0.3, optimizer="Adagrad")
return head.create_model_fn_ops(
features=features,
labels=labels,
mode=mode,
train_op_fn=train_op_fn,
logits=logits)
class DebugClassifier(estimator.Estimator):
"""A classifier for TensorFlow Debug models.
Example:
```python
# Build DebugClassifier
classifier = DebugClassifier()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
classifier.fit(input_fn=input_fn_train)
# Evaluate cross entropy between the test and train labels.
loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
# predict_class outputs the most commonly seen class in training.
predicted_label = classifier.predict_class(new_samples)
# predict_proba outputs the class distribution from training.
label_distribution = classifier.predict_proba(new_samples)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
"""
def __init__(self,
model_dir=None,
n_classes=2,
weight_column_name=None,
config=None,
feature_engineering_fn=None,
label_keys=None):
"""Initializes a DebugClassifier instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
n_classes: number of label classes. Default is binary classification.
It must be greater than 1. Note: Class labels are integers representing
the class index (i.e. values from 0 to n_classes-1). For arbitrary
label values (e.g. string labels), convert to class indices first.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns
features and labels which will be fed into the model.
label_keys: Optional list of strings with size `[n_classes]` defining the
label vocabulary. Only supported for `n_classes` > 2.
Returns:
A `DebugClassifier` estimator.
Raises:
ValueError: If `n_classes` < 2.
"""
params = {"head":
head_lib.multi_class_head(
n_classes=n_classes,
weight_column_name=weight_column_name,
enable_centered_bias=True,
label_keys=label_keys)}
super(DebugClassifier, self).__init__(
model_fn=debug_model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
def predict_classes(self, input_fn=None, batch_size=None):
"""Returns predicted classes for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted classes. Each predicted class is represented by
its class index (i.e. integer from 0 to n_classes-1).
"""
key = prediction_key.PredictionKey.CLASSES
preds = self.predict(
input_fn=input_fn, batch_size=batch_size, outputs=[key])
return (pred[key] for pred in preds)
def predict_proba(self,
input_fn=None,
batch_size=None):
"""Returns prediction probabilities for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted probabilities with shape [batch_size, n_classes].
"""
key = prediction_key.PredictionKey.PROBABILITIES
preds = self.predict(
input_fn=input_fn,
batch_size=batch_size,
outputs=[key])
return (pred[key] for pred in preds)
class DebugRegressor(estimator.Estimator):
"""A regressor for TensorFlow Debug models.
Example:
```python
# Build DebugRegressor
regressor = DebugRegressor()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
regressor.fit(input_fn=input_fn_train)
# Evaluate squared-loss between the test and train targets.
loss = regressor.evaluate(input_fn=input_fn_eval)["loss"]
# predict_scores outputs mean value seen during training.
predicted_targets = regressor.predict_scores(new_samples)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
"""
def __init__(self,
model_dir=None,
label_dimension=1,
weight_column_name=None,
config=None,
feature_engineering_fn=None):
"""Initializes a DebugRegressor instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
label_dimension: Number of regression targets per example. This is the
size of the last dimension of the labels and logits `Tensor` objects
(typically, these have shape `[batch_size, label_dimension]`).
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns
features and labels which will be fed into the model.
Returns:
A `DebugRegressor` estimator.
"""
params = {
"head":
head_lib.regression_head(
weight_column_name=weight_column_name,
label_dimension=label_dimension,
enable_centered_bias=True)
}
super(DebugRegressor, self).__init__(
model_fn=debug_model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
def predict_scores(self, input_fn=None, batch_size=None):
"""Returns predicted scores for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted scores.
"""
key = prediction_key.PredictionKey.SCORES
preds = self.predict(
input_fn=input_fn, batch_size=batch_size, outputs=[key])
return (pred[key] for pred in preds) | Debug estimators are bias-only estimators that can be used for debugging
and as simple baselines.
Example: | random_line_split |
debug.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debug estimators.
Debug estimators are bias-only estimators that can be used for debugging
and as simple baselines.
Example:
```
# Build DebugClassifier
classifier = DebugClassifier()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
classifier.fit(input_fn=input_fn_train)
# Evaluate cross entropy between the test and train labels.
loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
# predict_classes outputs the most commonly seen class in training.
predicted_label = classifier.predict_classes(new_samples)
# predict_proba outputs the class distribution from training.
label_distribution = classifier.predict_proba(new_samples)
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
def _get_feature_dict(features):
if isinstance(features, dict):
return features
return {"": features}
def debug_model_fn(features, labels, mode, params, config=None):
"""Model_fn for debug models.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: Labels that are compatible with the `_Head` instance in `params`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters containing:
* head: A `_Head` instance.
config: `RunConfig` object to configure the runtime settings.
Raises:
KeyError: If weight column is specified but not present.
ValueError: If features is an empty dictionary.
Returns:
A `ModelFnOps` instance.
"""
del config # Unused.
features = _get_feature_dict(features)
if not features:
raise ValueError("Features cannot be empty.")
head = params["head"]
size_checks = []
batch_size = None
# The first dimension is assumed to be a batch size and must be consistent
# among all of the features.
for feature in features.values():
first_dim = array_ops.shape(feature)[0]
if batch_size is None:
batch_size = first_dim
else:
size_checks.append(check_ops.assert_equal(batch_size, first_dim))
with ops.control_dependencies(size_checks):
logits = array_ops.zeros([batch_size, head.logits_dimension])
def train_op_fn(loss):
return optimizers.optimize_loss(
loss, global_step=None, learning_rate=0.3, optimizer="Adagrad")
return head.create_model_fn_ops(
features=features,
labels=labels,
mode=mode,
train_op_fn=train_op_fn,
logits=logits)
class DebugClassifier(estimator.Estimator):
"""A classifier for TensorFlow Debug models.
Example:
```python
# Build DebugClassifier
classifier = DebugClassifier()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
classifier.fit(input_fn=input_fn_train)
# Evaluate cross entropy between the test and train labels.
loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
# predict_class outputs the most commonly seen class in training.
predicted_label = classifier.predict_class(new_samples)
# predict_proba outputs the class distribution from training.
label_distribution = classifier.predict_proba(new_samples)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
"""
def __init__(self,
model_dir=None,
n_classes=2,
weight_column_name=None,
config=None,
feature_engineering_fn=None,
label_keys=None):
"""Initializes a DebugClassifier instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
n_classes: number of label classes. Default is binary classification.
It must be greater than 1. Note: Class labels are integers representing
the class index (i.e. values from 0 to n_classes-1). For arbitrary
label values (e.g. string labels), convert to class indices first.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns
features and labels which will be fed into the model.
label_keys: Optional list of strings with size `[n_classes]` defining the
label vocabulary. Only supported for `n_classes` > 2.
Returns:
A `DebugClassifier` estimator.
Raises:
ValueError: If `n_classes` < 2.
"""
params = {"head":
head_lib.multi_class_head(
n_classes=n_classes,
weight_column_name=weight_column_name,
enable_centered_bias=True,
label_keys=label_keys)}
super(DebugClassifier, self).__init__(
model_fn=debug_model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
def predict_classes(self, input_fn=None, batch_size=None):
"""Returns predicted classes for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted classes. Each predicted class is represented by
its class index (i.e. integer from 0 to n_classes-1).
"""
key = prediction_key.PredictionKey.CLASSES
preds = self.predict(
input_fn=input_fn, batch_size=batch_size, outputs=[key])
return (pred[key] for pred in preds)
def predict_proba(self,
input_fn=None,
batch_size=None):
"""Returns prediction probabilities for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted probabilities with shape [batch_size, n_classes].
"""
key = prediction_key.PredictionKey.PROBABILITIES
preds = self.predict(
input_fn=input_fn,
batch_size=batch_size,
outputs=[key])
return (pred[key] for pred in preds)
class DebugRegressor(estimator.Estimator):
"""A regressor for TensorFlow Debug models.
Example:
```python
# Build DebugRegressor
regressor = DebugRegressor()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
regressor.fit(input_fn=input_fn_train)
# Evaluate squared-loss between the test and train targets.
loss = regressor.evaluate(input_fn=input_fn_eval)["loss"]
# predict_scores outputs mean value seen during training.
predicted_targets = regressor.predict_scores(new_samples)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
"""
def __init__(self,
model_dir=None,
label_dimension=1,
weight_column_name=None,
config=None,
feature_engineering_fn=None):
"""Initializes a DebugRegressor instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
label_dimension: Number of regression targets per example. This is the
size of the last dimension of the labels and logits `Tensor` objects
(typically, these have shape `[batch_size, label_dimension]`).
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns
features and labels which will be fed into the model.
Returns:
A `DebugRegressor` estimator.
"""
params = {
"head":
head_lib.regression_head(
weight_column_name=weight_column_name,
label_dimension=label_dimension,
enable_centered_bias=True)
}
super(DebugRegressor, self).__init__(
model_fn=debug_model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
def predict_scores(self, input_fn=None, batch_size=None):
| """Returns predicted scores for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted scores.
"""
key = prediction_key.PredictionKey.SCORES
preds = self.predict(
input_fn=input_fn, batch_size=batch_size, outputs=[key])
return (pred[key] for pred in preds) | identifier_body | |
debug.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debug estimators.
Debug estimators are bias-only estimators that can be used for debugging
and as simple baselines.
Example:
```
# Build DebugClassifier
classifier = DebugClassifier()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
classifier.fit(input_fn=input_fn_train)
# Evaluate cross entropy between the test and train labels.
loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
# predict_classes outputs the most commonly seen class in training.
predicted_label = classifier.predict_classes(new_samples)
# predict_proba outputs the class distribution from training.
label_distribution = classifier.predict_proba(new_samples)
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
def _get_feature_dict(features):
if isinstance(features, dict):
return features
return {"": features}
def debug_model_fn(features, labels, mode, params, config=None):
"""Model_fn for debug models.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: Labels that are compatible with the `_Head` instance in `params`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters containing:
* head: A `_Head` instance.
config: `RunConfig` object to configure the runtime settings.
Raises:
KeyError: If weight column is specified but not present.
ValueError: If features is an empty dictionary.
Returns:
A `ModelFnOps` instance.
"""
del config # Unused.
features = _get_feature_dict(features)
if not features:
raise ValueError("Features cannot be empty.")
head = params["head"]
size_checks = []
batch_size = None
# The first dimension is assumed to be a batch size and must be consistent
# among all of the features.
for feature in features.values():
first_dim = array_ops.shape(feature)[0]
if batch_size is None:
batch_size = first_dim
else:
size_checks.append(check_ops.assert_equal(batch_size, first_dim))
with ops.control_dependencies(size_checks):
logits = array_ops.zeros([batch_size, head.logits_dimension])
def train_op_fn(loss):
return optimizers.optimize_loss(
loss, global_step=None, learning_rate=0.3, optimizer="Adagrad")
return head.create_model_fn_ops(
features=features,
labels=labels,
mode=mode,
train_op_fn=train_op_fn,
logits=logits)
class DebugClassifier(estimator.Estimator):
"""A classifier for TensorFlow Debug models.
Example:
```python
# Build DebugClassifier
classifier = DebugClassifier()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
classifier.fit(input_fn=input_fn_train)
# Evaluate cross entropy between the test and train labels.
loss = classifier.evaluate(input_fn=input_fn_eval)["loss"]
# predict_class outputs the most commonly seen class in training.
predicted_label = classifier.predict_class(new_samples)
# predict_proba outputs the class distribution from training.
label_distribution = classifier.predict_proba(new_samples)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
"""
def __init__(self,
model_dir=None,
n_classes=2,
weight_column_name=None,
config=None,
feature_engineering_fn=None,
label_keys=None):
"""Initializes a DebugClassifier instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
n_classes: number of label classes. Default is binary classification.
It must be greater than 1. Note: Class labels are integers representing
the class index (i.e. values from 0 to n_classes-1). For arbitrary
label values (e.g. string labels), convert to class indices first.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns
features and labels which will be fed into the model.
label_keys: Optional list of strings with size `[n_classes]` defining the
label vocabulary. Only supported for `n_classes` > 2.
Returns:
A `DebugClassifier` estimator.
Raises:
ValueError: If `n_classes` < 2.
"""
params = {"head":
head_lib.multi_class_head(
n_classes=n_classes,
weight_column_name=weight_column_name,
enable_centered_bias=True,
label_keys=label_keys)}
super(DebugClassifier, self).__init__(
model_fn=debug_model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
def predict_classes(self, input_fn=None, batch_size=None):
"""Returns predicted classes for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted classes. Each predicted class is represented by
its class index (i.e. integer from 0 to n_classes-1).
"""
key = prediction_key.PredictionKey.CLASSES
preds = self.predict(
input_fn=input_fn, batch_size=batch_size, outputs=[key])
return (pred[key] for pred in preds)
def predict_proba(self,
input_fn=None,
batch_size=None):
"""Returns prediction probabilities for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted probabilities with shape [batch_size, n_classes].
"""
key = prediction_key.PredictionKey.PROBABILITIES
preds = self.predict(
input_fn=input_fn,
batch_size=batch_size,
outputs=[key])
return (pred[key] for pred in preds)
class DebugRegressor(estimator.Estimator):
"""A regressor for TensorFlow Debug models.
Example:
```python
# Build DebugRegressor
regressor = DebugRegressor()
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
# Fit model.
regressor.fit(input_fn=input_fn_train)
# Evaluate squared-loss between the test and train targets.
loss = regressor.evaluate(input_fn=input_fn_eval)["loss"]
# predict_scores outputs mean value seen during training.
predicted_targets = regressor.predict_scores(new_samples)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
"""
def __init__(self,
model_dir=None,
label_dimension=1,
weight_column_name=None,
config=None,
feature_engineering_fn=None):
"""Initializes a DebugRegressor instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
label_dimension: Number of regression targets per example. This is the
size of the last dimension of the labels and logits `Tensor` objects
(typically, these have shape `[batch_size, label_dimension]`).
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns
features and labels which will be fed into the model.
Returns:
A `DebugRegressor` estimator.
"""
params = {
"head":
head_lib.regression_head(
weight_column_name=weight_column_name,
label_dimension=label_dimension,
enable_centered_bias=True)
}
super(DebugRegressor, self).__init__(
model_fn=debug_model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
def | (self, input_fn=None, batch_size=None):
"""Returns predicted scores for given features.
Args:
input_fn: Input function.
batch_size: Override default batch size.
Returns:
An iterable of predicted scores.
"""
key = prediction_key.PredictionKey.SCORES
preds = self.predict(
input_fn=input_fn, batch_size=batch_size, outputs=[key])
return (pred[key] for pred in preds)
| predict_scores | identifier_name |
volumes.rs | use std::path::PathBuf;
use std::collections::BTreeMap;
use quire::validate as V;
use quire::ast::{Ast, Tag};
use libc::{uid_t, gid_t};
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub struct SnapshotInfo {
pub size: usize,
pub owner_uid: Option<uid_t>,
pub owner_gid: Option<gid_t>,
pub container: Option<String>,
}
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub struct PersistentInfo {
pub name: String,
pub owner_uid: uid_t,
pub owner_gid: gid_t,
pub init_command: Option<String>,
}
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub enum Volume {
Tmpfs(TmpfsInfo),
BindRW(PathBuf),
BindRO(PathBuf),
Empty,
VaggaBin,
Snapshot(SnapshotInfo),
Container(String),
Persistent(PersistentInfo),
}
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub struct Dir {
pub mode: u32,
}
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub struct TmpfsInfo {
pub size: usize,
pub mode: u32,
pub subdirs: BTreeMap<PathBuf, Dir>,
pub files: BTreeMap<PathBuf, Option<String>>,
}
pub fn volume_validator<'x>() -> V::Enum<'x> {
V::Enum::new()
.option("Tmpfs", V::Structure::new()
.member("size", V::Numeric::new()
.min(0).default(100*1024*1024))
.member("mode", V::Numeric::new()
.min(0).max(0o1777).default(0o1777))
.member("subdirs",
V::Mapping::new( | V::Directory::new().is_absolute(false),
V::Structure::new()
.member("mode", V::Numeric::new()
.min(0).max(0o1777).default(0o755))
))
.member("files",
V::Mapping::new(
V::Directory::new().is_absolute(false),
V::Scalar::new().optional(),
)))
.option("VaggaBin", V::Nothing)
.option("BindRW", V::Scalar::new())
.option("BindRO", V::Scalar::new())
.option("Empty", V::Nothing)
.option("Snapshot", V::Structure::new()
.member("container", V::Scalar::new().optional())
.member("size", V::Numeric::new().min(0).default(100*1024*1024))
.member("owner_uid", V::Numeric::new().min(0).optional())
.member("owner_gid", V::Numeric::new().min(0).optional())
)
.option("Container", V::Scalar::new())
.option("Persistent", V::Structure::new()
.member("name", V::Scalar::new())
.member("init_command", V::Scalar::new().optional())
.member("owner_uid", V::Numeric::new().min(0).default(0))
.member("owner_gid", V::Numeric::new().min(0).default(0))
.parser(persistent_volume_string))
}
fn persistent_volume_string(ast: Ast) -> BTreeMap<String, Ast> {
match ast {
Ast::Scalar(pos, _, style, value) => {
let mut map = BTreeMap::new();
map.insert("name".to_string(),
Ast::Scalar(pos.clone(), Tag::NonSpecific, style, value));
map
},
_ => unreachable!(),
}
} | random_line_split | |
volumes.rs | use std::path::PathBuf;
use std::collections::BTreeMap;
use quire::validate as V;
use quire::ast::{Ast, Tag};
use libc::{uid_t, gid_t};
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub struct SnapshotInfo {
pub size: usize,
pub owner_uid: Option<uid_t>,
pub owner_gid: Option<gid_t>,
pub container: Option<String>,
}
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub struct | {
pub name: String,
pub owner_uid: uid_t,
pub owner_gid: gid_t,
pub init_command: Option<String>,
}
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub enum Volume {
Tmpfs(TmpfsInfo),
BindRW(PathBuf),
BindRO(PathBuf),
Empty,
VaggaBin,
Snapshot(SnapshotInfo),
Container(String),
Persistent(PersistentInfo),
}
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub struct Dir {
pub mode: u32,
}
#[derive(RustcDecodable, Clone, PartialEq, Eq)]
pub struct TmpfsInfo {
pub size: usize,
pub mode: u32,
pub subdirs: BTreeMap<PathBuf, Dir>,
pub files: BTreeMap<PathBuf, Option<String>>,
}
pub fn volume_validator<'x>() -> V::Enum<'x> {
V::Enum::new()
.option("Tmpfs", V::Structure::new()
.member("size", V::Numeric::new()
.min(0).default(100*1024*1024))
.member("mode", V::Numeric::new()
.min(0).max(0o1777).default(0o1777))
.member("subdirs",
V::Mapping::new(
V::Directory::new().is_absolute(false),
V::Structure::new()
.member("mode", V::Numeric::new()
.min(0).max(0o1777).default(0o755))
))
.member("files",
V::Mapping::new(
V::Directory::new().is_absolute(false),
V::Scalar::new().optional(),
)))
.option("VaggaBin", V::Nothing)
.option("BindRW", V::Scalar::new())
.option("BindRO", V::Scalar::new())
.option("Empty", V::Nothing)
.option("Snapshot", V::Structure::new()
.member("container", V::Scalar::new().optional())
.member("size", V::Numeric::new().min(0).default(100*1024*1024))
.member("owner_uid", V::Numeric::new().min(0).optional())
.member("owner_gid", V::Numeric::new().min(0).optional())
)
.option("Container", V::Scalar::new())
.option("Persistent", V::Structure::new()
.member("name", V::Scalar::new())
.member("init_command", V::Scalar::new().optional())
.member("owner_uid", V::Numeric::new().min(0).default(0))
.member("owner_gid", V::Numeric::new().min(0).default(0))
.parser(persistent_volume_string))
}
fn persistent_volume_string(ast: Ast) -> BTreeMap<String, Ast> {
match ast {
Ast::Scalar(pos, _, style, value) => {
let mut map = BTreeMap::new();
map.insert("name".to_string(),
Ast::Scalar(pos.clone(), Tag::NonSpecific, style, value));
map
},
_ => unreachable!(),
}
}
| PersistentInfo | identifier_name |
test.py | #! /usr/bin/env python
import numpy as np
import gc
import matplotlib.pyplot as plt
from random import seed, sample, randint
from ransac import LineModel, ransac
from time import time
random_seed = 0
num_iterations = 100
num_samples = 1000
noise_ratio = 0.8
num_noise = int(noise_ratio * num_samples)
def setup():
global data, model
seed(random_seed)
X = np.asarray(range(num_samples))
Y = 2 * X
noise = [randint(0, 2 * (num_samples - 1)) for i in xrange(num_noise)]
Y[sample(xrange(len(Y)), num_noise)] = noise
data = np.asarray([X, Y]).T
model = LineModel()
plt.plot(X, Y, 'bx')
def run():
global params, residual, mean_time
gc.disable()
start_time = time()
for i in xrange(num_iterations):
try:
(params, inliers, residual) = ransac(data, model, 2, (1 - noise_ratio) * num_samples)
except ValueError:
pass
end_time = time()
mean_time = (end_time - start_time) / num_iterations
gc.enable()
def summary():
if params:
|
else:
print 'RANSAC failed to find a sufficiently good fit for the data.'
plt.show()
if __name__ == '__main__':
setup()
run()
summary()
| print ' Parameters '.center(40, '=')
print params
print ' Residual '.center(40, '=')
print residual
print ' Time '.center(40, '=')
print '%.1f msecs mean time spent per call' % (1000 * mean_time)
X = np.asarray([0, num_samples - 1])
Y = params[0] * X + params[1]
plt.plot(X, Y, 'k-') | conditional_block |
test.py | #! /usr/bin/env python
import numpy as np
import gc
import matplotlib.pyplot as plt
from random import seed, sample, randint
from ransac import LineModel, ransac
from time import time
random_seed = 0
num_iterations = 100
num_samples = 1000
noise_ratio = 0.8
num_noise = int(noise_ratio * num_samples)
def setup():
|
def run():
global params, residual, mean_time
gc.disable()
start_time = time()
for i in xrange(num_iterations):
try:
(params, inliers, residual) = ransac(data, model, 2, (1 - noise_ratio) * num_samples)
except ValueError:
pass
end_time = time()
mean_time = (end_time - start_time) / num_iterations
gc.enable()
def summary():
if params:
print ' Parameters '.center(40, '=')
print params
print ' Residual '.center(40, '=')
print residual
print ' Time '.center(40, '=')
print '%.1f msecs mean time spent per call' % (1000 * mean_time)
X = np.asarray([0, num_samples - 1])
Y = params[0] * X + params[1]
plt.plot(X, Y, 'k-')
else:
print 'RANSAC failed to find a sufficiently good fit for the data.'
plt.show()
if __name__ == '__main__':
setup()
run()
summary()
| global data, model
seed(random_seed)
X = np.asarray(range(num_samples))
Y = 2 * X
noise = [randint(0, 2 * (num_samples - 1)) for i in xrange(num_noise)]
Y[sample(xrange(len(Y)), num_noise)] = noise
data = np.asarray([X, Y]).T
model = LineModel()
plt.plot(X, Y, 'bx') | identifier_body |
test.py | #! /usr/bin/env python
import numpy as np
import gc
import matplotlib.pyplot as plt
from random import seed, sample, randint
from ransac import LineModel, ransac
from time import time
random_seed = 0
num_iterations = 100
num_samples = 1000
noise_ratio = 0.8
num_noise = int(noise_ratio * num_samples)
def setup():
global data, model
seed(random_seed)
X = np.asarray(range(num_samples))
Y = 2 * X
noise = [randint(0, 2 * (num_samples - 1)) for i in xrange(num_noise)]
Y[sample(xrange(len(Y)), num_noise)] = noise
data = np.asarray([X, Y]).T
model = LineModel()
plt.plot(X, Y, 'bx')
| try:
(params, inliers, residual) = ransac(data, model, 2, (1 - noise_ratio) * num_samples)
except ValueError:
pass
end_time = time()
mean_time = (end_time - start_time) / num_iterations
gc.enable()
def summary():
if params:
print ' Parameters '.center(40, '=')
print params
print ' Residual '.center(40, '=')
print residual
print ' Time '.center(40, '=')
print '%.1f msecs mean time spent per call' % (1000 * mean_time)
X = np.asarray([0, num_samples - 1])
Y = params[0] * X + params[1]
plt.plot(X, Y, 'k-')
else:
print 'RANSAC failed to find a sufficiently good fit for the data.'
plt.show()
if __name__ == '__main__':
setup()
run()
summary() | def run():
global params, residual, mean_time
gc.disable()
start_time = time()
for i in xrange(num_iterations): | random_line_split |
test.py | #! /usr/bin/env python
import numpy as np
import gc
import matplotlib.pyplot as plt
from random import seed, sample, randint
from ransac import LineModel, ransac
from time import time
random_seed = 0
num_iterations = 100
num_samples = 1000
noise_ratio = 0.8
num_noise = int(noise_ratio * num_samples)
def setup():
global data, model
seed(random_seed)
X = np.asarray(range(num_samples))
Y = 2 * X
noise = [randint(0, 2 * (num_samples - 1)) for i in xrange(num_noise)]
Y[sample(xrange(len(Y)), num_noise)] = noise
data = np.asarray([X, Y]).T
model = LineModel()
plt.plot(X, Y, 'bx')
def run():
global params, residual, mean_time
gc.disable()
start_time = time()
for i in xrange(num_iterations):
try:
(params, inliers, residual) = ransac(data, model, 2, (1 - noise_ratio) * num_samples)
except ValueError:
pass
end_time = time()
mean_time = (end_time - start_time) / num_iterations
gc.enable()
def | ():
if params:
print ' Parameters '.center(40, '=')
print params
print ' Residual '.center(40, '=')
print residual
print ' Time '.center(40, '=')
print '%.1f msecs mean time spent per call' % (1000 * mean_time)
X = np.asarray([0, num_samples - 1])
Y = params[0] * X + params[1]
plt.plot(X, Y, 'k-')
else:
print 'RANSAC failed to find a sufficiently good fit for the data.'
plt.show()
if __name__ == '__main__':
setup()
run()
summary()
| summary | identifier_name |
exceptions.py | # Copyright (C) 2015 by Per Unneberg
class NotInstalledError(Exception):
"""Error thrown if program/command/application cannot be found in path
Args:
msg (str): String described by exception
code (int, optional): Error code, defaults to 2.
"""
def __init__(self, msg, code=2):
self.msg = msg
self.code = code
class SamplesException(Exception):
"""Error thrown if samples missing or wrong number.
Args:
msg (str): String described by exception
code (int, optional): Error code, defaults to 2.
"""
def __init__(self, msg, code=2):
self.msg = msg
self.code = code
class OutputFilesException(Exception):
"""Error thrown if outputfiles missing or wrong number.
Args:
msg (str): String described by exception
code (int, optional): Error code, defaults to 2.
"""
def | (self, msg, code=2):
self.msg = msg
self.code = code
| __init__ | identifier_name |
exceptions.py | # Copyright (C) 2015 by Per Unneberg
class NotInstalledError(Exception):
|
class SamplesException(Exception):
"""Error thrown if samples missing or wrong number.
Args:
msg (str): String described by exception
code (int, optional): Error code, defaults to 2.
"""
def __init__(self, msg, code=2):
self.msg = msg
self.code = code
class OutputFilesException(Exception):
"""Error thrown if outputfiles missing or wrong number.
Args:
msg (str): String described by exception
code (int, optional): Error code, defaults to 2.
"""
def __init__(self, msg, code=2):
self.msg = msg
self.code = code
| """Error thrown if program/command/application cannot be found in path
Args:
msg (str): String described by exception
code (int, optional): Error code, defaults to 2.
"""
def __init__(self, msg, code=2):
self.msg = msg
self.code = code | identifier_body |
exceptions.py | # Copyright (C) 2015 by Per Unneberg
class NotInstalledError(Exception):
"""Error thrown if program/command/application cannot be found in path
Args:
msg (str): String described by exception
code (int, optional): Error code, defaults to 2.
"""
def __init__(self, msg, code=2):
self.msg = msg
self.code = code
class SamplesException(Exception):
"""Error thrown if samples missing or wrong number.
Args:
msg (str): String described by exception
code (int, optional): Error code, defaults to 2.
"""
def __init__(self, msg, code=2):
self.msg = msg
self.code = code
class OutputFilesException(Exception): | msg (str): String described by exception
code (int, optional): Error code, defaults to 2.
"""
def __init__(self, msg, code=2):
self.msg = msg
self.code = code | """Error thrown if outputfiles missing or wrong number.
Args: | random_line_split |
simulation.py | # Copyright (C) 2015 SensorLab, Jozef Stefan Institute http://sensorlab.ijs.si
#
# Written by Tomaz Solc, tomaz.solc@ijs.si
#
# This work has been partially funded by the European Community through the
# 7th Framework Programme project CREW (FP7-ICT-2009-258301).
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/
import logging
import Queue
import random
import time
from spectrumwars.testbed import TestbedBase, RadioBase, RadioTimeout, RadioError, TestbedError, RadioPacket
log = logging.getLogger(__name__)
class Radio(RadioBase):
RECEIVE_TIMEOUT = 2.
def __init__(self, addr, dispatcher, send_delay):
super(Radio, self).__init__()
self.addr = addr
self.neighbor = None
self.dispatcher = dispatcher
self.q = Queue.Queue()
self.frequency = 0
self.bandwidth = 0
self.send_delay = send_delay
def _recv(self, addr, bindata, frequency, bandwidth):
if self.frequency == frequency and self.bandwidth == bandwidth and self.addr == addr:
self.q.put(bindata)
def set_configuration(self, frequency, bandwidth, power):
self.frequency = frequency
self.bandwidth = bandwidth
def binsend(self, bindata):
self.dispatcher(self.neighbor, bindata, self.frequency, self.bandwidth)
time.sleep(self.send_delay)
def binrecv(self, timeout=None):
if timeout is None:
timeout = self.RECEIVE_TIMEOUT
try:
bindata = self.q.get(True, timeout)
except Queue.Empty:
raise RadioTimeout
else:
return bindata
class Testbed(TestbedBase):
RADIO_CLASS = Radio
def __init__(self, send_delay=.1, frequency_range=64, bandwidth_range=10, power_range=10, packet_size=1024):
self.send_delay = float(send_delay)
self.frequency_range = int(frequency_range)
self.bandwidth_range = int(bandwidth_range)
self.power_range = int(power_range)
self.RADIO_CLASS.PACKET_SIZE = int(packet_size) + 1
self.radios = []
# for each channel, we keep the timestamp of the last
# transmission. we use this for simulated spectrum sensing and
# for detecting collisions.
self.channels = [0] * self.frequency_range
self.i = 0
def _get_radio(self):
r = Radio(self.i, self._dispatcher, self.send_delay)
self.radios.append(r)
self.i += 1
return r
def _dispatcher(self, addr, bindata, frequency, bandwidth):
now = self.time()
has_collision = (now - self.channels[frequency]) > self.send_delay
self.channels[frequency] = now
if has_collision:
# note that when packets collide, the first one goes
# through while the later ones fail. this is different
# than in reality: all should fail. But this would
# be complicated to implement in practice.
for radio in self.radios:
radio._recv(addr, bindata, frequency, bandwidth)
else:
log.debug("packet collision detected on channel %d" % (frequency,))
def get_radio_pair(self):
dst = self._get_radio()
src = self._get_radio()
dst.neighbor = src.addr
src.neighbor = dst.addr
return dst, src
def get_spectrum(self):
spectrum = []
now = self.time()
for time in self.channels:
if now - time < .5:
p = random.randint(-40, -20)
else:
p = random.randint(-90, -80)
spectrum.append(p)
return tuple(spectrum)
def get_frequency_range(self):
return self.frequency_range
def get_bandwidth_range(self):
return self.bandwidth_range
def | (self):
return self.power_range
| get_power_range | identifier_name |
simulation.py | # Copyright (C) 2015 SensorLab, Jozef Stefan Institute http://sensorlab.ijs.si
#
# Written by Tomaz Solc, tomaz.solc@ijs.si
#
# This work has been partially funded by the European Community through the
# 7th Framework Programme project CREW (FP7-ICT-2009-258301).
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/
import logging
import Queue
import random
import time
from spectrumwars.testbed import TestbedBase, RadioBase, RadioTimeout, RadioError, TestbedError, RadioPacket
log = logging.getLogger(__name__)
class Radio(RadioBase):
RECEIVE_TIMEOUT = 2.
def __init__(self, addr, dispatcher, send_delay):
super(Radio, self).__init__()
self.addr = addr
self.neighbor = None
self.dispatcher = dispatcher
self.q = Queue.Queue()
self.frequency = 0
self.bandwidth = 0
self.send_delay = send_delay
def _recv(self, addr, bindata, frequency, bandwidth):
if self.frequency == frequency and self.bandwidth == bandwidth and self.addr == addr:
self.q.put(bindata)
def set_configuration(self, frequency, bandwidth, power):
self.frequency = frequency
self.bandwidth = bandwidth
def binsend(self, bindata):
self.dispatcher(self.neighbor, bindata, self.frequency, self.bandwidth)
time.sleep(self.send_delay)
def binrecv(self, timeout=None):
if timeout is None:
timeout = self.RECEIVE_TIMEOUT
try:
bindata = self.q.get(True, timeout)
except Queue.Empty:
raise RadioTimeout
else:
return bindata
class Testbed(TestbedBase):
RADIO_CLASS = Radio
def __init__(self, send_delay=.1, frequency_range=64, bandwidth_range=10, power_range=10, packet_size=1024):
self.send_delay = float(send_delay)
self.frequency_range = int(frequency_range)
self.bandwidth_range = int(bandwidth_range)
self.power_range = int(power_range)
self.RADIO_CLASS.PACKET_SIZE = int(packet_size) + 1
self.radios = []
# for each channel, we keep the timestamp of the last
# transmission. we use this for simulated spectrum sensing and
# for detecting collisions.
self.channels = [0] * self.frequency_range
self.i = 0
def _get_radio(self):
r = Radio(self.i, self._dispatcher, self.send_delay)
self.radios.append(r)
self.i += 1
return r
def _dispatcher(self, addr, bindata, frequency, bandwidth):
now = self.time()
has_collision = (now - self.channels[frequency]) > self.send_delay
self.channels[frequency] = now
if has_collision:
# note that when packets collide, the first one goes
# through while the later ones fail. this is different
# than in reality: all should fail. But this would
# be complicated to implement in practice.
for radio in self.radios:
radio._recv(addr, bindata, frequency, bandwidth)
else:
log.debug("packet collision detected on channel %d" % (frequency,))
def get_radio_pair(self):
dst = self._get_radio()
src = self._get_radio()
dst.neighbor = src.addr
src.neighbor = dst.addr
return dst, src
def get_spectrum(self):
spectrum = []
now = self.time()
for time in self.channels:
if now - time < .5:
p = random.randint(-40, -20)
else:
p = random.randint(-90, -80)
spectrum.append(p)
return tuple(spectrum)
def get_frequency_range(self):
return self.frequency_range
| def get_bandwidth_range(self):
return self.bandwidth_range
def get_power_range(self):
return self.power_range | random_line_split | |
simulation.py | # Copyright (C) 2015 SensorLab, Jozef Stefan Institute http://sensorlab.ijs.si
#
# Written by Tomaz Solc, tomaz.solc@ijs.si
#
# This work has been partially funded by the European Community through the
# 7th Framework Programme project CREW (FP7-ICT-2009-258301).
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/
import logging
import Queue
import random
import time
from spectrumwars.testbed import TestbedBase, RadioBase, RadioTimeout, RadioError, TestbedError, RadioPacket
log = logging.getLogger(__name__)
class Radio(RadioBase):
|
class Testbed(TestbedBase):
RADIO_CLASS = Radio
def __init__(self, send_delay=.1, frequency_range=64, bandwidth_range=10, power_range=10, packet_size=1024):
self.send_delay = float(send_delay)
self.frequency_range = int(frequency_range)
self.bandwidth_range = int(bandwidth_range)
self.power_range = int(power_range)
self.RADIO_CLASS.PACKET_SIZE = int(packet_size) + 1
self.radios = []
# for each channel, we keep the timestamp of the last
# transmission. we use this for simulated spectrum sensing and
# for detecting collisions.
self.channels = [0] * self.frequency_range
self.i = 0
def _get_radio(self):
r = Radio(self.i, self._dispatcher, self.send_delay)
self.radios.append(r)
self.i += 1
return r
def _dispatcher(self, addr, bindata, frequency, bandwidth):
now = self.time()
has_collision = (now - self.channels[frequency]) > self.send_delay
self.channels[frequency] = now
if has_collision:
# note that when packets collide, the first one goes
# through while the later ones fail. this is different
# than in reality: all should fail. But this would
# be complicated to implement in practice.
for radio in self.radios:
radio._recv(addr, bindata, frequency, bandwidth)
else:
log.debug("packet collision detected on channel %d" % (frequency,))
def get_radio_pair(self):
dst = self._get_radio()
src = self._get_radio()
dst.neighbor = src.addr
src.neighbor = dst.addr
return dst, src
def get_spectrum(self):
spectrum = []
now = self.time()
for time in self.channels:
if now - time < .5:
p = random.randint(-40, -20)
else:
p = random.randint(-90, -80)
spectrum.append(p)
return tuple(spectrum)
def get_frequency_range(self):
return self.frequency_range
def get_bandwidth_range(self):
return self.bandwidth_range
def get_power_range(self):
return self.power_range
| RECEIVE_TIMEOUT = 2.
def __init__(self, addr, dispatcher, send_delay):
super(Radio, self).__init__()
self.addr = addr
self.neighbor = None
self.dispatcher = dispatcher
self.q = Queue.Queue()
self.frequency = 0
self.bandwidth = 0
self.send_delay = send_delay
def _recv(self, addr, bindata, frequency, bandwidth):
if self.frequency == frequency and self.bandwidth == bandwidth and self.addr == addr:
self.q.put(bindata)
def set_configuration(self, frequency, bandwidth, power):
self.frequency = frequency
self.bandwidth = bandwidth
def binsend(self, bindata):
self.dispatcher(self.neighbor, bindata, self.frequency, self.bandwidth)
time.sleep(self.send_delay)
def binrecv(self, timeout=None):
if timeout is None:
timeout = self.RECEIVE_TIMEOUT
try:
bindata = self.q.get(True, timeout)
except Queue.Empty:
raise RadioTimeout
else:
return bindata | identifier_body |
simulation.py | # Copyright (C) 2015 SensorLab, Jozef Stefan Institute http://sensorlab.ijs.si
#
# Written by Tomaz Solc, tomaz.solc@ijs.si
#
# This work has been partially funded by the European Community through the
# 7th Framework Programme project CREW (FP7-ICT-2009-258301).
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/
import logging
import Queue
import random
import time
from spectrumwars.testbed import TestbedBase, RadioBase, RadioTimeout, RadioError, TestbedError, RadioPacket
log = logging.getLogger(__name__)
class Radio(RadioBase):
RECEIVE_TIMEOUT = 2.
def __init__(self, addr, dispatcher, send_delay):
super(Radio, self).__init__()
self.addr = addr
self.neighbor = None
self.dispatcher = dispatcher
self.q = Queue.Queue()
self.frequency = 0
self.bandwidth = 0
self.send_delay = send_delay
def _recv(self, addr, bindata, frequency, bandwidth):
if self.frequency == frequency and self.bandwidth == bandwidth and self.addr == addr:
self.q.put(bindata)
def set_configuration(self, frequency, bandwidth, power):
self.frequency = frequency
self.bandwidth = bandwidth
def binsend(self, bindata):
self.dispatcher(self.neighbor, bindata, self.frequency, self.bandwidth)
time.sleep(self.send_delay)
def binrecv(self, timeout=None):
if timeout is None:
|
try:
bindata = self.q.get(True, timeout)
except Queue.Empty:
raise RadioTimeout
else:
return bindata
class Testbed(TestbedBase):
RADIO_CLASS = Radio
def __init__(self, send_delay=.1, frequency_range=64, bandwidth_range=10, power_range=10, packet_size=1024):
self.send_delay = float(send_delay)
self.frequency_range = int(frequency_range)
self.bandwidth_range = int(bandwidth_range)
self.power_range = int(power_range)
self.RADIO_CLASS.PACKET_SIZE = int(packet_size) + 1
self.radios = []
# for each channel, we keep the timestamp of the last
# transmission. we use this for simulated spectrum sensing and
# for detecting collisions.
self.channels = [0] * self.frequency_range
self.i = 0
def _get_radio(self):
r = Radio(self.i, self._dispatcher, self.send_delay)
self.radios.append(r)
self.i += 1
return r
def _dispatcher(self, addr, bindata, frequency, bandwidth):
now = self.time()
has_collision = (now - self.channels[frequency]) > self.send_delay
self.channels[frequency] = now
if has_collision:
# note that when packets collide, the first one goes
# through while the later ones fail. this is different
# than in reality: all should fail. But this would
# be complicated to implement in practice.
for radio in self.radios:
radio._recv(addr, bindata, frequency, bandwidth)
else:
log.debug("packet collision detected on channel %d" % (frequency,))
def get_radio_pair(self):
dst = self._get_radio()
src = self._get_radio()
dst.neighbor = src.addr
src.neighbor = dst.addr
return dst, src
def get_spectrum(self):
spectrum = []
now = self.time()
for time in self.channels:
if now - time < .5:
p = random.randint(-40, -20)
else:
p = random.randint(-90, -80)
spectrum.append(p)
return tuple(spectrum)
def get_frequency_range(self):
return self.frequency_range
def get_bandwidth_range(self):
return self.bandwidth_range
def get_power_range(self):
return self.power_range
| timeout = self.RECEIVE_TIMEOUT | conditional_block |
Window.js | "use strict";
var CSSStyleDeclaration = require("cssstyle").CSSStyleDeclaration;
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
var notImplemented = require("./not-implemented");
var History = require("./history");
var VirtualConsole = require("../virtual-console");
var define = require("../utils").define;
var inherits = require("../utils").inheritFrom;
var resolveHref = require("../utils").resolveHref;
var EventTarget = require("../living/generated/events/EventTarget");
var namedPropertiesWindow = require("../living/named-properties-window");
var cssom = require("cssom");
var postMessage = require("../living/post-message");
const DOMException = require("../web-idl/DOMException");
const btoa = require("../../base64").btoa;
const atob = require("../../base64").atob;
const idlUtils = require("../living/generated/util");
// NB: the require() must be after assigning `module.export` because this require() is circular
module.exports = Window;
var dom = require("../living");
var cssSelectorSplitRE = /((?:[^,"']|"[^"]*"|'[^']*')+)/;
var defaultStyleSheet = cssom.parse(require("./default-stylesheet"));
dom.Window = Window;
// NOTE: per https://heycam.github.io/webidl/#Global, all properties on the Window object must be own-properties.
// That is why we assign everything inside of the constructor, instead of using a shared prototype.
// You can verify this in e.g. Firefox or Internet Explorer, which do a good job with Web IDL compliance.
function Window(options) {
EventTarget.setup(this);
var window = this;
///// INTERFACES FROM THE DOM
// TODO: consider a mode of some sort where these are not shared between all DOM instances
// It'd be very memory-expensive in most cases, though.
define(window, dom);
///// PRIVATE DATA PROPERTIES
// vm initialization is defered until script processing is activated (in level1/core)
this._globalProxy = this;
this.__timers = [];
// List options explicitly to be clear which are passed through
this._document = new dom.HTMLDocument({
parsingMode: options.parsingMode,
contentType: options.contentType,
cookieJar: options.cookieJar,
parser: options.parser,
url: options.url,
referrer: options.referrer,
cookie: options.cookie,
deferClose: options.deferClose,
resourceLoader: options.resourceLoader,
concurrentNodeIterators: options.concurrentNodeIterators,
defaultView: this._globalProxy,
global: this
});
// Set up the window as if it's a top level window.
// If it's not, then references will be corrected by frame/iframe code.
this._parent = this._top = this._globalProxy;
// This implements window.frames.length, since window.frames returns a
// self reference to the window object. This value is incremented in the
// HTMLFrameElement init function (see: level2/html.js).
this._length = 0;
if (options.virtualConsole) {
if (options.virtualConsole instanceof VirtualConsole) {
this._virtualConsole = options.virtualConsole;
} else {
throw new TypeError(
"options.virtualConsole must be a VirtualConsole (from createVirtualConsole)");
}
} else {
this._virtualConsole = new VirtualConsole();
}
///// GETTERS
define(this, {
get length() {
return window._length;
},
get window() { | },
get frames() {
return window._globalProxy;
},
get self() {
return window._globalProxy;
},
get parent() {
return window._parent;
},
get top() {
return window._top;
},
get document() {
return window._document;
},
get location() {
return window._document._location;
}
});
namedPropertiesWindow.initializeWindow(this, dom.HTMLCollection);
///// METHODS for [ImplicitThis] hack
// See https://lists.w3.org/Archives/Public/public-script-coord/2015JanMar/0109.html
this.addEventListener = this.addEventListener.bind(this);
this.removeEventListener = this.removeEventListener.bind(this);
this.dispatchEvent = this.dispatchEvent.bind(this);
///// METHODS
this.setTimeout = function (fn, ms) {
return startTimer(window, setTimeout, clearTimeout, fn, ms);
};
this.setInterval = function (fn, ms) {
return startTimer(window, setInterval, clearInterval, fn, ms);
};
this.clearInterval = stopTimer.bind(this, window);
this.clearTimeout = stopTimer.bind(this, window);
this.__stopAllTimers = stopAllTimers.bind(this, window);
this.Image = function (width, height) {
var element = window._document.createElement("img");
element.width = width;
element.height = height;
return element;
};
function wrapConsoleMethod(method) {
return function () {
var args = Array.prototype.slice.call(arguments);
window._virtualConsole.emit.apply(window._virtualConsole, [method].concat(args));
};
}
this.postMessage = postMessage;
this.atob = function (str) {
const result = atob(str);
if (result === null) {
throw new DOMException(DOMException.INVALID_CHARACTER_ERR,
"The string to be encoded contains invalid characters.");
}
return result;
};
this.btoa = function (str) {
const result = btoa(str);
if (result === null) {
throw new DOMException(DOMException.INVALID_CHARACTER_ERR,
"The string to be encoded contains invalid characters.");
}
return result;
};
this.XMLHttpRequest = function () {
var xhr = new XMLHttpRequest();
var lastUrl = "";
xhr._open = xhr.open;
xhr.open = function (method, url, async, user, password) {
lastUrl = fixUrlForBuggyXhr(resolveHref(window.document.URL, url));
return xhr._open(method, lastUrl, async, user, password);
};
xhr._getAllResponseHeaders = xhr.getAllResponseHeaders;
xhr.getAllResponseHeaders = function () {
if (lastUrl.startsWith("file:")) {
// Monkey patch this function for files. The node-xhr module will crash for file URLs.
return null;
}
return xhr._getAllResponseHeaders();
};
xhr._send = xhr.send;
xhr.send = function (data) {
var cookieJar = window.document._cookieJar;
var cookieStr = cookieJar.getCookieStringSync(lastUrl, {http: true});
if (cookieStr) {
xhr.setDisableHeaderCheck(true);
xhr.setRequestHeader("cookie", cookieStr);
xhr.setDisableHeaderCheck(false);
}
function setReceivedCookies() {
if (xhr.readyState === xhr.HEADERS_RECEIVED) {
var receivedCookies = xhr.getResponseHeader("set-cookie");
if (receivedCookies) {
receivedCookies = Array.isArray(receivedCookies) ? receivedCookies : [receivedCookies];
receivedCookies.forEach(function (cookieStr) {
cookieJar.setCookieSync(cookieStr, lastUrl, {
http: true,
ignoreError: true
});
});
}
xhr.removeEventListener("readystatechange", setReceivedCookies);
}
}
xhr.addEventListener("readystatechange", setReceivedCookies);
return xhr._send(data);
};
Object.defineProperty(xhr, "response", {
get: function () {
if (this.responseType === "text" || !this.responseType) {
// Spec says "text" or "", but responseType support is incomplete, so we need to catch more cases.
return this.responseText;
} else if (this.responseType === "json") {
return JSON.parse(this.responseText);
} else {
return null; // emulate failed request
}
},
enumerable: true,
configurable: true
});
return xhr;
};
this.close = function () {
// Recursively close child frame windows, then ourselves.
var currentWindow = this;
(function windowCleaner(window) {
var i;
// We could call window.frames.length etc, but window.frames just points
// back to window.
if (window.length > 0) {
for (i = 0; i < window.length; i++) {
windowCleaner(window[i]);
}
}
// We"re already in our own window.close().
if (window !== currentWindow) {
window.close();
}
})(this);
// Clear out all listeners. Any in-flight or upcoming events should not get delivered.
idlUtils.implForWrapper(this, "EventTarget")._events = Object.create(null);
if (this._document) {
if (this._document.body) {
this._document.body.innerHTML = "";
}
if (this._document.close) {
// It's especially important to clear out the listeners here because document.close() causes a "load" event to
// fire.
this._document._listeners = Object.create(null);
this._document.close();
}
delete this._document;
}
stopAllTimers(currentWindow);
};
this.getComputedStyle = function (node) {
var s = node.style;
var cs = new CSSStyleDeclaration();
var forEach = Array.prototype.forEach;
function setPropertiesFromRule(rule) {
if (!rule.selectorText) {
return;
}
var selectors = rule.selectorText.split(cssSelectorSplitRE);
var matched = false;
selectors.forEach(function (selectorText) {
if (selectorText !== "" && selectorText !== "," && !matched && matchesDontThrow(node, selectorText)) {
matched = true;
forEach.call(rule.style, function (property) {
cs.setProperty(property, rule.style.getPropertyValue(property), rule.style.getPropertyPriority(property));
});
}
});
}
function readStylesFromStyleSheet(sheet) {
forEach.call(sheet.cssRules, function (rule) {
if (rule.media) {
if (Array.prototype.indexOf.call(rule.media, "screen") !== -1) {
forEach.call(rule.cssRules, setPropertiesFromRule);
}
} else {
setPropertiesFromRule(rule);
}
});
}
readStylesFromStyleSheet(defaultStyleSheet);
forEach.call(node.ownerDocument.styleSheets, readStylesFromStyleSheet);
forEach.call(s, function (property) {
cs.setProperty(property, s.getPropertyValue(property), s.getPropertyPriority(property));
});
return cs;
};
///// PUBLIC DATA PROPERTIES (TODO: should be getters)
this.history = new History(this);
this.console = {
assert: wrapConsoleMethod("assert"),
clear: wrapConsoleMethod("clear"),
count: wrapConsoleMethod("count"),
debug: wrapConsoleMethod("debug"),
error: wrapConsoleMethod("error"),
group: wrapConsoleMethod("group"),
groupCollapse: wrapConsoleMethod("groupCollapse"),
groupEnd: wrapConsoleMethod("groupEnd"),
info: wrapConsoleMethod("info"),
log: wrapConsoleMethod("log"),
table: wrapConsoleMethod("table"),
time: wrapConsoleMethod("time"),
timeEnd: wrapConsoleMethod("timeEnd"),
trace: wrapConsoleMethod("trace"),
warn: wrapConsoleMethod("warn")
};
function notImplementedMethod(name) {
return function () {
notImplemented(name, window);
};
}
define(this, {
navigator: {
get userAgent() { return "Node.js (" + process.platform + "; U; rv:" + process.version + ")"; },
get appName() { return "Node.js jsDom"; },
get platform() { return process.platform; },
get appVersion() { return process.version; },
noUI: true,
get cookieEnabled() { return true; }
},
name: "nodejs",
innerWidth: 1024,
innerHeight: 768,
outerWidth: 1024,
outerHeight: 768,
pageXOffset: 0,
pageYOffset: 0,
screenX: 0,
screenY: 0,
screenLeft: 0,
screenTop: 0,
scrollX: 0,
scrollY: 0,
scrollTop: 0,
scrollLeft: 0,
screen: {
width: 0,
height: 0
},
alert: notImplementedMethod("window.alert"),
blur: notImplementedMethod("window.blur"),
confirm: notImplementedMethod("window.confirm"),
createPopup: notImplementedMethod("window.createPopup"),
focus: notImplementedMethod("window.focus"),
moveBy: notImplementedMethod("window.moveBy"),
moveTo: notImplementedMethod("window.moveTo"),
open: notImplementedMethod("window.open"),
print: notImplementedMethod("window.print"),
prompt: notImplementedMethod("window.prompt"),
resizeBy: notImplementedMethod("window.resizeBy"),
resizeTo: notImplementedMethod("window.resizeTo"),
scroll: notImplementedMethod("window.scroll"),
scrollBy: notImplementedMethod("window.scrollBy"),
scrollTo: notImplementedMethod("window.scrollTo")
});
///// INITIALIZATION
process.nextTick(function () {
if (!window.document) {
return; // window might've been closed already
}
var ev = window.document.createEvent("HTMLEvents");
ev.initEvent("load", false, false);
if (window.document.readyState === "complete") {
window.dispatchEvent(ev);
} else {
window.document.addEventListener("load", function (ev) {
window.dispatchEvent(ev);
});
}
});
}
inherits(EventTarget.interface, Window, EventTarget.interface.prototype);
function matchesDontThrow(el, selector) {
try {
return el.matches(selector);
} catch (e) {
return false;
}
}
function startTimer(window, startFn, stopFn, callback, ms) {
var res = startFn(callback, ms);
window.__timers.push([res, stopFn]);
return res;
}
function stopTimer(window, id) {
if (typeof id === "undefined") {
return;
}
for (var i in window.__timers) {
if (window.__timers[i][0] === id) {
window.__timers[i][1].call(window, id);
window.__timers.splice(i, 1);
break;
}
}
}
function stopAllTimers(window) {
window.__timers.forEach(function (t) {
t[1].call(window, t[0]);
});
window.__timers = [];
}
function fixUrlForBuggyXhr(url) {
// node-XMLHttpRequest doesn't properly handle file URLs. It only accepts file://C:/..., not file:///C:/...
// See https://github.com/tmpvar/jsdom/pull/1180
return url.replace(/^file:\/\/\/([a-zA-Z]:)/, "file://$1");
} | return window._globalProxy; | random_line_split |
Window.js | "use strict";
var CSSStyleDeclaration = require("cssstyle").CSSStyleDeclaration;
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
var notImplemented = require("./not-implemented");
var History = require("./history");
var VirtualConsole = require("../virtual-console");
var define = require("../utils").define;
var inherits = require("../utils").inheritFrom;
var resolveHref = require("../utils").resolveHref;
var EventTarget = require("../living/generated/events/EventTarget");
var namedPropertiesWindow = require("../living/named-properties-window");
var cssom = require("cssom");
var postMessage = require("../living/post-message");
const DOMException = require("../web-idl/DOMException");
const btoa = require("../../base64").btoa;
const atob = require("../../base64").atob;
const idlUtils = require("../living/generated/util");
// NB: the require() must be after assigning `module.export` because this require() is circular
module.exports = Window;
var dom = require("../living");
var cssSelectorSplitRE = /((?:[^,"']|"[^"]*"|'[^']*')+)/;
var defaultStyleSheet = cssom.parse(require("./default-stylesheet"));
dom.Window = Window;
// NOTE: per https://heycam.github.io/webidl/#Global, all properties on the Window object must be own-properties.
// That is why we assign everything inside of the constructor, instead of using a shared prototype.
// You can verify this in e.g. Firefox or Internet Explorer, which do a good job with Web IDL compliance.
function Window(options) {
EventTarget.setup(this);
var window = this;
///// INTERFACES FROM THE DOM
// TODO: consider a mode of some sort where these are not shared between all DOM instances
// It'd be very memory-expensive in most cases, though.
define(window, dom);
///// PRIVATE DATA PROPERTIES
// vm initialization is defered until script processing is activated (in level1/core)
this._globalProxy = this;
this.__timers = [];
// List options explicitly to be clear which are passed through
this._document = new dom.HTMLDocument({
parsingMode: options.parsingMode,
contentType: options.contentType,
cookieJar: options.cookieJar,
parser: options.parser,
url: options.url,
referrer: options.referrer,
cookie: options.cookie,
deferClose: options.deferClose,
resourceLoader: options.resourceLoader,
concurrentNodeIterators: options.concurrentNodeIterators,
defaultView: this._globalProxy,
global: this
});
// Set up the window as if it's a top level window.
// If it's not, then references will be corrected by frame/iframe code.
this._parent = this._top = this._globalProxy;
// This implements window.frames.length, since window.frames returns a
// self reference to the window object. This value is incremented in the
// HTMLFrameElement init function (see: level2/html.js).
this._length = 0;
if (options.virtualConsole) {
if (options.virtualConsole instanceof VirtualConsole) {
this._virtualConsole = options.virtualConsole;
} else {
throw new TypeError(
"options.virtualConsole must be a VirtualConsole (from createVirtualConsole)");
}
} else {
this._virtualConsole = new VirtualConsole();
}
///// GETTERS
define(this, {
get length() {
return window._length;
},
get window() {
return window._globalProxy;
},
get frames() {
return window._globalProxy;
},
get self() {
return window._globalProxy;
},
get | () {
return window._parent;
},
get top() {
return window._top;
},
get document() {
return window._document;
},
get location() {
return window._document._location;
}
});
namedPropertiesWindow.initializeWindow(this, dom.HTMLCollection);
///// METHODS for [ImplicitThis] hack
// See https://lists.w3.org/Archives/Public/public-script-coord/2015JanMar/0109.html
this.addEventListener = this.addEventListener.bind(this);
this.removeEventListener = this.removeEventListener.bind(this);
this.dispatchEvent = this.dispatchEvent.bind(this);
///// METHODS
this.setTimeout = function (fn, ms) {
return startTimer(window, setTimeout, clearTimeout, fn, ms);
};
this.setInterval = function (fn, ms) {
return startTimer(window, setInterval, clearInterval, fn, ms);
};
this.clearInterval = stopTimer.bind(this, window);
this.clearTimeout = stopTimer.bind(this, window);
this.__stopAllTimers = stopAllTimers.bind(this, window);
this.Image = function (width, height) {
var element = window._document.createElement("img");
element.width = width;
element.height = height;
return element;
};
function wrapConsoleMethod(method) {
return function () {
var args = Array.prototype.slice.call(arguments);
window._virtualConsole.emit.apply(window._virtualConsole, [method].concat(args));
};
}
this.postMessage = postMessage;
this.atob = function (str) {
const result = atob(str);
if (result === null) {
throw new DOMException(DOMException.INVALID_CHARACTER_ERR,
"The string to be encoded contains invalid characters.");
}
return result;
};
this.btoa = function (str) {
const result = btoa(str);
if (result === null) {
throw new DOMException(DOMException.INVALID_CHARACTER_ERR,
"The string to be encoded contains invalid characters.");
}
return result;
};
this.XMLHttpRequest = function () {
var xhr = new XMLHttpRequest();
var lastUrl = "";
xhr._open = xhr.open;
xhr.open = function (method, url, async, user, password) {
lastUrl = fixUrlForBuggyXhr(resolveHref(window.document.URL, url));
return xhr._open(method, lastUrl, async, user, password);
};
xhr._getAllResponseHeaders = xhr.getAllResponseHeaders;
xhr.getAllResponseHeaders = function () {
if (lastUrl.startsWith("file:")) {
// Monkey patch this function for files. The node-xhr module will crash for file URLs.
return null;
}
return xhr._getAllResponseHeaders();
};
xhr._send = xhr.send;
xhr.send = function (data) {
var cookieJar = window.document._cookieJar;
var cookieStr = cookieJar.getCookieStringSync(lastUrl, {http: true});
if (cookieStr) {
xhr.setDisableHeaderCheck(true);
xhr.setRequestHeader("cookie", cookieStr);
xhr.setDisableHeaderCheck(false);
}
function setReceivedCookies() {
if (xhr.readyState === xhr.HEADERS_RECEIVED) {
var receivedCookies = xhr.getResponseHeader("set-cookie");
if (receivedCookies) {
receivedCookies = Array.isArray(receivedCookies) ? receivedCookies : [receivedCookies];
receivedCookies.forEach(function (cookieStr) {
cookieJar.setCookieSync(cookieStr, lastUrl, {
http: true,
ignoreError: true
});
});
}
xhr.removeEventListener("readystatechange", setReceivedCookies);
}
}
xhr.addEventListener("readystatechange", setReceivedCookies);
return xhr._send(data);
};
Object.defineProperty(xhr, "response", {
get: function () {
if (this.responseType === "text" || !this.responseType) {
// Spec says "text" or "", but responseType support is incomplete, so we need to catch more cases.
return this.responseText;
} else if (this.responseType === "json") {
return JSON.parse(this.responseText);
} else {
return null; // emulate failed request
}
},
enumerable: true,
configurable: true
});
return xhr;
};
this.close = function () {
// Recursively close child frame windows, then ourselves.
var currentWindow = this;
(function windowCleaner(window) {
var i;
// We could call window.frames.length etc, but window.frames just points
// back to window.
if (window.length > 0) {
for (i = 0; i < window.length; i++) {
windowCleaner(window[i]);
}
}
// We"re already in our own window.close().
if (window !== currentWindow) {
window.close();
}
})(this);
// Clear out all listeners. Any in-flight or upcoming events should not get delivered.
idlUtils.implForWrapper(this, "EventTarget")._events = Object.create(null);
if (this._document) {
if (this._document.body) {
this._document.body.innerHTML = "";
}
if (this._document.close) {
// It's especially important to clear out the listeners here because document.close() causes a "load" event to
// fire.
this._document._listeners = Object.create(null);
this._document.close();
}
delete this._document;
}
stopAllTimers(currentWindow);
};
this.getComputedStyle = function (node) {
var s = node.style;
var cs = new CSSStyleDeclaration();
var forEach = Array.prototype.forEach;
function setPropertiesFromRule(rule) {
if (!rule.selectorText) {
return;
}
var selectors = rule.selectorText.split(cssSelectorSplitRE);
var matched = false;
selectors.forEach(function (selectorText) {
if (selectorText !== "" && selectorText !== "," && !matched && matchesDontThrow(node, selectorText)) {
matched = true;
forEach.call(rule.style, function (property) {
cs.setProperty(property, rule.style.getPropertyValue(property), rule.style.getPropertyPriority(property));
});
}
});
}
function readStylesFromStyleSheet(sheet) {
forEach.call(sheet.cssRules, function (rule) {
if (rule.media) {
if (Array.prototype.indexOf.call(rule.media, "screen") !== -1) {
forEach.call(rule.cssRules, setPropertiesFromRule);
}
} else {
setPropertiesFromRule(rule);
}
});
}
readStylesFromStyleSheet(defaultStyleSheet);
forEach.call(node.ownerDocument.styleSheets, readStylesFromStyleSheet);
forEach.call(s, function (property) {
cs.setProperty(property, s.getPropertyValue(property), s.getPropertyPriority(property));
});
return cs;
};
///// PUBLIC DATA PROPERTIES (TODO: should be getters)
this.history = new History(this);
this.console = {
assert: wrapConsoleMethod("assert"),
clear: wrapConsoleMethod("clear"),
count: wrapConsoleMethod("count"),
debug: wrapConsoleMethod("debug"),
error: wrapConsoleMethod("error"),
group: wrapConsoleMethod("group"),
groupCollapse: wrapConsoleMethod("groupCollapse"),
groupEnd: wrapConsoleMethod("groupEnd"),
info: wrapConsoleMethod("info"),
log: wrapConsoleMethod("log"),
table: wrapConsoleMethod("table"),
time: wrapConsoleMethod("time"),
timeEnd: wrapConsoleMethod("timeEnd"),
trace: wrapConsoleMethod("trace"),
warn: wrapConsoleMethod("warn")
};
function notImplementedMethod(name) {
return function () {
notImplemented(name, window);
};
}
define(this, {
navigator: {
get userAgent() { return "Node.js (" + process.platform + "; U; rv:" + process.version + ")"; },
get appName() { return "Node.js jsDom"; },
get platform() { return process.platform; },
get appVersion() { return process.version; },
noUI: true,
get cookieEnabled() { return true; }
},
name: "nodejs",
innerWidth: 1024,
innerHeight: 768,
outerWidth: 1024,
outerHeight: 768,
pageXOffset: 0,
pageYOffset: 0,
screenX: 0,
screenY: 0,
screenLeft: 0,
screenTop: 0,
scrollX: 0,
scrollY: 0,
scrollTop: 0,
scrollLeft: 0,
screen: {
width: 0,
height: 0
},
alert: notImplementedMethod("window.alert"),
blur: notImplementedMethod("window.blur"),
confirm: notImplementedMethod("window.confirm"),
createPopup: notImplementedMethod("window.createPopup"),
focus: notImplementedMethod("window.focus"),
moveBy: notImplementedMethod("window.moveBy"),
moveTo: notImplementedMethod("window.moveTo"),
open: notImplementedMethod("window.open"),
print: notImplementedMethod("window.print"),
prompt: notImplementedMethod("window.prompt"),
resizeBy: notImplementedMethod("window.resizeBy"),
resizeTo: notImplementedMethod("window.resizeTo"),
scroll: notImplementedMethod("window.scroll"),
scrollBy: notImplementedMethod("window.scrollBy"),
scrollTo: notImplementedMethod("window.scrollTo")
});
///// INITIALIZATION
process.nextTick(function () {
if (!window.document) {
return; // window might've been closed already
}
var ev = window.document.createEvent("HTMLEvents");
ev.initEvent("load", false, false);
if (window.document.readyState === "complete") {
window.dispatchEvent(ev);
} else {
window.document.addEventListener("load", function (ev) {
window.dispatchEvent(ev);
});
}
});
}
inherits(EventTarget.interface, Window, EventTarget.interface.prototype);
function matchesDontThrow(el, selector) {
try {
return el.matches(selector);
} catch (e) {
return false;
}
}
function startTimer(window, startFn, stopFn, callback, ms) {
var res = startFn(callback, ms);
window.__timers.push([res, stopFn]);
return res;
}
function stopTimer(window, id) {
if (typeof id === "undefined") {
return;
}
for (var i in window.__timers) {
if (window.__timers[i][0] === id) {
window.__timers[i][1].call(window, id);
window.__timers.splice(i, 1);
break;
}
}
}
function stopAllTimers(window) {
window.__timers.forEach(function (t) {
t[1].call(window, t[0]);
});
window.__timers = [];
}
function fixUrlForBuggyXhr(url) {
// node-XMLHttpRequest doesn't properly handle file URLs. It only accepts file://C:/..., not file:///C:/...
// See https://github.com/tmpvar/jsdom/pull/1180
return url.replace(/^file:\/\/\/([a-zA-Z]:)/, "file://$1");
}
| parent | identifier_name |
Window.js | "use strict";
var CSSStyleDeclaration = require("cssstyle").CSSStyleDeclaration;
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
var notImplemented = require("./not-implemented");
var History = require("./history");
var VirtualConsole = require("../virtual-console");
var define = require("../utils").define;
var inherits = require("../utils").inheritFrom;
var resolveHref = require("../utils").resolveHref;
var EventTarget = require("../living/generated/events/EventTarget");
var namedPropertiesWindow = require("../living/named-properties-window");
var cssom = require("cssom");
var postMessage = require("../living/post-message");
const DOMException = require("../web-idl/DOMException");
const btoa = require("../../base64").btoa;
const atob = require("../../base64").atob;
const idlUtils = require("../living/generated/util");
// NB: the require() must be after assigning `module.export` because this require() is circular
module.exports = Window;
var dom = require("../living");
var cssSelectorSplitRE = /((?:[^,"']|"[^"]*"|'[^']*')+)/;
var defaultStyleSheet = cssom.parse(require("./default-stylesheet"));
dom.Window = Window;
// NOTE: per https://heycam.github.io/webidl/#Global, all properties on the Window object must be own-properties.
// That is why we assign everything inside of the constructor, instead of using a shared prototype.
// You can verify this in e.g. Firefox or Internet Explorer, which do a good job with Web IDL compliance.
function Window(options) {
EventTarget.setup(this);
var window = this;
///// INTERFACES FROM THE DOM
// TODO: consider a mode of some sort where these are not shared between all DOM instances
// It'd be very memory-expensive in most cases, though.
define(window, dom);
///// PRIVATE DATA PROPERTIES
// vm initialization is defered until script processing is activated (in level1/core)
this._globalProxy = this;
this.__timers = [];
// List options explicitly to be clear which are passed through
this._document = new dom.HTMLDocument({
parsingMode: options.parsingMode,
contentType: options.contentType,
cookieJar: options.cookieJar,
parser: options.parser,
url: options.url,
referrer: options.referrer,
cookie: options.cookie,
deferClose: options.deferClose,
resourceLoader: options.resourceLoader,
concurrentNodeIterators: options.concurrentNodeIterators,
defaultView: this._globalProxy,
global: this
});
// Set up the window as if it's a top level window.
// If it's not, then references will be corrected by frame/iframe code.
this._parent = this._top = this._globalProxy;
// This implements window.frames.length, since window.frames returns a
// self reference to the window object. This value is incremented in the
// HTMLFrameElement init function (see: level2/html.js).
this._length = 0;
if (options.virtualConsole) {
if (options.virtualConsole instanceof VirtualConsole) {
this._virtualConsole = options.virtualConsole;
} else {
throw new TypeError(
"options.virtualConsole must be a VirtualConsole (from createVirtualConsole)");
}
} else {
this._virtualConsole = new VirtualConsole();
}
///// GETTERS
define(this, {
get length() {
return window._length;
},
get window() {
return window._globalProxy;
},
get frames() {
return window._globalProxy;
},
get self() {
return window._globalProxy;
},
get parent() {
return window._parent;
},
get top() {
return window._top;
},
get document() {
return window._document;
},
get location() {
return window._document._location;
}
});
namedPropertiesWindow.initializeWindow(this, dom.HTMLCollection);
///// METHODS for [ImplicitThis] hack
// See https://lists.w3.org/Archives/Public/public-script-coord/2015JanMar/0109.html
this.addEventListener = this.addEventListener.bind(this);
this.removeEventListener = this.removeEventListener.bind(this);
this.dispatchEvent = this.dispatchEvent.bind(this);
///// METHODS
this.setTimeout = function (fn, ms) {
return startTimer(window, setTimeout, clearTimeout, fn, ms);
};
this.setInterval = function (fn, ms) {
return startTimer(window, setInterval, clearInterval, fn, ms);
};
this.clearInterval = stopTimer.bind(this, window);
this.clearTimeout = stopTimer.bind(this, window);
this.__stopAllTimers = stopAllTimers.bind(this, window);
this.Image = function (width, height) {
var element = window._document.createElement("img");
element.width = width;
element.height = height;
return element;
};
function wrapConsoleMethod(method) {
return function () {
var args = Array.prototype.slice.call(arguments);
window._virtualConsole.emit.apply(window._virtualConsole, [method].concat(args));
};
}
this.postMessage = postMessage;
this.atob = function (str) {
const result = atob(str);
if (result === null) {
throw new DOMException(DOMException.INVALID_CHARACTER_ERR,
"The string to be encoded contains invalid characters.");
}
return result;
};
this.btoa = function (str) {
const result = btoa(str);
if (result === null) {
throw new DOMException(DOMException.INVALID_CHARACTER_ERR,
"The string to be encoded contains invalid characters.");
}
return result;
};
this.XMLHttpRequest = function () {
var xhr = new XMLHttpRequest();
var lastUrl = "";
xhr._open = xhr.open;
xhr.open = function (method, url, async, user, password) {
lastUrl = fixUrlForBuggyXhr(resolveHref(window.document.URL, url));
return xhr._open(method, lastUrl, async, user, password);
};
xhr._getAllResponseHeaders = xhr.getAllResponseHeaders;
xhr.getAllResponseHeaders = function () {
if (lastUrl.startsWith("file:")) {
// Monkey patch this function for files. The node-xhr module will crash for file URLs.
return null;
}
return xhr._getAllResponseHeaders();
};
xhr._send = xhr.send;
xhr.send = function (data) {
var cookieJar = window.document._cookieJar;
var cookieStr = cookieJar.getCookieStringSync(lastUrl, {http: true});
if (cookieStr) {
xhr.setDisableHeaderCheck(true);
xhr.setRequestHeader("cookie", cookieStr);
xhr.setDisableHeaderCheck(false);
}
function setReceivedCookies() {
if (xhr.readyState === xhr.HEADERS_RECEIVED) {
var receivedCookies = xhr.getResponseHeader("set-cookie");
if (receivedCookies) {
receivedCookies = Array.isArray(receivedCookies) ? receivedCookies : [receivedCookies];
receivedCookies.forEach(function (cookieStr) {
cookieJar.setCookieSync(cookieStr, lastUrl, {
http: true,
ignoreError: true
});
});
}
xhr.removeEventListener("readystatechange", setReceivedCookies);
}
}
xhr.addEventListener("readystatechange", setReceivedCookies);
return xhr._send(data);
};
Object.defineProperty(xhr, "response", {
get: function () {
if (this.responseType === "text" || !this.responseType) {
// Spec says "text" or "", but responseType support is incomplete, so we need to catch more cases.
return this.responseText;
} else if (this.responseType === "json") {
return JSON.parse(this.responseText);
} else {
return null; // emulate failed request
}
},
enumerable: true,
configurable: true
});
return xhr;
};
this.close = function () {
// Recursively close child frame windows, then ourselves.
var currentWindow = this;
(function windowCleaner(window) {
var i;
// We could call window.frames.length etc, but window.frames just points
// back to window.
if (window.length > 0) {
for (i = 0; i < window.length; i++) {
windowCleaner(window[i]);
}
}
// We"re already in our own window.close().
if (window !== currentWindow) {
window.close();
}
})(this);
// Clear out all listeners. Any in-flight or upcoming events should not get delivered.
idlUtils.implForWrapper(this, "EventTarget")._events = Object.create(null);
if (this._document) {
if (this._document.body) {
this._document.body.innerHTML = "";
}
if (this._document.close) {
// It's especially important to clear out the listeners here because document.close() causes a "load" event to
// fire.
this._document._listeners = Object.create(null);
this._document.close();
}
delete this._document;
}
stopAllTimers(currentWindow);
};
this.getComputedStyle = function (node) {
var s = node.style;
var cs = new CSSStyleDeclaration();
var forEach = Array.prototype.forEach;
function setPropertiesFromRule(rule) {
if (!rule.selectorText) {
return;
}
var selectors = rule.selectorText.split(cssSelectorSplitRE);
var matched = false;
selectors.forEach(function (selectorText) {
if (selectorText !== "" && selectorText !== "," && !matched && matchesDontThrow(node, selectorText)) {
matched = true;
forEach.call(rule.style, function (property) {
cs.setProperty(property, rule.style.getPropertyValue(property), rule.style.getPropertyPriority(property));
});
}
});
}
function readStylesFromStyleSheet(sheet) {
forEach.call(sheet.cssRules, function (rule) {
if (rule.media) {
if (Array.prototype.indexOf.call(rule.media, "screen") !== -1) |
} else {
setPropertiesFromRule(rule);
}
});
}
readStylesFromStyleSheet(defaultStyleSheet);
forEach.call(node.ownerDocument.styleSheets, readStylesFromStyleSheet);
forEach.call(s, function (property) {
cs.setProperty(property, s.getPropertyValue(property), s.getPropertyPriority(property));
});
return cs;
};
///// PUBLIC DATA PROPERTIES (TODO: should be getters)
this.history = new History(this);
this.console = {
assert: wrapConsoleMethod("assert"),
clear: wrapConsoleMethod("clear"),
count: wrapConsoleMethod("count"),
debug: wrapConsoleMethod("debug"),
error: wrapConsoleMethod("error"),
group: wrapConsoleMethod("group"),
groupCollapse: wrapConsoleMethod("groupCollapse"),
groupEnd: wrapConsoleMethod("groupEnd"),
info: wrapConsoleMethod("info"),
log: wrapConsoleMethod("log"),
table: wrapConsoleMethod("table"),
time: wrapConsoleMethod("time"),
timeEnd: wrapConsoleMethod("timeEnd"),
trace: wrapConsoleMethod("trace"),
warn: wrapConsoleMethod("warn")
};
function notImplementedMethod(name) {
return function () {
notImplemented(name, window);
};
}
define(this, {
navigator: {
get userAgent() { return "Node.js (" + process.platform + "; U; rv:" + process.version + ")"; },
get appName() { return "Node.js jsDom"; },
get platform() { return process.platform; },
get appVersion() { return process.version; },
noUI: true,
get cookieEnabled() { return true; }
},
name: "nodejs",
innerWidth: 1024,
innerHeight: 768,
outerWidth: 1024,
outerHeight: 768,
pageXOffset: 0,
pageYOffset: 0,
screenX: 0,
screenY: 0,
screenLeft: 0,
screenTop: 0,
scrollX: 0,
scrollY: 0,
scrollTop: 0,
scrollLeft: 0,
screen: {
width: 0,
height: 0
},
alert: notImplementedMethod("window.alert"),
blur: notImplementedMethod("window.blur"),
confirm: notImplementedMethod("window.confirm"),
createPopup: notImplementedMethod("window.createPopup"),
focus: notImplementedMethod("window.focus"),
moveBy: notImplementedMethod("window.moveBy"),
moveTo: notImplementedMethod("window.moveTo"),
open: notImplementedMethod("window.open"),
print: notImplementedMethod("window.print"),
prompt: notImplementedMethod("window.prompt"),
resizeBy: notImplementedMethod("window.resizeBy"),
resizeTo: notImplementedMethod("window.resizeTo"),
scroll: notImplementedMethod("window.scroll"),
scrollBy: notImplementedMethod("window.scrollBy"),
scrollTo: notImplementedMethod("window.scrollTo")
});
///// INITIALIZATION
process.nextTick(function () {
if (!window.document) {
return; // window might've been closed already
}
var ev = window.document.createEvent("HTMLEvents");
ev.initEvent("load", false, false);
if (window.document.readyState === "complete") {
window.dispatchEvent(ev);
} else {
window.document.addEventListener("load", function (ev) {
window.dispatchEvent(ev);
});
}
});
}
inherits(EventTarget.interface, Window, EventTarget.interface.prototype);
function matchesDontThrow(el, selector) {
try {
return el.matches(selector);
} catch (e) {
return false;
}
}
function startTimer(window, startFn, stopFn, callback, ms) {
var res = startFn(callback, ms);
window.__timers.push([res, stopFn]);
return res;
}
function stopTimer(window, id) {
if (typeof id === "undefined") {
return;
}
for (var i in window.__timers) {
if (window.__timers[i][0] === id) {
window.__timers[i][1].call(window, id);
window.__timers.splice(i, 1);
break;
}
}
}
function stopAllTimers(window) {
window.__timers.forEach(function (t) {
t[1].call(window, t[0]);
});
window.__timers = [];
}
function fixUrlForBuggyXhr(url) {
// node-XMLHttpRequest doesn't properly handle file URLs. It only accepts file://C:/..., not file:///C:/...
// See https://github.com/tmpvar/jsdom/pull/1180
return url.replace(/^file:\/\/\/([a-zA-Z]:)/, "file://$1");
}
| {
forEach.call(rule.cssRules, setPropertiesFromRule);
} | conditional_block |
Window.js | "use strict";
var CSSStyleDeclaration = require("cssstyle").CSSStyleDeclaration;
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
var notImplemented = require("./not-implemented");
var History = require("./history");
var VirtualConsole = require("../virtual-console");
var define = require("../utils").define;
var inherits = require("../utils").inheritFrom;
var resolveHref = require("../utils").resolveHref;
var EventTarget = require("../living/generated/events/EventTarget");
var namedPropertiesWindow = require("../living/named-properties-window");
var cssom = require("cssom");
var postMessage = require("../living/post-message");
const DOMException = require("../web-idl/DOMException");
const btoa = require("../../base64").btoa;
const atob = require("../../base64").atob;
const idlUtils = require("../living/generated/util");
// NB: the require() must be after assigning `module.export` because this require() is circular
module.exports = Window;
var dom = require("../living");
var cssSelectorSplitRE = /((?:[^,"']|"[^"]*"|'[^']*')+)/;
var defaultStyleSheet = cssom.parse(require("./default-stylesheet"));
dom.Window = Window;
// NOTE: per https://heycam.github.io/webidl/#Global, all properties on the Window object must be own-properties.
// That is why we assign everything inside of the constructor, instead of using a shared prototype.
// You can verify this in e.g. Firefox or Internet Explorer, which do a good job with Web IDL compliance.
function Window(options) {
EventTarget.setup(this);
var window = this;
///// INTERFACES FROM THE DOM
// TODO: consider a mode of some sort where these are not shared between all DOM instances
// It'd be very memory-expensive in most cases, though.
define(window, dom);
///// PRIVATE DATA PROPERTIES
// vm initialization is defered until script processing is activated (in level1/core)
this._globalProxy = this;
this.__timers = [];
// List options explicitly to be clear which are passed through
this._document = new dom.HTMLDocument({
parsingMode: options.parsingMode,
contentType: options.contentType,
cookieJar: options.cookieJar,
parser: options.parser,
url: options.url,
referrer: options.referrer,
cookie: options.cookie,
deferClose: options.deferClose,
resourceLoader: options.resourceLoader,
concurrentNodeIterators: options.concurrentNodeIterators,
defaultView: this._globalProxy,
global: this
});
// Set up the window as if it's a top level window.
// If it's not, then references will be corrected by frame/iframe code.
this._parent = this._top = this._globalProxy;
// This implements window.frames.length, since window.frames returns a
// self reference to the window object. This value is incremented in the
// HTMLFrameElement init function (see: level2/html.js).
this._length = 0;
if (options.virtualConsole) {
if (options.virtualConsole instanceof VirtualConsole) {
this._virtualConsole = options.virtualConsole;
} else {
throw new TypeError(
"options.virtualConsole must be a VirtualConsole (from createVirtualConsole)");
}
} else {
this._virtualConsole = new VirtualConsole();
}
///// GETTERS
define(this, {
get length() {
return window._length;
},
get window() {
return window._globalProxy;
},
get frames() {
return window._globalProxy;
},
get self() {
return window._globalProxy;
},
get parent() {
return window._parent;
},
get top() {
return window._top;
},
get document() {
return window._document;
},
get location() {
return window._document._location;
}
});
namedPropertiesWindow.initializeWindow(this, dom.HTMLCollection);
///// METHODS for [ImplicitThis] hack
// See https://lists.w3.org/Archives/Public/public-script-coord/2015JanMar/0109.html
this.addEventListener = this.addEventListener.bind(this);
this.removeEventListener = this.removeEventListener.bind(this);
this.dispatchEvent = this.dispatchEvent.bind(this);
///// METHODS
this.setTimeout = function (fn, ms) {
return startTimer(window, setTimeout, clearTimeout, fn, ms);
};
this.setInterval = function (fn, ms) {
return startTimer(window, setInterval, clearInterval, fn, ms);
};
this.clearInterval = stopTimer.bind(this, window);
this.clearTimeout = stopTimer.bind(this, window);
this.__stopAllTimers = stopAllTimers.bind(this, window);
this.Image = function (width, height) {
var element = window._document.createElement("img");
element.width = width;
element.height = height;
return element;
};
function wrapConsoleMethod(method) {
return function () {
var args = Array.prototype.slice.call(arguments);
window._virtualConsole.emit.apply(window._virtualConsole, [method].concat(args));
};
}
this.postMessage = postMessage;
this.atob = function (str) {
const result = atob(str);
if (result === null) {
throw new DOMException(DOMException.INVALID_CHARACTER_ERR,
"The string to be encoded contains invalid characters.");
}
return result;
};
this.btoa = function (str) {
const result = btoa(str);
if (result === null) {
throw new DOMException(DOMException.INVALID_CHARACTER_ERR,
"The string to be encoded contains invalid characters.");
}
return result;
};
this.XMLHttpRequest = function () {
var xhr = new XMLHttpRequest();
var lastUrl = "";
xhr._open = xhr.open;
xhr.open = function (method, url, async, user, password) {
lastUrl = fixUrlForBuggyXhr(resolveHref(window.document.URL, url));
return xhr._open(method, lastUrl, async, user, password);
};
xhr._getAllResponseHeaders = xhr.getAllResponseHeaders;
xhr.getAllResponseHeaders = function () {
if (lastUrl.startsWith("file:")) {
// Monkey patch this function for files. The node-xhr module will crash for file URLs.
return null;
}
return xhr._getAllResponseHeaders();
};
xhr._send = xhr.send;
xhr.send = function (data) {
var cookieJar = window.document._cookieJar;
var cookieStr = cookieJar.getCookieStringSync(lastUrl, {http: true});
if (cookieStr) {
xhr.setDisableHeaderCheck(true);
xhr.setRequestHeader("cookie", cookieStr);
xhr.setDisableHeaderCheck(false);
}
function setReceivedCookies() {
if (xhr.readyState === xhr.HEADERS_RECEIVED) {
var receivedCookies = xhr.getResponseHeader("set-cookie");
if (receivedCookies) {
receivedCookies = Array.isArray(receivedCookies) ? receivedCookies : [receivedCookies];
receivedCookies.forEach(function (cookieStr) {
cookieJar.setCookieSync(cookieStr, lastUrl, {
http: true,
ignoreError: true
});
});
}
xhr.removeEventListener("readystatechange", setReceivedCookies);
}
}
xhr.addEventListener("readystatechange", setReceivedCookies);
return xhr._send(data);
};
Object.defineProperty(xhr, "response", {
get: function () {
if (this.responseType === "text" || !this.responseType) {
// Spec says "text" or "", but responseType support is incomplete, so we need to catch more cases.
return this.responseText;
} else if (this.responseType === "json") {
return JSON.parse(this.responseText);
} else {
return null; // emulate failed request
}
},
enumerable: true,
configurable: true
});
return xhr;
};
this.close = function () {
// Recursively close child frame windows, then ourselves.
var currentWindow = this;
(function windowCleaner(window) {
var i;
// We could call window.frames.length etc, but window.frames just points
// back to window.
if (window.length > 0) {
for (i = 0; i < window.length; i++) {
windowCleaner(window[i]);
}
}
// We"re already in our own window.close().
if (window !== currentWindow) {
window.close();
}
})(this);
// Clear out all listeners. Any in-flight or upcoming events should not get delivered.
idlUtils.implForWrapper(this, "EventTarget")._events = Object.create(null);
if (this._document) {
if (this._document.body) {
this._document.body.innerHTML = "";
}
if (this._document.close) {
// It's especially important to clear out the listeners here because document.close() causes a "load" event to
// fire.
this._document._listeners = Object.create(null);
this._document.close();
}
delete this._document;
}
stopAllTimers(currentWindow);
};
this.getComputedStyle = function (node) {
var s = node.style;
var cs = new CSSStyleDeclaration();
var forEach = Array.prototype.forEach;
function setPropertiesFromRule(rule) {
if (!rule.selectorText) {
return;
}
var selectors = rule.selectorText.split(cssSelectorSplitRE);
var matched = false;
selectors.forEach(function (selectorText) {
if (selectorText !== "" && selectorText !== "," && !matched && matchesDontThrow(node, selectorText)) {
matched = true;
forEach.call(rule.style, function (property) {
cs.setProperty(property, rule.style.getPropertyValue(property), rule.style.getPropertyPriority(property));
});
}
});
}
function readStylesFromStyleSheet(sheet) {
forEach.call(sheet.cssRules, function (rule) {
if (rule.media) {
if (Array.prototype.indexOf.call(rule.media, "screen") !== -1) {
forEach.call(rule.cssRules, setPropertiesFromRule);
}
} else {
setPropertiesFromRule(rule);
}
});
}
readStylesFromStyleSheet(defaultStyleSheet);
forEach.call(node.ownerDocument.styleSheets, readStylesFromStyleSheet);
forEach.call(s, function (property) {
cs.setProperty(property, s.getPropertyValue(property), s.getPropertyPriority(property));
});
return cs;
};
///// PUBLIC DATA PROPERTIES (TODO: should be getters)
this.history = new History(this);
this.console = {
assert: wrapConsoleMethod("assert"),
clear: wrapConsoleMethod("clear"),
count: wrapConsoleMethod("count"),
debug: wrapConsoleMethod("debug"),
error: wrapConsoleMethod("error"),
group: wrapConsoleMethod("group"),
groupCollapse: wrapConsoleMethod("groupCollapse"),
groupEnd: wrapConsoleMethod("groupEnd"),
info: wrapConsoleMethod("info"),
log: wrapConsoleMethod("log"),
table: wrapConsoleMethod("table"),
time: wrapConsoleMethod("time"),
timeEnd: wrapConsoleMethod("timeEnd"),
trace: wrapConsoleMethod("trace"),
warn: wrapConsoleMethod("warn")
};
function notImplementedMethod(name) {
return function () {
notImplemented(name, window);
};
}
define(this, {
navigator: {
get userAgent() { return "Node.js (" + process.platform + "; U; rv:" + process.version + ")"; },
get appName() { return "Node.js jsDom"; },
get platform() { return process.platform; },
get appVersion() { return process.version; },
noUI: true,
get cookieEnabled() |
},
name: "nodejs",
innerWidth: 1024,
innerHeight: 768,
outerWidth: 1024,
outerHeight: 768,
pageXOffset: 0,
pageYOffset: 0,
screenX: 0,
screenY: 0,
screenLeft: 0,
screenTop: 0,
scrollX: 0,
scrollY: 0,
scrollTop: 0,
scrollLeft: 0,
screen: {
width: 0,
height: 0
},
alert: notImplementedMethod("window.alert"),
blur: notImplementedMethod("window.blur"),
confirm: notImplementedMethod("window.confirm"),
createPopup: notImplementedMethod("window.createPopup"),
focus: notImplementedMethod("window.focus"),
moveBy: notImplementedMethod("window.moveBy"),
moveTo: notImplementedMethod("window.moveTo"),
open: notImplementedMethod("window.open"),
print: notImplementedMethod("window.print"),
prompt: notImplementedMethod("window.prompt"),
resizeBy: notImplementedMethod("window.resizeBy"),
resizeTo: notImplementedMethod("window.resizeTo"),
scroll: notImplementedMethod("window.scroll"),
scrollBy: notImplementedMethod("window.scrollBy"),
scrollTo: notImplementedMethod("window.scrollTo")
});
///// INITIALIZATION
process.nextTick(function () {
if (!window.document) {
return; // window might've been closed already
}
var ev = window.document.createEvent("HTMLEvents");
ev.initEvent("load", false, false);
if (window.document.readyState === "complete") {
window.dispatchEvent(ev);
} else {
window.document.addEventListener("load", function (ev) {
window.dispatchEvent(ev);
});
}
});
}
inherits(EventTarget.interface, Window, EventTarget.interface.prototype);
function matchesDontThrow(el, selector) {
try {
return el.matches(selector);
} catch (e) {
return false;
}
}
function startTimer(window, startFn, stopFn, callback, ms) {
var res = startFn(callback, ms);
window.__timers.push([res, stopFn]);
return res;
}
function stopTimer(window, id) {
if (typeof id === "undefined") {
return;
}
for (var i in window.__timers) {
if (window.__timers[i][0] === id) {
window.__timers[i][1].call(window, id);
window.__timers.splice(i, 1);
break;
}
}
}
function stopAllTimers(window) {
window.__timers.forEach(function (t) {
t[1].call(window, t[0]);
});
window.__timers = [];
}
function fixUrlForBuggyXhr(url) {
// node-XMLHttpRequest doesn't properly handle file URLs. It only accepts file://C:/..., not file:///C:/...
// See https://github.com/tmpvar/jsdom/pull/1180
return url.replace(/^file:\/\/\/([a-zA-Z]:)/, "file://$1");
}
| { return true; } | identifier_body |
nn_test.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for nn module."""
import collections
from functools import partial
import itertools
from absl.testing import absltest
from absl.testing import parameterized
import scipy.stats
from jax import core
from jax._src import test_util as jtu
from jax.test_util import check_grads
from jax import nn
from jax import random
import jax
import jax.numpy as jnp
from jax.config import config
config.parse_flags_with_absl()
class NNFunctionsTest(jtu.JaxTestCase):
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testSoftplusGrad(self):
check_grads(nn.softplus, (1e-8,), order=4,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradZero(self):
check_grads(nn.softplus, (0.,), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradInf(self):
self.assertAllClose(
1., jax.grad(nn.softplus)(float('inf')))
def testSoftplusGradNegInf(self):
check_grads(nn.softplus, (-float('inf'),), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradNan(self):
check_grads(nn.softplus, (float('nan'),), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
@parameterized.parameters([int, float] + jtu.dtypes.floating + jtu.dtypes.integer)
def testSoftplusZero(self, dtype):
self.assertEqual(jnp.log(dtype(2)), nn.softplus(dtype(0)))
def testReluGrad(self):
rtol = 1e-2 if jtu.device_under_test() == "tpu" else None
check_grads(nn.relu, (1.,), order=3, rtol=rtol)
check_grads(nn.relu, (-1.,), order=3, rtol=rtol)
jaxpr = jax.make_jaxpr(jax.grad(nn.relu))(0.)
self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
def testSoftplusValue(self):
val = nn.softplus(89.)
self.assertAllClose(val, 89., check_dtypes=False)
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testEluGrad(self):
check_grads(nn.elu, (1e4,), order=4, eps=1.)
def testEluValue(self):
val = nn.elu(1e4)
self.assertAllClose(val, 1e4, check_dtypes=False)
def testGluValue(self):
val = nn.glu(jnp.array([1.0, 0.0]))
self.assertAllClose(val, jnp.array([0.5]))
@parameterized.parameters(False, True)
def testGelu(self, approximate):
def gelu_reference(x):
return x * scipy.stats.norm.cdf(x)
rng = jtu.rand_default(self.rng())
args_maker = lambda: [rng((4, 5, 6), jnp.float32)]
self._CheckAgainstNumpy(
gelu_reference, partial(nn.gelu, approximate=approximate), args_maker,
check_dtypes=False, tol=1e-3 if approximate else None)
@parameterized.parameters(*itertools.product(
(jnp.float32, jnp.bfloat16, jnp.float16),
(partial(nn.gelu, approximate=False),
partial(nn.gelu, approximate=True),
nn.relu, nn.softplus, nn.sigmoid)))
def testDtypeMatchesInput(self, dtype, fn):
x = jnp.zeros((), dtype=dtype)
out = fn(x)
self.assertEqual(out.dtype, dtype)
def testEluMemory(self):
# see https://github.com/google/jax/pull/1640
with jax.enable_checks(False): # With checks we materialize the array
jax.make_jaxpr(lambda: nn.elu(jnp.ones((10 ** 12,)))) # don't oom
def testHardTanhMemory(self):
# see https://github.com/google/jax/pull/1640
with jax.enable_checks(False): # With checks we materialize the array
jax.make_jaxpr(lambda: nn.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
@parameterized.parameters([nn.softmax, nn.log_softmax])
def testSoftmaxWhereMask(self, fn):
x = jnp.array([5.5, 1.3, -4.2, 0.9])
m = jnp.array([True, False, True, True])
x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
out_masked = jnp.take(
fn(x, where=m, initial=-jnp.inf), jnp.array([0, 2, 3]))
out_filtered = fn(x_filtered)
self.assertAllClose(out_masked, out_filtered)
def testNormalizeWhereMask(self):
x = jnp.array([5.5, 1.3, -4.2, 0.9])
m = jnp.array([True, False, True, True])
x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
out_masked = jnp.take(nn.normalize(x, where=m), jnp.array([0, 2, 3]))
out_filtered = nn.normalize(x_filtered)
self.assertAllClose(out_masked, out_filtered)
def testOneHot(self):
actual = nn.one_hot(jnp.array([0, 1, 2]), 3)
expected = jnp.array([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
self.assertAllClose(actual, expected)
actual = nn.one_hot(jnp.array([1, 2, 0]), 3)
expected = jnp.array([[0., 1., 0.],
[0., 0., 1.],
[1., 0., 0.]])
self.assertAllClose(actual, expected)
def testOneHotOutOfBound(self):
actual = nn.one_hot(jnp.array([-1, 3]), 3)
expected = jnp.array([[0., 0., 0.],
[0., 0., 0.]])
self.assertAllClose(actual, expected)
def testOneHotNonArrayInput(self):
actual = nn.one_hot([0, 1, 2], 3)
expected = jnp.array([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
self.assertAllClose(actual, expected)
def testOneHotCustomDtype(self):
actual = nn.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
expected = jnp.array([[True, False, False],
[False, True, False],
[False, False, True]])
self.assertAllClose(actual, expected)
def testOneHotConcretizationError(self):
# https://github.com/google/jax/issues/3654
msg = r"in jax.nn.one_hot argument `num_classes`"
with self.assertRaisesRegex(core.ConcretizationTypeError, msg):
jax.jit(nn.one_hot)(3, 5)
def testOneHotAxis(self):
expected = jnp.array([[0., 1., 0.],
[0., 0., 1.],
[1., 0., 0.]]).T
actual = nn.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
self.assertAllClose(actual, expected)
actual = nn.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
self.assertAllClose(actual, expected)
def testTanhExists(self):
nn.tanh # doesn't crash
def testCustomJVPLeak(self):
# https://github.com/google/jax/issues/8171
@jax.jit
def fwd():
a = jnp.array(1.)
def f(hx, _):
hx = jax.nn.sigmoid(hx + a)
return hx, None
hx = jnp.array(0.)
jax.lax.scan(f, hx, None, length=2)
with jax.checking_leaks():
fwd() # doesn't crash
InitializerRecord = collections.namedtuple(
"InitializerRecord",
["name", "initializer", "shapes", "dtypes"])
ALL_SHAPES = [(2,), (2, 2), (2, 3), (3, 2), (2, 3, 4), (4, 3, 2), (2, 3, 4, 5)]
def initializer_record(name, initializer, dtypes, min_dims=2, max_dims=4):
shapes = [shape for shape in ALL_SHAPES
if min_dims <= len(shape) <= max_dims]
return InitializerRecord(name, initializer, shapes, dtypes)
INITIALIZER_RECS = [
initializer_record("uniform", nn.initializers.uniform, jtu.dtypes.floating, 1),
initializer_record("normal", nn.initializers.normal, jtu.dtypes.inexact, 1),
initializer_record("he_normal", nn.initializers.he_normal, jtu.dtypes.inexact),
initializer_record("he_uniform", nn.initializers.he_uniform, jtu.dtypes.inexact),
initializer_record("glorot_normal", nn.initializers.glorot_normal, jtu.dtypes.inexact), | ]
class NNInitializersTest(jtu.JaxTestCase):
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_{}_{}".format(
rec.name,
jtu.format_shape_dtype_string(shape, dtype)),
"initializer": rec.initializer(),
"shape": shape, "dtype": dtype}
for rec in INITIALIZER_RECS
for shape in rec.shapes
for dtype in rec.dtypes))
def testInitializer(self, initializer, shape, dtype):
rng = random.PRNGKey(0)
val = initializer(rng, shape, dtype)
self.assertEqual(shape, jnp.shape(val))
self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_{}_{}".format(
rec.name,
jtu.format_shape_dtype_string(shape, dtype)),
"initializer_provider": rec.initializer,
"shape": shape, "dtype": dtype}
for rec in INITIALIZER_RECS
for shape in rec.shapes
for dtype in rec.dtypes))
def testInitializerProvider(self, initializer_provider, shape, dtype):
rng = random.PRNGKey(0)
initializer = initializer_provider(dtype=dtype)
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
def testVarianceScalingMultiAxis(self):
rng = random.PRNGKey(0)
shape = (2, 3, 4, 5)
initializer = nn.initializers.variance_scaling(
scale=1.0, mode='fan_avg', distribution='truncated_normal',
in_axis=(0, 1), out_axis=(-2, -1))
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
def testVarianceScalingBatchAxis(self):
rng = random.PRNGKey(0)
shape = (2, 3, 4, 5)
initializer = nn.initializers.variance_scaling(
scale=1.0, mode='fan_avg', distribution='truncated_normal',
in_axis=0, out_axis=(2, 3), batch_axis=1)
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
if __name__ == "__main__":
absltest.main(testLoader=jtu.JaxTestLoader()) | initializer_record("glorot_uniform", nn.initializers.glorot_uniform, jtu.dtypes.inexact),
initializer_record("lecun_normal", nn.initializers.lecun_normal, jtu.dtypes.inexact),
initializer_record("lecun_uniform", nn.initializers.lecun_uniform, jtu.dtypes.inexact),
initializer_record("orthogonal", nn.initializers.orthogonal, jtu.dtypes.floating, 2, 2),
initializer_record("delta_orthogonal", nn.initializers.delta_orthogonal, jtu.dtypes.floating, 4, 4) | random_line_split |
nn_test.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for nn module."""
import collections
from functools import partial
import itertools
from absl.testing import absltest
from absl.testing import parameterized
import scipy.stats
from jax import core
from jax._src import test_util as jtu
from jax.test_util import check_grads
from jax import nn
from jax import random
import jax
import jax.numpy as jnp
from jax.config import config
config.parse_flags_with_absl()
class NNFunctionsTest(jtu.JaxTestCase):
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testSoftplusGrad(self):
check_grads(nn.softplus, (1e-8,), order=4,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradZero(self):
check_grads(nn.softplus, (0.,), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradInf(self):
self.assertAllClose(
1., jax.grad(nn.softplus)(float('inf')))
def testSoftplusGradNegInf(self):
check_grads(nn.softplus, (-float('inf'),), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradNan(self):
check_grads(nn.softplus, (float('nan'),), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
@parameterized.parameters([int, float] + jtu.dtypes.floating + jtu.dtypes.integer)
def testSoftplusZero(self, dtype):
self.assertEqual(jnp.log(dtype(2)), nn.softplus(dtype(0)))
def testReluGrad(self):
rtol = 1e-2 if jtu.device_under_test() == "tpu" else None
check_grads(nn.relu, (1.,), order=3, rtol=rtol)
check_grads(nn.relu, (-1.,), order=3, rtol=rtol)
jaxpr = jax.make_jaxpr(jax.grad(nn.relu))(0.)
self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
def testSoftplusValue(self):
val = nn.softplus(89.)
self.assertAllClose(val, 89., check_dtypes=False)
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testEluGrad(self):
check_grads(nn.elu, (1e4,), order=4, eps=1.)
def testEluValue(self):
val = nn.elu(1e4)
self.assertAllClose(val, 1e4, check_dtypes=False)
def testGluValue(self):
val = nn.glu(jnp.array([1.0, 0.0]))
self.assertAllClose(val, jnp.array([0.5]))
@parameterized.parameters(False, True)
def testGelu(self, approximate):
def gelu_reference(x):
return x * scipy.stats.norm.cdf(x)
rng = jtu.rand_default(self.rng())
args_maker = lambda: [rng((4, 5, 6), jnp.float32)]
self._CheckAgainstNumpy(
gelu_reference, partial(nn.gelu, approximate=approximate), args_maker,
check_dtypes=False, tol=1e-3 if approximate else None)
@parameterized.parameters(*itertools.product(
(jnp.float32, jnp.bfloat16, jnp.float16),
(partial(nn.gelu, approximate=False),
partial(nn.gelu, approximate=True),
nn.relu, nn.softplus, nn.sigmoid)))
def testDtypeMatchesInput(self, dtype, fn):
x = jnp.zeros((), dtype=dtype)
out = fn(x)
self.assertEqual(out.dtype, dtype)
def testEluMemory(self):
# see https://github.com/google/jax/pull/1640
with jax.enable_checks(False): # With checks we materialize the array
jax.make_jaxpr(lambda: nn.elu(jnp.ones((10 ** 12,)))) # don't oom
def testHardTanhMemory(self):
# see https://github.com/google/jax/pull/1640
with jax.enable_checks(False): # With checks we materialize the array
jax.make_jaxpr(lambda: nn.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
@parameterized.parameters([nn.softmax, nn.log_softmax])
def testSoftmaxWhereMask(self, fn):
x = jnp.array([5.5, 1.3, -4.2, 0.9])
m = jnp.array([True, False, True, True])
x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
out_masked = jnp.take(
fn(x, where=m, initial=-jnp.inf), jnp.array([0, 2, 3]))
out_filtered = fn(x_filtered)
self.assertAllClose(out_masked, out_filtered)
def testNormalizeWhereMask(self):
x = jnp.array([5.5, 1.3, -4.2, 0.9])
m = jnp.array([True, False, True, True])
x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
out_masked = jnp.take(nn.normalize(x, where=m), jnp.array([0, 2, 3]))
out_filtered = nn.normalize(x_filtered)
self.assertAllClose(out_masked, out_filtered)
def testOneHot(self):
actual = nn.one_hot(jnp.array([0, 1, 2]), 3)
expected = jnp.array([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
self.assertAllClose(actual, expected)
actual = nn.one_hot(jnp.array([1, 2, 0]), 3)
expected = jnp.array([[0., 1., 0.],
[0., 0., 1.],
[1., 0., 0.]])
self.assertAllClose(actual, expected)
def testOneHotOutOfBound(self):
actual = nn.one_hot(jnp.array([-1, 3]), 3)
expected = jnp.array([[0., 0., 0.],
[0., 0., 0.]])
self.assertAllClose(actual, expected)
def testOneHotNonArrayInput(self):
actual = nn.one_hot([0, 1, 2], 3)
expected = jnp.array([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
self.assertAllClose(actual, expected)
def testOneHotCustomDtype(self):
actual = nn.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
expected = jnp.array([[True, False, False],
[False, True, False],
[False, False, True]])
self.assertAllClose(actual, expected)
def testOneHotConcretizationError(self):
# https://github.com/google/jax/issues/3654
msg = r"in jax.nn.one_hot argument `num_classes`"
with self.assertRaisesRegex(core.ConcretizationTypeError, msg):
jax.jit(nn.one_hot)(3, 5)
def testOneHotAxis(self):
expected = jnp.array([[0., 1., 0.],
[0., 0., 1.],
[1., 0., 0.]]).T
actual = nn.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
self.assertAllClose(actual, expected)
actual = nn.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
self.assertAllClose(actual, expected)
def testTanhExists(self):
nn.tanh # doesn't crash
def testCustomJVPLeak(self):
# https://github.com/google/jax/issues/8171
@jax.jit
def fwd():
a = jnp.array(1.)
def f(hx, _):
hx = jax.nn.sigmoid(hx + a)
return hx, None
hx = jnp.array(0.)
jax.lax.scan(f, hx, None, length=2)
with jax.checking_leaks():
fwd() # doesn't crash
InitializerRecord = collections.namedtuple(
"InitializerRecord",
["name", "initializer", "shapes", "dtypes"])
ALL_SHAPES = [(2,), (2, 2), (2, 3), (3, 2), (2, 3, 4), (4, 3, 2), (2, 3, 4, 5)]
def initializer_record(name, initializer, dtypes, min_dims=2, max_dims=4):
shapes = [shape for shape in ALL_SHAPES
if min_dims <= len(shape) <= max_dims]
return InitializerRecord(name, initializer, shapes, dtypes)
INITIALIZER_RECS = [
initializer_record("uniform", nn.initializers.uniform, jtu.dtypes.floating, 1),
initializer_record("normal", nn.initializers.normal, jtu.dtypes.inexact, 1),
initializer_record("he_normal", nn.initializers.he_normal, jtu.dtypes.inexact),
initializer_record("he_uniform", nn.initializers.he_uniform, jtu.dtypes.inexact),
initializer_record("glorot_normal", nn.initializers.glorot_normal, jtu.dtypes.inexact),
initializer_record("glorot_uniform", nn.initializers.glorot_uniform, jtu.dtypes.inexact),
initializer_record("lecun_normal", nn.initializers.lecun_normal, jtu.dtypes.inexact),
initializer_record("lecun_uniform", nn.initializers.lecun_uniform, jtu.dtypes.inexact),
initializer_record("orthogonal", nn.initializers.orthogonal, jtu.dtypes.floating, 2, 2),
initializer_record("delta_orthogonal", nn.initializers.delta_orthogonal, jtu.dtypes.floating, 4, 4)
]
class NNInitializersTest(jtu.JaxTestCase):
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_{}_{}".format(
rec.name,
jtu.format_shape_dtype_string(shape, dtype)),
"initializer": rec.initializer(),
"shape": shape, "dtype": dtype}
for rec in INITIALIZER_RECS
for shape in rec.shapes
for dtype in rec.dtypes))
def testInitializer(self, initializer, shape, dtype):
rng = random.PRNGKey(0)
val = initializer(rng, shape, dtype)
self.assertEqual(shape, jnp.shape(val))
self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_{}_{}".format(
rec.name,
jtu.format_shape_dtype_string(shape, dtype)),
"initializer_provider": rec.initializer,
"shape": shape, "dtype": dtype}
for rec in INITIALIZER_RECS
for shape in rec.shapes
for dtype in rec.dtypes))
def testInitializerProvider(self, initializer_provider, shape, dtype):
rng = random.PRNGKey(0)
initializer = initializer_provider(dtype=dtype)
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
def testVarianceScalingMultiAxis(self):
rng = random.PRNGKey(0)
shape = (2, 3, 4, 5)
initializer = nn.initializers.variance_scaling(
scale=1.0, mode='fan_avg', distribution='truncated_normal',
in_axis=(0, 1), out_axis=(-2, -1))
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
def testVarianceScalingBatchAxis(self):
rng = random.PRNGKey(0)
shape = (2, 3, 4, 5)
initializer = nn.initializers.variance_scaling(
scale=1.0, mode='fan_avg', distribution='truncated_normal',
in_axis=0, out_axis=(2, 3), batch_axis=1)
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
if __name__ == "__main__":
| absltest.main(testLoader=jtu.JaxTestLoader()) | conditional_block | |
nn_test.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for nn module."""
import collections
from functools import partial
import itertools
from absl.testing import absltest
from absl.testing import parameterized
import scipy.stats
from jax import core
from jax._src import test_util as jtu
from jax.test_util import check_grads
from jax import nn
from jax import random
import jax
import jax.numpy as jnp
from jax.config import config
config.parse_flags_with_absl()
class NNFunctionsTest(jtu.JaxTestCase):
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testSoftplusGrad(self):
check_grads(nn.softplus, (1e-8,), order=4,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradZero(self):
check_grads(nn.softplus, (0.,), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradInf(self):
self.assertAllClose(
1., jax.grad(nn.softplus)(float('inf')))
def testSoftplusGradNegInf(self):
check_grads(nn.softplus, (-float('inf'),), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradNan(self):
check_grads(nn.softplus, (float('nan'),), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
@parameterized.parameters([int, float] + jtu.dtypes.floating + jtu.dtypes.integer)
def testSoftplusZero(self, dtype):
self.assertEqual(jnp.log(dtype(2)), nn.softplus(dtype(0)))
def testReluGrad(self):
rtol = 1e-2 if jtu.device_under_test() == "tpu" else None
check_grads(nn.relu, (1.,), order=3, rtol=rtol)
check_grads(nn.relu, (-1.,), order=3, rtol=rtol)
jaxpr = jax.make_jaxpr(jax.grad(nn.relu))(0.)
self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
def testSoftplusValue(self):
val = nn.softplus(89.)
self.assertAllClose(val, 89., check_dtypes=False)
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testEluGrad(self):
check_grads(nn.elu, (1e4,), order=4, eps=1.)
def testEluValue(self):
val = nn.elu(1e4)
self.assertAllClose(val, 1e4, check_dtypes=False)
def testGluValue(self):
val = nn.glu(jnp.array([1.0, 0.0]))
self.assertAllClose(val, jnp.array([0.5]))
@parameterized.parameters(False, True)
def testGelu(self, approximate):
def gelu_reference(x):
return x * scipy.stats.norm.cdf(x)
rng = jtu.rand_default(self.rng())
args_maker = lambda: [rng((4, 5, 6), jnp.float32)]
self._CheckAgainstNumpy(
gelu_reference, partial(nn.gelu, approximate=approximate), args_maker,
check_dtypes=False, tol=1e-3 if approximate else None)
@parameterized.parameters(*itertools.product(
(jnp.float32, jnp.bfloat16, jnp.float16),
(partial(nn.gelu, approximate=False),
partial(nn.gelu, approximate=True),
nn.relu, nn.softplus, nn.sigmoid)))
def testDtypeMatchesInput(self, dtype, fn):
x = jnp.zeros((), dtype=dtype)
out = fn(x)
self.assertEqual(out.dtype, dtype)
def testEluMemory(self):
# see https://github.com/google/jax/pull/1640
with jax.enable_checks(False): # With checks we materialize the array
jax.make_jaxpr(lambda: nn.elu(jnp.ones((10 ** 12,)))) # don't oom
def testHardTanhMemory(self):
# see https://github.com/google/jax/pull/1640
with jax.enable_checks(False): # With checks we materialize the array
jax.make_jaxpr(lambda: nn.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
@parameterized.parameters([nn.softmax, nn.log_softmax])
def testSoftmaxWhereMask(self, fn):
x = jnp.array([5.5, 1.3, -4.2, 0.9])
m = jnp.array([True, False, True, True])
x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
out_masked = jnp.take(
fn(x, where=m, initial=-jnp.inf), jnp.array([0, 2, 3]))
out_filtered = fn(x_filtered)
self.assertAllClose(out_masked, out_filtered)
def testNormalizeWhereMask(self):
x = jnp.array([5.5, 1.3, -4.2, 0.9])
m = jnp.array([True, False, True, True])
x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
out_masked = jnp.take(nn.normalize(x, where=m), jnp.array([0, 2, 3]))
out_filtered = nn.normalize(x_filtered)
self.assertAllClose(out_masked, out_filtered)
def testOneHot(self):
actual = nn.one_hot(jnp.array([0, 1, 2]), 3)
expected = jnp.array([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
self.assertAllClose(actual, expected)
actual = nn.one_hot(jnp.array([1, 2, 0]), 3)
expected = jnp.array([[0., 1., 0.],
[0., 0., 1.],
[1., 0., 0.]])
self.assertAllClose(actual, expected)
def testOneHotOutOfBound(self):
actual = nn.one_hot(jnp.array([-1, 3]), 3)
expected = jnp.array([[0., 0., 0.],
[0., 0., 0.]])
self.assertAllClose(actual, expected)
def testOneHotNonArrayInput(self):
actual = nn.one_hot([0, 1, 2], 3)
expected = jnp.array([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
self.assertAllClose(actual, expected)
def testOneHotCustomDtype(self):
actual = nn.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
expected = jnp.array([[True, False, False],
[False, True, False],
[False, False, True]])
self.assertAllClose(actual, expected)
def testOneHotConcretizationError(self):
# https://github.com/google/jax/issues/3654
msg = r"in jax.nn.one_hot argument `num_classes`"
with self.assertRaisesRegex(core.ConcretizationTypeError, msg):
jax.jit(nn.one_hot)(3, 5)
def testOneHotAxis(self):
expected = jnp.array([[0., 1., 0.],
[0., 0., 1.],
[1., 0., 0.]]).T
actual = nn.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
self.assertAllClose(actual, expected)
actual = nn.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
self.assertAllClose(actual, expected)
def testTanhExists(self):
nn.tanh # doesn't crash
def testCustomJVPLeak(self):
# https://github.com/google/jax/issues/8171
@jax.jit
def fwd():
a = jnp.array(1.)
def f(hx, _):
hx = jax.nn.sigmoid(hx + a)
return hx, None
hx = jnp.array(0.)
jax.lax.scan(f, hx, None, length=2)
with jax.checking_leaks():
fwd() # doesn't crash
InitializerRecord = collections.namedtuple(
"InitializerRecord",
["name", "initializer", "shapes", "dtypes"])
ALL_SHAPES = [(2,), (2, 2), (2, 3), (3, 2), (2, 3, 4), (4, 3, 2), (2, 3, 4, 5)]
def initializer_record(name, initializer, dtypes, min_dims=2, max_dims=4):
shapes = [shape for shape in ALL_SHAPES
if min_dims <= len(shape) <= max_dims]
return InitializerRecord(name, initializer, shapes, dtypes)
INITIALIZER_RECS = [
initializer_record("uniform", nn.initializers.uniform, jtu.dtypes.floating, 1),
initializer_record("normal", nn.initializers.normal, jtu.dtypes.inexact, 1),
initializer_record("he_normal", nn.initializers.he_normal, jtu.dtypes.inexact),
initializer_record("he_uniform", nn.initializers.he_uniform, jtu.dtypes.inexact),
initializer_record("glorot_normal", nn.initializers.glorot_normal, jtu.dtypes.inexact),
initializer_record("glorot_uniform", nn.initializers.glorot_uniform, jtu.dtypes.inexact),
initializer_record("lecun_normal", nn.initializers.lecun_normal, jtu.dtypes.inexact),
initializer_record("lecun_uniform", nn.initializers.lecun_uniform, jtu.dtypes.inexact),
initializer_record("orthogonal", nn.initializers.orthogonal, jtu.dtypes.floating, 2, 2),
initializer_record("delta_orthogonal", nn.initializers.delta_orthogonal, jtu.dtypes.floating, 4, 4)
]
class NNInitializersTest(jtu.JaxTestCase):
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_{}_{}".format(
rec.name,
jtu.format_shape_dtype_string(shape, dtype)),
"initializer": rec.initializer(),
"shape": shape, "dtype": dtype}
for rec in INITIALIZER_RECS
for shape in rec.shapes
for dtype in rec.dtypes))
def testInitializer(self, initializer, shape, dtype):
rng = random.PRNGKey(0)
val = initializer(rng, shape, dtype)
self.assertEqual(shape, jnp.shape(val))
self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_{}_{}".format(
rec.name,
jtu.format_shape_dtype_string(shape, dtype)),
"initializer_provider": rec.initializer,
"shape": shape, "dtype": dtype}
for rec in INITIALIZER_RECS
for shape in rec.shapes
for dtype in rec.dtypes))
def testInitializerProvider(self, initializer_provider, shape, dtype):
rng = random.PRNGKey(0)
initializer = initializer_provider(dtype=dtype)
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
def | (self):
rng = random.PRNGKey(0)
shape = (2, 3, 4, 5)
initializer = nn.initializers.variance_scaling(
scale=1.0, mode='fan_avg', distribution='truncated_normal',
in_axis=(0, 1), out_axis=(-2, -1))
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
def testVarianceScalingBatchAxis(self):
rng = random.PRNGKey(0)
shape = (2, 3, 4, 5)
initializer = nn.initializers.variance_scaling(
scale=1.0, mode='fan_avg', distribution='truncated_normal',
in_axis=0, out_axis=(2, 3), batch_axis=1)
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
if __name__ == "__main__":
absltest.main(testLoader=jtu.JaxTestLoader())
| testVarianceScalingMultiAxis | identifier_name |
nn_test.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for nn module."""
import collections
from functools import partial
import itertools
from absl.testing import absltest
from absl.testing import parameterized
import scipy.stats
from jax import core
from jax._src import test_util as jtu
from jax.test_util import check_grads
from jax import nn
from jax import random
import jax
import jax.numpy as jnp
from jax.config import config
config.parse_flags_with_absl()
class NNFunctionsTest(jtu.JaxTestCase):
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testSoftplusGrad(self):
check_grads(nn.softplus, (1e-8,), order=4,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradZero(self):
check_grads(nn.softplus, (0.,), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradInf(self):
self.assertAllClose(
1., jax.grad(nn.softplus)(float('inf')))
def testSoftplusGradNegInf(self):
check_grads(nn.softplus, (-float('inf'),), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
def testSoftplusGradNan(self):
check_grads(nn.softplus, (float('nan'),), order=1,
rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
@parameterized.parameters([int, float] + jtu.dtypes.floating + jtu.dtypes.integer)
def testSoftplusZero(self, dtype):
self.assertEqual(jnp.log(dtype(2)), nn.softplus(dtype(0)))
def testReluGrad(self):
rtol = 1e-2 if jtu.device_under_test() == "tpu" else None
check_grads(nn.relu, (1.,), order=3, rtol=rtol)
check_grads(nn.relu, (-1.,), order=3, rtol=rtol)
jaxpr = jax.make_jaxpr(jax.grad(nn.relu))(0.)
self.assertGreaterEqual(len(jaxpr.jaxpr.eqns), 2)
def testSoftplusValue(self):
val = nn.softplus(89.)
self.assertAllClose(val, 89., check_dtypes=False)
@jtu.skip_on_flag("jax_skip_slow_tests", True)
def testEluGrad(self):
check_grads(nn.elu, (1e4,), order=4, eps=1.)
def testEluValue(self):
val = nn.elu(1e4)
self.assertAllClose(val, 1e4, check_dtypes=False)
def testGluValue(self):
val = nn.glu(jnp.array([1.0, 0.0]))
self.assertAllClose(val, jnp.array([0.5]))
@parameterized.parameters(False, True)
def testGelu(self, approximate):
def gelu_reference(x):
|
rng = jtu.rand_default(self.rng())
args_maker = lambda: [rng((4, 5, 6), jnp.float32)]
self._CheckAgainstNumpy(
gelu_reference, partial(nn.gelu, approximate=approximate), args_maker,
check_dtypes=False, tol=1e-3 if approximate else None)
@parameterized.parameters(*itertools.product(
(jnp.float32, jnp.bfloat16, jnp.float16),
(partial(nn.gelu, approximate=False),
partial(nn.gelu, approximate=True),
nn.relu, nn.softplus, nn.sigmoid)))
def testDtypeMatchesInput(self, dtype, fn):
x = jnp.zeros((), dtype=dtype)
out = fn(x)
self.assertEqual(out.dtype, dtype)
def testEluMemory(self):
# see https://github.com/google/jax/pull/1640
with jax.enable_checks(False): # With checks we materialize the array
jax.make_jaxpr(lambda: nn.elu(jnp.ones((10 ** 12,)))) # don't oom
def testHardTanhMemory(self):
# see https://github.com/google/jax/pull/1640
with jax.enable_checks(False): # With checks we materialize the array
jax.make_jaxpr(lambda: nn.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
@parameterized.parameters([nn.softmax, nn.log_softmax])
def testSoftmaxWhereMask(self, fn):
x = jnp.array([5.5, 1.3, -4.2, 0.9])
m = jnp.array([True, False, True, True])
x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
out_masked = jnp.take(
fn(x, where=m, initial=-jnp.inf), jnp.array([0, 2, 3]))
out_filtered = fn(x_filtered)
self.assertAllClose(out_masked, out_filtered)
def testNormalizeWhereMask(self):
x = jnp.array([5.5, 1.3, -4.2, 0.9])
m = jnp.array([True, False, True, True])
x_filtered = jnp.take(x, jnp.array([0, 2, 3]))
out_masked = jnp.take(nn.normalize(x, where=m), jnp.array([0, 2, 3]))
out_filtered = nn.normalize(x_filtered)
self.assertAllClose(out_masked, out_filtered)
def testOneHot(self):
actual = nn.one_hot(jnp.array([0, 1, 2]), 3)
expected = jnp.array([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
self.assertAllClose(actual, expected)
actual = nn.one_hot(jnp.array([1, 2, 0]), 3)
expected = jnp.array([[0., 1., 0.],
[0., 0., 1.],
[1., 0., 0.]])
self.assertAllClose(actual, expected)
def testOneHotOutOfBound(self):
actual = nn.one_hot(jnp.array([-1, 3]), 3)
expected = jnp.array([[0., 0., 0.],
[0., 0., 0.]])
self.assertAllClose(actual, expected)
def testOneHotNonArrayInput(self):
actual = nn.one_hot([0, 1, 2], 3)
expected = jnp.array([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
self.assertAllClose(actual, expected)
def testOneHotCustomDtype(self):
actual = nn.one_hot(jnp.array([0, 1, 2]), 3, dtype=jnp.bool_)
expected = jnp.array([[True, False, False],
[False, True, False],
[False, False, True]])
self.assertAllClose(actual, expected)
def testOneHotConcretizationError(self):
# https://github.com/google/jax/issues/3654
msg = r"in jax.nn.one_hot argument `num_classes`"
with self.assertRaisesRegex(core.ConcretizationTypeError, msg):
jax.jit(nn.one_hot)(3, 5)
def testOneHotAxis(self):
expected = jnp.array([[0., 1., 0.],
[0., 0., 1.],
[1., 0., 0.]]).T
actual = nn.one_hot(jnp.array([1, 2, 0]), 3, axis=0)
self.assertAllClose(actual, expected)
actual = nn.one_hot(jnp.array([1, 2, 0]), 3, axis=-2)
self.assertAllClose(actual, expected)
def testTanhExists(self):
nn.tanh # doesn't crash
def testCustomJVPLeak(self):
# https://github.com/google/jax/issues/8171
@jax.jit
def fwd():
a = jnp.array(1.)
def f(hx, _):
hx = jax.nn.sigmoid(hx + a)
return hx, None
hx = jnp.array(0.)
jax.lax.scan(f, hx, None, length=2)
with jax.checking_leaks():
fwd() # doesn't crash
InitializerRecord = collections.namedtuple(
"InitializerRecord",
["name", "initializer", "shapes", "dtypes"])
ALL_SHAPES = [(2,), (2, 2), (2, 3), (3, 2), (2, 3, 4), (4, 3, 2), (2, 3, 4, 5)]
def initializer_record(name, initializer, dtypes, min_dims=2, max_dims=4):
shapes = [shape for shape in ALL_SHAPES
if min_dims <= len(shape) <= max_dims]
return InitializerRecord(name, initializer, shapes, dtypes)
INITIALIZER_RECS = [
initializer_record("uniform", nn.initializers.uniform, jtu.dtypes.floating, 1),
initializer_record("normal", nn.initializers.normal, jtu.dtypes.inexact, 1),
initializer_record("he_normal", nn.initializers.he_normal, jtu.dtypes.inexact),
initializer_record("he_uniform", nn.initializers.he_uniform, jtu.dtypes.inexact),
initializer_record("glorot_normal", nn.initializers.glorot_normal, jtu.dtypes.inexact),
initializer_record("glorot_uniform", nn.initializers.glorot_uniform, jtu.dtypes.inexact),
initializer_record("lecun_normal", nn.initializers.lecun_normal, jtu.dtypes.inexact),
initializer_record("lecun_uniform", nn.initializers.lecun_uniform, jtu.dtypes.inexact),
initializer_record("orthogonal", nn.initializers.orthogonal, jtu.dtypes.floating, 2, 2),
initializer_record("delta_orthogonal", nn.initializers.delta_orthogonal, jtu.dtypes.floating, 4, 4)
]
class NNInitializersTest(jtu.JaxTestCase):
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_{}_{}".format(
rec.name,
jtu.format_shape_dtype_string(shape, dtype)),
"initializer": rec.initializer(),
"shape": shape, "dtype": dtype}
for rec in INITIALIZER_RECS
for shape in rec.shapes
for dtype in rec.dtypes))
def testInitializer(self, initializer, shape, dtype):
rng = random.PRNGKey(0)
val = initializer(rng, shape, dtype)
self.assertEqual(shape, jnp.shape(val))
self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_{}_{}".format(
rec.name,
jtu.format_shape_dtype_string(shape, dtype)),
"initializer_provider": rec.initializer,
"shape": shape, "dtype": dtype}
for rec in INITIALIZER_RECS
for shape in rec.shapes
for dtype in rec.dtypes))
def testInitializerProvider(self, initializer_provider, shape, dtype):
rng = random.PRNGKey(0)
initializer = initializer_provider(dtype=dtype)
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
def testVarianceScalingMultiAxis(self):
rng = random.PRNGKey(0)
shape = (2, 3, 4, 5)
initializer = nn.initializers.variance_scaling(
scale=1.0, mode='fan_avg', distribution='truncated_normal',
in_axis=(0, 1), out_axis=(-2, -1))
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
def testVarianceScalingBatchAxis(self):
rng = random.PRNGKey(0)
shape = (2, 3, 4, 5)
initializer = nn.initializers.variance_scaling(
scale=1.0, mode='fan_avg', distribution='truncated_normal',
in_axis=0, out_axis=(2, 3), batch_axis=1)
val = initializer(rng, shape)
self.assertEqual(shape, jnp.shape(val))
if __name__ == "__main__":
absltest.main(testLoader=jtu.JaxTestLoader())
| return x * scipy.stats.norm.cdf(x) | identifier_body |
schroedinger_app.py | from __future__ import absolute_import
import os
import time
from math import pi
import numpy as nm
from sfepy.base.base import Struct, output, get_default
from sfepy.applications import PDESolverApp
from sfepy.solvers import Solver
from six.moves import range
def guess_n_eigs(n_electron, n_eigs=None):
"""
Guess the number of eigenvalues (energies) to compute so that the smearing
iteration converges. Passing n_eigs overrides the guess.
"""
if n_eigs is not None: return n_eigs
if n_electron > 2:
n_eigs = int(1.2 * ((0.5 * n_electron) + 5))
else:
n_eigs = n_electron
return n_eigs
class SchroedingerApp(PDESolverApp):
"""
Base application for electronic structure calculations.
Subclasses should typically override `solve_eigen_problem()` method.
This class allows solving only simple single electron problems,
e.g. well, oscillator, hydrogen atom and boron atom with 1 electron.
"""
@staticmethod
def process_options(options):
|
def __init__(self, conf, options, output_prefix, **kwargs):
PDESolverApp.__init__(self, conf, options, output_prefix,
init_equations=False)
def setup_options(self):
PDESolverApp.setup_options(self)
opts = SchroedingerApp.process_options(self.conf.options)
self.app_options += opts
def setup_output(self):
"""
Setup various file names for the output directory given by
`self.problem.output_dir`.
"""
output_dir = self.problem.output_dir
opts = self.app_options
opts.output_dir = output_dir
self.mesh_results_name = os.path.join(opts.output_dir,
self.problem.get_output_name())
self.eig_results_name = os.path.join(opts.output_dir,
self.problem.ofn_trunk
+ '_eigs.txt')
def call(self):
# This cannot be in __init__(), as parametric calls may change
# the output directory.
self.setup_output()
evp = self.solve_eigen_problem()
output("solution saved to %s" % self.problem.get_output_name())
output("in %s" % self.app_options.output_dir)
if self.post_process_hook_final is not None: # User postprocessing.
self.post_process_hook_final(self.problem, evp=evp)
return evp
def solve_eigen_problem(self):
options = self.options
opts = self.app_options
pb = self.problem
dim = pb.domain.mesh.dim
pb.set_equations(pb.conf.equations)
pb.time_update()
output('assembling lhs...')
tt = time.clock()
mtx_a = pb.evaluate(pb.conf.equations['lhs'], mode='weak',
auto_init=True, dw_mode='matrix')
output('...done in %.2f s' % (time.clock() - tt))
output('assembling rhs...')
tt = time.clock()
mtx_b = pb.evaluate(pb.conf.equations['rhs'], mode='weak',
dw_mode='matrix')
output('...done in %.2f s' % (time.clock() - tt))
n_eigs = get_default(opts.n_eigs, mtx_a.shape[0])
output('computing resonance frequencies...')
eig = Solver.any_from_conf(pb.get_solver_conf(opts.eigen_solver))
eigs, mtx_s_phi = eig(mtx_a, mtx_b, n_eigs, eigenvectors=True)
output('...done')
bounding_box = pb.domain.mesh.get_bounding_box()
# this assumes a box (3D), or a square (2D):
a = bounding_box[1][0] - bounding_box[0][0]
E_exact = None
if options.hydrogen or options.boron:
if options.hydrogen:
Z = 1
elif options.boron:
Z = 5
if dim == 2:
E_exact = [-float(Z)**2/2/(n-0.5)**2/4
for n in [1]+[2]*3+[3]*5 + [4]*8 + [5]*15]
elif dim == 3:
E_exact = [-float(Z)**2/2/n**2 for n in [1]+[2]*2**2+[3]*3**2 ]
if options.well:
if dim == 2:
E_exact = [pi**2/(2*a**2)*x
for x in [2, 5, 5, 8, 10, 10, 13, 13,
17, 17, 18, 20, 20 ] ]
elif dim == 3:
E_exact = [pi**2/(2*a**2)*x
for x in [3, 6, 6, 6, 9, 9, 9, 11, 11,
11, 12, 14, 14, 14, 14, 14,
14, 17, 17, 17] ]
if options.oscillator:
if dim == 2:
E_exact = [1] + [2]*2 + [3]*3 + [4]*4 + [5]*5 + [6]*6
elif dim == 3:
E_exact = [float(1)/2+x for x in [1]+[2]*3+[3]*6+[4]*10 ]
if E_exact is not None:
output("a=%f" % a)
output("Energies:")
output("n exact FEM error")
for i, e in enumerate(eigs):
from numpy import NaN
if i < len(E_exact):
exact = E_exact[i]
err = 100*abs((exact - e)/exact)
else:
exact = NaN
err = NaN
output("%d: %.8f %.8f %5.2f%%" % (i, exact, e, err))
else:
output(eigs)
mtx_phi = self.make_full(mtx_s_phi)
self.save_results(eigs, mtx_phi)
return Struct(pb=pb, eigs=eigs, mtx_phi=mtx_phi)
def make_full(self, mtx_s_phi):
variables = self.problem.get_variables()
mtx_phi = nm.empty((variables.di.ptr[-1], mtx_s_phi.shape[1]),
dtype=nm.float64)
for ii in range(mtx_s_phi.shape[1]):
mtx_phi[:,ii] = variables.make_full_vec(mtx_s_phi[:,ii])
return mtx_phi
def save_results(self, eigs, mtx_phi, out=None,
mesh_results_name=None, eig_results_name=None):
mesh_results_name = get_default(mesh_results_name,
self.mesh_results_name)
eig_results_name = get_default(eig_results_name,
self.eig_results_name)
pb = self.problem
save = self.app_options.save_eig_vectors
n_eigs = self.app_options.n_eigs
out = get_default(out, {})
state = pb.create_state()
aux = {}
for ii in range(eigs.shape[0]):
if save is not None:
if (ii > save[0]) and (ii < (n_eigs - save[1])): continue
state.set_full(mtx_phi[:,ii])
aux = state.create_output_dict()
key = list(aux.keys())[0]
out[key+'%03d' % ii] = aux[key]
if aux.get('__mesh__') is not None:
out['__mesh__'] = aux['__mesh__']
pb.save_state(mesh_results_name, out=out)
fd = open(eig_results_name, 'w')
eigs.tofile(fd, ' ')
fd.close()
| """
Application options setup. Sets default values for missing
non-compulsory options.
Options:
save_eig_vectors : (from_largest, from_smallest) or None
If None, save all.
"""
get = options.get
n_electron = get('n_electron', 5)
n_eigs = guess_n_eigs(n_electron, n_eigs=get('n_eigs', None))
return Struct(eigen_solver=get('eigen_solver', None,
'missing "eigen_solver" in options!'),
n_electron=n_electron,
n_eigs=n_eigs,
save_eig_vectors=get('save_eig_vectors', None)) | identifier_body |
schroedinger_app.py | from __future__ import absolute_import
import os
import time
from math import pi
import numpy as nm
from sfepy.base.base import Struct, output, get_default
from sfepy.applications import PDESolverApp
from sfepy.solvers import Solver
from six.moves import range
def guess_n_eigs(n_electron, n_eigs=None):
"""
Guess the number of eigenvalues (energies) to compute so that the smearing
iteration converges. Passing n_eigs overrides the guess.
"""
if n_eigs is not None: return n_eigs
if n_electron > 2:
n_eigs = int(1.2 * ((0.5 * n_electron) + 5))
else:
n_eigs = n_electron
return n_eigs
class SchroedingerApp(PDESolverApp):
"""
Base application for electronic structure calculations.
Subclasses should typically override `solve_eigen_problem()` method.
This class allows solving only simple single electron problems,
e.g. well, oscillator, hydrogen atom and boron atom with 1 electron.
"""
@staticmethod
def process_options(options):
"""
Application options setup. Sets default values for missing
non-compulsory options.
Options:
save_eig_vectors : (from_largest, from_smallest) or None
If None, save all.
"""
get = options.get
n_electron = get('n_electron', 5)
n_eigs = guess_n_eigs(n_electron, n_eigs=get('n_eigs', None))
return Struct(eigen_solver=get('eigen_solver', None,
'missing "eigen_solver" in options!'),
n_electron=n_electron,
n_eigs=n_eigs,
save_eig_vectors=get('save_eig_vectors', None))
def __init__(self, conf, options, output_prefix, **kwargs):
PDESolverApp.__init__(self, conf, options, output_prefix,
init_equations=False)
def setup_options(self):
PDESolverApp.setup_options(self)
opts = SchroedingerApp.process_options(self.conf.options)
self.app_options += opts
def setup_output(self):
"""
Setup various file names for the output directory given by
`self.problem.output_dir`.
"""
output_dir = self.problem.output_dir
opts = self.app_options
opts.output_dir = output_dir
self.mesh_results_name = os.path.join(opts.output_dir,
self.problem.get_output_name())
self.eig_results_name = os.path.join(opts.output_dir,
self.problem.ofn_trunk
+ '_eigs.txt')
def call(self):
# This cannot be in __init__(), as parametric calls may change
# the output directory.
self.setup_output()
evp = self.solve_eigen_problem()
output("solution saved to %s" % self.problem.get_output_name())
output("in %s" % self.app_options.output_dir)
if self.post_process_hook_final is not None: # User postprocessing.
self.post_process_hook_final(self.problem, evp=evp)
return evp
def solve_eigen_problem(self):
options = self.options
opts = self.app_options
pb = self.problem
dim = pb.domain.mesh.dim
pb.set_equations(pb.conf.equations)
pb.time_update()
output('assembling lhs...')
tt = time.clock()
mtx_a = pb.evaluate(pb.conf.equations['lhs'], mode='weak',
auto_init=True, dw_mode='matrix')
output('...done in %.2f s' % (time.clock() - tt))
output('assembling rhs...')
tt = time.clock()
mtx_b = pb.evaluate(pb.conf.equations['rhs'], mode='weak',
dw_mode='matrix')
output('...done in %.2f s' % (time.clock() - tt))
n_eigs = get_default(opts.n_eigs, mtx_a.shape[0])
output('computing resonance frequencies...')
eig = Solver.any_from_conf(pb.get_solver_conf(opts.eigen_solver))
eigs, mtx_s_phi = eig(mtx_a, mtx_b, n_eigs, eigenvectors=True)
output('...done')
bounding_box = pb.domain.mesh.get_bounding_box()
# this assumes a box (3D), or a square (2D):
a = bounding_box[1][0] - bounding_box[0][0]
E_exact = None
if options.hydrogen or options.boron:
if options.hydrogen:
Z = 1
elif options.boron:
Z = 5
if dim == 2:
E_exact = [-float(Z)**2/2/(n-0.5)**2/4
for n in [1]+[2]*3+[3]*5 + [4]*8 + [5]*15]
elif dim == 3:
E_exact = [-float(Z)**2/2/n**2 for n in [1]+[2]*2**2+[3]*3**2 ]
if options.well:
if dim == 2:
E_exact = [pi**2/(2*a**2)*x
for x in [2, 5, 5, 8, 10, 10, 13, 13,
17, 17, 18, 20, 20 ] ]
elif dim == 3:
E_exact = [pi**2/(2*a**2)*x
for x in [3, 6, 6, 6, 9, 9, 9, 11, 11,
11, 12, 14, 14, 14, 14, 14,
14, 17, 17, 17] ]
if options.oscillator:
if dim == 2:
E_exact = [1] + [2]*2 + [3]*3 + [4]*4 + [5]*5 + [6]*6
elif dim == 3:
E_exact = [float(1)/2+x for x in [1]+[2]*3+[3]*6+[4]*10 ]
if E_exact is not None:
output("a=%f" % a)
output("Energies:")
output("n exact FEM error")
for i, e in enumerate(eigs):
from numpy import NaN
if i < len(E_exact):
exact = E_exact[i]
err = 100*abs((exact - e)/exact)
else:
exact = NaN
err = NaN
output("%d: %.8f %.8f %5.2f%%" % (i, exact, e, err))
else:
output(eigs)
mtx_phi = self.make_full(mtx_s_phi)
self.save_results(eigs, mtx_phi)
return Struct(pb=pb, eigs=eigs, mtx_phi=mtx_phi)
def make_full(self, mtx_s_phi):
variables = self.problem.get_variables()
mtx_phi = nm.empty((variables.di.ptr[-1], mtx_s_phi.shape[1]),
dtype=nm.float64)
for ii in range(mtx_s_phi.shape[1]):
mtx_phi[:,ii] = variables.make_full_vec(mtx_s_phi[:,ii])
return mtx_phi
def save_results(self, eigs, mtx_phi, out=None,
mesh_results_name=None, eig_results_name=None):
mesh_results_name = get_default(mesh_results_name,
self.mesh_results_name)
eig_results_name = get_default(eig_results_name,
self.eig_results_name)
pb = self.problem
save = self.app_options.save_eig_vectors
n_eigs = self.app_options.n_eigs
out = get_default(out, {})
state = pb.create_state()
aux = {}
for ii in range(eigs.shape[0]):
if save is not None:
|
state.set_full(mtx_phi[:,ii])
aux = state.create_output_dict()
key = list(aux.keys())[0]
out[key+'%03d' % ii] = aux[key]
if aux.get('__mesh__') is not None:
out['__mesh__'] = aux['__mesh__']
pb.save_state(mesh_results_name, out=out)
fd = open(eig_results_name, 'w')
eigs.tofile(fd, ' ')
fd.close()
| if (ii > save[0]) and (ii < (n_eigs - save[1])): continue | conditional_block |
schroedinger_app.py | from __future__ import absolute_import
import os
import time
from math import pi
import numpy as nm
from sfepy.base.base import Struct, output, get_default
from sfepy.applications import PDESolverApp
from sfepy.solvers import Solver
from six.moves import range
def guess_n_eigs(n_electron, n_eigs=None):
"""
Guess the number of eigenvalues (energies) to compute so that the smearing
iteration converges. Passing n_eigs overrides the guess.
"""
if n_eigs is not None: return n_eigs
if n_electron > 2:
n_eigs = int(1.2 * ((0.5 * n_electron) + 5))
else:
n_eigs = n_electron
return n_eigs
class SchroedingerApp(PDESolverApp):
"""
Base application for electronic structure calculations.
Subclasses should typically override `solve_eigen_problem()` method.
This class allows solving only simple single electron problems,
e.g. well, oscillator, hydrogen atom and boron atom with 1 electron.
"""
@staticmethod
def process_options(options):
"""
Application options setup. Sets default values for missing
non-compulsory options.
Options: |
n_electron = get('n_electron', 5)
n_eigs = guess_n_eigs(n_electron, n_eigs=get('n_eigs', None))
return Struct(eigen_solver=get('eigen_solver', None,
'missing "eigen_solver" in options!'),
n_electron=n_electron,
n_eigs=n_eigs,
save_eig_vectors=get('save_eig_vectors', None))
def __init__(self, conf, options, output_prefix, **kwargs):
PDESolverApp.__init__(self, conf, options, output_prefix,
init_equations=False)
def setup_options(self):
PDESolverApp.setup_options(self)
opts = SchroedingerApp.process_options(self.conf.options)
self.app_options += opts
def setup_output(self):
"""
Setup various file names for the output directory given by
`self.problem.output_dir`.
"""
output_dir = self.problem.output_dir
opts = self.app_options
opts.output_dir = output_dir
self.mesh_results_name = os.path.join(opts.output_dir,
self.problem.get_output_name())
self.eig_results_name = os.path.join(opts.output_dir,
self.problem.ofn_trunk
+ '_eigs.txt')
def call(self):
# This cannot be in __init__(), as parametric calls may change
# the output directory.
self.setup_output()
evp = self.solve_eigen_problem()
output("solution saved to %s" % self.problem.get_output_name())
output("in %s" % self.app_options.output_dir)
if self.post_process_hook_final is not None: # User postprocessing.
self.post_process_hook_final(self.problem, evp=evp)
return evp
def solve_eigen_problem(self):
options = self.options
opts = self.app_options
pb = self.problem
dim = pb.domain.mesh.dim
pb.set_equations(pb.conf.equations)
pb.time_update()
output('assembling lhs...')
tt = time.clock()
mtx_a = pb.evaluate(pb.conf.equations['lhs'], mode='weak',
auto_init=True, dw_mode='matrix')
output('...done in %.2f s' % (time.clock() - tt))
output('assembling rhs...')
tt = time.clock()
mtx_b = pb.evaluate(pb.conf.equations['rhs'], mode='weak',
dw_mode='matrix')
output('...done in %.2f s' % (time.clock() - tt))
n_eigs = get_default(opts.n_eigs, mtx_a.shape[0])
output('computing resonance frequencies...')
eig = Solver.any_from_conf(pb.get_solver_conf(opts.eigen_solver))
eigs, mtx_s_phi = eig(mtx_a, mtx_b, n_eigs, eigenvectors=True)
output('...done')
bounding_box = pb.domain.mesh.get_bounding_box()
# this assumes a box (3D), or a square (2D):
a = bounding_box[1][0] - bounding_box[0][0]
E_exact = None
if options.hydrogen or options.boron:
if options.hydrogen:
Z = 1
elif options.boron:
Z = 5
if dim == 2:
E_exact = [-float(Z)**2/2/(n-0.5)**2/4
for n in [1]+[2]*3+[3]*5 + [4]*8 + [5]*15]
elif dim == 3:
E_exact = [-float(Z)**2/2/n**2 for n in [1]+[2]*2**2+[3]*3**2 ]
if options.well:
if dim == 2:
E_exact = [pi**2/(2*a**2)*x
for x in [2, 5, 5, 8, 10, 10, 13, 13,
17, 17, 18, 20, 20 ] ]
elif dim == 3:
E_exact = [pi**2/(2*a**2)*x
for x in [3, 6, 6, 6, 9, 9, 9, 11, 11,
11, 12, 14, 14, 14, 14, 14,
14, 17, 17, 17] ]
if options.oscillator:
if dim == 2:
E_exact = [1] + [2]*2 + [3]*3 + [4]*4 + [5]*5 + [6]*6
elif dim == 3:
E_exact = [float(1)/2+x for x in [1]+[2]*3+[3]*6+[4]*10 ]
if E_exact is not None:
output("a=%f" % a)
output("Energies:")
output("n exact FEM error")
for i, e in enumerate(eigs):
from numpy import NaN
if i < len(E_exact):
exact = E_exact[i]
err = 100*abs((exact - e)/exact)
else:
exact = NaN
err = NaN
output("%d: %.8f %.8f %5.2f%%" % (i, exact, e, err))
else:
output(eigs)
mtx_phi = self.make_full(mtx_s_phi)
self.save_results(eigs, mtx_phi)
return Struct(pb=pb, eigs=eigs, mtx_phi=mtx_phi)
def make_full(self, mtx_s_phi):
variables = self.problem.get_variables()
mtx_phi = nm.empty((variables.di.ptr[-1], mtx_s_phi.shape[1]),
dtype=nm.float64)
for ii in range(mtx_s_phi.shape[1]):
mtx_phi[:,ii] = variables.make_full_vec(mtx_s_phi[:,ii])
return mtx_phi
def save_results(self, eigs, mtx_phi, out=None,
mesh_results_name=None, eig_results_name=None):
mesh_results_name = get_default(mesh_results_name,
self.mesh_results_name)
eig_results_name = get_default(eig_results_name,
self.eig_results_name)
pb = self.problem
save = self.app_options.save_eig_vectors
n_eigs = self.app_options.n_eigs
out = get_default(out, {})
state = pb.create_state()
aux = {}
for ii in range(eigs.shape[0]):
if save is not None:
if (ii > save[0]) and (ii < (n_eigs - save[1])): continue
state.set_full(mtx_phi[:,ii])
aux = state.create_output_dict()
key = list(aux.keys())[0]
out[key+'%03d' % ii] = aux[key]
if aux.get('__mesh__') is not None:
out['__mesh__'] = aux['__mesh__']
pb.save_state(mesh_results_name, out=out)
fd = open(eig_results_name, 'w')
eigs.tofile(fd, ' ')
fd.close() |
save_eig_vectors : (from_largest, from_smallest) or None
If None, save all.
"""
get = options.get | random_line_split |
schroedinger_app.py | from __future__ import absolute_import
import os
import time
from math import pi
import numpy as nm
from sfepy.base.base import Struct, output, get_default
from sfepy.applications import PDESolverApp
from sfepy.solvers import Solver
from six.moves import range
def guess_n_eigs(n_electron, n_eigs=None):
"""
Guess the number of eigenvalues (energies) to compute so that the smearing
iteration converges. Passing n_eigs overrides the guess.
"""
if n_eigs is not None: return n_eigs
if n_electron > 2:
n_eigs = int(1.2 * ((0.5 * n_electron) + 5))
else:
n_eigs = n_electron
return n_eigs
class | (PDESolverApp):
"""
Base application for electronic structure calculations.
Subclasses should typically override `solve_eigen_problem()` method.
This class allows solving only simple single electron problems,
e.g. well, oscillator, hydrogen atom and boron atom with 1 electron.
"""
@staticmethod
def process_options(options):
"""
Application options setup. Sets default values for missing
non-compulsory options.
Options:
save_eig_vectors : (from_largest, from_smallest) or None
If None, save all.
"""
get = options.get
n_electron = get('n_electron', 5)
n_eigs = guess_n_eigs(n_electron, n_eigs=get('n_eigs', None))
return Struct(eigen_solver=get('eigen_solver', None,
'missing "eigen_solver" in options!'),
n_electron=n_electron,
n_eigs=n_eigs,
save_eig_vectors=get('save_eig_vectors', None))
def __init__(self, conf, options, output_prefix, **kwargs):
PDESolverApp.__init__(self, conf, options, output_prefix,
init_equations=False)
def setup_options(self):
PDESolverApp.setup_options(self)
opts = SchroedingerApp.process_options(self.conf.options)
self.app_options += opts
def setup_output(self):
"""
Setup various file names for the output directory given by
`self.problem.output_dir`.
"""
output_dir = self.problem.output_dir
opts = self.app_options
opts.output_dir = output_dir
self.mesh_results_name = os.path.join(opts.output_dir,
self.problem.get_output_name())
self.eig_results_name = os.path.join(opts.output_dir,
self.problem.ofn_trunk
+ '_eigs.txt')
def call(self):
# This cannot be in __init__(), as parametric calls may change
# the output directory.
self.setup_output()
evp = self.solve_eigen_problem()
output("solution saved to %s" % self.problem.get_output_name())
output("in %s" % self.app_options.output_dir)
if self.post_process_hook_final is not None: # User postprocessing.
self.post_process_hook_final(self.problem, evp=evp)
return evp
def solve_eigen_problem(self):
options = self.options
opts = self.app_options
pb = self.problem
dim = pb.domain.mesh.dim
pb.set_equations(pb.conf.equations)
pb.time_update()
output('assembling lhs...')
tt = time.clock()
mtx_a = pb.evaluate(pb.conf.equations['lhs'], mode='weak',
auto_init=True, dw_mode='matrix')
output('...done in %.2f s' % (time.clock() - tt))
output('assembling rhs...')
tt = time.clock()
mtx_b = pb.evaluate(pb.conf.equations['rhs'], mode='weak',
dw_mode='matrix')
output('...done in %.2f s' % (time.clock() - tt))
n_eigs = get_default(opts.n_eigs, mtx_a.shape[0])
output('computing resonance frequencies...')
eig = Solver.any_from_conf(pb.get_solver_conf(opts.eigen_solver))
eigs, mtx_s_phi = eig(mtx_a, mtx_b, n_eigs, eigenvectors=True)
output('...done')
bounding_box = pb.domain.mesh.get_bounding_box()
# this assumes a box (3D), or a square (2D):
a = bounding_box[1][0] - bounding_box[0][0]
E_exact = None
if options.hydrogen or options.boron:
if options.hydrogen:
Z = 1
elif options.boron:
Z = 5
if dim == 2:
E_exact = [-float(Z)**2/2/(n-0.5)**2/4
for n in [1]+[2]*3+[3]*5 + [4]*8 + [5]*15]
elif dim == 3:
E_exact = [-float(Z)**2/2/n**2 for n in [1]+[2]*2**2+[3]*3**2 ]
if options.well:
if dim == 2:
E_exact = [pi**2/(2*a**2)*x
for x in [2, 5, 5, 8, 10, 10, 13, 13,
17, 17, 18, 20, 20 ] ]
elif dim == 3:
E_exact = [pi**2/(2*a**2)*x
for x in [3, 6, 6, 6, 9, 9, 9, 11, 11,
11, 12, 14, 14, 14, 14, 14,
14, 17, 17, 17] ]
if options.oscillator:
if dim == 2:
E_exact = [1] + [2]*2 + [3]*3 + [4]*4 + [5]*5 + [6]*6
elif dim == 3:
E_exact = [float(1)/2+x for x in [1]+[2]*3+[3]*6+[4]*10 ]
if E_exact is not None:
output("a=%f" % a)
output("Energies:")
output("n exact FEM error")
for i, e in enumerate(eigs):
from numpy import NaN
if i < len(E_exact):
exact = E_exact[i]
err = 100*abs((exact - e)/exact)
else:
exact = NaN
err = NaN
output("%d: %.8f %.8f %5.2f%%" % (i, exact, e, err))
else:
output(eigs)
mtx_phi = self.make_full(mtx_s_phi)
self.save_results(eigs, mtx_phi)
return Struct(pb=pb, eigs=eigs, mtx_phi=mtx_phi)
def make_full(self, mtx_s_phi):
variables = self.problem.get_variables()
mtx_phi = nm.empty((variables.di.ptr[-1], mtx_s_phi.shape[1]),
dtype=nm.float64)
for ii in range(mtx_s_phi.shape[1]):
mtx_phi[:,ii] = variables.make_full_vec(mtx_s_phi[:,ii])
return mtx_phi
def save_results(self, eigs, mtx_phi, out=None,
mesh_results_name=None, eig_results_name=None):
mesh_results_name = get_default(mesh_results_name,
self.mesh_results_name)
eig_results_name = get_default(eig_results_name,
self.eig_results_name)
pb = self.problem
save = self.app_options.save_eig_vectors
n_eigs = self.app_options.n_eigs
out = get_default(out, {})
state = pb.create_state()
aux = {}
for ii in range(eigs.shape[0]):
if save is not None:
if (ii > save[0]) and (ii < (n_eigs - save[1])): continue
state.set_full(mtx_phi[:,ii])
aux = state.create_output_dict()
key = list(aux.keys())[0]
out[key+'%03d' % ii] = aux[key]
if aux.get('__mesh__') is not None:
out['__mesh__'] = aux['__mesh__']
pb.save_state(mesh_results_name, out=out)
fd = open(eig_results_name, 'w')
eigs.tofile(fd, ' ')
fd.close()
| SchroedingerApp | identifier_name |
info_result.rs | //
// imag - the personal information management suite for the commandline
// Copyright (C) 2015-2020 Matthias Beyer <mail@beyermatthias.de> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
// Generates a extension for the `Result<T, E>`, named `DebugResult` which has functionality to
// print either `T` or `E` via `info!()`.
generate_result_logging_extension!( | map_info,
map_info_str,
map_info_err,
map_info_err_str,
|s| { info!("{}", s); }
); | InfoResult, | random_line_split |
app.component.ts | import {Component} from '@angular/core';
import {OnInit, AfterContentInit, AfterViewInit} from '@angular/core';
@Component({
selector: 'yw-app',
template: `
<div class="container-fluid">
<yw-messages [messages]="logMessages">
<header>
<h2>Messages Logged</h2>
</header>
<footer>
<p>--- End of messages</p>
</footer>
</yw-messages>
</div>
`
})
export class AppComponent implements OnInit,
AfterContentInit, AfterViewInit {
logMessages: string[] = [];
count = 0;
ngOnInit() {
this.log('ngOnInit');
}
ngAfterContentInit() {
this.log('ngAfterContentInit');
}
ngAfterViewInit() |
log(message: string) {
this.logMessages.push(`${++this.count}: ${message}`);
}
} | {
this.log('ngAfterViewInit');
} | identifier_body |
app.component.ts | import {Component} from '@angular/core';
import {OnInit, AfterContentInit, AfterViewInit} from '@angular/core';
@Component({
selector: 'yw-app',
template: `
<div class="container-fluid">
<yw-messages [messages]="logMessages">
<header>
<h2>Messages Logged</h2>
</header>
<footer>
<p>--- End of messages</p>
</footer>
</yw-messages>
</div>
`
})
export class AppComponent implements OnInit,
AfterContentInit, AfterViewInit {
logMessages: string[] = [];
count = 0;
ngOnInit() {
this.log('ngOnInit');
}
ngAfterContentInit() {
this.log('ngAfterContentInit');
}
| () {
this.log('ngAfterViewInit');
}
log(message: string) {
this.logMessages.push(`${++this.count}: ${message}`);
}
} | ngAfterViewInit | identifier_name |
app.component.ts | import {Component} from '@angular/core';
import {OnInit, AfterContentInit, AfterViewInit} from '@angular/core';
@Component({
selector: 'yw-app',
template: `
<div class="container-fluid">
<yw-messages [messages]="logMessages">
<header>
<h2>Messages Logged</h2>
</header>
<footer>
<p>--- End of messages</p>
</footer>
</yw-messages>
</div>
`
})
export class AppComponent implements OnInit,
AfterContentInit, AfterViewInit {
logMessages: string[] = [];
count = 0;
ngOnInit() {
this.log('ngOnInit');
}
ngAfterContentInit() {
this.log('ngAfterContentInit');
}
ngAfterViewInit() { |
log(message: string) {
this.logMessages.push(`${++this.count}: ${message}`);
}
} | this.log('ngAfterViewInit');
} | random_line_split |
steps.js | module.exports = {
'/': {
backLink: '/../priority_service_170705/filter/uncancelled',
next: '/what-you-need'
},
'/before-you-continue-overseas': {
backLink: '/../priority_service_170705/overseas/uncancelled',
next: '/what-you-need-overseas'
},
'/what-you-need': {
backLink: './',
next: '/you-need-a-photo'
},
'/what-you-need-overseas': {
backLink: '/../priority_service_170705/overseas/try-service',
next: '/you-need-a-photo-overseas'
},
'/you-need-a-photo': {
backLink: '../book-appointment/confirmation-scenario-1',
next: '/choose-photo-method'
},
'/you-need-a-photo-overseas': {
backLink: './what-you-need-overseas',
next: '/choose-photo-method'
},
'/you-need-a-photo-v3': {
backLink: './what-you-need', | '/choose-photo-method': {
fields: ['choose-photo'],
next: '/../upload'
},
'/choose-photo-method-overseas': {
fields: ['choose-photo-overseas'],
next: '/../upload'
}
}; | next: '/choose-photo-method'
}, | random_line_split |
snack-bar-container.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AnimationEvent} from '@angular/animations';
import {AriaLivePoliteness} from '@angular/cdk/a11y';
import {Platform} from '@angular/cdk/platform';
import {
BasePortalOutlet,
CdkPortalOutlet,
ComponentPortal,
TemplatePortal,
DomPortal,
} from '@angular/cdk/portal';
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ComponentRef,
ElementRef,
EmbeddedViewRef,
NgZone,
OnDestroy,
ViewChild,
ViewEncapsulation,
} from '@angular/core';
import {Observable, Subject} from 'rxjs';
import {take} from 'rxjs/operators';
import {matSnackBarAnimations} from './snack-bar-animations';
import {MatSnackBarConfig} from './snack-bar-config';
/**
* Internal interface for a snack bar container.
* @docs-private
*/
export interface _SnackBarContainer {
snackBarConfig: MatSnackBarConfig;
readonly _onAnnounce: Subject<any>;
readonly _onExit: Subject<any>;
readonly _onEnter: Subject<any>;
enter: () => void;
exit: () => Observable<void>;
attachTemplatePortal: <C>(portal: TemplatePortal<C>) => EmbeddedViewRef<C>;
attachComponentPortal: <T>(portal: ComponentPortal<T>) => ComponentRef<T>;
}
/**
* Internal component that wraps user-provided snack bar content.
* @docs-private
*/
@Component({
selector: 'snack-bar-container',
templateUrl: 'snack-bar-container.html',
styleUrls: ['snack-bar-container.css'],
// In Ivy embedded views will be change detected from their declaration place, rather than
// where they were stamped out. This means that we can't have the snack bar container be OnPush,
// because it might cause snack bars that were opened from a template not to be out of date.
// tslint:disable-next-line:validate-decorators
changeDetection: ChangeDetectionStrategy.Default,
encapsulation: ViewEncapsulation.None,
animations: [matSnackBarAnimations.snackBarState],
host: {
'class': 'mat-snack-bar-container',
'[@state]': '_animationState',
'(@state.done)': 'onAnimationEnd($event)'
},
})
export class MatSnackBarContainer extends BasePortalOutlet
implements OnDestroy, _SnackBarContainer {
/** The number of milliseconds to wait before announcing the snack bar's content. */
private readonly _announceDelay: number = 150;
/** The timeout for announcing the snack bar's content. */
private _announceTimeoutId: any;
/** Whether the component has been destroyed. */
private _destroyed = false;
/** The portal outlet inside of this container into which the snack bar content will be loaded. */
@ViewChild(CdkPortalOutlet, {static: true}) _portalOutlet: CdkPortalOutlet;
/** Subject for notifying that the snack bar has announced to screen readers. */
readonly _onAnnounce: Subject<void> = new Subject();
/** Subject for notifying that the snack bar has exited from view. */
readonly _onExit: Subject<void> = new Subject();
/** Subject for notifying that the snack bar has finished entering the view. */
readonly _onEnter: Subject<void> = new Subject();
/** The state of the snack bar animations. */
_animationState = 'void';
/** aria-live value for the live region. */
_live: AriaLivePoliteness;
/**
* Role of the live region. This is only for Firefox as there is a known issue where Firefox +
* JAWS does not read out aria-live message.
*/
_role?: 'status' | 'alert';
constructor(
private _ngZone: NgZone,
private _elementRef: ElementRef<HTMLElement>,
private _changeDetectorRef: ChangeDetectorRef,
private _platform: Platform,
/** The snack bar configuration. */
public snackBarConfig: MatSnackBarConfig) {
super();
// Use aria-live rather than a live role like 'alert' or 'status'
// because NVDA and JAWS have show inconsistent behavior with live roles.
if (snackBarConfig.politeness === 'assertive' && !snackBarConfig.announcementMessage) {
this._live = 'assertive';
} else if (snackBarConfig.politeness === 'off') {
this._live = 'off';
} else {
this._live = 'polite';
}
// Only set role for Firefox. Set role based on aria-live because setting role="alert" implies
// aria-live="assertive" which may cause issues if aria-live is set to "polite" above.
if (this._platform.FIREFOX) {
if (this._live === 'polite') {
this._role = 'status';
}
if (this._live === 'assertive') {
this._role = 'alert';
}
}
}
/** Attach a component portal as content to this snack bar container. */
attachComponentPortal<T>(portal: ComponentPortal<T>): ComponentRef<T> {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachComponentPortal(portal);
}
/** Attach a template portal as content to this snack bar container. */
attachTemplatePortal<C>(portal: TemplatePortal<C>): EmbeddedViewRef<C> {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachTemplatePortal(portal);
}
/**
* Attaches a DOM portal to the snack bar container.
* @deprecated To be turned into a method.
* @breaking-change 10.0.0
*/
override attachDomPortal = (portal: DomPortal) => {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachDomPortal(portal);
}
/** Handle end of animations, updating the state of the snackbar. */
onAnimationEnd(event: AnimationEvent) {
const {fromState, toState} = event;
if ((toState === 'void' && fromState !== 'void') || toState === 'hidden') {
this._completeExit();
}
if (toState === 'visible') {
// Note: we shouldn't use `this` inside the zone callback,
// because it can cause a memory leak.
const onEnter = this._onEnter;
this._ngZone.run(() => {
onEnter.next();
onEnter.complete();
});
}
}
/** Begin animation of snack bar entrance into view. */
enter(): void |
/** Begin animation of the snack bar exiting from view. */
exit(): Observable<void> {
// Note: this one transitions to `hidden`, rather than `void`, in order to handle the case
// where multiple snack bars are opened in quick succession (e.g. two consecutive calls to
// `MatSnackBar.open`).
this._animationState = 'hidden';
// Mark this element with an 'exit' attribute to indicate that the snackbar has
// been dismissed and will soon be removed from the DOM. This is used by the snackbar
// test harness.
this._elementRef.nativeElement.setAttribute('mat-exit', '');
// If the snack bar hasn't been announced by the time it exits it wouldn't have been open
// long enough to visually read it either, so clear the timeout for announcing.
clearTimeout(this._announceTimeoutId);
return this._onExit;
}
/** Makes sure the exit callbacks have been invoked when the element is destroyed. */
ngOnDestroy() {
this._destroyed = true;
this._completeExit();
}
/**
* Waits for the zone to settle before removing the element. Helps prevent
* errors where we end up removing an element which is in the middle of an animation.
*/
private _completeExit() {
this._ngZone.onMicrotaskEmpty.pipe(take(1)).subscribe(() => {
this._onExit.next();
this._onExit.complete();
});
}
/** Applies the various positioning and user-configured CSS classes to the snack bar. */
private _applySnackBarClasses() {
const element: HTMLElement = this._elementRef.nativeElement;
const panelClasses = this.snackBarConfig.panelClass;
if (panelClasses) {
if (Array.isArray(panelClasses)) {
// Note that we can't use a spread here, because IE doesn't support multiple arguments.
panelClasses.forEach(cssClass => element.classList.add(cssClass));
} else {
element.classList.add(panelClasses);
}
}
if (this.snackBarConfig.horizontalPosition === 'center') {
element.classList.add('mat-snack-bar-center');
}
if (this.snackBarConfig.verticalPosition === 'top') {
element.classList.add('mat-snack-bar-top');
}
}
/** Asserts that no content is already attached to the container. */
private _assertNotAttached() {
if (this._portalOutlet.hasAttached() && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throw Error('Attempting to attach snack bar content after content is already attached');
}
}
/**
* Starts a timeout to move the snack bar content to the live region so screen readers will
* announce it.
*/
private _screenReaderAnnounce() {
if (!this._announceTimeoutId) {
this._ngZone.runOutsideAngular(() => {
this._announceTimeoutId = setTimeout(() => {
const inertElement = this._elementRef.nativeElement.querySelector('[aria-hidden]');
const liveElement = this._elementRef.nativeElement.querySelector('[aria-live]');
if (inertElement && liveElement) {
// If an element in the snack bar content is focused before being moved
// track it and restore focus after moving to the live region.
let focusedElement: HTMLElement | null = null;
if (this._platform.isBrowser &&
document.activeElement instanceof HTMLElement &&
inertElement.contains(document.activeElement)) {
focusedElement = document.activeElement;
}
inertElement.removeAttribute('aria-hidden');
liveElement.appendChild(inertElement);
focusedElement?.focus();
this._onAnnounce.next();
this._onAnnounce.complete();
}
}, this._announceDelay);
});
}
}
}
| {
if (!this._destroyed) {
this._animationState = 'visible';
this._changeDetectorRef.detectChanges();
this._screenReaderAnnounce();
}
} | identifier_body |
snack-bar-container.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AnimationEvent} from '@angular/animations';
import {AriaLivePoliteness} from '@angular/cdk/a11y';
import {Platform} from '@angular/cdk/platform';
import {
BasePortalOutlet,
CdkPortalOutlet,
ComponentPortal,
TemplatePortal,
DomPortal,
} from '@angular/cdk/portal';
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ComponentRef,
ElementRef,
EmbeddedViewRef,
NgZone,
OnDestroy,
ViewChild,
ViewEncapsulation,
} from '@angular/core';
import {Observable, Subject} from 'rxjs';
import {take} from 'rxjs/operators';
import {matSnackBarAnimations} from './snack-bar-animations';
import {MatSnackBarConfig} from './snack-bar-config';
/**
* Internal interface for a snack bar container.
* @docs-private
*/
export interface _SnackBarContainer {
snackBarConfig: MatSnackBarConfig;
readonly _onAnnounce: Subject<any>;
readonly _onExit: Subject<any>;
readonly _onEnter: Subject<any>;
enter: () => void;
exit: () => Observable<void>;
attachTemplatePortal: <C>(portal: TemplatePortal<C>) => EmbeddedViewRef<C>;
attachComponentPortal: <T>(portal: ComponentPortal<T>) => ComponentRef<T>;
}
/**
* Internal component that wraps user-provided snack bar content.
* @docs-private
*/
@Component({
selector: 'snack-bar-container',
templateUrl: 'snack-bar-container.html',
styleUrls: ['snack-bar-container.css'],
// In Ivy embedded views will be change detected from their declaration place, rather than
// where they were stamped out. This means that we can't have the snack bar container be OnPush,
// because it might cause snack bars that were opened from a template not to be out of date.
// tslint:disable-next-line:validate-decorators
changeDetection: ChangeDetectionStrategy.Default,
encapsulation: ViewEncapsulation.None,
animations: [matSnackBarAnimations.snackBarState],
host: {
'class': 'mat-snack-bar-container',
'[@state]': '_animationState',
'(@state.done)': 'onAnimationEnd($event)'
},
})
export class MatSnackBarContainer extends BasePortalOutlet
implements OnDestroy, _SnackBarContainer {
/** The number of milliseconds to wait before announcing the snack bar's content. */
private readonly _announceDelay: number = 150;
/** The timeout for announcing the snack bar's content. */
private _announceTimeoutId: any;
/** Whether the component has been destroyed. */
private _destroyed = false;
/** The portal outlet inside of this container into which the snack bar content will be loaded. */
@ViewChild(CdkPortalOutlet, {static: true}) _portalOutlet: CdkPortalOutlet;
/** Subject for notifying that the snack bar has announced to screen readers. */
readonly _onAnnounce: Subject<void> = new Subject();
/** Subject for notifying that the snack bar has exited from view. */
readonly _onExit: Subject<void> = new Subject();
/** Subject for notifying that the snack bar has finished entering the view. */
readonly _onEnter: Subject<void> = new Subject();
/** The state of the snack bar animations. */
_animationState = 'void';
/** aria-live value for the live region. */
_live: AriaLivePoliteness;
/**
* Role of the live region. This is only for Firefox as there is a known issue where Firefox +
* JAWS does not read out aria-live message.
*/
_role?: 'status' | 'alert';
constructor(
private _ngZone: NgZone,
private _elementRef: ElementRef<HTMLElement>,
private _changeDetectorRef: ChangeDetectorRef,
private _platform: Platform,
/** The snack bar configuration. */
public snackBarConfig: MatSnackBarConfig) {
super();
// Use aria-live rather than a live role like 'alert' or 'status'
// because NVDA and JAWS have show inconsistent behavior with live roles.
if (snackBarConfig.politeness === 'assertive' && !snackBarConfig.announcementMessage) {
this._live = 'assertive';
} else if (snackBarConfig.politeness === 'off') {
this._live = 'off';
} else {
this._live = 'polite';
}
// Only set role for Firefox. Set role based on aria-live because setting role="alert" implies
// aria-live="assertive" which may cause issues if aria-live is set to "polite" above.
if (this._platform.FIREFOX) {
if (this._live === 'polite') {
this._role = 'status';
}
if (this._live === 'assertive') |
}
}
/** Attach a component portal as content to this snack bar container. */
attachComponentPortal<T>(portal: ComponentPortal<T>): ComponentRef<T> {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachComponentPortal(portal);
}
/** Attach a template portal as content to this snack bar container. */
attachTemplatePortal<C>(portal: TemplatePortal<C>): EmbeddedViewRef<C> {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachTemplatePortal(portal);
}
/**
* Attaches a DOM portal to the snack bar container.
* @deprecated To be turned into a method.
* @breaking-change 10.0.0
*/
override attachDomPortal = (portal: DomPortal) => {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachDomPortal(portal);
}
/** Handle end of animations, updating the state of the snackbar. */
onAnimationEnd(event: AnimationEvent) {
const {fromState, toState} = event;
if ((toState === 'void' && fromState !== 'void') || toState === 'hidden') {
this._completeExit();
}
if (toState === 'visible') {
// Note: we shouldn't use `this` inside the zone callback,
// because it can cause a memory leak.
const onEnter = this._onEnter;
this._ngZone.run(() => {
onEnter.next();
onEnter.complete();
});
}
}
/** Begin animation of snack bar entrance into view. */
enter(): void {
if (!this._destroyed) {
this._animationState = 'visible';
this._changeDetectorRef.detectChanges();
this._screenReaderAnnounce();
}
}
/** Begin animation of the snack bar exiting from view. */
exit(): Observable<void> {
// Note: this one transitions to `hidden`, rather than `void`, in order to handle the case
// where multiple snack bars are opened in quick succession (e.g. two consecutive calls to
// `MatSnackBar.open`).
this._animationState = 'hidden';
// Mark this element with an 'exit' attribute to indicate that the snackbar has
// been dismissed and will soon be removed from the DOM. This is used by the snackbar
// test harness.
this._elementRef.nativeElement.setAttribute('mat-exit', '');
// If the snack bar hasn't been announced by the time it exits it wouldn't have been open
// long enough to visually read it either, so clear the timeout for announcing.
clearTimeout(this._announceTimeoutId);
return this._onExit;
}
/** Makes sure the exit callbacks have been invoked when the element is destroyed. */
ngOnDestroy() {
this._destroyed = true;
this._completeExit();
}
/**
* Waits for the zone to settle before removing the element. Helps prevent
* errors where we end up removing an element which is in the middle of an animation.
*/
private _completeExit() {
this._ngZone.onMicrotaskEmpty.pipe(take(1)).subscribe(() => {
this._onExit.next();
this._onExit.complete();
});
}
/** Applies the various positioning and user-configured CSS classes to the snack bar. */
private _applySnackBarClasses() {
const element: HTMLElement = this._elementRef.nativeElement;
const panelClasses = this.snackBarConfig.panelClass;
if (panelClasses) {
if (Array.isArray(panelClasses)) {
// Note that we can't use a spread here, because IE doesn't support multiple arguments.
panelClasses.forEach(cssClass => element.classList.add(cssClass));
} else {
element.classList.add(panelClasses);
}
}
if (this.snackBarConfig.horizontalPosition === 'center') {
element.classList.add('mat-snack-bar-center');
}
if (this.snackBarConfig.verticalPosition === 'top') {
element.classList.add('mat-snack-bar-top');
}
}
/** Asserts that no content is already attached to the container. */
private _assertNotAttached() {
if (this._portalOutlet.hasAttached() && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throw Error('Attempting to attach snack bar content after content is already attached');
}
}
/**
* Starts a timeout to move the snack bar content to the live region so screen readers will
* announce it.
*/
private _screenReaderAnnounce() {
if (!this._announceTimeoutId) {
this._ngZone.runOutsideAngular(() => {
this._announceTimeoutId = setTimeout(() => {
const inertElement = this._elementRef.nativeElement.querySelector('[aria-hidden]');
const liveElement = this._elementRef.nativeElement.querySelector('[aria-live]');
if (inertElement && liveElement) {
// If an element in the snack bar content is focused before being moved
// track it and restore focus after moving to the live region.
let focusedElement: HTMLElement | null = null;
if (this._platform.isBrowser &&
document.activeElement instanceof HTMLElement &&
inertElement.contains(document.activeElement)) {
focusedElement = document.activeElement;
}
inertElement.removeAttribute('aria-hidden');
liveElement.appendChild(inertElement);
focusedElement?.focus();
this._onAnnounce.next();
this._onAnnounce.complete();
}
}, this._announceDelay);
});
}
}
}
| {
this._role = 'alert';
} | conditional_block |
snack-bar-container.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AnimationEvent} from '@angular/animations';
import {AriaLivePoliteness} from '@angular/cdk/a11y';
import {Platform} from '@angular/cdk/platform';
import {
BasePortalOutlet,
CdkPortalOutlet,
ComponentPortal,
TemplatePortal,
DomPortal,
} from '@angular/cdk/portal';
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ComponentRef,
ElementRef,
EmbeddedViewRef,
NgZone,
OnDestroy,
ViewChild,
ViewEncapsulation,
} from '@angular/core';
import {Observable, Subject} from 'rxjs';
import {take} from 'rxjs/operators';
import {matSnackBarAnimations} from './snack-bar-animations';
import {MatSnackBarConfig} from './snack-bar-config';
/**
* Internal interface for a snack bar container.
* @docs-private
*/
export interface _SnackBarContainer {
snackBarConfig: MatSnackBarConfig;
readonly _onAnnounce: Subject<any>;
readonly _onExit: Subject<any>;
readonly _onEnter: Subject<any>;
enter: () => void;
exit: () => Observable<void>;
attachTemplatePortal: <C>(portal: TemplatePortal<C>) => EmbeddedViewRef<C>;
attachComponentPortal: <T>(portal: ComponentPortal<T>) => ComponentRef<T>;
}
/**
* Internal component that wraps user-provided snack bar content.
* @docs-private | templateUrl: 'snack-bar-container.html',
styleUrls: ['snack-bar-container.css'],
// In Ivy embedded views will be change detected from their declaration place, rather than
// where they were stamped out. This means that we can't have the snack bar container be OnPush,
// because it might cause snack bars that were opened from a template not to be out of date.
// tslint:disable-next-line:validate-decorators
changeDetection: ChangeDetectionStrategy.Default,
encapsulation: ViewEncapsulation.None,
animations: [matSnackBarAnimations.snackBarState],
host: {
'class': 'mat-snack-bar-container',
'[@state]': '_animationState',
'(@state.done)': 'onAnimationEnd($event)'
},
})
export class MatSnackBarContainer extends BasePortalOutlet
implements OnDestroy, _SnackBarContainer {
/** The number of milliseconds to wait before announcing the snack bar's content. */
private readonly _announceDelay: number = 150;
/** The timeout for announcing the snack bar's content. */
private _announceTimeoutId: any;
/** Whether the component has been destroyed. */
private _destroyed = false;
/** The portal outlet inside of this container into which the snack bar content will be loaded. */
@ViewChild(CdkPortalOutlet, {static: true}) _portalOutlet: CdkPortalOutlet;
/** Subject for notifying that the snack bar has announced to screen readers. */
readonly _onAnnounce: Subject<void> = new Subject();
/** Subject for notifying that the snack bar has exited from view. */
readonly _onExit: Subject<void> = new Subject();
/** Subject for notifying that the snack bar has finished entering the view. */
readonly _onEnter: Subject<void> = new Subject();
/** The state of the snack bar animations. */
_animationState = 'void';
/** aria-live value for the live region. */
_live: AriaLivePoliteness;
/**
* Role of the live region. This is only for Firefox as there is a known issue where Firefox +
* JAWS does not read out aria-live message.
*/
_role?: 'status' | 'alert';
constructor(
private _ngZone: NgZone,
private _elementRef: ElementRef<HTMLElement>,
private _changeDetectorRef: ChangeDetectorRef,
private _platform: Platform,
/** The snack bar configuration. */
public snackBarConfig: MatSnackBarConfig) {
super();
// Use aria-live rather than a live role like 'alert' or 'status'
// because NVDA and JAWS have show inconsistent behavior with live roles.
if (snackBarConfig.politeness === 'assertive' && !snackBarConfig.announcementMessage) {
this._live = 'assertive';
} else if (snackBarConfig.politeness === 'off') {
this._live = 'off';
} else {
this._live = 'polite';
}
// Only set role for Firefox. Set role based on aria-live because setting role="alert" implies
// aria-live="assertive" which may cause issues if aria-live is set to "polite" above.
if (this._platform.FIREFOX) {
if (this._live === 'polite') {
this._role = 'status';
}
if (this._live === 'assertive') {
this._role = 'alert';
}
}
}
/** Attach a component portal as content to this snack bar container. */
attachComponentPortal<T>(portal: ComponentPortal<T>): ComponentRef<T> {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachComponentPortal(portal);
}
/** Attach a template portal as content to this snack bar container. */
attachTemplatePortal<C>(portal: TemplatePortal<C>): EmbeddedViewRef<C> {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachTemplatePortal(portal);
}
/**
* Attaches a DOM portal to the snack bar container.
* @deprecated To be turned into a method.
* @breaking-change 10.0.0
*/
override attachDomPortal = (portal: DomPortal) => {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachDomPortal(portal);
}
/** Handle end of animations, updating the state of the snackbar. */
onAnimationEnd(event: AnimationEvent) {
const {fromState, toState} = event;
if ((toState === 'void' && fromState !== 'void') || toState === 'hidden') {
this._completeExit();
}
if (toState === 'visible') {
// Note: we shouldn't use `this` inside the zone callback,
// because it can cause a memory leak.
const onEnter = this._onEnter;
this._ngZone.run(() => {
onEnter.next();
onEnter.complete();
});
}
}
/** Begin animation of snack bar entrance into view. */
enter(): void {
if (!this._destroyed) {
this._animationState = 'visible';
this._changeDetectorRef.detectChanges();
this._screenReaderAnnounce();
}
}
/** Begin animation of the snack bar exiting from view. */
exit(): Observable<void> {
// Note: this one transitions to `hidden`, rather than `void`, in order to handle the case
// where multiple snack bars are opened in quick succession (e.g. two consecutive calls to
// `MatSnackBar.open`).
this._animationState = 'hidden';
// Mark this element with an 'exit' attribute to indicate that the snackbar has
// been dismissed and will soon be removed from the DOM. This is used by the snackbar
// test harness.
this._elementRef.nativeElement.setAttribute('mat-exit', '');
// If the snack bar hasn't been announced by the time it exits it wouldn't have been open
// long enough to visually read it either, so clear the timeout for announcing.
clearTimeout(this._announceTimeoutId);
return this._onExit;
}
/** Makes sure the exit callbacks have been invoked when the element is destroyed. */
ngOnDestroy() {
this._destroyed = true;
this._completeExit();
}
/**
* Waits for the zone to settle before removing the element. Helps prevent
* errors where we end up removing an element which is in the middle of an animation.
*/
private _completeExit() {
this._ngZone.onMicrotaskEmpty.pipe(take(1)).subscribe(() => {
this._onExit.next();
this._onExit.complete();
});
}
/** Applies the various positioning and user-configured CSS classes to the snack bar. */
private _applySnackBarClasses() {
const element: HTMLElement = this._elementRef.nativeElement;
const panelClasses = this.snackBarConfig.panelClass;
if (panelClasses) {
if (Array.isArray(panelClasses)) {
// Note that we can't use a spread here, because IE doesn't support multiple arguments.
panelClasses.forEach(cssClass => element.classList.add(cssClass));
} else {
element.classList.add(panelClasses);
}
}
if (this.snackBarConfig.horizontalPosition === 'center') {
element.classList.add('mat-snack-bar-center');
}
if (this.snackBarConfig.verticalPosition === 'top') {
element.classList.add('mat-snack-bar-top');
}
}
/** Asserts that no content is already attached to the container. */
private _assertNotAttached() {
if (this._portalOutlet.hasAttached() && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throw Error('Attempting to attach snack bar content after content is already attached');
}
}
/**
* Starts a timeout to move the snack bar content to the live region so screen readers will
* announce it.
*/
private _screenReaderAnnounce() {
if (!this._announceTimeoutId) {
this._ngZone.runOutsideAngular(() => {
this._announceTimeoutId = setTimeout(() => {
const inertElement = this._elementRef.nativeElement.querySelector('[aria-hidden]');
const liveElement = this._elementRef.nativeElement.querySelector('[aria-live]');
if (inertElement && liveElement) {
// If an element in the snack bar content is focused before being moved
// track it and restore focus after moving to the live region.
let focusedElement: HTMLElement | null = null;
if (this._platform.isBrowser &&
document.activeElement instanceof HTMLElement &&
inertElement.contains(document.activeElement)) {
focusedElement = document.activeElement;
}
inertElement.removeAttribute('aria-hidden');
liveElement.appendChild(inertElement);
focusedElement?.focus();
this._onAnnounce.next();
this._onAnnounce.complete();
}
}, this._announceDelay);
});
}
}
} | */
@Component({
selector: 'snack-bar-container', | random_line_split |
snack-bar-container.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AnimationEvent} from '@angular/animations';
import {AriaLivePoliteness} from '@angular/cdk/a11y';
import {Platform} from '@angular/cdk/platform';
import {
BasePortalOutlet,
CdkPortalOutlet,
ComponentPortal,
TemplatePortal,
DomPortal,
} from '@angular/cdk/portal';
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ComponentRef,
ElementRef,
EmbeddedViewRef,
NgZone,
OnDestroy,
ViewChild,
ViewEncapsulation,
} from '@angular/core';
import {Observable, Subject} from 'rxjs';
import {take} from 'rxjs/operators';
import {matSnackBarAnimations} from './snack-bar-animations';
import {MatSnackBarConfig} from './snack-bar-config';
/**
* Internal interface for a snack bar container.
* @docs-private
*/
export interface _SnackBarContainer {
snackBarConfig: MatSnackBarConfig;
readonly _onAnnounce: Subject<any>;
readonly _onExit: Subject<any>;
readonly _onEnter: Subject<any>;
enter: () => void;
exit: () => Observable<void>;
attachTemplatePortal: <C>(portal: TemplatePortal<C>) => EmbeddedViewRef<C>;
attachComponentPortal: <T>(portal: ComponentPortal<T>) => ComponentRef<T>;
}
/**
* Internal component that wraps user-provided snack bar content.
* @docs-private
*/
@Component({
selector: 'snack-bar-container',
templateUrl: 'snack-bar-container.html',
styleUrls: ['snack-bar-container.css'],
// In Ivy embedded views will be change detected from their declaration place, rather than
// where they were stamped out. This means that we can't have the snack bar container be OnPush,
// because it might cause snack bars that were opened from a template not to be out of date.
// tslint:disable-next-line:validate-decorators
changeDetection: ChangeDetectionStrategy.Default,
encapsulation: ViewEncapsulation.None,
animations: [matSnackBarAnimations.snackBarState],
host: {
'class': 'mat-snack-bar-container',
'[@state]': '_animationState',
'(@state.done)': 'onAnimationEnd($event)'
},
})
export class MatSnackBarContainer extends BasePortalOutlet
implements OnDestroy, _SnackBarContainer {
/** The number of milliseconds to wait before announcing the snack bar's content. */
private readonly _announceDelay: number = 150;
/** The timeout for announcing the snack bar's content. */
private _announceTimeoutId: any;
/** Whether the component has been destroyed. */
private _destroyed = false;
/** The portal outlet inside of this container into which the snack bar content will be loaded. */
@ViewChild(CdkPortalOutlet, {static: true}) _portalOutlet: CdkPortalOutlet;
/** Subject for notifying that the snack bar has announced to screen readers. */
readonly _onAnnounce: Subject<void> = new Subject();
/** Subject for notifying that the snack bar has exited from view. */
readonly _onExit: Subject<void> = new Subject();
/** Subject for notifying that the snack bar has finished entering the view. */
readonly _onEnter: Subject<void> = new Subject();
/** The state of the snack bar animations. */
_animationState = 'void';
/** aria-live value for the live region. */
_live: AriaLivePoliteness;
/**
* Role of the live region. This is only for Firefox as there is a known issue where Firefox +
* JAWS does not read out aria-live message.
*/
_role?: 'status' | 'alert';
constructor(
private _ngZone: NgZone,
private _elementRef: ElementRef<HTMLElement>,
private _changeDetectorRef: ChangeDetectorRef,
private _platform: Platform,
/** The snack bar configuration. */
public snackBarConfig: MatSnackBarConfig) {
super();
// Use aria-live rather than a live role like 'alert' or 'status'
// because NVDA and JAWS have show inconsistent behavior with live roles.
if (snackBarConfig.politeness === 'assertive' && !snackBarConfig.announcementMessage) {
this._live = 'assertive';
} else if (snackBarConfig.politeness === 'off') {
this._live = 'off';
} else {
this._live = 'polite';
}
// Only set role for Firefox. Set role based on aria-live because setting role="alert" implies
// aria-live="assertive" which may cause issues if aria-live is set to "polite" above.
if (this._platform.FIREFOX) {
if (this._live === 'polite') {
this._role = 'status';
}
if (this._live === 'assertive') {
this._role = 'alert';
}
}
}
/** Attach a component portal as content to this snack bar container. */
attachComponentPortal<T>(portal: ComponentPortal<T>): ComponentRef<T> {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachComponentPortal(portal);
}
/** Attach a template portal as content to this snack bar container. */
attachTemplatePortal<C>(portal: TemplatePortal<C>): EmbeddedViewRef<C> {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachTemplatePortal(portal);
}
/**
* Attaches a DOM portal to the snack bar container.
* @deprecated To be turned into a method.
* @breaking-change 10.0.0
*/
override attachDomPortal = (portal: DomPortal) => {
this._assertNotAttached();
this._applySnackBarClasses();
return this._portalOutlet.attachDomPortal(portal);
}
/** Handle end of animations, updating the state of the snackbar. */
onAnimationEnd(event: AnimationEvent) {
const {fromState, toState} = event;
if ((toState === 'void' && fromState !== 'void') || toState === 'hidden') {
this._completeExit();
}
if (toState === 'visible') {
// Note: we shouldn't use `this` inside the zone callback,
// because it can cause a memory leak.
const onEnter = this._onEnter;
this._ngZone.run(() => {
onEnter.next();
onEnter.complete();
});
}
}
/** Begin animation of snack bar entrance into view. */
enter(): void {
if (!this._destroyed) {
this._animationState = 'visible';
this._changeDetectorRef.detectChanges();
this._screenReaderAnnounce();
}
}
/** Begin animation of the snack bar exiting from view. */
| (): Observable<void> {
// Note: this one transitions to `hidden`, rather than `void`, in order to handle the case
// where multiple snack bars are opened in quick succession (e.g. two consecutive calls to
// `MatSnackBar.open`).
this._animationState = 'hidden';
// Mark this element with an 'exit' attribute to indicate that the snackbar has
// been dismissed and will soon be removed from the DOM. This is used by the snackbar
// test harness.
this._elementRef.nativeElement.setAttribute('mat-exit', '');
// If the snack bar hasn't been announced by the time it exits it wouldn't have been open
// long enough to visually read it either, so clear the timeout for announcing.
clearTimeout(this._announceTimeoutId);
return this._onExit;
}
/** Makes sure the exit callbacks have been invoked when the element is destroyed. */
ngOnDestroy() {
this._destroyed = true;
this._completeExit();
}
/**
* Waits for the zone to settle before removing the element. Helps prevent
* errors where we end up removing an element which is in the middle of an animation.
*/
private _completeExit() {
this._ngZone.onMicrotaskEmpty.pipe(take(1)).subscribe(() => {
this._onExit.next();
this._onExit.complete();
});
}
/** Applies the various positioning and user-configured CSS classes to the snack bar. */
private _applySnackBarClasses() {
const element: HTMLElement = this._elementRef.nativeElement;
const panelClasses = this.snackBarConfig.panelClass;
if (panelClasses) {
if (Array.isArray(panelClasses)) {
// Note that we can't use a spread here, because IE doesn't support multiple arguments.
panelClasses.forEach(cssClass => element.classList.add(cssClass));
} else {
element.classList.add(panelClasses);
}
}
if (this.snackBarConfig.horizontalPosition === 'center') {
element.classList.add('mat-snack-bar-center');
}
if (this.snackBarConfig.verticalPosition === 'top') {
element.classList.add('mat-snack-bar-top');
}
}
/** Asserts that no content is already attached to the container. */
private _assertNotAttached() {
if (this._portalOutlet.hasAttached() && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throw Error('Attempting to attach snack bar content after content is already attached');
}
}
/**
* Starts a timeout to move the snack bar content to the live region so screen readers will
* announce it.
*/
private _screenReaderAnnounce() {
if (!this._announceTimeoutId) {
this._ngZone.runOutsideAngular(() => {
this._announceTimeoutId = setTimeout(() => {
const inertElement = this._elementRef.nativeElement.querySelector('[aria-hidden]');
const liveElement = this._elementRef.nativeElement.querySelector('[aria-live]');
if (inertElement && liveElement) {
// If an element in the snack bar content is focused before being moved
// track it and restore focus after moving to the live region.
let focusedElement: HTMLElement | null = null;
if (this._platform.isBrowser &&
document.activeElement instanceof HTMLElement &&
inertElement.contains(document.activeElement)) {
focusedElement = document.activeElement;
}
inertElement.removeAttribute('aria-hidden');
liveElement.appendChild(inertElement);
focusedElement?.focus();
this._onAnnounce.next();
this._onAnnounce.complete();
}
}, this._announceDelay);
});
}
}
}
| exit | identifier_name |
pig_operator.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from typing import Optional
from airflow.hooks.pig_hook import PigCliHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class PigOperator(BaseOperator):
"""
Executes pig script.
:param pig: the pig latin script to be executed. (templated)
:type pig: str
:param pig_cli_conn_id: reference to the Hive database
:type pig_cli_conn_id: str
:param pigparams_jinja_translate: when True, pig params-type templating
${var} gets translated into jinja-type templating {{ var }}. Note that
you may want to use this along with the
``DAG(user_defined_macros=myargs)`` parameter. View the DAG
object documentation for more details.
:type pigparams_jinja_translate: bool
:param pig_opts: pig options, such as: -x tez, -useHCatalog, ...
:type pig_opts: str
"""
template_fields = ('pig',)
template_ext = ('.pig', '.piglatin',)
ui_color = '#f0e4ec'
@apply_defaults
def __init__(
self,
pig: str,
pig_cli_conn_id: str = 'pig_cli_default',
pigparams_jinja_translate: bool = False,
pig_opts: Optional[str] = None,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.pigparams_jinja_translate = pigparams_jinja_translate
self.pig = pig
self.pig_cli_conn_id = pig_cli_conn_id
self.pig_opts = pig_opts
def get_hook(self):
return PigCliHook(pig_cli_conn_id=self.pig_cli_conn_id)
def | (self):
if self.pigparams_jinja_translate:
self.pig = re.sub(
r"(\$([a-zA-Z_][a-zA-Z0-9_]*))", r"{{ \g<2> }}", self.pig)
def execute(self, context):
self.log.info('Executing: %s', self.pig)
self.hook = self.get_hook()
self.hook.run_cli(pig=self.pig, pig_opts=self.pig_opts)
def on_kill(self):
self.hook.kill()
| prepare_template | identifier_name |
pig_operator.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from typing import Optional
from airflow.hooks.pig_hook import PigCliHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class PigOperator(BaseOperator):
"""
Executes pig script.
:param pig: the pig latin script to be executed. (templated)
:type pig: str
:param pig_cli_conn_id: reference to the Hive database
:type pig_cli_conn_id: str
:param pigparams_jinja_translate: when True, pig params-type templating
${var} gets translated into jinja-type templating {{ var }}. Note that
you may want to use this along with the
``DAG(user_defined_macros=myargs)`` parameter. View the DAG
object documentation for more details.
:type pigparams_jinja_translate: bool
:param pig_opts: pig options, such as: -x tez, -useHCatalog, ...
:type pig_opts: str
"""
template_fields = ('pig',)
template_ext = ('.pig', '.piglatin',)
ui_color = '#f0e4ec'
@apply_defaults
def __init__(
self,
pig: str,
pig_cli_conn_id: str = 'pig_cli_default',
pigparams_jinja_translate: bool = False,
pig_opts: Optional[str] = None,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.pigparams_jinja_translate = pigparams_jinja_translate
self.pig = pig
self.pig_cli_conn_id = pig_cli_conn_id
self.pig_opts = pig_opts
def get_hook(self):
return PigCliHook(pig_cli_conn_id=self.pig_cli_conn_id)
def prepare_template(self):
if self.pigparams_jinja_translate:
self.pig = re.sub(
r"(\$([a-zA-Z_][a-zA-Z0-9_]*))", r"{{ \g<2> }}", self.pig)
def execute(self, context):
self.log.info('Executing: %s', self.pig)
self.hook = self.get_hook()
self.hook.run_cli(pig=self.pig, pig_opts=self.pig_opts)
def on_kill(self):
self.hook.kill() | # -*- coding: utf-8 -*-
# | random_line_split | |
pig_operator.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from typing import Optional
from airflow.hooks.pig_hook import PigCliHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class PigOperator(BaseOperator):
"""
Executes pig script.
:param pig: the pig latin script to be executed. (templated)
:type pig: str
:param pig_cli_conn_id: reference to the Hive database
:type pig_cli_conn_id: str
:param pigparams_jinja_translate: when True, pig params-type templating
${var} gets translated into jinja-type templating {{ var }}. Note that
you may want to use this along with the
``DAG(user_defined_macros=myargs)`` parameter. View the DAG
object documentation for more details.
:type pigparams_jinja_translate: bool
:param pig_opts: pig options, such as: -x tez, -useHCatalog, ...
:type pig_opts: str
"""
template_fields = ('pig',)
template_ext = ('.pig', '.piglatin',)
ui_color = '#f0e4ec'
@apply_defaults
def __init__(
self,
pig: str,
pig_cli_conn_id: str = 'pig_cli_default',
pigparams_jinja_translate: bool = False,
pig_opts: Optional[str] = None,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.pigparams_jinja_translate = pigparams_jinja_translate
self.pig = pig
self.pig_cli_conn_id = pig_cli_conn_id
self.pig_opts = pig_opts
def get_hook(self):
return PigCliHook(pig_cli_conn_id=self.pig_cli_conn_id)
def prepare_template(self):
if self.pigparams_jinja_translate:
|
def execute(self, context):
self.log.info('Executing: %s', self.pig)
self.hook = self.get_hook()
self.hook.run_cli(pig=self.pig, pig_opts=self.pig_opts)
def on_kill(self):
self.hook.kill()
| self.pig = re.sub(
r"(\$([a-zA-Z_][a-zA-Z0-9_]*))", r"{{ \g<2> }}", self.pig) | conditional_block |
pig_operator.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from typing import Optional
from airflow.hooks.pig_hook import PigCliHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class PigOperator(BaseOperator):
"""
Executes pig script.
:param pig: the pig latin script to be executed. (templated)
:type pig: str
:param pig_cli_conn_id: reference to the Hive database
:type pig_cli_conn_id: str
:param pigparams_jinja_translate: when True, pig params-type templating
${var} gets translated into jinja-type templating {{ var }}. Note that
you may want to use this along with the
``DAG(user_defined_macros=myargs)`` parameter. View the DAG
object documentation for more details.
:type pigparams_jinja_translate: bool
:param pig_opts: pig options, such as: -x tez, -useHCatalog, ...
:type pig_opts: str
"""
template_fields = ('pig',)
template_ext = ('.pig', '.piglatin',)
ui_color = '#f0e4ec'
@apply_defaults
def __init__(
self,
pig: str,
pig_cli_conn_id: str = 'pig_cli_default',
pigparams_jinja_translate: bool = False,
pig_opts: Optional[str] = None,
*args, **kwargs) -> None:
|
def get_hook(self):
return PigCliHook(pig_cli_conn_id=self.pig_cli_conn_id)
def prepare_template(self):
if self.pigparams_jinja_translate:
self.pig = re.sub(
r"(\$([a-zA-Z_][a-zA-Z0-9_]*))", r"{{ \g<2> }}", self.pig)
def execute(self, context):
self.log.info('Executing: %s', self.pig)
self.hook = self.get_hook()
self.hook.run_cli(pig=self.pig, pig_opts=self.pig_opts)
def on_kill(self):
self.hook.kill()
| super().__init__(*args, **kwargs)
self.pigparams_jinja_translate = pigparams_jinja_translate
self.pig = pig
self.pig_cli_conn_id = pig_cli_conn_id
self.pig_opts = pig_opts | identifier_body |
synth.py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
from synthtool import gcp
gapic = gcp.GAPICGenerator()
common = gcp.CommonTemplates()
versions = ["v1beta1", "v1beta2", "v1p1beta1", "v1p2beta1", "v1"]
# ----------------------------------------------------------------------------
# Generate videointelligence GAPIC layer
# ----------------------------------------------------------------------------
for version in versions:
library = gapic.py_library(
"videointelligence", version, artman_output_name=f"video-intelligence-{version}"
)
# TODO: stop excluding tests and nox.py (excluded as we lack system tests)
s.move( | "docs/index.rst",
f"tests/system/gapic/{version}/"
f"test_system_video_intelligence_service_{version}.py",
# f'tests/unit/gapic/{version}/'
# f'test_video_intelligence_service_client_{version}.py',
],
)
s.replace(
"**/*/video_intelligence_service_client.py",
"'google-cloud-video-intelligence', \).version",
"'google-cloud-videointelligence', ).version",
)
s.replace(
"tests/unit/gapic/**/test_video_intelligence_service_client_*.py",
"^(\s+)expected_request = video_intelligence_pb2.AnnotateVideoRequest\(\)",
"\g<1>expected_request = video_intelligence_pb2.AnnotateVideoRequest(\n"
"\g<1> input_uri=input_uri, features=features)",
)
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
templated_files = common.py_library(unit_cov_level=97, cov_level=100)
s.move(templated_files)
s.shell.run(["nox", "-s", "blacken"], hide_output=False) | library,
excludes=[
"setup.py",
"nox*.py",
"README.rst", | random_line_split |
synth.py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
from synthtool import gcp
gapic = gcp.GAPICGenerator()
common = gcp.CommonTemplates()
versions = ["v1beta1", "v1beta2", "v1p1beta1", "v1p2beta1", "v1"]
# ----------------------------------------------------------------------------
# Generate videointelligence GAPIC layer
# ----------------------------------------------------------------------------
for version in versions:
|
s.replace(
"**/*/video_intelligence_service_client.py",
"'google-cloud-video-intelligence', \).version",
"'google-cloud-videointelligence', ).version",
)
s.replace(
"tests/unit/gapic/**/test_video_intelligence_service_client_*.py",
"^(\s+)expected_request = video_intelligence_pb2.AnnotateVideoRequest\(\)",
"\g<1>expected_request = video_intelligence_pb2.AnnotateVideoRequest(\n"
"\g<1> input_uri=input_uri, features=features)",
)
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
templated_files = common.py_library(unit_cov_level=97, cov_level=100)
s.move(templated_files)
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
| library = gapic.py_library(
"videointelligence", version, artman_output_name=f"video-intelligence-{version}"
)
# TODO: stop excluding tests and nox.py (excluded as we lack system tests)
s.move(
library,
excludes=[
"setup.py",
"nox*.py",
"README.rst",
"docs/index.rst",
f"tests/system/gapic/{version}/"
f"test_system_video_intelligence_service_{version}.py",
# f'tests/unit/gapic/{version}/'
# f'test_video_intelligence_service_client_{version}.py',
],
) | conditional_block |
image.py | # -*- coding: utf-8 -*-
import attr
from cached_property import cached_property
from navmazing import NavigateToSibling, NavigateToAttribute
from wrapanapi.containers.image import Image as ApiImage
from cfme.common import (WidgetasticTaggable, PolicyProfileAssignable,
TagPageView)
from cfme.containers.provider import (Labelable,
ContainerObjectAllBaseView,
ContainerObjectDetailsBaseView, LoadDetailsMixin,
refresh_and_navigate, ContainerObjectDetailsEntities,
GetRandomInstancesMixin)
from cfme.utils.appliance.implementations.ui import CFMENavigateStep, navigator, navigate_to
from cfme.utils.log import logger
from cfme.configure import tasks
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.wait import wait_for, TimedOutError
from widgetastic_manageiq import SummaryTable, BaseEntitiesView
from widgetastic.widget import View
from cfme.utils.providers import get_crud_by_name
class ImageAllView(ContainerObjectAllBaseView):
SUMMARY_TEXT = "Container Images"
# ProviderEntity has its own fields, image view should rather use BaseEntity instead
including_entities = View.include(BaseEntitiesView, use_parent=True)
class ImageDetailsView(ContainerObjectDetailsBaseView):
@View.nested
class entities(ContainerObjectDetailsEntities): # noqa
configuration = SummaryTable(title='Configuration')
compliance = SummaryTable(title='Compliance')
@attr.s
class Image(BaseEntity, WidgetasticTaggable, Labelable, LoadDetailsMixin, PolicyProfileAssignable):
PLURAL = 'Container Images'
all_view = ImageAllView
details_view = ImageDetailsView
name = attr.ib()
id = attr.ib()
provider = attr.ib()
@cached_property
def mgmt(self):
return ApiImage(self.provider.mgmt, self.name, self.sha256)
@cached_property
def sha256(self):
return self.id.split('@')[-1]
def perform_smartstate_analysis(self, wait_for_finish=False, timeout='7M'):
"""Performing SmartState Analysis on this Image
"""
# task_name change from str to regular expression pattern following Bugzilla Bug 1483590
# task name will contain also Image name
# the str compile on tasks module
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Perform SmartState Analysis', handle_alert=True)
# TODO: Modify accordingly once there is FlashMessages.assert_massage_contains()
assert filter(lambda m: 'Analysis successfully initiated' in m.text, view.flash.messages)
if wait_for_finish:
try:
wait_for(tasks.is_analysis_finished,
func_kwargs={'name': '(?i)(Container Image.*)',
'task_type': 'container'},
timeout=timeout,
fail_func=self.appliance.server.browser.refresh)
except TimedOutError:
raise TimedOutError('Timeout exceeded, Waited too much time for SSA to finish ({}).'
.format(timeout))
def check_compliance(self, wait_for_finish=True, timeout=240):
"""Initiates compliance check and waits for it to finish."""
view = navigate_to(self, 'Details')
original_state = self.compliance_status
view.toolbar.policy.item_select("Check Compliance of Last Known Configuration",
handle_alert=True)
view.flash.assert_no_error()
if wait_for_finish:
wait_for(
lambda: self.compliance_status != original_state, num_sec=timeout, delay=5,
message='compliance state of {} still matches {}'
.format(self.name, original_state)
)
return self.compliant
@property
def compliance_status(self):
view = refresh_and_navigate(self, 'Details')
return view.entities.compliance.read().get('Status').strip()
@property
def compliant(self):
"""Check if the image is compliant
Returns:
:py:class:`NoneType` if the image was never verified, otherwise :py:class:`bool`
"""
text = self.compliance_status.lower()
if text == "never verified":
return None
elif text.startswith("non-compliant"):
return False
elif text.startswith("compliant"):
return True
else:
raise ValueError("{} is not a known state for compliance".format(text))
@attr.s
class ImageCollection(GetRandomInstancesMixin, BaseCollection, PolicyProfileAssignable):
"""Collection object for :py:class:`Image`."""
ENTITY = Image
def all(self):
# container_images has ems_id, join with ext_mgmgt_systems on id for provider name
image_table = self.appliance.db.client['container_images']
ems_table = self.appliance.db.client['ext_management_systems']
image_query = (
self.appliance.db.client.session
.query(image_table.name, image_table.image_ref, ems_table.name)
.join(ems_table, image_table.ems_id == ems_table.id))
provider = None
# filtered
if self.filters.get('provider'):
provider = self.filters.get('provider')
image_query = image_query.filter(ems_table.name == provider.name)
images = []
for name, image_ref, ems_name in image_query.all():
images.append(self.instantiate(name=name, id=image_ref,
provider=provider or get_crud_by_name(ems_name)))
return images
def check_compliance_multiple_images(self, image_entities, check_on_entity=True, timeout=240): | """Initiates compliance check and waits for it to finish on several Images.
Args:
image_entities: list of Image entities that need to perform compliance check on them
check_on_entity (bool): check the compliance status on the entity summary view if True,
only run compliance otherwise.
timeout (seconds): time for waiting for compliance status
"""
# Chose Check Compliance of Last Known Configuration
images_view = navigate_to(self, 'All')
self.check_image_entities(image_entities)
wait_for(lambda: images_view.toolbar.policy.is_enabled, num_sec=5,
message='Policy drop down menu is disabled after checking some Images')
images_view.toolbar.policy.item_select('Check Compliance of Last Known Configuration',
handle_alert=True)
images_view.flash.assert_no_error()
# Verify Image summary
if check_on_entity:
for image_instance in image_entities:
original_state = 'never verified'
try:
wait_for(
lambda: image_instance.compliance_status.lower() != original_state,
num_sec=timeout, delay=5,
message='compliance state of Image ID, "{}", still matches {}'
.format(image_instance.id, original_state)
)
except TimedOutError:
logger.error('compliance state of Image ID, "{}", is {}'
.format(image_instance.id, image_instance.compliance_status))
raise TimedOutError('Timeout exceeded, Waited too much'
' time for check Compliance finish ({}).'.format(timeout))
def check_image_entities(self, image_entities):
"""check rows on Container Images table."""
images_view = navigate_to(self, 'All', use_resetter=False)
images_view.paginator.set_items_per_page(1000)
conditions = []
for image_entity in image_entities:
conditions.append({'id': image_entity.id})
entities = images_view.entities.apply(func=lambda e: e.check(), conditions=conditions)
return entities
def perform_smartstate_analysis_multiple_images(
self, image_entities, wait_for_finish=False, timeout='20M'):
"""Performing SmartState Analysis on this Image
"""
# task_name change from str to regular expression
# the str compile on tasks module
task_name = '(?i)(Container Image.*)'
task_type = 'container'
num_of_tasks = len(image_entities)
images_view = navigate_to(self, 'All')
self.check_image_entities(image_entities)
images_view.toolbar.configuration.item_select(
'Perform SmartState Analysis', handle_alert=True)
for image_entity in image_entities:
images_view.flash.assert_success_message(
'"{}": Analysis successfully initiated'.format(image_entity.name), partial=True
)
if wait_for_finish:
try:
# check all tasks state finished
tasks.wait_analysis_finished_multiple_tasks(task_name, task_type,
num_of_tasks, timeout=timeout)
# check all task passed successfully with no error
if tasks.check_tasks_have_no_errors(task_name, task_type, num_of_tasks,
silent_failure=True,
clear_tasks_after_success=False):
return True
else:
logger.error('Some Images SSA tasks finished with error message,'
' see logger for more details.')
return False
except TimedOutError:
raise TimedOutError('Timeout exceeded, Waited too much time for SSA to finish ({}).'
.format(timeout))
@navigator.register(ImageCollection, 'All')
class All(CFMENavigateStep):
VIEW = ImageAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self):
self.prerequisite_view.navigation.select('Compute', 'Containers', 'Container Images')
def resetter(self):
# Reset view and selection
self.view.entities.search.clear_simple_search()
self.view.toolbar.view_selector.select("List View")
@navigator.register(Image, 'Details')
class Details(CFMENavigateStep):
VIEW = ImageDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
search_visible = self.prerequisite_view.entities.search.is_displayed
self.prerequisite_view.entities.get_entity(provider=self.obj.provider.name,
surf_pages=not search_visible,
use_search=search_visible, name=self.obj.name,
id=self.obj.id).click()
@navigator.register(Image, 'EditTags')
class ImageRegistryEditTags(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.policy.item_select('Edit Tags') | random_line_split | |
image.py | # -*- coding: utf-8 -*-
import attr
from cached_property import cached_property
from navmazing import NavigateToSibling, NavigateToAttribute
from wrapanapi.containers.image import Image as ApiImage
from cfme.common import (WidgetasticTaggable, PolicyProfileAssignable,
TagPageView)
from cfme.containers.provider import (Labelable,
ContainerObjectAllBaseView,
ContainerObjectDetailsBaseView, LoadDetailsMixin,
refresh_and_navigate, ContainerObjectDetailsEntities,
GetRandomInstancesMixin)
from cfme.utils.appliance.implementations.ui import CFMENavigateStep, navigator, navigate_to
from cfme.utils.log import logger
from cfme.configure import tasks
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.wait import wait_for, TimedOutError
from widgetastic_manageiq import SummaryTable, BaseEntitiesView
from widgetastic.widget import View
from cfme.utils.providers import get_crud_by_name
class ImageAllView(ContainerObjectAllBaseView):
SUMMARY_TEXT = "Container Images"
# ProviderEntity has its own fields, image view should rather use BaseEntity instead
including_entities = View.include(BaseEntitiesView, use_parent=True)
class ImageDetailsView(ContainerObjectDetailsBaseView):
@View.nested
class entities(ContainerObjectDetailsEntities): # noqa
configuration = SummaryTable(title='Configuration')
compliance = SummaryTable(title='Compliance')
@attr.s
class Image(BaseEntity, WidgetasticTaggable, Labelable, LoadDetailsMixin, PolicyProfileAssignable):
PLURAL = 'Container Images'
all_view = ImageAllView
details_view = ImageDetailsView
name = attr.ib()
id = attr.ib()
provider = attr.ib()
@cached_property
def mgmt(self):
return ApiImage(self.provider.mgmt, self.name, self.sha256)
@cached_property
def sha256(self):
return self.id.split('@')[-1]
def perform_smartstate_analysis(self, wait_for_finish=False, timeout='7M'):
"""Performing SmartState Analysis on this Image
"""
# task_name change from str to regular expression pattern following Bugzilla Bug 1483590
# task name will contain also Image name
# the str compile on tasks module
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Perform SmartState Analysis', handle_alert=True)
# TODO: Modify accordingly once there is FlashMessages.assert_massage_contains()
assert filter(lambda m: 'Analysis successfully initiated' in m.text, view.flash.messages)
if wait_for_finish:
try:
wait_for(tasks.is_analysis_finished,
func_kwargs={'name': '(?i)(Container Image.*)',
'task_type': 'container'},
timeout=timeout,
fail_func=self.appliance.server.browser.refresh)
except TimedOutError:
raise TimedOutError('Timeout exceeded, Waited too much time for SSA to finish ({}).'
.format(timeout))
def check_compliance(self, wait_for_finish=True, timeout=240):
"""Initiates compliance check and waits for it to finish."""
view = navigate_to(self, 'Details')
original_state = self.compliance_status
view.toolbar.policy.item_select("Check Compliance of Last Known Configuration",
handle_alert=True)
view.flash.assert_no_error()
if wait_for_finish:
wait_for(
lambda: self.compliance_status != original_state, num_sec=timeout, delay=5,
message='compliance state of {} still matches {}'
.format(self.name, original_state)
)
return self.compliant
@property
def compliance_status(self):
view = refresh_and_navigate(self, 'Details')
return view.entities.compliance.read().get('Status').strip()
@property
def compliant(self):
"""Check if the image is compliant
Returns:
:py:class:`NoneType` if the image was never verified, otherwise :py:class:`bool`
"""
text = self.compliance_status.lower()
if text == "never verified":
return None
elif text.startswith("non-compliant"):
return False
elif text.startswith("compliant"):
|
else:
raise ValueError("{} is not a known state for compliance".format(text))
@attr.s
class ImageCollection(GetRandomInstancesMixin, BaseCollection, PolicyProfileAssignable):
"""Collection object for :py:class:`Image`."""
ENTITY = Image
def all(self):
# container_images has ems_id, join with ext_mgmgt_systems on id for provider name
image_table = self.appliance.db.client['container_images']
ems_table = self.appliance.db.client['ext_management_systems']
image_query = (
self.appliance.db.client.session
.query(image_table.name, image_table.image_ref, ems_table.name)
.join(ems_table, image_table.ems_id == ems_table.id))
provider = None
# filtered
if self.filters.get('provider'):
provider = self.filters.get('provider')
image_query = image_query.filter(ems_table.name == provider.name)
images = []
for name, image_ref, ems_name in image_query.all():
images.append(self.instantiate(name=name, id=image_ref,
provider=provider or get_crud_by_name(ems_name)))
return images
def check_compliance_multiple_images(self, image_entities, check_on_entity=True, timeout=240):
"""Initiates compliance check and waits for it to finish on several Images.
Args:
image_entities: list of Image entities that need to perform compliance check on them
check_on_entity (bool): check the compliance status on the entity summary view if True,
only run compliance otherwise.
timeout (seconds): time for waiting for compliance status
"""
# Chose Check Compliance of Last Known Configuration
images_view = navigate_to(self, 'All')
self.check_image_entities(image_entities)
wait_for(lambda: images_view.toolbar.policy.is_enabled, num_sec=5,
message='Policy drop down menu is disabled after checking some Images')
images_view.toolbar.policy.item_select('Check Compliance of Last Known Configuration',
handle_alert=True)
images_view.flash.assert_no_error()
# Verify Image summary
if check_on_entity:
for image_instance in image_entities:
original_state = 'never verified'
try:
wait_for(
lambda: image_instance.compliance_status.lower() != original_state,
num_sec=timeout, delay=5,
message='compliance state of Image ID, "{}", still matches {}'
.format(image_instance.id, original_state)
)
except TimedOutError:
logger.error('compliance state of Image ID, "{}", is {}'
.format(image_instance.id, image_instance.compliance_status))
raise TimedOutError('Timeout exceeded, Waited too much'
' time for check Compliance finish ({}).'.format(timeout))
def check_image_entities(self, image_entities):
"""check rows on Container Images table."""
images_view = navigate_to(self, 'All', use_resetter=False)
images_view.paginator.set_items_per_page(1000)
conditions = []
for image_entity in image_entities:
conditions.append({'id': image_entity.id})
entities = images_view.entities.apply(func=lambda e: e.check(), conditions=conditions)
return entities
def perform_smartstate_analysis_multiple_images(
self, image_entities, wait_for_finish=False, timeout='20M'):
"""Performing SmartState Analysis on this Image
"""
# task_name change from str to regular expression
# the str compile on tasks module
task_name = '(?i)(Container Image.*)'
task_type = 'container'
num_of_tasks = len(image_entities)
images_view = navigate_to(self, 'All')
self.check_image_entities(image_entities)
images_view.toolbar.configuration.item_select(
'Perform SmartState Analysis', handle_alert=True)
for image_entity in image_entities:
images_view.flash.assert_success_message(
'"{}": Analysis successfully initiated'.format(image_entity.name), partial=True
)
if wait_for_finish:
try:
# check all tasks state finished
tasks.wait_analysis_finished_multiple_tasks(task_name, task_type,
num_of_tasks, timeout=timeout)
# check all task passed successfully with no error
if tasks.check_tasks_have_no_errors(task_name, task_type, num_of_tasks,
silent_failure=True,
clear_tasks_after_success=False):
return True
else:
logger.error('Some Images SSA tasks finished with error message,'
' see logger for more details.')
return False
except TimedOutError:
raise TimedOutError('Timeout exceeded, Waited too much time for SSA to finish ({}).'
.format(timeout))
@navigator.register(ImageCollection, 'All')
class All(CFMENavigateStep):
VIEW = ImageAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self):
self.prerequisite_view.navigation.select('Compute', 'Containers', 'Container Images')
def resetter(self):
# Reset view and selection
self.view.entities.search.clear_simple_search()
self.view.toolbar.view_selector.select("List View")
@navigator.register(Image, 'Details')
class Details(CFMENavigateStep):
VIEW = ImageDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
search_visible = self.prerequisite_view.entities.search.is_displayed
self.prerequisite_view.entities.get_entity(provider=self.obj.provider.name,
surf_pages=not search_visible,
use_search=search_visible, name=self.obj.name,
id=self.obj.id).click()
@navigator.register(Image, 'EditTags')
class ImageRegistryEditTags(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
| return True | conditional_block |
image.py | # -*- coding: utf-8 -*-
import attr
from cached_property import cached_property
from navmazing import NavigateToSibling, NavigateToAttribute
from wrapanapi.containers.image import Image as ApiImage
from cfme.common import (WidgetasticTaggable, PolicyProfileAssignable,
TagPageView)
from cfme.containers.provider import (Labelable,
ContainerObjectAllBaseView,
ContainerObjectDetailsBaseView, LoadDetailsMixin,
refresh_and_navigate, ContainerObjectDetailsEntities,
GetRandomInstancesMixin)
from cfme.utils.appliance.implementations.ui import CFMENavigateStep, navigator, navigate_to
from cfme.utils.log import logger
from cfme.configure import tasks
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.wait import wait_for, TimedOutError
from widgetastic_manageiq import SummaryTable, BaseEntitiesView
from widgetastic.widget import View
from cfme.utils.providers import get_crud_by_name
class ImageAllView(ContainerObjectAllBaseView):
SUMMARY_TEXT = "Container Images"
# ProviderEntity has its own fields, image view should rather use BaseEntity instead
including_entities = View.include(BaseEntitiesView, use_parent=True)
class ImageDetailsView(ContainerObjectDetailsBaseView):
@View.nested
class entities(ContainerObjectDetailsEntities): # noqa
configuration = SummaryTable(title='Configuration')
compliance = SummaryTable(title='Compliance')
@attr.s
class Image(BaseEntity, WidgetasticTaggable, Labelable, LoadDetailsMixin, PolicyProfileAssignable):
PLURAL = 'Container Images'
all_view = ImageAllView
details_view = ImageDetailsView
name = attr.ib()
id = attr.ib()
provider = attr.ib()
@cached_property
def mgmt(self):
return ApiImage(self.provider.mgmt, self.name, self.sha256)
@cached_property
def sha256(self):
return self.id.split('@')[-1]
def perform_smartstate_analysis(self, wait_for_finish=False, timeout='7M'):
"""Performing SmartState Analysis on this Image
"""
# task_name change from str to regular expression pattern following Bugzilla Bug 1483590
# task name will contain also Image name
# the str compile on tasks module
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Perform SmartState Analysis', handle_alert=True)
# TODO: Modify accordingly once there is FlashMessages.assert_massage_contains()
assert filter(lambda m: 'Analysis successfully initiated' in m.text, view.flash.messages)
if wait_for_finish:
try:
wait_for(tasks.is_analysis_finished,
func_kwargs={'name': '(?i)(Container Image.*)',
'task_type': 'container'},
timeout=timeout,
fail_func=self.appliance.server.browser.refresh)
except TimedOutError:
raise TimedOutError('Timeout exceeded, Waited too much time for SSA to finish ({}).'
.format(timeout))
def check_compliance(self, wait_for_finish=True, timeout=240):
"""Initiates compliance check and waits for it to finish."""
view = navigate_to(self, 'Details')
original_state = self.compliance_status
view.toolbar.policy.item_select("Check Compliance of Last Known Configuration",
handle_alert=True)
view.flash.assert_no_error()
if wait_for_finish:
wait_for(
lambda: self.compliance_status != original_state, num_sec=timeout, delay=5,
message='compliance state of {} still matches {}'
.format(self.name, original_state)
)
return self.compliant
@property
def compliance_status(self):
view = refresh_and_navigate(self, 'Details')
return view.entities.compliance.read().get('Status').strip()
@property
def compliant(self):
"""Check if the image is compliant
Returns:
:py:class:`NoneType` if the image was never verified, otherwise :py:class:`bool`
"""
text = self.compliance_status.lower()
if text == "never verified":
return None
elif text.startswith("non-compliant"):
return False
elif text.startswith("compliant"):
return True
else:
raise ValueError("{} is not a known state for compliance".format(text))
@attr.s
class ImageCollection(GetRandomInstancesMixin, BaseCollection, PolicyProfileAssignable):
"""Collection object for :py:class:`Image`."""
ENTITY = Image
def all(self):
# container_images has ems_id, join with ext_mgmgt_systems on id for provider name
image_table = self.appliance.db.client['container_images']
ems_table = self.appliance.db.client['ext_management_systems']
image_query = (
self.appliance.db.client.session
.query(image_table.name, image_table.image_ref, ems_table.name)
.join(ems_table, image_table.ems_id == ems_table.id))
provider = None
# filtered
if self.filters.get('provider'):
provider = self.filters.get('provider')
image_query = image_query.filter(ems_table.name == provider.name)
images = []
for name, image_ref, ems_name in image_query.all():
images.append(self.instantiate(name=name, id=image_ref,
provider=provider or get_crud_by_name(ems_name)))
return images
def check_compliance_multiple_images(self, image_entities, check_on_entity=True, timeout=240):
"""Initiates compliance check and waits for it to finish on several Images.
Args:
image_entities: list of Image entities that need to perform compliance check on them
check_on_entity (bool): check the compliance status on the entity summary view if True,
only run compliance otherwise.
timeout (seconds): time for waiting for compliance status
"""
# Chose Check Compliance of Last Known Configuration
images_view = navigate_to(self, 'All')
self.check_image_entities(image_entities)
wait_for(lambda: images_view.toolbar.policy.is_enabled, num_sec=5,
message='Policy drop down menu is disabled after checking some Images')
images_view.toolbar.policy.item_select('Check Compliance of Last Known Configuration',
handle_alert=True)
images_view.flash.assert_no_error()
# Verify Image summary
if check_on_entity:
for image_instance in image_entities:
original_state = 'never verified'
try:
wait_for(
lambda: image_instance.compliance_status.lower() != original_state,
num_sec=timeout, delay=5,
message='compliance state of Image ID, "{}", still matches {}'
.format(image_instance.id, original_state)
)
except TimedOutError:
logger.error('compliance state of Image ID, "{}", is {}'
.format(image_instance.id, image_instance.compliance_status))
raise TimedOutError('Timeout exceeded, Waited too much'
' time for check Compliance finish ({}).'.format(timeout))
def check_image_entities(self, image_entities):
"""check rows on Container Images table."""
images_view = navigate_to(self, 'All', use_resetter=False)
images_view.paginator.set_items_per_page(1000)
conditions = []
for image_entity in image_entities:
conditions.append({'id': image_entity.id})
entities = images_view.entities.apply(func=lambda e: e.check(), conditions=conditions)
return entities
def perform_smartstate_analysis_multiple_images(
self, image_entities, wait_for_finish=False, timeout='20M'):
"""Performing SmartState Analysis on this Image
"""
# task_name change from str to regular expression
# the str compile on tasks module
task_name = '(?i)(Container Image.*)'
task_type = 'container'
num_of_tasks = len(image_entities)
images_view = navigate_to(self, 'All')
self.check_image_entities(image_entities)
images_view.toolbar.configuration.item_select(
'Perform SmartState Analysis', handle_alert=True)
for image_entity in image_entities:
images_view.flash.assert_success_message(
'"{}": Analysis successfully initiated'.format(image_entity.name), partial=True
)
if wait_for_finish:
try:
# check all tasks state finished
tasks.wait_analysis_finished_multiple_tasks(task_name, task_type,
num_of_tasks, timeout=timeout)
# check all task passed successfully with no error
if tasks.check_tasks_have_no_errors(task_name, task_type, num_of_tasks,
silent_failure=True,
clear_tasks_after_success=False):
return True
else:
logger.error('Some Images SSA tasks finished with error message,'
' see logger for more details.')
return False
except TimedOutError:
raise TimedOutError('Timeout exceeded, Waited too much time for SSA to finish ({}).'
.format(timeout))
@navigator.register(ImageCollection, 'All')
class All(CFMENavigateStep):
VIEW = ImageAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self):
self.prerequisite_view.navigation.select('Compute', 'Containers', 'Container Images')
def resetter(self):
# Reset view and selection
|
@navigator.register(Image, 'Details')
class Details(CFMENavigateStep):
VIEW = ImageDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
search_visible = self.prerequisite_view.entities.search.is_displayed
self.prerequisite_view.entities.get_entity(provider=self.obj.provider.name,
surf_pages=not search_visible,
use_search=search_visible, name=self.obj.name,
id=self.obj.id).click()
@navigator.register(Image, 'EditTags')
class ImageRegistryEditTags(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
| self.view.entities.search.clear_simple_search()
self.view.toolbar.view_selector.select("List View") | identifier_body |
image.py | # -*- coding: utf-8 -*-
import attr
from cached_property import cached_property
from navmazing import NavigateToSibling, NavigateToAttribute
from wrapanapi.containers.image import Image as ApiImage
from cfme.common import (WidgetasticTaggable, PolicyProfileAssignable,
TagPageView)
from cfme.containers.provider import (Labelable,
ContainerObjectAllBaseView,
ContainerObjectDetailsBaseView, LoadDetailsMixin,
refresh_and_navigate, ContainerObjectDetailsEntities,
GetRandomInstancesMixin)
from cfme.utils.appliance.implementations.ui import CFMENavigateStep, navigator, navigate_to
from cfme.utils.log import logger
from cfme.configure import tasks
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.wait import wait_for, TimedOutError
from widgetastic_manageiq import SummaryTable, BaseEntitiesView
from widgetastic.widget import View
from cfme.utils.providers import get_crud_by_name
class ImageAllView(ContainerObjectAllBaseView):
SUMMARY_TEXT = "Container Images"
# ProviderEntity has its own fields, image view should rather use BaseEntity instead
including_entities = View.include(BaseEntitiesView, use_parent=True)
class ImageDetailsView(ContainerObjectDetailsBaseView):
@View.nested
class entities(ContainerObjectDetailsEntities): # noqa
configuration = SummaryTable(title='Configuration')
compliance = SummaryTable(title='Compliance')
@attr.s
class | (BaseEntity, WidgetasticTaggable, Labelable, LoadDetailsMixin, PolicyProfileAssignable):
PLURAL = 'Container Images'
all_view = ImageAllView
details_view = ImageDetailsView
name = attr.ib()
id = attr.ib()
provider = attr.ib()
@cached_property
def mgmt(self):
return ApiImage(self.provider.mgmt, self.name, self.sha256)
@cached_property
def sha256(self):
return self.id.split('@')[-1]
def perform_smartstate_analysis(self, wait_for_finish=False, timeout='7M'):
"""Performing SmartState Analysis on this Image
"""
# task_name change from str to regular expression pattern following Bugzilla Bug 1483590
# task name will contain also Image name
# the str compile on tasks module
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Perform SmartState Analysis', handle_alert=True)
# TODO: Modify accordingly once there is FlashMessages.assert_massage_contains()
assert filter(lambda m: 'Analysis successfully initiated' in m.text, view.flash.messages)
if wait_for_finish:
try:
wait_for(tasks.is_analysis_finished,
func_kwargs={'name': '(?i)(Container Image.*)',
'task_type': 'container'},
timeout=timeout,
fail_func=self.appliance.server.browser.refresh)
except TimedOutError:
raise TimedOutError('Timeout exceeded, Waited too much time for SSA to finish ({}).'
.format(timeout))
def check_compliance(self, wait_for_finish=True, timeout=240):
"""Initiates compliance check and waits for it to finish."""
view = navigate_to(self, 'Details')
original_state = self.compliance_status
view.toolbar.policy.item_select("Check Compliance of Last Known Configuration",
handle_alert=True)
view.flash.assert_no_error()
if wait_for_finish:
wait_for(
lambda: self.compliance_status != original_state, num_sec=timeout, delay=5,
message='compliance state of {} still matches {}'
.format(self.name, original_state)
)
return self.compliant
@property
def compliance_status(self):
view = refresh_and_navigate(self, 'Details')
return view.entities.compliance.read().get('Status').strip()
@property
def compliant(self):
"""Check if the image is compliant
Returns:
:py:class:`NoneType` if the image was never verified, otherwise :py:class:`bool`
"""
text = self.compliance_status.lower()
if text == "never verified":
return None
elif text.startswith("non-compliant"):
return False
elif text.startswith("compliant"):
return True
else:
raise ValueError("{} is not a known state for compliance".format(text))
@attr.s
class ImageCollection(GetRandomInstancesMixin, BaseCollection, PolicyProfileAssignable):
"""Collection object for :py:class:`Image`."""
ENTITY = Image
def all(self):
# container_images has ems_id, join with ext_mgmgt_systems on id for provider name
image_table = self.appliance.db.client['container_images']
ems_table = self.appliance.db.client['ext_management_systems']
image_query = (
self.appliance.db.client.session
.query(image_table.name, image_table.image_ref, ems_table.name)
.join(ems_table, image_table.ems_id == ems_table.id))
provider = None
# filtered
if self.filters.get('provider'):
provider = self.filters.get('provider')
image_query = image_query.filter(ems_table.name == provider.name)
images = []
for name, image_ref, ems_name in image_query.all():
images.append(self.instantiate(name=name, id=image_ref,
provider=provider or get_crud_by_name(ems_name)))
return images
def check_compliance_multiple_images(self, image_entities, check_on_entity=True, timeout=240):
"""Initiates compliance check and waits for it to finish on several Images.
Args:
image_entities: list of Image entities that need to perform compliance check on them
check_on_entity (bool): check the compliance status on the entity summary view if True,
only run compliance otherwise.
timeout (seconds): time for waiting for compliance status
"""
# Chose Check Compliance of Last Known Configuration
images_view = navigate_to(self, 'All')
self.check_image_entities(image_entities)
wait_for(lambda: images_view.toolbar.policy.is_enabled, num_sec=5,
message='Policy drop down menu is disabled after checking some Images')
images_view.toolbar.policy.item_select('Check Compliance of Last Known Configuration',
handle_alert=True)
images_view.flash.assert_no_error()
# Verify Image summary
if check_on_entity:
for image_instance in image_entities:
original_state = 'never verified'
try:
wait_for(
lambda: image_instance.compliance_status.lower() != original_state,
num_sec=timeout, delay=5,
message='compliance state of Image ID, "{}", still matches {}'
.format(image_instance.id, original_state)
)
except TimedOutError:
logger.error('compliance state of Image ID, "{}", is {}'
.format(image_instance.id, image_instance.compliance_status))
raise TimedOutError('Timeout exceeded, Waited too much'
' time for check Compliance finish ({}).'.format(timeout))
def check_image_entities(self, image_entities):
"""check rows on Container Images table."""
images_view = navigate_to(self, 'All', use_resetter=False)
images_view.paginator.set_items_per_page(1000)
conditions = []
for image_entity in image_entities:
conditions.append({'id': image_entity.id})
entities = images_view.entities.apply(func=lambda e: e.check(), conditions=conditions)
return entities
def perform_smartstate_analysis_multiple_images(
self, image_entities, wait_for_finish=False, timeout='20M'):
"""Performing SmartState Analysis on this Image
"""
# task_name change from str to regular expression
# the str compile on tasks module
task_name = '(?i)(Container Image.*)'
task_type = 'container'
num_of_tasks = len(image_entities)
images_view = navigate_to(self, 'All')
self.check_image_entities(image_entities)
images_view.toolbar.configuration.item_select(
'Perform SmartState Analysis', handle_alert=True)
for image_entity in image_entities:
images_view.flash.assert_success_message(
'"{}": Analysis successfully initiated'.format(image_entity.name), partial=True
)
if wait_for_finish:
try:
# check all tasks state finished
tasks.wait_analysis_finished_multiple_tasks(task_name, task_type,
num_of_tasks, timeout=timeout)
# check all task passed successfully with no error
if tasks.check_tasks_have_no_errors(task_name, task_type, num_of_tasks,
silent_failure=True,
clear_tasks_after_success=False):
return True
else:
logger.error('Some Images SSA tasks finished with error message,'
' see logger for more details.')
return False
except TimedOutError:
raise TimedOutError('Timeout exceeded, Waited too much time for SSA to finish ({}).'
.format(timeout))
@navigator.register(ImageCollection, 'All')
class All(CFMENavigateStep):
VIEW = ImageAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self):
self.prerequisite_view.navigation.select('Compute', 'Containers', 'Container Images')
def resetter(self):
# Reset view and selection
self.view.entities.search.clear_simple_search()
self.view.toolbar.view_selector.select("List View")
@navigator.register(Image, 'Details')
class Details(CFMENavigateStep):
VIEW = ImageDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
search_visible = self.prerequisite_view.entities.search.is_displayed
self.prerequisite_view.entities.get_entity(provider=self.obj.provider.name,
surf_pages=not search_visible,
use_search=search_visible, name=self.obj.name,
id=self.obj.id).click()
@navigator.register(Image, 'EditTags')
class ImageRegistryEditTags(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
| Image | identifier_name |
animation.py | """Animation.
Animation is set of keyframes.
Value of selected attribute changes in time.
Keyframe:
(time, value)
Objects have animation manager which manages animation graph and switching."""
from operator import itemgetter
from eaf import Timer
from xoinvader.utils import Point
class AnimationBoundariesExceeded(Exception):
"""Exception to show that interpolated value will be incorrect."""
def __init__(self, first, current_time, second):
super(AnimationBoundariesExceeded, self).__init__(
self,
f"Animation frame boundaries exceeded: {first} <= {current_time} <= {second}",
)
class InterpolationUnknownTypes(Exception):
"""Such type combination is unsupported."""
def __init__(self, first, second):
super(InterpolationUnknownTypes, self).__init__(
self, f"Unknown types of interpolating values: {first} and {second}"
)
# TODO: Implement animation graph and etc
class AnimationManager(object):
"""Manage list of object animation."""
def __init__(self):
self._animations = {}
self._animation = None
@property
def animation(self):
"""AnimationManager's current animation name.
To set animation - assign it's name.
:getter: yes
:setter: yes
:type: str
"""
if self._animation:
return self._animation.name
else:
raise AttributeError("There is no available animation.")
@animation.setter
def animation(self, name):
if name in self._animations:
self._animation = self._animations[name]
else:
raise ValueError(f"No such animation: '{name}'.")
def add(self, name, *args, **kwargs):
"""Add new animation, pass args to Animation class.
See interface of `class::xoinvader.animation.Animation`.
:param str name: animation name
"""
animation = Animation(name, *args, **kwargs)
self._animations[name] = animation
if not self._animation:
self._animation = animation
def update(self, dt):
"""Update manager's state."""
if not self._animation:
return
try:
self._animation.update(dt)
except StopIteration:
return # TODO: think about method to change animation
# pylint: disable=too-many-instance-attributes,too-many-arguments
# pylint: disable=too-few-public-methods
class Animation(object):
"""Animation unit.
Animation object holds sorted list of (time, value) items and changes
selected attribute of bound object according to local animation time.
Time measured by timer. When current time is greater or equal then time
of next keyframe - animation object changes it to appropriate value.
When animation is done and if not looped - raise StopIteration.
In case of interpolated animation value calculation occurs within two
bounding frames and on frame switch.
:param str name: animation name
:param object bind: object to bind animation
:param str attr: attribute to change in frames
:param list keyframes: (float, object) tuples
:param bool interp: interpolate values between frames or not
:param bool loop: loop animation or not
"""
def __init__(self, name, bind, attr, keyframes, interp=False, loop=False):
self._name = name
self._obj = bind
self._attr = attr
if not keyframes:
raise ValueError("Animation keyframes must not be empty.")
self._keyframes = sorted(keyframes, key=itemgetter(0))
self._interp = interp
self._loop = loop
# Timer for tracking local time
self._timer = Timer(self._keyframes[-1][0], lambda: True)
self._timer.start()
# Current keyframe index
self._current = 0
if self._interp:
self.update = self._update_interpolated
else:
self.update = self._update_discrete
@property
def name(self):
"""Animation's name.
:getter: yes
:setter: no
:type: str
"""
return self._name
def _apply_value(self, value):
"""Apply new value to linked object.
:param obj value: value to apply
"""
setattr(self._obj, self._attr, value)
def _update_interpolated(self, dt):
"""Advance animation and interpolate value.
NOTE: animation frame switching depends on interp mode
animation with interpolation switches frame only when current local
time exceeds NEXT frames' time border.
"""
self._check_animation_state()
self._timer.update(dt)
current_time = self._timer.elapsed
keyframe = self._keyframes[self._current]
next_keyframe = self._keyframes[self._current + 1]
# it's time to switch keyframe
if current_time >= next_keyframe[0]:
self._current += 1
keyframe = self._keyframes[self._current]
if self._current == len(self._keyframes) - 1:
self._apply_value(keyframe[1])
self._current += 1
self._check_animation_state()
return
next_keyframe = self._keyframes[self._current + 1]
value = interpolate(keyframe, next_keyframe, current_time)
self._apply_value(value)
def _update_discrete(self, dt):
"""Advance animation without interpolating value.
NOTE: animation frame switching depends on interp mode
discrete animation swiches frame and updates value only if
current local time is >= time of current keyframe.
No need to worry about calculating value between frames - thus
no need to complicate behaviour.
"""
self._check_animation_state()
self._timer.update(dt)
keyframe = self._keyframes[self._current]
# Check if animation need to switch keyframe
if self._timer.elapsed >= keyframe[0]:
self._apply_value(keyframe[1])
self._current += 1
def _check_animation_state(self):
"""Check animation state and restart if needed.
:raise StopIteration: when animation exceeded frames.
"""
if len(self._keyframes) == self._current:
if self._loop:
self._current = 0
self._timer.restart()
else:
self._timer.stop()
raise StopIteration
def linear_equation(val1, val2, time1, time2, current_time):
"""Linear equation to get interpolated value.
:param float val1: first keyframe value
:param float val2: second keyframe value
:param float time1: first keyframe local time
:param float time2: second keyframe local time
:param float current_time: current animation local time
"""
return val1 + (val2 - val1) / (time2 - time1) * (current_time - time1)
def | (values, types):
"""Check if values are belongs to same type or type tuple.
:param collections.Iterable values: values to check type similarity
:param tuple|type types: type or tuple of types
"""
return all(map(lambda it: isinstance(it, types), values))
def interpolate(first, second, current_time):
"""Interpolate value by two bounding keyframes.
:param collections.Iterable first: first bounding keyframe
:param collections.Iterable second: second bounding keyframe
:param float current_time: current animation local time
:raises AnimationBoundariesExceeded: when time interval is invalid
:raises InterpolationUnknownTypes: when interpolating invalid types
"""
if not first[0] <= current_time <= second[0]:
raise AnimationBoundariesExceeded(first[0], current_time, second[0])
def frames_of(*args):
"""If frames both of specified type."""
return same_type((first[1], second[1]), args)
if frames_of(int, float):
value = linear_equation(
float(first[1]),
float(second[1]),
float(first[0]),
float(second[0]),
float(current_time),
)
elif frames_of(Point):
value = linear_equation(
first[1],
second[1],
float(first[0]),
float(second[0]),
float(current_time),
)
else:
raise InterpolationUnknownTypes(type(first[1]), type(second[1]))
return value
| same_type | identifier_name |
animation.py | """Animation.
Animation is set of keyframes.
Value of selected attribute changes in time.
Keyframe:
(time, value)
Objects have animation manager which manages animation graph and switching."""
from operator import itemgetter
from eaf import Timer
from xoinvader.utils import Point
class AnimationBoundariesExceeded(Exception):
"""Exception to show that interpolated value will be incorrect."""
def __init__(self, first, current_time, second):
super(AnimationBoundariesExceeded, self).__init__(
self,
f"Animation frame boundaries exceeded: {first} <= {current_time} <= {second}",
)
class InterpolationUnknownTypes(Exception):
"""Such type combination is unsupported."""
def __init__(self, first, second):
super(InterpolationUnknownTypes, self).__init__(
self, f"Unknown types of interpolating values: {first} and {second}"
)
# TODO: Implement animation graph and etc
class AnimationManager(object):
"""Manage list of object animation."""
def __init__(self):
self._animations = {}
self._animation = None
@property
def animation(self):
"""AnimationManager's current animation name.
To set animation - assign it's name.
:getter: yes
:setter: yes
:type: str
"""
if self._animation:
return self._animation.name
else:
raise AttributeError("There is no available animation.")
@animation.setter
def animation(self, name):
if name in self._animations:
self._animation = self._animations[name]
else:
|
def add(self, name, *args, **kwargs):
"""Add new animation, pass args to Animation class.
See interface of `class::xoinvader.animation.Animation`.
:param str name: animation name
"""
animation = Animation(name, *args, **kwargs)
self._animations[name] = animation
if not self._animation:
self._animation = animation
def update(self, dt):
"""Update manager's state."""
if not self._animation:
return
try:
self._animation.update(dt)
except StopIteration:
return # TODO: think about method to change animation
# pylint: disable=too-many-instance-attributes,too-many-arguments
# pylint: disable=too-few-public-methods
class Animation(object):
"""Animation unit.
Animation object holds sorted list of (time, value) items and changes
selected attribute of bound object according to local animation time.
Time measured by timer. When current time is greater or equal then time
of next keyframe - animation object changes it to appropriate value.
When animation is done and if not looped - raise StopIteration.
In case of interpolated animation value calculation occurs within two
bounding frames and on frame switch.
:param str name: animation name
:param object bind: object to bind animation
:param str attr: attribute to change in frames
:param list keyframes: (float, object) tuples
:param bool interp: interpolate values between frames or not
:param bool loop: loop animation or not
"""
def __init__(self, name, bind, attr, keyframes, interp=False, loop=False):
self._name = name
self._obj = bind
self._attr = attr
if not keyframes:
raise ValueError("Animation keyframes must not be empty.")
self._keyframes = sorted(keyframes, key=itemgetter(0))
self._interp = interp
self._loop = loop
# Timer for tracking local time
self._timer = Timer(self._keyframes[-1][0], lambda: True)
self._timer.start()
# Current keyframe index
self._current = 0
if self._interp:
self.update = self._update_interpolated
else:
self.update = self._update_discrete
@property
def name(self):
"""Animation's name.
:getter: yes
:setter: no
:type: str
"""
return self._name
def _apply_value(self, value):
"""Apply new value to linked object.
:param obj value: value to apply
"""
setattr(self._obj, self._attr, value)
def _update_interpolated(self, dt):
"""Advance animation and interpolate value.
NOTE: animation frame switching depends on interp mode
animation with interpolation switches frame only when current local
time exceeds NEXT frames' time border.
"""
self._check_animation_state()
self._timer.update(dt)
current_time = self._timer.elapsed
keyframe = self._keyframes[self._current]
next_keyframe = self._keyframes[self._current + 1]
# it's time to switch keyframe
if current_time >= next_keyframe[0]:
self._current += 1
keyframe = self._keyframes[self._current]
if self._current == len(self._keyframes) - 1:
self._apply_value(keyframe[1])
self._current += 1
self._check_animation_state()
return
next_keyframe = self._keyframes[self._current + 1]
value = interpolate(keyframe, next_keyframe, current_time)
self._apply_value(value)
def _update_discrete(self, dt):
"""Advance animation without interpolating value.
NOTE: animation frame switching depends on interp mode
discrete animation swiches frame and updates value only if
current local time is >= time of current keyframe.
No need to worry about calculating value between frames - thus
no need to complicate behaviour.
"""
self._check_animation_state()
self._timer.update(dt)
keyframe = self._keyframes[self._current]
# Check if animation need to switch keyframe
if self._timer.elapsed >= keyframe[0]:
self._apply_value(keyframe[1])
self._current += 1
def _check_animation_state(self):
"""Check animation state and restart if needed.
:raise StopIteration: when animation exceeded frames.
"""
if len(self._keyframes) == self._current:
if self._loop:
self._current = 0
self._timer.restart()
else:
self._timer.stop()
raise StopIteration
def linear_equation(val1, val2, time1, time2, current_time):
"""Linear equation to get interpolated value.
:param float val1: first keyframe value
:param float val2: second keyframe value
:param float time1: first keyframe local time
:param float time2: second keyframe local time
:param float current_time: current animation local time
"""
return val1 + (val2 - val1) / (time2 - time1) * (current_time - time1)
def same_type(values, types):
"""Check if values are belongs to same type or type tuple.
:param collections.Iterable values: values to check type similarity
:param tuple|type types: type or tuple of types
"""
return all(map(lambda it: isinstance(it, types), values))
def interpolate(first, second, current_time):
"""Interpolate value by two bounding keyframes.
:param collections.Iterable first: first bounding keyframe
:param collections.Iterable second: second bounding keyframe
:param float current_time: current animation local time
:raises AnimationBoundariesExceeded: when time interval is invalid
:raises InterpolationUnknownTypes: when interpolating invalid types
"""
if not first[0] <= current_time <= second[0]:
raise AnimationBoundariesExceeded(first[0], current_time, second[0])
def frames_of(*args):
"""If frames both of specified type."""
return same_type((first[1], second[1]), args)
if frames_of(int, float):
value = linear_equation(
float(first[1]),
float(second[1]),
float(first[0]),
float(second[0]),
float(current_time),
)
elif frames_of(Point):
value = linear_equation(
first[1],
second[1],
float(first[0]),
float(second[0]),
float(current_time),
)
else:
raise InterpolationUnknownTypes(type(first[1]), type(second[1]))
return value
| raise ValueError(f"No such animation: '{name}'.") | conditional_block |
animation.py | """Animation.
Animation is set of keyframes.
Value of selected attribute changes in time.
Keyframe:
(time, value)
Objects have animation manager which manages animation graph and switching."""
from operator import itemgetter
from eaf import Timer
from xoinvader.utils import Point
class AnimationBoundariesExceeded(Exception):
"""Exception to show that interpolated value will be incorrect."""
def __init__(self, first, current_time, second):
super(AnimationBoundariesExceeded, self).__init__(
self,
f"Animation frame boundaries exceeded: {first} <= {current_time} <= {second}",
)
class InterpolationUnknownTypes(Exception):
"""Such type combination is unsupported."""
def __init__(self, first, second):
super(InterpolationUnknownTypes, self).__init__(
self, f"Unknown types of interpolating values: {first} and {second}"
)
# TODO: Implement animation graph and etc
class AnimationManager(object):
"""Manage list of object animation."""
def __init__(self):
self._animations = {}
self._animation = None
@property
def animation(self):
|
@animation.setter
def animation(self, name):
if name in self._animations:
self._animation = self._animations[name]
else:
raise ValueError(f"No such animation: '{name}'.")
def add(self, name, *args, **kwargs):
"""Add new animation, pass args to Animation class.
See interface of `class::xoinvader.animation.Animation`.
:param str name: animation name
"""
animation = Animation(name, *args, **kwargs)
self._animations[name] = animation
if not self._animation:
self._animation = animation
def update(self, dt):
"""Update manager's state."""
if not self._animation:
return
try:
self._animation.update(dt)
except StopIteration:
return # TODO: think about method to change animation
# pylint: disable=too-many-instance-attributes,too-many-arguments
# pylint: disable=too-few-public-methods
class Animation(object):
"""Animation unit.
Animation object holds sorted list of (time, value) items and changes
selected attribute of bound object according to local animation time.
Time measured by timer. When current time is greater or equal then time
of next keyframe - animation object changes it to appropriate value.
When animation is done and if not looped - raise StopIteration.
In case of interpolated animation value calculation occurs within two
bounding frames and on frame switch.
:param str name: animation name
:param object bind: object to bind animation
:param str attr: attribute to change in frames
:param list keyframes: (float, object) tuples
:param bool interp: interpolate values between frames or not
:param bool loop: loop animation or not
"""
def __init__(self, name, bind, attr, keyframes, interp=False, loop=False):
self._name = name
self._obj = bind
self._attr = attr
if not keyframes:
raise ValueError("Animation keyframes must not be empty.")
self._keyframes = sorted(keyframes, key=itemgetter(0))
self._interp = interp
self._loop = loop
# Timer for tracking local time
self._timer = Timer(self._keyframes[-1][0], lambda: True)
self._timer.start()
# Current keyframe index
self._current = 0
if self._interp:
self.update = self._update_interpolated
else:
self.update = self._update_discrete
@property
def name(self):
"""Animation's name.
:getter: yes
:setter: no
:type: str
"""
return self._name
def _apply_value(self, value):
"""Apply new value to linked object.
:param obj value: value to apply
"""
setattr(self._obj, self._attr, value)
def _update_interpolated(self, dt):
"""Advance animation and interpolate value.
NOTE: animation frame switching depends on interp mode
animation with interpolation switches frame only when current local
time exceeds NEXT frames' time border.
"""
self._check_animation_state()
self._timer.update(dt)
current_time = self._timer.elapsed
keyframe = self._keyframes[self._current]
next_keyframe = self._keyframes[self._current + 1]
# it's time to switch keyframe
if current_time >= next_keyframe[0]:
self._current += 1
keyframe = self._keyframes[self._current]
if self._current == len(self._keyframes) - 1:
self._apply_value(keyframe[1])
self._current += 1
self._check_animation_state()
return
next_keyframe = self._keyframes[self._current + 1]
value = interpolate(keyframe, next_keyframe, current_time)
self._apply_value(value)
def _update_discrete(self, dt):
"""Advance animation without interpolating value.
NOTE: animation frame switching depends on interp mode
discrete animation swiches frame and updates value only if
current local time is >= time of current keyframe.
No need to worry about calculating value between frames - thus
no need to complicate behaviour.
"""
self._check_animation_state()
self._timer.update(dt)
keyframe = self._keyframes[self._current]
# Check if animation need to switch keyframe
if self._timer.elapsed >= keyframe[0]:
self._apply_value(keyframe[1])
self._current += 1
def _check_animation_state(self):
"""Check animation state and restart if needed.
:raise StopIteration: when animation exceeded frames.
"""
if len(self._keyframes) == self._current:
if self._loop:
self._current = 0
self._timer.restart()
else:
self._timer.stop()
raise StopIteration
def linear_equation(val1, val2, time1, time2, current_time):
"""Linear equation to get interpolated value.
:param float val1: first keyframe value
:param float val2: second keyframe value
:param float time1: first keyframe local time
:param float time2: second keyframe local time
:param float current_time: current animation local time
"""
return val1 + (val2 - val1) / (time2 - time1) * (current_time - time1)
def same_type(values, types):
"""Check if values are belongs to same type or type tuple.
:param collections.Iterable values: values to check type similarity
:param tuple|type types: type or tuple of types
"""
return all(map(lambda it: isinstance(it, types), values))
def interpolate(first, second, current_time):
"""Interpolate value by two bounding keyframes.
:param collections.Iterable first: first bounding keyframe
:param collections.Iterable second: second bounding keyframe
:param float current_time: current animation local time
:raises AnimationBoundariesExceeded: when time interval is invalid
:raises InterpolationUnknownTypes: when interpolating invalid types
"""
if not first[0] <= current_time <= second[0]:
raise AnimationBoundariesExceeded(first[0], current_time, second[0])
def frames_of(*args):
"""If frames both of specified type."""
return same_type((first[1], second[1]), args)
if frames_of(int, float):
value = linear_equation(
float(first[1]),
float(second[1]),
float(first[0]),
float(second[0]),
float(current_time),
)
elif frames_of(Point):
value = linear_equation(
first[1],
second[1],
float(first[0]),
float(second[0]),
float(current_time),
)
else:
raise InterpolationUnknownTypes(type(first[1]), type(second[1]))
return value
| """AnimationManager's current animation name.
To set animation - assign it's name.
:getter: yes
:setter: yes
:type: str
"""
if self._animation:
return self._animation.name
else:
raise AttributeError("There is no available animation.") | identifier_body |
animation.py | """Animation.
Animation is set of keyframes.
Value of selected attribute changes in time.
Keyframe:
(time, value)
Objects have animation manager which manages animation graph and switching."""
from operator import itemgetter
from eaf import Timer
from xoinvader.utils import Point
class AnimationBoundariesExceeded(Exception):
"""Exception to show that interpolated value will be incorrect."""
def __init__(self, first, current_time, second):
super(AnimationBoundariesExceeded, self).__init__(
self,
f"Animation frame boundaries exceeded: {first} <= {current_time} <= {second}",
)
class InterpolationUnknownTypes(Exception):
"""Such type combination is unsupported."""
def __init__(self, first, second):
super(InterpolationUnknownTypes, self).__init__(
self, f"Unknown types of interpolating values: {first} and {second}"
)
# TODO: Implement animation graph and etc
class AnimationManager(object):
"""Manage list of object animation."""
def __init__(self):
self._animations = {}
self._animation = None
@property
def animation(self):
"""AnimationManager's current animation name.
To set animation - assign it's name.
:getter: yes
:setter: yes
:type: str
"""
if self._animation:
return self._animation.name
else:
raise AttributeError("There is no available animation.")
@animation.setter
def animation(self, name):
if name in self._animations:
self._animation = self._animations[name]
else:
raise ValueError(f"No such animation: '{name}'.")
def add(self, name, *args, **kwargs):
"""Add new animation, pass args to Animation class.
See interface of `class::xoinvader.animation.Animation`.
:param str name: animation name
"""
animation = Animation(name, *args, **kwargs)
self._animations[name] = animation | if not self._animation:
self._animation = animation
def update(self, dt):
"""Update manager's state."""
if not self._animation:
return
try:
self._animation.update(dt)
except StopIteration:
return # TODO: think about method to change animation
# pylint: disable=too-many-instance-attributes,too-many-arguments
# pylint: disable=too-few-public-methods
class Animation(object):
"""Animation unit.
Animation object holds sorted list of (time, value) items and changes
selected attribute of bound object according to local animation time.
Time measured by timer. When current time is greater or equal then time
of next keyframe - animation object changes it to appropriate value.
When animation is done and if not looped - raise StopIteration.
In case of interpolated animation value calculation occurs within two
bounding frames and on frame switch.
:param str name: animation name
:param object bind: object to bind animation
:param str attr: attribute to change in frames
:param list keyframes: (float, object) tuples
:param bool interp: interpolate values between frames or not
:param bool loop: loop animation or not
"""
def __init__(self, name, bind, attr, keyframes, interp=False, loop=False):
self._name = name
self._obj = bind
self._attr = attr
if not keyframes:
raise ValueError("Animation keyframes must not be empty.")
self._keyframes = sorted(keyframes, key=itemgetter(0))
self._interp = interp
self._loop = loop
# Timer for tracking local time
self._timer = Timer(self._keyframes[-1][0], lambda: True)
self._timer.start()
# Current keyframe index
self._current = 0
if self._interp:
self.update = self._update_interpolated
else:
self.update = self._update_discrete
@property
def name(self):
"""Animation's name.
:getter: yes
:setter: no
:type: str
"""
return self._name
def _apply_value(self, value):
"""Apply new value to linked object.
:param obj value: value to apply
"""
setattr(self._obj, self._attr, value)
def _update_interpolated(self, dt):
"""Advance animation and interpolate value.
NOTE: animation frame switching depends on interp mode
animation with interpolation switches frame only when current local
time exceeds NEXT frames' time border.
"""
self._check_animation_state()
self._timer.update(dt)
current_time = self._timer.elapsed
keyframe = self._keyframes[self._current]
next_keyframe = self._keyframes[self._current + 1]
# it's time to switch keyframe
if current_time >= next_keyframe[0]:
self._current += 1
keyframe = self._keyframes[self._current]
if self._current == len(self._keyframes) - 1:
self._apply_value(keyframe[1])
self._current += 1
self._check_animation_state()
return
next_keyframe = self._keyframes[self._current + 1]
value = interpolate(keyframe, next_keyframe, current_time)
self._apply_value(value)
def _update_discrete(self, dt):
"""Advance animation without interpolating value.
NOTE: animation frame switching depends on interp mode
discrete animation swiches frame and updates value only if
current local time is >= time of current keyframe.
No need to worry about calculating value between frames - thus
no need to complicate behaviour.
"""
self._check_animation_state()
self._timer.update(dt)
keyframe = self._keyframes[self._current]
# Check if animation need to switch keyframe
if self._timer.elapsed >= keyframe[0]:
self._apply_value(keyframe[1])
self._current += 1
def _check_animation_state(self):
"""Check animation state and restart if needed.
:raise StopIteration: when animation exceeded frames.
"""
if len(self._keyframes) == self._current:
if self._loop:
self._current = 0
self._timer.restart()
else:
self._timer.stop()
raise StopIteration
def linear_equation(val1, val2, time1, time2, current_time):
"""Linear equation to get interpolated value.
:param float val1: first keyframe value
:param float val2: second keyframe value
:param float time1: first keyframe local time
:param float time2: second keyframe local time
:param float current_time: current animation local time
"""
return val1 + (val2 - val1) / (time2 - time1) * (current_time - time1)
def same_type(values, types):
"""Check if values are belongs to same type or type tuple.
:param collections.Iterable values: values to check type similarity
:param tuple|type types: type or tuple of types
"""
return all(map(lambda it: isinstance(it, types), values))
def interpolate(first, second, current_time):
"""Interpolate value by two bounding keyframes.
:param collections.Iterable first: first bounding keyframe
:param collections.Iterable second: second bounding keyframe
:param float current_time: current animation local time
:raises AnimationBoundariesExceeded: when time interval is invalid
:raises InterpolationUnknownTypes: when interpolating invalid types
"""
if not first[0] <= current_time <= second[0]:
raise AnimationBoundariesExceeded(first[0], current_time, second[0])
def frames_of(*args):
"""If frames both of specified type."""
return same_type((first[1], second[1]), args)
if frames_of(int, float):
value = linear_equation(
float(first[1]),
float(second[1]),
float(first[0]),
float(second[0]),
float(current_time),
)
elif frames_of(Point):
value = linear_equation(
first[1],
second[1],
float(first[0]),
float(second[0]),
float(current_time),
)
else:
raise InterpolationUnknownTypes(type(first[1]), type(second[1]))
return value | random_line_split | |
kblink.directive.js | (function() {
'use strict';
angular.module('facetApp')
.directive('kblink', function() {
return {
restrict: 'EC',
scope: { href: '@' },
transclude: true,
controller: ['$scope', 'popoverService', function($scope, popoverService){
if (!$scope.href) return;
$scope.image = false;
$scope.lifespan = '';
popoverService.getHrefPopover($scope.href).then(function(data) {
if (data.length) data = data[0];
$scope.label = data.label;
$scope.link = '#!/henkilo/'+ (data.id).replace(/^.+?(p[0-9_]+)$/, '$1');
// check if lifespan contains any numbers
if ((new RegExp(/\d/)).test(data.lifespan)) |
if (data.hasOwnProperty('image')) $scope.image = data.image;
});
}],
template: '<a uib-popover-template="\'views/tooltips/personTooltipTemplate.html\'" popover-trigger="\'mouseenter\'" ng-href="{{ link }}" ng-transclude></a>'
}});
})(); | {
// remove leading zeros (0800-0900) -> (800-900)
data.lifespan = data.lifespan.replace(/(\D)0/g, "$1");
$scope.lifespan = data.lifespan;
} | conditional_block |
kblink.directive.js | (function() {
'use strict';
angular.module('facetApp') | scope: { href: '@' },
transclude: true,
controller: ['$scope', 'popoverService', function($scope, popoverService){
if (!$scope.href) return;
$scope.image = false;
$scope.lifespan = '';
popoverService.getHrefPopover($scope.href).then(function(data) {
if (data.length) data = data[0];
$scope.label = data.label;
$scope.link = '#!/henkilo/'+ (data.id).replace(/^.+?(p[0-9_]+)$/, '$1');
// check if lifespan contains any numbers
if ((new RegExp(/\d/)).test(data.lifespan)) {
// remove leading zeros (0800-0900) -> (800-900)
data.lifespan = data.lifespan.replace(/(\D)0/g, "$1");
$scope.lifespan = data.lifespan;
}
if (data.hasOwnProperty('image')) $scope.image = data.image;
});
}],
template: '<a uib-popover-template="\'views/tooltips/personTooltipTemplate.html\'" popover-trigger="\'mouseenter\'" ng-href="{{ link }}" ng-transclude></a>'
}});
})(); | .directive('kblink', function() {
return {
restrict: 'EC', | random_line_split |
old-state-manager.ts | import mapValues from 'lodash-es/mapValues'
import {IOldGameData, OldState} from './types'
const KEYS = {
currentRound: 'Bridge.currentRound',
maker: 'Bridge.maker',
players: 'Bridge.players',
state: 'Bridge.state',
totalRounds: 'Bridge.totalRounds'
}
/**
* Attempt to get v1.0.0 data from localStorage.
*
* @returns The data stored in localStorage. If not found, null is returned.
* @throws {SyntaxError} - Invalid JSON is found in one of the key
*/
export function retrieveOldData(): IOldGameData | null {
const dataMap = mapValues(KEYS, key => {
const value = localStorage.getItem(key)
return value != null ? JSON.parse(value) : null
})
if (Object.values(dataMap).includes(null)) | else {
return dataMap as any
}
}
/**
* Test if there is v1.0.0 data.
* Even the state is notStarted, this function will still return true.
*/
export function hasOldData(): boolean {
return localStorage.getItem('Bridge.state') !== null
}
/**
* Check if old data is on state `notStarted` or does not have the game.
* If either one of the above condition met, false will be returned
*/
export function isNotStarted(data: IOldGameData | null): boolean {
return data == null || data.state === OldState.notStarted
}
/**
* Remove all old data in v1.0.0 from localStorage
*/
export function deleteOldData(): void {
Object.values(KEYS).forEach(key =>
localStorage.removeItem(key)
)
}
| {
return null
} | conditional_block |
old-state-manager.ts | import mapValues from 'lodash-es/mapValues'
import {IOldGameData, OldState} from './types'
const KEYS = {
currentRound: 'Bridge.currentRound',
maker: 'Bridge.maker',
players: 'Bridge.players',
state: 'Bridge.state',
totalRounds: 'Bridge.totalRounds'
}
/**
* Attempt to get v1.0.0 data from localStorage.
*
* @returns The data stored in localStorage. If not found, null is returned.
* @throws {SyntaxError} - Invalid JSON is found in one of the key
*/
export function retrieveOldData(): IOldGameData | null {
const dataMap = mapValues(KEYS, key => {
const value = localStorage.getItem(key)
return value != null ? JSON.parse(value) : null
})
if (Object.values(dataMap).includes(null)) {
return null
} else {
return dataMap as any
}
}
/**
* Test if there is v1.0.0 data.
* Even the state is notStarted, this function will still return true.
*/
export function hasOldData(): boolean {
return localStorage.getItem('Bridge.state') !== null
}
/**
* Check if old data is on state `notStarted` or does not have the game.
* If either one of the above condition met, false will be returned
*/
export function isNotStarted(data: IOldGameData | null): boolean {
return data == null || data.state === OldState.notStarted
}
| * Remove all old data in v1.0.0 from localStorage
*/
export function deleteOldData(): void {
Object.values(KEYS).forEach(key =>
localStorage.removeItem(key)
)
} | /** | random_line_split |
old-state-manager.ts | import mapValues from 'lodash-es/mapValues'
import {IOldGameData, OldState} from './types'
const KEYS = {
currentRound: 'Bridge.currentRound',
maker: 'Bridge.maker',
players: 'Bridge.players',
state: 'Bridge.state',
totalRounds: 'Bridge.totalRounds'
}
/**
* Attempt to get v1.0.0 data from localStorage.
*
* @returns The data stored in localStorage. If not found, null is returned.
* @throws {SyntaxError} - Invalid JSON is found in one of the key
*/
export function retrieveOldData(): IOldGameData | null {
const dataMap = mapValues(KEYS, key => {
const value = localStorage.getItem(key)
return value != null ? JSON.parse(value) : null
})
if (Object.values(dataMap).includes(null)) {
return null
} else {
return dataMap as any
}
}
/**
* Test if there is v1.0.0 data.
* Even the state is notStarted, this function will still return true.
*/
export function hasOldData(): boolean |
/**
* Check if old data is on state `notStarted` or does not have the game.
* If either one of the above condition met, false will be returned
*/
export function isNotStarted(data: IOldGameData | null): boolean {
return data == null || data.state === OldState.notStarted
}
/**
* Remove all old data in v1.0.0 from localStorage
*/
export function deleteOldData(): void {
Object.values(KEYS).forEach(key =>
localStorage.removeItem(key)
)
}
| {
return localStorage.getItem('Bridge.state') !== null
} | identifier_body |
old-state-manager.ts | import mapValues from 'lodash-es/mapValues'
import {IOldGameData, OldState} from './types'
const KEYS = {
currentRound: 'Bridge.currentRound',
maker: 'Bridge.maker',
players: 'Bridge.players',
state: 'Bridge.state',
totalRounds: 'Bridge.totalRounds'
}
/**
* Attempt to get v1.0.0 data from localStorage.
*
* @returns The data stored in localStorage. If not found, null is returned.
* @throws {SyntaxError} - Invalid JSON is found in one of the key
*/
export function retrieveOldData(): IOldGameData | null {
const dataMap = mapValues(KEYS, key => {
const value = localStorage.getItem(key)
return value != null ? JSON.parse(value) : null
})
if (Object.values(dataMap).includes(null)) {
return null
} else {
return dataMap as any
}
}
/**
* Test if there is v1.0.0 data.
* Even the state is notStarted, this function will still return true.
*/
export function | (): boolean {
return localStorage.getItem('Bridge.state') !== null
}
/**
* Check if old data is on state `notStarted` or does not have the game.
* If either one of the above condition met, false will be returned
*/
export function isNotStarted(data: IOldGameData | null): boolean {
return data == null || data.state === OldState.notStarted
}
/**
* Remove all old data in v1.0.0 from localStorage
*/
export function deleteOldData(): void {
Object.values(KEYS).forEach(key =>
localStorage.removeItem(key)
)
}
| hasOldData | identifier_name |
fuzz_target_1.rs | #![no_main]
#[macro_use] extern crate libfuzzer_sys;
extern crate quick_xml;
| fuzz_target!(|data: &[u8]| {
// fuzzed code goes here
let cursor = Cursor::new(data);
let mut reader = Reader::from_reader(cursor);
let mut buf = vec![];
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) | Ok(Event::Empty(ref e))=> {
if e.unescaped().is_err() {
break;
}
for a in e.attributes() {
if a.ok().map_or(false, |a| a.unescaped_value().is_err()) {
break;
}
}
}
Ok(Event::Text(ref e)) | Ok(Event::Comment(ref e))
| Ok(Event::CData(ref e)) | Ok(Event::PI(ref e))
| Ok(Event::DocType(ref e)) => {
if e.unescaped().is_err() {
break;
}
}
Ok(Event::Decl(ref e)) => {
let _ = e.version();
let _ = e.encoding();
let _ = e.standalone();
}
Ok(Event::End(_)) => (),
Ok(Event::Eof) | Err(..) => break,
}
buf.clear();
}
}); | use quick_xml::Reader;
use quick_xml::events::Event;
use std::io::Cursor;
| random_line_split |
cabi_arm.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_upper_case_globals)]
use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector};
use llvm::{StructRetAttribute, ZExtAttribute};
use trans::cabi::{FnType, ArgType};
use trans::context::CrateContext;
use trans::type_::Type;
use std::cmp;
pub enum Flavor {
General,
Ios
}
type TyAlignFn = fn(ty: Type) -> uint;
fn align_up_to(off: uint, a: uint) -> uint {
return (off + a - 1) / a * a;
}
fn align(off: uint, ty: Type, align_fn: TyAlignFn) -> uint {
let a = align_fn(ty);
return align_up_to(off, a);
}
fn general_ty_align(ty: Type) -> uint {
match ty.kind() {
Integer => ((ty.int_width() as uint) + 7) / 8,
Pointer => 4,
Float => 4,
Double => 8,
Struct => {
if ty.is_packed() {
1
} else {
let str_tys = ty.field_types();
str_tys.iter().fold(1, |a, t| cmp::max(a, general_ty_align(*t)))
}
}
Array => {
let elt = ty.element_type();
general_ty_align(elt)
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
general_ty_align(elt) * len
}
_ => panic!("ty_align: unhandled type")
}
}
// For more information see:
// ARMv7
// https://developer.apple.com/library/ios/documentation/Xcode/Conceptual
// /iPhoneOSABIReference/Articles/ARMv7FunctionCallingConventions.html
// ARMv6
// https://developer.apple.com/library/ios/documentation/Xcode/Conceptual
// /iPhoneOSABIReference/Articles/ARMv6FunctionCallingConventions.html
fn ios_ty_align(ty: Type) -> uint {
match ty.kind() {
Integer => cmp::min(4, ((ty.int_width() as uint) + 7) / 8),
Pointer => 4,
Float => 4,
Double => 4,
Struct => {
if ty.is_packed() {
1
} else {
let str_tys = ty.field_types();
str_tys.iter().fold(1, |a, t| cmp::max(a, ios_ty_align(*t)))
}
}
Array => {
let elt = ty.element_type();
ios_ty_align(elt)
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
ios_ty_align(elt) * len
}
_ => panic!("ty_align: unhandled type")
}
}
fn ty_size(ty: Type, align_fn: TyAlignFn) -> uint {
match ty.kind() {
Integer => ((ty.int_width() as uint) + 7) / 8,
Pointer => 4,
Float => 4,
Double => 8,
Struct => {
if ty.is_packed() {
let str_tys = ty.field_types();
str_tys.iter().fold(0, |s, t| s + ty_size(*t, align_fn))
} else {
let str_tys = ty.field_types();
let size = str_tys.iter()
.fold(0, |s, t| {
align(s, *t, align_fn) + ty_size(*t, align_fn)
});
align(size, ty, align_fn)
}
}
Array => {
let len = ty.array_length();
let elt = ty.element_type();
let eltsz = ty_size(elt, align_fn);
len * eltsz
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
let eltsz = ty_size(elt, align_fn);
len * eltsz
}
_ => panic!("ty_size: unhandled type")
}
}
fn classify_ret_ty(ccx: &CrateContext, ty: Type, align_fn: TyAlignFn) -> ArgType {
if is_reg_ty(ty) {
let attr = if ty == Type::i1(ccx) { Some(ZExtAttribute) } else { None };
return ArgType::direct(ty, None, None, attr);
}
let size = ty_size(ty, align_fn);
if size <= 4 {
let llty = if size <= 1 {
Type::i8(ccx)
} else if size <= 2 {
Type::i16(ccx)
} else {
Type::i32(ccx)
};
return ArgType::direct(ty, Some(llty), None, None);
}
ArgType::indirect(ty, Some(StructRetAttribute))
}
fn classify_arg_ty(ccx: &CrateContext, ty: Type, align_fn: TyAlignFn) -> ArgType {
if is_reg_ty(ty) {
let attr = if ty == Type::i1(ccx) { Some(ZExtAttribute) } else { None };
return ArgType::direct(ty, None, None, attr);
} | let align = align_fn(ty);
let size = ty_size(ty, align_fn);
let llty = if align <= 4 {
Type::array(&Type::i32(ccx), ((size + 3) / 4) as u64)
} else {
Type::array(&Type::i64(ccx), ((size + 7) / 8) as u64)
};
ArgType::direct(ty, Some(llty), None, None)
}
fn is_reg_ty(ty: Type) -> bool {
match ty.kind() {
Integer
| Pointer
| Float
| Double
| Vector => true,
_ => false
}
}
pub fn compute_abi_info(ccx: &CrateContext,
atys: &[Type],
rty: Type,
ret_def: bool,
flavor: Flavor) -> FnType {
let align_fn = match flavor {
Flavor::General => general_ty_align as TyAlignFn,
Flavor::Ios => ios_ty_align as TyAlignFn,
};
let mut arg_tys = Vec::new();
for &aty in atys {
let ty = classify_arg_ty(ccx, aty, align_fn);
arg_tys.push(ty);
}
let ret_ty = if ret_def {
classify_ret_ty(ccx, rty, align_fn)
} else {
ArgType::direct(Type::void(ccx), None, None, None)
};
return FnType {
arg_tys: arg_tys,
ret_ty: ret_ty,
};
} | random_line_split | |
cabi_arm.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_upper_case_globals)]
use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector};
use llvm::{StructRetAttribute, ZExtAttribute};
use trans::cabi::{FnType, ArgType};
use trans::context::CrateContext;
use trans::type_::Type;
use std::cmp;
pub enum Flavor {
General,
Ios
}
type TyAlignFn = fn(ty: Type) -> uint;
fn align_up_to(off: uint, a: uint) -> uint {
return (off + a - 1) / a * a;
}
fn align(off: uint, ty: Type, align_fn: TyAlignFn) -> uint {
let a = align_fn(ty);
return align_up_to(off, a);
}
fn general_ty_align(ty: Type) -> uint {
match ty.kind() {
Integer => ((ty.int_width() as uint) + 7) / 8,
Pointer => 4,
Float => 4,
Double => 8,
Struct => {
if ty.is_packed() {
1
} else {
let str_tys = ty.field_types();
str_tys.iter().fold(1, |a, t| cmp::max(a, general_ty_align(*t)))
}
}
Array => {
let elt = ty.element_type();
general_ty_align(elt)
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
general_ty_align(elt) * len
}
_ => panic!("ty_align: unhandled type")
}
}
// For more information see:
// ARMv7
// https://developer.apple.com/library/ios/documentation/Xcode/Conceptual
// /iPhoneOSABIReference/Articles/ARMv7FunctionCallingConventions.html
// ARMv6
// https://developer.apple.com/library/ios/documentation/Xcode/Conceptual
// /iPhoneOSABIReference/Articles/ARMv6FunctionCallingConventions.html
fn ios_ty_align(ty: Type) -> uint {
match ty.kind() {
Integer => cmp::min(4, ((ty.int_width() as uint) + 7) / 8),
Pointer => 4,
Float => 4,
Double => 4,
Struct => {
if ty.is_packed() {
1
} else {
let str_tys = ty.field_types();
str_tys.iter().fold(1, |a, t| cmp::max(a, ios_ty_align(*t)))
}
}
Array => {
let elt = ty.element_type();
ios_ty_align(elt)
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
ios_ty_align(elt) * len
}
_ => panic!("ty_align: unhandled type")
}
}
fn ty_size(ty: Type, align_fn: TyAlignFn) -> uint {
match ty.kind() {
Integer => ((ty.int_width() as uint) + 7) / 8,
Pointer => 4,
Float => 4,
Double => 8,
Struct => {
if ty.is_packed() {
let str_tys = ty.field_types();
str_tys.iter().fold(0, |s, t| s + ty_size(*t, align_fn))
} else {
let str_tys = ty.field_types();
let size = str_tys.iter()
.fold(0, |s, t| {
align(s, *t, align_fn) + ty_size(*t, align_fn)
});
align(size, ty, align_fn)
}
}
Array => {
let len = ty.array_length();
let elt = ty.element_type();
let eltsz = ty_size(elt, align_fn);
len * eltsz
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
let eltsz = ty_size(elt, align_fn);
len * eltsz
}
_ => panic!("ty_size: unhandled type")
}
}
fn classify_ret_ty(ccx: &CrateContext, ty: Type, align_fn: TyAlignFn) -> ArgType {
if is_reg_ty(ty) {
let attr = if ty == Type::i1(ccx) { Some(ZExtAttribute) } else { None };
return ArgType::direct(ty, None, None, attr);
}
let size = ty_size(ty, align_fn);
if size <= 4 {
let llty = if size <= 1 {
Type::i8(ccx)
} else if size <= 2 {
Type::i16(ccx)
} else {
Type::i32(ccx)
};
return ArgType::direct(ty, Some(llty), None, None);
}
ArgType::indirect(ty, Some(StructRetAttribute))
}
fn classify_arg_ty(ccx: &CrateContext, ty: Type, align_fn: TyAlignFn) -> ArgType |
fn is_reg_ty(ty: Type) -> bool {
match ty.kind() {
Integer
| Pointer
| Float
| Double
| Vector => true,
_ => false
}
}
pub fn compute_abi_info(ccx: &CrateContext,
atys: &[Type],
rty: Type,
ret_def: bool,
flavor: Flavor) -> FnType {
let align_fn = match flavor {
Flavor::General => general_ty_align as TyAlignFn,
Flavor::Ios => ios_ty_align as TyAlignFn,
};
let mut arg_tys = Vec::new();
for &aty in atys {
let ty = classify_arg_ty(ccx, aty, align_fn);
arg_tys.push(ty);
}
let ret_ty = if ret_def {
classify_ret_ty(ccx, rty, align_fn)
} else {
ArgType::direct(Type::void(ccx), None, None, None)
};
return FnType {
arg_tys: arg_tys,
ret_ty: ret_ty,
};
}
| {
if is_reg_ty(ty) {
let attr = if ty == Type::i1(ccx) { Some(ZExtAttribute) } else { None };
return ArgType::direct(ty, None, None, attr);
}
let align = align_fn(ty);
let size = ty_size(ty, align_fn);
let llty = if align <= 4 {
Type::array(&Type::i32(ccx), ((size + 3) / 4) as u64)
} else {
Type::array(&Type::i64(ccx), ((size + 7) / 8) as u64)
};
ArgType::direct(ty, Some(llty), None, None)
} | identifier_body |
cabi_arm.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_upper_case_globals)]
use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector};
use llvm::{StructRetAttribute, ZExtAttribute};
use trans::cabi::{FnType, ArgType};
use trans::context::CrateContext;
use trans::type_::Type;
use std::cmp;
pub enum Flavor {
General,
Ios
}
type TyAlignFn = fn(ty: Type) -> uint;
fn align_up_to(off: uint, a: uint) -> uint {
return (off + a - 1) / a * a;
}
fn align(off: uint, ty: Type, align_fn: TyAlignFn) -> uint {
let a = align_fn(ty);
return align_up_to(off, a);
}
fn | (ty: Type) -> uint {
match ty.kind() {
Integer => ((ty.int_width() as uint) + 7) / 8,
Pointer => 4,
Float => 4,
Double => 8,
Struct => {
if ty.is_packed() {
1
} else {
let str_tys = ty.field_types();
str_tys.iter().fold(1, |a, t| cmp::max(a, general_ty_align(*t)))
}
}
Array => {
let elt = ty.element_type();
general_ty_align(elt)
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
general_ty_align(elt) * len
}
_ => panic!("ty_align: unhandled type")
}
}
// For more information see:
// ARMv7
// https://developer.apple.com/library/ios/documentation/Xcode/Conceptual
// /iPhoneOSABIReference/Articles/ARMv7FunctionCallingConventions.html
// ARMv6
// https://developer.apple.com/library/ios/documentation/Xcode/Conceptual
// /iPhoneOSABIReference/Articles/ARMv6FunctionCallingConventions.html
fn ios_ty_align(ty: Type) -> uint {
match ty.kind() {
Integer => cmp::min(4, ((ty.int_width() as uint) + 7) / 8),
Pointer => 4,
Float => 4,
Double => 4,
Struct => {
if ty.is_packed() {
1
} else {
let str_tys = ty.field_types();
str_tys.iter().fold(1, |a, t| cmp::max(a, ios_ty_align(*t)))
}
}
Array => {
let elt = ty.element_type();
ios_ty_align(elt)
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
ios_ty_align(elt) * len
}
_ => panic!("ty_align: unhandled type")
}
}
fn ty_size(ty: Type, align_fn: TyAlignFn) -> uint {
match ty.kind() {
Integer => ((ty.int_width() as uint) + 7) / 8,
Pointer => 4,
Float => 4,
Double => 8,
Struct => {
if ty.is_packed() {
let str_tys = ty.field_types();
str_tys.iter().fold(0, |s, t| s + ty_size(*t, align_fn))
} else {
let str_tys = ty.field_types();
let size = str_tys.iter()
.fold(0, |s, t| {
align(s, *t, align_fn) + ty_size(*t, align_fn)
});
align(size, ty, align_fn)
}
}
Array => {
let len = ty.array_length();
let elt = ty.element_type();
let eltsz = ty_size(elt, align_fn);
len * eltsz
}
Vector => {
let len = ty.vector_length();
let elt = ty.element_type();
let eltsz = ty_size(elt, align_fn);
len * eltsz
}
_ => panic!("ty_size: unhandled type")
}
}
fn classify_ret_ty(ccx: &CrateContext, ty: Type, align_fn: TyAlignFn) -> ArgType {
if is_reg_ty(ty) {
let attr = if ty == Type::i1(ccx) { Some(ZExtAttribute) } else { None };
return ArgType::direct(ty, None, None, attr);
}
let size = ty_size(ty, align_fn);
if size <= 4 {
let llty = if size <= 1 {
Type::i8(ccx)
} else if size <= 2 {
Type::i16(ccx)
} else {
Type::i32(ccx)
};
return ArgType::direct(ty, Some(llty), None, None);
}
ArgType::indirect(ty, Some(StructRetAttribute))
}
fn classify_arg_ty(ccx: &CrateContext, ty: Type, align_fn: TyAlignFn) -> ArgType {
if is_reg_ty(ty) {
let attr = if ty == Type::i1(ccx) { Some(ZExtAttribute) } else { None };
return ArgType::direct(ty, None, None, attr);
}
let align = align_fn(ty);
let size = ty_size(ty, align_fn);
let llty = if align <= 4 {
Type::array(&Type::i32(ccx), ((size + 3) / 4) as u64)
} else {
Type::array(&Type::i64(ccx), ((size + 7) / 8) as u64)
};
ArgType::direct(ty, Some(llty), None, None)
}
fn is_reg_ty(ty: Type) -> bool {
match ty.kind() {
Integer
| Pointer
| Float
| Double
| Vector => true,
_ => false
}
}
pub fn compute_abi_info(ccx: &CrateContext,
atys: &[Type],
rty: Type,
ret_def: bool,
flavor: Flavor) -> FnType {
let align_fn = match flavor {
Flavor::General => general_ty_align as TyAlignFn,
Flavor::Ios => ios_ty_align as TyAlignFn,
};
let mut arg_tys = Vec::new();
for &aty in atys {
let ty = classify_arg_ty(ccx, aty, align_fn);
arg_tys.push(ty);
}
let ret_ty = if ret_def {
classify_ret_ty(ccx, rty, align_fn)
} else {
ArgType::direct(Type::void(ccx), None, None, None)
};
return FnType {
arg_tys: arg_tys,
ret_ty: ret_ty,
};
}
| general_ty_align | identifier_name |
scene.py | """Support for Lutron Caseta scenes."""
from typing import Any
from homeassistant.components.scene import Scene
from .const import BRIDGE_LEAP, DOMAIN as CASETA_DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Lutron Caseta scene platform.
Adds scenes from the Caseta bridge associated with the config_entry as
scene entities.
"""
entities = []
data = hass.data[CASETA_DOMAIN][config_entry.entry_id]
bridge = data[BRIDGE_LEAP]
scenes = bridge.get_scenes()
for scene in scenes:
entity = LutronCasetaScene(scenes[scene], bridge)
entities.append(entity)
async_add_entities(entities, True)
class LutronCasetaScene(Scene):
"""Representation of a Lutron Caseta scene."""
def __init__(self, scene, bridge):
"""Initialize the Lutron Caseta scene."""
self._scene_name = scene["name"]
self._scene_id = scene["scene_id"]
self._bridge = bridge
@property
def name(self):
|
async def async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
await self._bridge.activate_scene(self._scene_id)
| """Return the name of the scene."""
return self._scene_name | identifier_body |
scene.py | """Support for Lutron Caseta scenes."""
from typing import Any
from homeassistant.components.scene import Scene
from .const import BRIDGE_LEAP, DOMAIN as CASETA_DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Lutron Caseta scene platform.
Adds scenes from the Caseta bridge associated with the config_entry as
scene entities.
"""
entities = []
data = hass.data[CASETA_DOMAIN][config_entry.entry_id]
bridge = data[BRIDGE_LEAP]
scenes = bridge.get_scenes() | for scene in scenes:
entity = LutronCasetaScene(scenes[scene], bridge)
entities.append(entity)
async_add_entities(entities, True)
class LutronCasetaScene(Scene):
"""Representation of a Lutron Caseta scene."""
def __init__(self, scene, bridge):
"""Initialize the Lutron Caseta scene."""
self._scene_name = scene["name"]
self._scene_id = scene["scene_id"]
self._bridge = bridge
@property
def name(self):
"""Return the name of the scene."""
return self._scene_name
async def async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
await self._bridge.activate_scene(self._scene_id) | random_line_split | |
scene.py | """Support for Lutron Caseta scenes."""
from typing import Any
from homeassistant.components.scene import Scene
from .const import BRIDGE_LEAP, DOMAIN as CASETA_DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Lutron Caseta scene platform.
Adds scenes from the Caseta bridge associated with the config_entry as
scene entities.
"""
entities = []
data = hass.data[CASETA_DOMAIN][config_entry.entry_id]
bridge = data[BRIDGE_LEAP]
scenes = bridge.get_scenes()
for scene in scenes:
|
async_add_entities(entities, True)
class LutronCasetaScene(Scene):
"""Representation of a Lutron Caseta scene."""
def __init__(self, scene, bridge):
"""Initialize the Lutron Caseta scene."""
self._scene_name = scene["name"]
self._scene_id = scene["scene_id"]
self._bridge = bridge
@property
def name(self):
"""Return the name of the scene."""
return self._scene_name
async def async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
await self._bridge.activate_scene(self._scene_id)
| entity = LutronCasetaScene(scenes[scene], bridge)
entities.append(entity) | conditional_block |
scene.py | """Support for Lutron Caseta scenes."""
from typing import Any
from homeassistant.components.scene import Scene
from .const import BRIDGE_LEAP, DOMAIN as CASETA_DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Lutron Caseta scene platform.
Adds scenes from the Caseta bridge associated with the config_entry as
scene entities.
"""
entities = []
data = hass.data[CASETA_DOMAIN][config_entry.entry_id]
bridge = data[BRIDGE_LEAP]
scenes = bridge.get_scenes()
for scene in scenes:
entity = LutronCasetaScene(scenes[scene], bridge)
entities.append(entity)
async_add_entities(entities, True)
class LutronCasetaScene(Scene):
"""Representation of a Lutron Caseta scene."""
def | (self, scene, bridge):
"""Initialize the Lutron Caseta scene."""
self._scene_name = scene["name"]
self._scene_id = scene["scene_id"]
self._bridge = bridge
@property
def name(self):
"""Return the name of the scene."""
return self._scene_name
async def async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
await self._bridge.activate_scene(self._scene_id)
| __init__ | identifier_name |
index.ts | let rectWidth = 70,
rectHeight = 70
document.addEventListener('dragstart', e => {
let fa = e.target.querySelector('i.fa')
if (fa) {
let type = fa.className.split(' ')[1]
e.dataTransfer.setData('text/plain', type)
}
})
document.querySelector('svg').addEventListener('drop', e => {
let type = e.dataTransfer.getData('text')
data.addNode(type, e.offsetX, e.offsetY)
initD3();
simulation.restart()
})
document.addEventListener('dragover', e => {
e.preventDefault()
})
let data = new RectGroup(),
svg = d3.select('svg'),
svgWidth = parseInt(svg.style('width')),
svgHeight = parseInt(svg.style('height')),
index = 10,
gMerge, path, pathHover, pathCross,
drawLineEnable = false,
drawLineFrom, drawLineTo
data.addNode('fa-car', 200, 200)
data.addNode('fa-car', 200, 200)
data.addLink(data.nodes[0], data.nodes[1])
let drag_line = svg.append('path')
.attr('class', 'dragline hidden')
.attr('d', 'M0,0L0,0')
const simulation = d3.forceSimulation()
.force('link', d3.forceLink().id(d => d.id).distance(100))
// .force('charge', d3.forceManyBody().strength(200))
// .force('center', d3.forceCenter(svgWidth / 2, svgHeight / 2))
.force('collide', d3.forceCollide(50))
init()
function init() {
initD3()
addEvent()
svg.on('mousemove', mousemove)
.on('mouseup', mouseup)
}
function initD3() {
let g
let linkData = svg.selectAll('.link')
.data(data.links, d => d.target.id)
linkData.exit().remove()
g = linkData.enter()
.append('g')
.attr('class', 'link')
.on('click', function (d) {
if (drawLineEnable) {
let l = d3.select(this)
l.select('.line-hover')
.classed('selected', true)
l.select('use')
.classed('hidden', false)
.attr('x', function (d) {
return (d.source.x + d.target.x) / 2 - 5
})
.attr('y', function (d) {
return (d.source.y + d.target.y) / 2 - 5
})
.on('mousedown', d => {
data.links = data.links.filter(n => n.source.id !== d.source.id || n.target.id !== d.target.id)
initD3()
clearAllEditStyle()
simulation.alphaTarget(0.1).restart()
})
}
})
path = g.append('path')
.attr('class', 'line')
.attr('marker-mid', 'url(#Triangle)')
.merge(linkData.select('.line'))
pathHover = g.append('path')
.attr('class', 'line-hover')
.merge(linkData.select('.line-hover'))
pathCross = g.append('use')
.attr('class', 'cross hidden')
.attr('xlink:href', '#cross')
.merge(linkData.select('.cross'))
//防止link遮盖rect
svg.selectAll('.link,.node')
.sort((a, b) => {
if (a.source) return -1
else return 1
})
dataJoin = svg.selectAll('.node')
.data(data.nodes, d => d.id)
dataJoin.exit().remove()
g = dataJoin.enter()
.append('g')
.attr('class', 'node')
g.append('rect')
.attr('class', 'rect')
.attr('width', rectWidth)
.attr('height', rectHeight)
.attr('transform', 'translate(-' + rectWidth / 2 + ',-' + rectHeight / 2 + ')')
g.append('text')
.attr('class', 'awe')
.text(d => d.icon)
g.append('text')
.attr('dy', 30)
.text(d => d.name)
g.append('use')
.attr('class', 'cross hidden')
.attr('xlink:href', '#cross')
.attr('transform', 'translate(' + (rectWidth / 2 - 5) + ',-' + (rectHeight / 2 + 5) + ')')
gMerge = g.merge(dataJoin)
.call(d3.drag()
.on('start', dragstarted)
.on('drag', dragged)
.on('end', dragended))
.on('mousedown', function (d) {
if (drawLineEnable) {
drawLineFrom = d
//去除所有
clearAllEditStyle()
//选中当前元素
let g = d3.select(this)
g.classed('selected', true)
g.select('use')
.classed('hidden', false)
.on('mousedown', function (d) {
data.nodes = data.nodes.filter(n => n.id !== d.id)
data.links = data.links.filter(n => n.source.id !== d.id && n.target.id !== d.id)
initD3()
})
drag_line.attr('marker-mid', 'url(#Triangle)')
.classed('hidden', false)
.attr('d', `M${d.x},${d.y}L${d.x},${d.y}`)
simulation.alphaTarget(0.1).restart()
}
}).on('mouseup', d => {
// d3.event.stopPropagation();
drawLineTo = d
data.addLink(drawLineFrom, drawLineTo)
initD3();
simulation.alphaTarget(0)
if (drawLineFrom !== drawLineTo) {
clearAllEditStyle()
}
})
if (drawLineEnable) {
gMerge.on('.drag', null)
}
simulation.nodes(data.nodes)
.on('tick', ticked)
simulation.force('link')
.links(data.links)
}
function ticked() {
path.attr("d", function (d) {
let centerX = d.source.x + (d.target.x - d.source.x) / 2,
centerY = d.source.y + (d.target.y - d.source.y) / 2
return `M${d.source.x},${d.source.y}L${centerX},${centerY}L${d.target.x},${d.target.y}`;
})
pathHover.attr("d", function (d) {
let centerX = d.source.x + (d.target.x - d.source.x) / 2,
centerY = d.source.y + (d.target.y - d.source.y) / 2
return `M${d.source.x},${d.source.y}L${centerX},${centerY}L${d.target.x},${d.target.y}`;
})
pathCross.attr('x', d => {
return (d.source.x + d.target.x) / 2
})
pathCross.attr('y', d => {
return (d.source.y + d.target.y) / 2
})
gMerge.attr("transform", function (d) {
return "translate(" + d.x + ", " + d.y + ")";
});
}
function dragstarted(d) {
if (drawLineEnable) return
if (!d3.event.active) simulation.alphaTarget(0.3).restart()
}
function dragged(d) {
if (drawLineEnable) return
d.fx = d3.event.x;
d.fy = d3.event.y;
}
function dragended(d) {
if (drawLineEnable) return
if (!d3.event.active) simulation.alphaTarget(0);
d.fx = undefined;
d.fy = undefined;
}
function addEvent() {
let buttonAdd = document.querySelector('.connect .add'),
buttonView = document.querySelector('.connect .view'),
buttonEdit = document.querySelector('.connect .edit'),
svg = document.querySelector('svg')
buttonView.addEventListener('click', e => {
buttonView.classList.add('hidden')
buttonEdit.classList.remove('hidden')
buttonAdd.classList.add('hidden')
svg.classList.remove('edit')
clearAllEditStyle()
drawLineEnable = false
initD3()
}, false)
buttonEdit.addEventListener('click', e => {
buttonView.classList.remove('hidden')
buttonEdit.classList.add('hidden')
buttonAdd.classList.remove('hidden')
svg.classList.add('edit')
drawLineEnable = true
initD3()
}, false)
}
function mousemove() {
if (drawLineEnable && drawLineFrom) {
let p = d3.mouse(this),
centerX = drawLineFrom.x / 2 + p[0] / 2,
centerY = drawLineFrom.y / 2 + p[1] / 2 |
function mouseup() {
if (drawLineEnable && drawLineFrom) {
drag_line.classed('hidden', true)
drawLineFrom = null
} else {
clearAllEditStyle()
}
simulation.alphaTarget(0)
}
function clearAllEditStyle() {
d3.selectAll('.node').classed('selected', false)
d3.selectAll('.cross').classed('hidden', true)
d3.selectAll('.selected').classed('selected', false)
} | drag_line.attr('d', `M${drawLineFrom.x},${drawLineFrom.y}L${centerX},${centerY}L${p[0]},${p[1]}`)
}
} | random_line_split |
index.ts | let rectWidth = 70,
rectHeight = 70
document.addEventListener('dragstart', e => {
let fa = e.target.querySelector('i.fa')
if (fa) {
let type = fa.className.split(' ')[1]
e.dataTransfer.setData('text/plain', type)
}
})
document.querySelector('svg').addEventListener('drop', e => {
let type = e.dataTransfer.getData('text')
data.addNode(type, e.offsetX, e.offsetY)
initD3();
simulation.restart()
})
document.addEventListener('dragover', e => {
e.preventDefault()
})
let data = new RectGroup(),
svg = d3.select('svg'),
svgWidth = parseInt(svg.style('width')),
svgHeight = parseInt(svg.style('height')),
index = 10,
gMerge, path, pathHover, pathCross,
drawLineEnable = false,
drawLineFrom, drawLineTo
data.addNode('fa-car', 200, 200)
data.addNode('fa-car', 200, 200)
data.addLink(data.nodes[0], data.nodes[1])
let drag_line = svg.append('path')
.attr('class', 'dragline hidden')
.attr('d', 'M0,0L0,0')
const simulation = d3.forceSimulation()
.force('link', d3.forceLink().id(d => d.id).distance(100))
// .force('charge', d3.forceManyBody().strength(200))
// .force('center', d3.forceCenter(svgWidth / 2, svgHeight / 2))
.force('collide', d3.forceCollide(50))
init()
function init() {
initD3()
addEvent()
svg.on('mousemove', mousemove)
.on('mouseup', mouseup)
}
function initD3() {
let g
let linkData = svg.selectAll('.link')
.data(data.links, d => d.target.id)
linkData.exit().remove()
g = linkData.enter()
.append('g')
.attr('class', 'link')
.on('click', function (d) {
if (drawLineEnable) {
let l = d3.select(this)
l.select('.line-hover')
.classed('selected', true)
l.select('use')
.classed('hidden', false)
.attr('x', function (d) {
return (d.source.x + d.target.x) / 2 - 5
})
.attr('y', function (d) {
return (d.source.y + d.target.y) / 2 - 5
})
.on('mousedown', d => {
data.links = data.links.filter(n => n.source.id !== d.source.id || n.target.id !== d.target.id)
initD3()
clearAllEditStyle()
simulation.alphaTarget(0.1).restart()
})
}
})
path = g.append('path')
.attr('class', 'line')
.attr('marker-mid', 'url(#Triangle)')
.merge(linkData.select('.line'))
pathHover = g.append('path')
.attr('class', 'line-hover')
.merge(linkData.select('.line-hover'))
pathCross = g.append('use')
.attr('class', 'cross hidden')
.attr('xlink:href', '#cross')
.merge(linkData.select('.cross'))
//防止link遮盖rect
svg.selectAll('.link,.node')
.sort((a, b) => {
if (a.source) return -1
else return 1
})
dataJoin = svg.selectAll('.node')
.data(data.nodes, d => d.id)
dataJoin.exit().remove()
g = dataJoin.enter()
.append('g')
.attr('class', 'node')
g.append('rect')
.attr('class', 'rect')
.attr('width', rectWidth)
.attr('height', rectHeight)
.attr('transform', 'translate(-' + rectWidth / 2 + ',-' + rectHeight / 2 + ')')
g.append('text')
.attr('class', 'awe')
.text(d => d.icon)
g.append('text')
.attr('dy', 30)
.text(d => d.name)
g.append('use')
.attr('class', 'cross hidden')
.attr('xlink:href', '#cross')
.attr('transform', 'translate(' + (rectWidth / 2 - 5) + ',-' + (rectHeight / 2 + 5) + ')')
gMerge = g.merge(dataJoin)
.call(d3.drag()
.on('start', dragstarted)
.on('drag', dragged)
.on('end', dragended))
.on('mousedown', function (d) {
if (drawLineEnable) {
drawLineFrom = d
//去除所有
clearAllEditStyle()
//选中当前元素
let g = d3.select(this)
g.classed('selected', true)
g.select('use')
.classed('hidden', false)
.on('mousedown', function (d) {
data.nodes = data.nodes.filter(n => n.id !== d.id)
data.links = data.links.filter(n => n.source.id !== d.id && n.target.id !== d.id)
initD3()
})
drag_line.attr('marker-mid', 'url(#Triangle)')
.classed('hidden', false)
.attr('d', `M${d.x},${d.y}L${d.x},${d.y}`)
simulation.alphaTarget(0.1).restart()
}
}).on('mouseup', d => {
// d3.event.stopPropagation();
drawLineTo = d
data.addLink(drawLineFrom, drawLineTo)
initD3();
simulation.alphaTarget(0)
if (drawLineFrom !== drawLineTo) {
clearAllEditStyle()
}
})
if (drawLineEnable) {
gMerge.on('.drag', null)
}
simulation.nodes(data.nodes)
.on('tick', ticked)
simulation.force('link')
.links(data.links)
}
function ticked() {
path.attr("d", function (d) {
let centerX = d.source.x + (d.target.x - d.source.x) / 2,
centerY = d.source.y + (d.target.y - d.source.y) / 2
return `M${d.source.x},${d.source.y}L${centerX},${centerY}L${d.target.x},${d.target.y}`;
})
pathHover.attr("d", function (d) {
let centerX = d.source.x + (d.target.x - d.source.x) / 2,
centerY = d.source.y + (d.target.y - d.source.y) / 2
return `M${d.source.x},${d.source.y}L${centerX},${centerY}L${d.target.x},${d.target.y}`;
})
pathCross.attr('x', d => {
return (d.source.x + d.target.x) / 2
})
pathCross.attr('y', d => {
return (d.source.y + d.target.y) / 2
})
gMerge.attr("transform", function (d) {
return "translate(" + d.x + ", " + d.y + ")";
});
}
function dragstarted(d) {
if (drawLineEnable) return
if (!d3.event.active) simulation.alphaTarget(0.3).restart()
}
function dragged(d) {
if (drawLineEnable) return
d.fx = d3.event.x;
d.fy = d3.event.y;
}
function dragended(d) {
if (drawLineEnable) return
if (!d3.event.active) simulation.alphaTarget(0);
d.fx = undefined;
d.fy = undefined;
}
function addEvent() {
let buttonA | ument.querySelector('.connect .add'),
buttonView = document.querySelector('.connect .view'),
buttonEdit = document.querySelector('.connect .edit'),
svg = document.querySelector('svg')
buttonView.addEventListener('click', e => {
buttonView.classList.add('hidden')
buttonEdit.classList.remove('hidden')
buttonAdd.classList.add('hidden')
svg.classList.remove('edit')
clearAllEditStyle()
drawLineEnable = false
initD3()
}, false)
buttonEdit.addEventListener('click', e => {
buttonView.classList.remove('hidden')
buttonEdit.classList.add('hidden')
buttonAdd.classList.remove('hidden')
svg.classList.add('edit')
drawLineEnable = true
initD3()
}, false)
}
function mousemove() {
if (drawLineEnable && drawLineFrom) {
let p = d3.mouse(this),
centerX = drawLineFrom.x / 2 + p[0] / 2,
centerY = drawLineFrom.y / 2 + p[1] / 2
drag_line.attr('d', `M${drawLineFrom.x},${drawLineFrom.y}L${centerX},${centerY}L${p[0]},${p[1]}`)
}
}
function mouseup() {
if (drawLineEnable && drawLineFrom) {
drag_line.classed('hidden', true)
drawLineFrom = null
} else {
clearAllEditStyle()
}
simulation.alphaTarget(0)
}
function clearAllEditStyle() {
d3.selectAll('.node').classed('selected', false)
d3.selectAll('.cross').classed('hidden', true)
d3.selectAll('.selected').classed('selected', false)
} | dd = doc | identifier_name |
index.ts | let rectWidth = 70,
rectHeight = 70
document.addEventListener('dragstart', e => {
let fa = e.target.querySelector('i.fa')
if (fa) {
let type = fa.className.split(' ')[1]
e.dataTransfer.setData('text/plain', type)
}
})
document.querySelector('svg').addEventListener('drop', e => {
let type = e.dataTransfer.getData('text')
data.addNode(type, e.offsetX, e.offsetY)
initD3();
simulation.restart()
})
document.addEventListener('dragover', e => {
e.preventDefault()
})
let data = new RectGroup(),
svg = d3.select('svg'),
svgWidth = parseInt(svg.style('width')),
svgHeight = parseInt(svg.style('height')),
index = 10,
gMerge, path, pathHover, pathCross,
drawLineEnable = false,
drawLineFrom, drawLineTo
data.addNode('fa-car', 200, 200)
data.addNode('fa-car', 200, 200)
data.addLink(data.nodes[0], data.nodes[1])
let drag_line = svg.append('path')
.attr('class', 'dragline hidden')
.attr('d', 'M0,0L0,0')
const simulation = d3.forceSimulation()
.force('link', d3.forceLink().id(d => d.id).distance(100))
// .force('charge', d3.forceManyBody().strength(200))
// .force('center', d3.forceCenter(svgWidth / 2, svgHeight / 2))
.force('collide', d3.forceCollide(50))
init()
function init() {
initD3()
addEvent()
svg.on('mousemove', mousemove)
.on('mouseup', mouseup)
}
function initD3() {
let g
let linkData = svg.selectAll('.link')
.data(data.links, d => d.target.id)
linkData.exit().remove()
g = linkData.enter()
.append('g')
.attr('class', 'link')
.on('click', function (d) {
if (drawLineEnable) {
let l = d3.select(this)
l.select('.line-hover')
.classed('selected', true)
l.select('use')
.classed('hidden', false)
.attr('x', function (d) {
return (d.source.x + d.target.x) / 2 - 5
})
.attr('y', function (d) {
return (d.source.y + d.target.y) / 2 - 5
})
.on('mousedown', d => {
data.links = data.links.filter(n => n.source.id !== d.source.id || n.target.id !== d.target.id)
initD3()
clearAllEditStyle()
simulation.alphaTarget(0.1).restart()
})
}
})
path = g.append('path')
.attr('class', 'line')
.attr('marker-mid', 'url(#Triangle)')
.merge(linkData.select('.line'))
pathHover = g.append('path')
.attr('class', 'line-hover')
.merge(linkData.select('.line-hover'))
pathCross = g.append('use')
.attr('class', 'cross hidden')
.attr('xlink:href', '#cross')
.merge(linkData.select('.cross'))
//防止link遮盖rect
svg.selectAll('.link,.node')
.sort((a, b) => {
if (a.source) return -1
else return 1
})
dataJoin = svg.selectAll('.node')
.data(data.nodes, d => d.id)
dataJoin.exit().remove()
g = dataJoin.enter()
.append('g')
.attr('class', 'node')
g.append('rect')
.attr('class', 'rect')
.attr('width', rectWidth)
.attr('height', rectHeight)
.attr('transform', 'translate(-' + rectWidth / 2 + ',-' + rectHeight / 2 + ')')
g.append('text')
.attr('class', 'awe')
.text(d => d.icon)
g.append('text')
.attr('dy', 30)
.text(d => d.name)
g.append('use')
.attr('class', 'cross hidden')
.attr('xlink:href', '#cross')
.attr('transform', 'translate(' + (rectWidth / 2 - 5) + ',-' + (rectHeight / 2 + 5) + ')')
gMerge = g.merge(dataJoin)
.call(d3.drag()
.on('start', dragstarted)
.on('drag', dragged)
.on('end', dragended))
.on('mousedown', function (d) {
if (drawLineEnable) {
drawLineFrom = d
//去除所有
clearAllEditStyle()
//选中当前元素
let g = d3.select(this)
g.classed('selected', true)
g.select('use')
.classed('hidden', false)
.on('mousedown', function (d) {
data.nodes = data.nodes.filter(n => n.id !== d.id)
data.links = data.links.filter(n => n.source.id !== d.id && n.target.id !== d.id)
initD3()
})
drag_line.attr('marker-mid', 'url(#Triangle)')
.classed('hidden', false)
.attr('d', `M${d.x},${d.y}L${d.x},${d.y}`)
simulation.alphaTarget(0.1).restart()
}
}).on('mouseup', d => {
// d3.event.stopPropagation();
drawLineTo = d
data.addLink(drawLineFrom, drawLineTo)
initD3();
simulation.alphaTarget(0)
if (drawLineFrom !== drawLineTo) {
clearAllEditStyle()
}
})
if (drawLineEnable) {
gMerge.on('.drag', null)
}
simulation.nodes(data.nodes)
.on('tick', ticked)
simulation.force('link')
.links(data.links)
}
function ticked() {
path.attr("d", function (d) {
let centerX = d.source.x + (d.target.x - d.source.x) / 2,
centerY = d.source.y + (d.target.y - d.source.y) / 2
return `M${d.source.x},${d.source.y}L${centerX},${centerY}L${d.target.x},${d.target.y}`;
})
pathHover.attr("d", function (d) {
let centerX = d.source.x + (d.target.x - d.source.x) / 2,
centerY = d.source.y + (d.target.y - d.source.y) / 2
return `M${d.source.x},${d.source.y}L${centerX},${centerY}L${d.target.x},${d.target.y}`;
})
pathCross.attr('x', d => {
return (d.source.x + d.target.x) / 2
})
pathCross.attr('y', d => {
return (d.source.y + d.target.y) / 2
})
gMerge.attr("transform", function (d) {
return "translate(" + d.x + ", " + d.y + ")";
});
}
function dragstarted(d) {
if (drawLineEnable) return
if (!d3.event.active) simulation.alphaTarget(0.3).restart()
}
function dragged(d) {
if (drawLineEnable) return
d.fx = d3.event.x;
d.fy = d3.event.y;
}
function dragended(d) {
if (drawLineEnable) return
if (!d3.event.active) simulation.alphaTarget(0);
d.fx = undefined;
d.fy = undefined;
}
function addEvent() {
let buttonAdd = document.querySelector('.connect .add'),
buttonView = document.querySelector('.connect .view'),
buttonEdit = document.querySelector('.connect .edit'),
svg = document.querySelector('svg')
buttonView.addEventListener('click', e => {
buttonView.classList.add('hidden')
buttonEdit.classList.remove('hidden')
buttonAdd.classList.add('hidden')
svg.classList.remove('edit')
clearAllEditStyle()
drawLineEnable = false
initD3()
}, false)
buttonEdit.addEventListener('click', e => {
buttonView.classList.remove('hidden')
buttonEdit.classList.add('hidden')
buttonAdd.classList.remove('hidden')
svg.classList.add('edit')
drawLineEnable = true
initD3()
}, false)
}
function mousemove() {
if (drawLineEnable && drawLineFrom) {
let p = d3.mouse(this),
centerX = drawLineFrom.x / 2 + p[0] / 2,
centerY = drawLineFrom.y / 2 + p[1] / 2
drag_line.attr('d', `M${drawLineFrom.x},${drawLineFrom.y}L${centerX},${centerY}L${p[0]},${p[1]}`)
}
}
function mouseup() {
if (drawLineEnable && | () {
d3.selectAll('.node').classed('selected', false)
d3.selectAll('.cross').classed('hidden', true)
d3.selectAll('.selected').classed('selected', false)
} | drawLineFrom) {
drag_line.classed('hidden', true)
drawLineFrom = null
} else {
clearAllEditStyle()
}
simulation.alphaTarget(0)
}
function clearAllEditStyle | identifier_body |
CommandTester.d.ts | declare namespace Jymfony.Component.Console.Tester {
import Command = Jymfony.Component.Console.Command.Command;
import InputInterface = Jymfony.Component.Console.Input.InputInterface;
import OutputInterface = Jymfony.Component.Console.Output.OutputInterface;
import ArrayInput = Jymfony.Component.Console.Input.ArrayInput;
export class | {
/**
* Gets the input instance used by the last execution of application.
*/
public readonly input: InputInterface;
/**
* Sets the user input.
*/
public /* writeonly */ inputs: string[];
/**
* Gets the output instance used by the last execution of application.
*/
public readonly output: OutputInterface;
/**
* Gets the status code returned by the last execution of the application
* if run has been completed.
*/
public readonly exitCode: number;
private _command: Command;
private _inputs: string[];
private _readOutput: string;
private _input?: ArrayInput;
private _statusCode?: number;
private _output?: OutputInterface;
/**
* Constructor.
*/
__construct(command: Command): void;
constructor(command: Command);
/**
* Executes the application.
*
* Options:
* * interactive Sets the input interactive flag [false]
* * decorated Sets the decorated flag [false]
* * verbosity Sets the output verbosity level [VERBOSITY_NORMAL]
*/
run(input: Record<string, any>, options: Record<string, boolean | any>): Promise<number>;
/**
* Gets the display returned by the last execution of the application.
*
* @param [normalize = false] Whether to normalize end of lines to \n or not
*/
getDisplay(normalize?: boolean): string;
/**
* Create a stream with given inputs.
*/
private static _createStream(inputs: string[]): NodeJS.ReadableStream;
}
}
| CommandTester | identifier_name |
CommandTester.d.ts | import Command = Jymfony.Component.Console.Command.Command;
import InputInterface = Jymfony.Component.Console.Input.InputInterface;
import OutputInterface = Jymfony.Component.Console.Output.OutputInterface;
import ArrayInput = Jymfony.Component.Console.Input.ArrayInput;
export class CommandTester {
/**
* Gets the input instance used by the last execution of application.
*/
public readonly input: InputInterface;
/**
* Sets the user input.
*/
public /* writeonly */ inputs: string[];
/**
* Gets the output instance used by the last execution of application.
*/
public readonly output: OutputInterface;
/**
* Gets the status code returned by the last execution of the application
* if run has been completed.
*/
public readonly exitCode: number;
private _command: Command;
private _inputs: string[];
private _readOutput: string;
private _input?: ArrayInput;
private _statusCode?: number;
private _output?: OutputInterface;
/**
* Constructor.
*/
__construct(command: Command): void;
constructor(command: Command);
/**
* Executes the application.
*
* Options:
* * interactive Sets the input interactive flag [false]
* * decorated Sets the decorated flag [false]
* * verbosity Sets the output verbosity level [VERBOSITY_NORMAL]
*/
run(input: Record<string, any>, options: Record<string, boolean | any>): Promise<number>;
/**
* Gets the display returned by the last execution of the application.
*
* @param [normalize = false] Whether to normalize end of lines to \n or not
*/
getDisplay(normalize?: boolean): string;
/**
* Create a stream with given inputs.
*/
private static _createStream(inputs: string[]): NodeJS.ReadableStream;
}
} | declare namespace Jymfony.Component.Console.Tester { | random_line_split | |
test-string-decoder.js | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
const common = require('../common');
const assert = require('assert');
const inspect = require('util').inspect;
const StringDecoder = require('string_decoder').StringDecoder;
// Test default encoding
let decoder = new StringDecoder();
assert.strictEqual(decoder.encoding, 'utf8');
// Should work without 'new' keyword
const decoder2 = {};
StringDecoder.call(decoder2);
assert.strictEqual(decoder2.encoding, 'utf8');
// UTF-8
test('utf-8', Buffer.from('$', 'utf-8'), '$');
test('utf-8', Buffer.from('¢', 'utf-8'), '¢');
test('utf-8', Buffer.from('€', 'utf-8'), '€');
test('utf-8', Buffer.from('𤭢', 'utf-8'), '𤭢');
// A mixed ascii and non-ascii string
// Test stolen from deps/v8/test/cctest/test-strings.cc
// U+02E4 -> CB A4
// U+0064 -> 64
// U+12E4 -> E1 8B A4
// U+0030 -> 30
// U+3045 -> E3 81 85
test(
'utf-8',
Buffer.from([0xCB, 0xA4, 0x64, 0xE1, 0x8B, 0xA4, 0x30, 0xE3, 0x81, 0x85]),
'\u02e4\u0064\u12e4\u0030\u3045'
);
// Some invalid input, known to have caused trouble with chunking
// in https://github.com/nodejs/node/pull/7310#issuecomment-226445923
// 00: |00000000 ASCII
// 41: |01000001 ASCII
// B8: 10|111000 continuation
// CC: 110|01100 two-byte head
// E2: 1110|0010 three-byte head
// F0: 11110|000 four-byte head
// F1: 11110|001'another four-byte head
// FB: 111110|11 "five-byte head", not UTF-8
test('utf-8', Buffer.from('C9B5A941', 'hex'), '\u0275\ufffdA');
test('utf-8', Buffer.from('E2', 'hex'), '\ufffd');
test('utf-8', Buffer.from('E241', 'hex'), '\ufffdA');
test('utf-8', Buffer.from('CCCCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('F0B841', 'hex'), '\ufffdA');
test('utf-8', Buffer.from('F1CCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('F0FB00', 'hex'), '\ufffd\ufffd\0');
test('utf-8', Buffer.from('CCE2B8B8', 'hex'), '\ufffd\u2e38');
test('utf-8', Buffer.from('E2B8CCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('E2FBCC01', 'hex'), '\ufffd\ufffd\ufffd\u0001');
test('utf-8', Buffer.from('CCB8CDB9', 'hex'), '\u0338\u0379');
// CESU-8 of U+1D40D
// V8 has changed their invalid UTF-8 handling, see
// https://chromium-review.googlesource.com/c/v8/v8/+/671020 for more info.
test('utf-8', Buffer.from('EDA0B5EDB08D', 'hex'),
'\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd');
// UCS-2
test('ucs2', Buffer.from('ababc', 'ucs2'), 'ababc');
// UTF-16LE
test('utf16le', Buffer.from('3DD84DDC', 'hex'), '\ud83d\udc4d'); // thumbs up
// Additional UTF-8 tests
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('E1', 'hex')), '');
// A quick test for lastNeed & lastTotal which are undocumented.
assert.strictEqual(decoder.lastNeed, 2);
assert.strictEqual(decoder.lastTotal, 3);
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('E18B', 'hex')), '');
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('\ufffd')), '\ufffd');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('\ufffd\ufffd\ufffd')),
'\ufffd\ufffd\ufffd');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('EFBFBDE2', 'hex')), '\ufffd');
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('F1', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('41F2', 'hex')), '\ufffdA');
assert.strictEqual(decoder.end(), '\ufffd');
// Additional utf8Text test
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.text(Buffer.from([0x41]), 2), '');
// Additional UTF-16LE surrogate pair tests
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('DC', 'hex')), '\ud83d\udc4d');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.end(), '\ud83d');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), '');
assert.strictEqual(decoder.end(), '\ud83d');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD84D', 'hex')), '\ud83d');
assert.strictEqual(decoder.end(), '');
common.expectsError(
() => new StringDecoder(1),
{
code: 'ERR_UNKNOWN_ENCODING',
type: TypeError,
message: 'Unknown encoding: 1'
}
);
common.expectsError(
() => new StringDecoder('test'),
{
code: 'ERR_UNKNOWN_ENCODING',
type: TypeError,
message: 'Unknown encoding: test'
}
);
// test verifies that StringDecoder will correctly decode the given input
// buffer with the given encoding to the expected output. It will attempt all
// possible ways to write() the input buffer, see writeSequences(). The | function test(encoding, input, expected, singleSequence) {
let sequences;
if (!singleSequence) {
sequences = writeSequences(input.length);
} else {
sequences = [singleSequence];
}
const hexNumberRE = /.{2}/g;
sequences.forEach((sequence) => {
const decoder = new StringDecoder(encoding);
let output = '';
sequence.forEach((write) => {
output += decoder.write(input.slice(write[0], write[1]));
});
output += decoder.end();
if (output !== expected) {
const message =
`Expected "${unicodeEscape(expected)}", ` +
`but got "${unicodeEscape(output)}"\n` +
`input: ${input.toString('hex').match(hexNumberRE)}\n` +
`Write sequence: ${JSON.stringify(sequence)}\n` +
`Full Decoder State: ${inspect(decoder)}`;
assert.fail(output, expected, message);
}
});
}
// unicodeEscape prints the str contents as unicode escape codes.
function unicodeEscape(str) {
let r = '';
for (let i = 0; i < str.length; i++) {
r += `\\u${str.charCodeAt(i).toString(16)}`;
}
return r;
}
// writeSequences returns an array of arrays that describes all possible ways a
// buffer of the given length could be split up and passed to sequential write
// calls.
//
// e.G. writeSequences(3) will return: [
// [ [ 0, 3 ] ],
// [ [ 0, 2 ], [ 2, 3 ] ],
// [ [ 0, 1 ], [ 1, 3 ] ],
// [ [ 0, 1 ], [ 1, 2 ], [ 2, 3 ] ]
// ]
function writeSequences(length, start, sequence) {
if (start === undefined) {
start = 0;
sequence = [];
} else if (start === length) {
return [sequence];
}
let sequences = [];
for (let end = length; end > start; end--) {
const subSequence = sequence.concat([[start, end]]);
const subSequences = writeSequences(length, end, subSequence, sequences);
sequences = sequences.concat(subSequences);
}
return sequences;
} | // singleSequence allows for easy debugging of a specific sequence which is
// useful in case of test failures. | random_line_split |
test-string-decoder.js | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
const common = require('../common');
const assert = require('assert');
const inspect = require('util').inspect;
const StringDecoder = require('string_decoder').StringDecoder;
// Test default encoding
let decoder = new StringDecoder();
assert.strictEqual(decoder.encoding, 'utf8');
// Should work without 'new' keyword
const decoder2 = {};
StringDecoder.call(decoder2);
assert.strictEqual(decoder2.encoding, 'utf8');
// UTF-8
test('utf-8', Buffer.from('$', 'utf-8'), '$');
test('utf-8', Buffer.from('¢', 'utf-8'), '¢');
test('utf-8', Buffer.from('€', 'utf-8'), '€');
test('utf-8', Buffer.from('𤭢', 'utf-8'), '𤭢');
// A mixed ascii and non-ascii string
// Test stolen from deps/v8/test/cctest/test-strings.cc
// U+02E4 -> CB A4
// U+0064 -> 64
// U+12E4 -> E1 8B A4
// U+0030 -> 30
// U+3045 -> E3 81 85
test(
'utf-8',
Buffer.from([0xCB, 0xA4, 0x64, 0xE1, 0x8B, 0xA4, 0x30, 0xE3, 0x81, 0x85]),
'\u02e4\u0064\u12e4\u0030\u3045'
);
// Some invalid input, known to have caused trouble with chunking
// in https://github.com/nodejs/node/pull/7310#issuecomment-226445923
// 00: |00000000 ASCII
// 41: |01000001 ASCII
// B8: 10|111000 continuation
// CC: 110|01100 two-byte head
// E2: 1110|0010 three-byte head
// F0: 11110|000 four-byte head
// F1: 11110|001'another four-byte head
// FB: 111110|11 "five-byte head", not UTF-8
test('utf-8', Buffer.from('C9B5A941', 'hex'), '\u0275\ufffdA');
test('utf-8', Buffer.from('E2', 'hex'), '\ufffd');
test('utf-8', Buffer.from('E241', 'hex'), '\ufffdA');
test('utf-8', Buffer.from('CCCCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('F0B841', 'hex'), '\ufffdA');
test('utf-8', Buffer.from('F1CCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('F0FB00', 'hex'), '\ufffd\ufffd\0');
test('utf-8', Buffer.from('CCE2B8B8', 'hex'), '\ufffd\u2e38');
test('utf-8', Buffer.from('E2B8CCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('E2FBCC01', 'hex'), '\ufffd\ufffd\ufffd\u0001');
test('utf-8', Buffer.from('CCB8CDB9', 'hex'), '\u0338\u0379');
// CESU-8 of U+1D40D
// V8 has changed their invalid UTF-8 handling, see
// https://chromium-review.googlesource.com/c/v8/v8/+/671020 for more info.
test('utf-8', Buffer.from('EDA0B5EDB08D', 'hex'),
'\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd');
// UCS-2
test('ucs2', Buffer.from('ababc', 'ucs2'), 'ababc');
// UTF-16LE
test('utf16le', Buffer.from('3DD84DDC', 'hex'), '\ud83d\udc4d'); // thumbs up
// Additional UTF-8 tests
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('E1', 'hex')), '');
// A quick test for lastNeed & lastTotal which are undocumented.
assert.strictEqual(decoder.lastNeed, 2);
assert.strictEqual(decoder.lastTotal, 3);
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('E18B', 'hex')), '');
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('\ufffd')), '\ufffd');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('\ufffd\ufffd\ufffd')),
'\ufffd\ufffd\ufffd');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('EFBFBDE2', 'hex')), '\ufffd');
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('F1', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('41F2', 'hex')), '\ufffdA');
assert.strictEqual(decoder.end(), '\ufffd');
// Additional utf8Text test
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.text(Buffer.from([0x41]), 2), '');
// Additional UTF-16LE surrogate pair tests
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('DC', 'hex')), '\ud83d\udc4d');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.end(), '\ud83d');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), '');
assert.strictEqual(decoder.end(), '\ud83d');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD84D', 'hex')), '\ud83d');
assert.strictEqual(decoder.end(), '');
common.expectsError(
() => new StringDecoder(1),
{
code: 'ERR_UNKNOWN_ENCODING',
type: TypeError,
message: 'Unknown encoding: 1'
}
);
common.expectsError(
() => new StringDecoder('test'),
{
code: 'ERR_UNKNOWN_ENCODING',
type: TypeError,
message: 'Unknown encoding: test'
}
);
// test verifies that StringDecoder will correctly decode the given input
// buffer with the given encoding to the expected output. It will attempt all
// possible ways to write() the input buffer, see writeSequences(). The
// singleSequence allows for easy debugging of a specific sequence which is
// useful in case of test failures.
function test(encoding, input, expected, singleSequence) {
let sequ | Escape prints the str contents as unicode escape codes.
function unicodeEscape(str) {
let r = '';
for (let i = 0; i < str.length; i++) {
r += `\\u${str.charCodeAt(i).toString(16)}`;
}
return r;
}
// writeSequences returns an array of arrays that describes all possible ways a
// buffer of the given length could be split up and passed to sequential write
// calls.
//
// e.G. writeSequences(3) will return: [
// [ [ 0, 3 ] ],
// [ [ 0, 2 ], [ 2, 3 ] ],
// [ [ 0, 1 ], [ 1, 3 ] ],
// [ [ 0, 1 ], [ 1, 2 ], [ 2, 3 ] ]
// ]
function writeSequences(length, start, sequence) {
if (start === undefined) {
start = 0;
sequence = [];
} else if (start === length) {
return [sequence];
}
let sequences = [];
for (let end = length; end > start; end--) {
const subSequence = sequence.concat([[start, end]]);
const subSequences = writeSequences(length, end, subSequence, sequences);
sequences = sequences.concat(subSequences);
}
return sequences;
}
| ences;
if (!singleSequence) {
sequences = writeSequences(input.length);
} else {
sequences = [singleSequence];
}
const hexNumberRE = /.{2}/g;
sequences.forEach((sequence) => {
const decoder = new StringDecoder(encoding);
let output = '';
sequence.forEach((write) => {
output += decoder.write(input.slice(write[0], write[1]));
});
output += decoder.end();
if (output !== expected) {
const message =
`Expected "${unicodeEscape(expected)}", ` +
`but got "${unicodeEscape(output)}"\n` +
`input: ${input.toString('hex').match(hexNumberRE)}\n` +
`Write sequence: ${JSON.stringify(sequence)}\n` +
`Full Decoder State: ${inspect(decoder)}`;
assert.fail(output, expected, message);
}
});
}
// unicode | identifier_body |
test-string-decoder.js | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
const common = require('../common');
const assert = require('assert');
const inspect = require('util').inspect;
const StringDecoder = require('string_decoder').StringDecoder;
// Test default encoding
let decoder = new StringDecoder();
assert.strictEqual(decoder.encoding, 'utf8');
// Should work without 'new' keyword
const decoder2 = {};
StringDecoder.call(decoder2);
assert.strictEqual(decoder2.encoding, 'utf8');
// UTF-8
test('utf-8', Buffer.from('$', 'utf-8'), '$');
test('utf-8', Buffer.from('¢', 'utf-8'), '¢');
test('utf-8', Buffer.from('€', 'utf-8'), '€');
test('utf-8', Buffer.from('𤭢', 'utf-8'), '𤭢');
// A mixed ascii and non-ascii string
// Test stolen from deps/v8/test/cctest/test-strings.cc
// U+02E4 -> CB A4
// U+0064 -> 64
// U+12E4 -> E1 8B A4
// U+0030 -> 30
// U+3045 -> E3 81 85
test(
'utf-8',
Buffer.from([0xCB, 0xA4, 0x64, 0xE1, 0x8B, 0xA4, 0x30, 0xE3, 0x81, 0x85]),
'\u02e4\u0064\u12e4\u0030\u3045'
);
// Some invalid input, known to have caused trouble with chunking
// in https://github.com/nodejs/node/pull/7310#issuecomment-226445923
// 00: |00000000 ASCII
// 41: |01000001 ASCII
// B8: 10|111000 continuation
// CC: 110|01100 two-byte head
// E2: 1110|0010 three-byte head
// F0: 11110|000 four-byte head
// F1: 11110|001'another four-byte head
// FB: 111110|11 "five-byte head", not UTF-8
test('utf-8', Buffer.from('C9B5A941', 'hex'), '\u0275\ufffdA');
test('utf-8', Buffer.from('E2', 'hex'), '\ufffd');
test('utf-8', Buffer.from('E241', 'hex'), '\ufffdA');
test('utf-8', Buffer.from('CCCCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('F0B841', 'hex'), '\ufffdA');
test('utf-8', Buffer.from('F1CCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('F0FB00', 'hex'), '\ufffd\ufffd\0');
test('utf-8', Buffer.from('CCE2B8B8', 'hex'), '\ufffd\u2e38');
test('utf-8', Buffer.from('E2B8CCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('E2FBCC01', 'hex'), '\ufffd\ufffd\ufffd\u0001');
test('utf-8', Buffer.from('CCB8CDB9', 'hex'), '\u0338\u0379');
// CESU-8 of U+1D40D
// V8 has changed their invalid UTF-8 handling, see
// https://chromium-review.googlesource.com/c/v8/v8/+/671020 for more info.
test('utf-8', Buffer.from('EDA0B5EDB08D', 'hex'),
'\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd');
// UCS-2
test('ucs2', Buffer.from('ababc', 'ucs2'), 'ababc');
// UTF-16LE
test('utf16le', Buffer.from('3DD84DDC', 'hex'), '\ud83d\udc4d'); // thumbs up
// Additional UTF-8 tests
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('E1', 'hex')), '');
// A quick test for lastNeed & lastTotal which are undocumented.
assert.strictEqual(decoder.lastNeed, 2);
assert.strictEqual(decoder.lastTotal, 3);
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('E18B', 'hex')), '');
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('\ufffd')), '\ufffd');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('\ufffd\ufffd\ufffd')),
'\ufffd\ufffd\ufffd');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('EFBFBDE2', 'hex')), '\ufffd');
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('F1', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('41F2', 'hex')), '\ufffdA');
assert.strictEqual(decoder.end(), '\ufffd');
// Additional utf8Text test
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.text(Buffer.from([0x41]), 2), '');
// Additional UTF-16LE surrogate pair tests
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('DC', 'hex')), '\ud83d\udc4d');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.end(), '\ud83d');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), '');
assert.strictEqual(decoder.end(), '\ud83d');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD84D', 'hex')), '\ud83d');
assert.strictEqual(decoder.end(), '');
common.expectsError(
() => new StringDecoder(1),
{
code: 'ERR_UNKNOWN_ENCODING',
type: TypeError,
message: 'Unknown encoding: 1'
}
);
common.expectsError(
() => new StringDecoder('test'),
{
code: 'ERR_UNKNOWN_ENCODING',
type: TypeError,
message: 'Unknown encoding: test'
}
);
// test verifies that StringDecoder will correctly decode the given input
// buffer with the given encoding to the expected output. It will attempt all
// possible ways to write() the input buffer, see writeSequences(). The
// singleSequence allows for easy debugging of a specific sequence which is
// useful in case of test failures.
function test(encoding, input, expected, singleSequence) {
let sequences;
if (!singleSequence) {
sequences = writeSequences(input.length);
} else {
sequences = [singleSequence];
}
const hexNumberRE = /.{2}/g;
sequences.forEach((sequence) => {
const decoder = new StringDecoder(encoding);
let output = '';
sequence.forEach((write) => {
output += decoder.write(input.slice(write[0], write[1]));
});
output += decoder.end();
if (output !== expected) {
const message =
`Expected "${unicodeEscape(expected)}", ` +
`but got "${unicodeEscape(output)}"\n` +
`input: ${input.toString('hex').match(hexNumberRE)}\n` +
`Write sequence: ${JSON.stringify(sequence)}\n` +
`Full Decoder State: ${inspect(decoder)}`;
assert.fail(output, expected, message);
}
});
}
// unicodeEscape prints the str contents as unicode escape codes.
function unicodeEscape(str) {
let r = '';
for (let i = 0; i < str.length; i++) {
r += `\\u${str.charCodeAt(i).toString(16)}`;
}
return r;
}
// writeSequences returns an array of arrays that describes all possible ways a
// buffer of the given length could be split up and passed to sequential write
// calls.
//
// e.G. writeSequences(3) will return: [
// [ [ 0, 3 ] ],
// [ [ 0, 2 ], [ 2, 3 ] ],
// [ [ 0, 1 ], [ 1, 3 ] ],
// [ [ 0, 1 ], [ 1, 2 ], [ 2, 3 ] ]
// ]
function writeSequenc | rt, sequence) {
if (start === undefined) {
start = 0;
sequence = [];
} else if (start === length) {
return [sequence];
}
let sequences = [];
for (let end = length; end > start; end--) {
const subSequence = sequence.concat([[start, end]]);
const subSequences = writeSequences(length, end, subSequence, sequences);
sequences = sequences.concat(subSequences);
}
return sequences;
}
| es(length, sta | identifier_name |
test-string-decoder.js | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
const common = require('../common');
const assert = require('assert');
const inspect = require('util').inspect;
const StringDecoder = require('string_decoder').StringDecoder;
// Test default encoding
let decoder = new StringDecoder();
assert.strictEqual(decoder.encoding, 'utf8');
// Should work without 'new' keyword
const decoder2 = {};
StringDecoder.call(decoder2);
assert.strictEqual(decoder2.encoding, 'utf8');
// UTF-8
test('utf-8', Buffer.from('$', 'utf-8'), '$');
test('utf-8', Buffer.from('¢', 'utf-8'), '¢');
test('utf-8', Buffer.from('€', 'utf-8'), '€');
test('utf-8', Buffer.from('𤭢', 'utf-8'), '𤭢');
// A mixed ascii and non-ascii string
// Test stolen from deps/v8/test/cctest/test-strings.cc
// U+02E4 -> CB A4
// U+0064 -> 64
// U+12E4 -> E1 8B A4
// U+0030 -> 30
// U+3045 -> E3 81 85
test(
'utf-8',
Buffer.from([0xCB, 0xA4, 0x64, 0xE1, 0x8B, 0xA4, 0x30, 0xE3, 0x81, 0x85]),
'\u02e4\u0064\u12e4\u0030\u3045'
);
// Some invalid input, known to have caused trouble with chunking
// in https://github.com/nodejs/node/pull/7310#issuecomment-226445923
// 00: |00000000 ASCII
// 41: |01000001 ASCII
// B8: 10|111000 continuation
// CC: 110|01100 two-byte head
// E2: 1110|0010 three-byte head
// F0: 11110|000 four-byte head
// F1: 11110|001'another four-byte head
// FB: 111110|11 "five-byte head", not UTF-8
test('utf-8', Buffer.from('C9B5A941', 'hex'), '\u0275\ufffdA');
test('utf-8', Buffer.from('E2', 'hex'), '\ufffd');
test('utf-8', Buffer.from('E241', 'hex'), '\ufffdA');
test('utf-8', Buffer.from('CCCCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('F0B841', 'hex'), '\ufffdA');
test('utf-8', Buffer.from('F1CCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('F0FB00', 'hex'), '\ufffd\ufffd\0');
test('utf-8', Buffer.from('CCE2B8B8', 'hex'), '\ufffd\u2e38');
test('utf-8', Buffer.from('E2B8CCB8', 'hex'), '\ufffd\u0338');
test('utf-8', Buffer.from('E2FBCC01', 'hex'), '\ufffd\ufffd\ufffd\u0001');
test('utf-8', Buffer.from('CCB8CDB9', 'hex'), '\u0338\u0379');
// CESU-8 of U+1D40D
// V8 has changed their invalid UTF-8 handling, see
// https://chromium-review.googlesource.com/c/v8/v8/+/671020 for more info.
test('utf-8', Buffer.from('EDA0B5EDB08D', 'hex'),
'\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd');
// UCS-2
test('ucs2', Buffer.from('ababc', 'ucs2'), 'ababc');
// UTF-16LE
test('utf16le', Buffer.from('3DD84DDC', 'hex'), '\ud83d\udc4d'); // thumbs up
// Additional UTF-8 tests
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('E1', 'hex')), '');
// A quick test for lastNeed & lastTotal which are undocumented.
assert.strictEqual(decoder.lastNeed, 2);
assert.strictEqual(decoder.lastTotal, 3);
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('E18B', 'hex')), '');
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('\ufffd')), '\ufffd');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('\ufffd\ufffd\ufffd')),
'\ufffd\ufffd\ufffd');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('EFBFBDE2', 'hex')), '\ufffd');
assert.strictEqual(decoder.end(), '\ufffd');
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.write(Buffer.from('F1', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('41F2', 'hex')), '\ufffdA');
assert.strictEqual(decoder.end(), '\ufffd');
// Additional utf8Text test
decoder = new StringDecoder('utf8');
assert.strictEqual(decoder.text(Buffer.from([0x41]), 2), '');
// Additional UTF-16LE surrogate pair tests
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('DC', 'hex')), '\ud83d\udc4d');
assert.strictEqual(decoder.end(), '');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.end(), '\ud83d');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), '');
assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), '');
assert.strictEqual(decoder.end(), '\ud83d');
decoder = new StringDecoder('utf16le');
assert.strictEqual(decoder.write(Buffer.from('3DD84D', 'hex')), '\ud83d');
assert.strictEqual(decoder.end(), '');
common.expectsError(
() => new StringDecoder(1),
{
code: 'ERR_UNKNOWN_ENCODING',
type: TypeError,
message: 'Unknown encoding: 1'
}
);
common.expectsError(
() => new StringDecoder('test'),
{
code: 'ERR_UNKNOWN_ENCODING',
type: TypeError,
message: 'Unknown encoding: test'
}
);
// test verifies that StringDecoder will correctly decode the given input
// buffer with the given encoding to the expected output. It will attempt all
// possible ways to write() the input buffer, see writeSequences(). The
// singleSequence allows for easy debugging of a specific sequence which is
// useful in case of test failures.
function test(encoding, input, expected, singleSequence) {
let sequences;
if (!singleSequence) {
sequen | sequences = [singleSequence];
}
const hexNumberRE = /.{2}/g;
sequences.forEach((sequence) => {
const decoder = new StringDecoder(encoding);
let output = '';
sequence.forEach((write) => {
output += decoder.write(input.slice(write[0], write[1]));
});
output += decoder.end();
if (output !== expected) {
const message =
`Expected "${unicodeEscape(expected)}", ` +
`but got "${unicodeEscape(output)}"\n` +
`input: ${input.toString('hex').match(hexNumberRE)}\n` +
`Write sequence: ${JSON.stringify(sequence)}\n` +
`Full Decoder State: ${inspect(decoder)}`;
assert.fail(output, expected, message);
}
});
}
// unicodeEscape prints the str contents as unicode escape codes.
function unicodeEscape(str) {
let r = '';
for (let i = 0; i < str.length; i++) {
r += `\\u${str.charCodeAt(i).toString(16)}`;
}
return r;
}
// writeSequences returns an array of arrays that describes all possible ways a
// buffer of the given length could be split up and passed to sequential write
// calls.
//
// e.G. writeSequences(3) will return: [
// [ [ 0, 3 ] ],
// [ [ 0, 2 ], [ 2, 3 ] ],
// [ [ 0, 1 ], [ 1, 3 ] ],
// [ [ 0, 1 ], [ 1, 2 ], [ 2, 3 ] ]
// ]
function writeSequences(length, start, sequence) {
if (start === undefined) {
start = 0;
sequence = [];
} else if (start === length) {
return [sequence];
}
let sequences = [];
for (let end = length; end > start; end--) {
const subSequence = sequence.concat([[start, end]]);
const subSequences = writeSequences(length, end, subSequence, sequences);
sequences = sequences.concat(subSequences);
}
return sequences;
}
| ces = writeSequences(input.length);
} else {
| conditional_block |
main.rs | extern crate libc;
extern crate scaly;
use libc::c_char;
use libc::c_int;
use scaly::containers::{Array, Ref, String, Vector};
use scaly::memory::Heap;
use scaly::memory::Page;
use scaly::memory::Region;
use scaly::memory::StackBucket;
use std::ffi::CString;
mod scalyc;
// Rust's main which converts args back to C's main convention (used here for the Rust backend)
fn main() {
let args = std::env::args() | .map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
_main(c_args.len() as c_int, c_args.as_ptr());
}
// C style main function which converts args to Scaly's main convention (would be provided by the LLVM backend)
fn _main(argc: c_int, argv: *const *const c_char) {
let _r = Region::create_from_page(Page::get(StackBucket::create(&mut Heap::create()) as usize));
_scalyc_main(&_r, {
let _r_1 = Region::create(&_r);
let mut arguments: Ref<Array<String>> = Ref::new(_r_1.page, Array::new());
for n in 0..argc {
if n == 0 {
continue;
}
unsafe {
let arg = argv.offset(n as isize);
let s = String::from_c_string(_r.page, *arg);
(*arguments).add(s);
}
}
Ref::new(_r.page, Vector::from_array(_r.page, arguments))
});
}
fn _scalyc_main(_pr: &Region, arguments: Ref<Vector<String>>) {
use scalyc::compiler::Compiler;
let _r = Region::create(_pr);
let compiler = Ref::new(_r.page, Compiler::new(_r.page, arguments));
(*compiler).compile(&_r, _r.page, _r.page);
} | .map(|arg| CString::new(arg).unwrap())
.collect::<Vec<CString>>();
let c_args = args
.iter() | random_line_split |
main.rs | extern crate libc;
extern crate scaly;
use libc::c_char;
use libc::c_int;
use scaly::containers::{Array, Ref, String, Vector};
use scaly::memory::Heap;
use scaly::memory::Page;
use scaly::memory::Region;
use scaly::memory::StackBucket;
use std::ffi::CString;
mod scalyc;
// Rust's main which converts args back to C's main convention (used here for the Rust backend)
fn main() {
let args = std::env::args()
.map(|arg| CString::new(arg).unwrap())
.collect::<Vec<CString>>();
let c_args = args
.iter()
.map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
_main(c_args.len() as c_int, c_args.as_ptr());
}
// C style main function which converts args to Scaly's main convention (would be provided by the LLVM backend)
fn | (argc: c_int, argv: *const *const c_char) {
let _r = Region::create_from_page(Page::get(StackBucket::create(&mut Heap::create()) as usize));
_scalyc_main(&_r, {
let _r_1 = Region::create(&_r);
let mut arguments: Ref<Array<String>> = Ref::new(_r_1.page, Array::new());
for n in 0..argc {
if n == 0 {
continue;
}
unsafe {
let arg = argv.offset(n as isize);
let s = String::from_c_string(_r.page, *arg);
(*arguments).add(s);
}
}
Ref::new(_r.page, Vector::from_array(_r.page, arguments))
});
}
fn _scalyc_main(_pr: &Region, arguments: Ref<Vector<String>>) {
use scalyc::compiler::Compiler;
let _r = Region::create(_pr);
let compiler = Ref::new(_r.page, Compiler::new(_r.page, arguments));
(*compiler).compile(&_r, _r.page, _r.page);
}
| _main | identifier_name |
main.rs | extern crate libc;
extern crate scaly;
use libc::c_char;
use libc::c_int;
use scaly::containers::{Array, Ref, String, Vector};
use scaly::memory::Heap;
use scaly::memory::Page;
use scaly::memory::Region;
use scaly::memory::StackBucket;
use std::ffi::CString;
mod scalyc;
// Rust's main which converts args back to C's main convention (used here for the Rust backend)
fn main() {
let args = std::env::args()
.map(|arg| CString::new(arg).unwrap())
.collect::<Vec<CString>>();
let c_args = args
.iter()
.map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
_main(c_args.len() as c_int, c_args.as_ptr());
}
// C style main function which converts args to Scaly's main convention (would be provided by the LLVM backend)
fn _main(argc: c_int, argv: *const *const c_char) {
let _r = Region::create_from_page(Page::get(StackBucket::create(&mut Heap::create()) as usize));
_scalyc_main(&_r, {
let _r_1 = Region::create(&_r);
let mut arguments: Ref<Array<String>> = Ref::new(_r_1.page, Array::new());
for n in 0..argc {
if n == 0 |
unsafe {
let arg = argv.offset(n as isize);
let s = String::from_c_string(_r.page, *arg);
(*arguments).add(s);
}
}
Ref::new(_r.page, Vector::from_array(_r.page, arguments))
});
}
fn _scalyc_main(_pr: &Region, arguments: Ref<Vector<String>>) {
use scalyc::compiler::Compiler;
let _r = Region::create(_pr);
let compiler = Ref::new(_r.page, Compiler::new(_r.page, arguments));
(*compiler).compile(&_r, _r.page, _r.page);
}
| {
continue;
} | conditional_block |
main.rs | extern crate libc;
extern crate scaly;
use libc::c_char;
use libc::c_int;
use scaly::containers::{Array, Ref, String, Vector};
use scaly::memory::Heap;
use scaly::memory::Page;
use scaly::memory::Region;
use scaly::memory::StackBucket;
use std::ffi::CString;
mod scalyc;
// Rust's main which converts args back to C's main convention (used here for the Rust backend)
fn main() {
let args = std::env::args()
.map(|arg| CString::new(arg).unwrap())
.collect::<Vec<CString>>();
let c_args = args
.iter()
.map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
_main(c_args.len() as c_int, c_args.as_ptr());
}
// C style main function which converts args to Scaly's main convention (would be provided by the LLVM backend)
fn _main(argc: c_int, argv: *const *const c_char) {
let _r = Region::create_from_page(Page::get(StackBucket::create(&mut Heap::create()) as usize));
_scalyc_main(&_r, {
let _r_1 = Region::create(&_r);
let mut arguments: Ref<Array<String>> = Ref::new(_r_1.page, Array::new());
for n in 0..argc {
if n == 0 {
continue;
}
unsafe {
let arg = argv.offset(n as isize);
let s = String::from_c_string(_r.page, *arg);
(*arguments).add(s);
}
}
Ref::new(_r.page, Vector::from_array(_r.page, arguments))
});
}
fn _scalyc_main(_pr: &Region, arguments: Ref<Vector<String>>) | {
use scalyc::compiler::Compiler;
let _r = Region::create(_pr);
let compiler = Ref::new(_r.page, Compiler::new(_r.page, arguments));
(*compiler).compile(&_r, _r.page, _r.page);
} | identifier_body | |
dashboard_deeplink_provider_types.ts | /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ==============================================================================*/
import {URLDeserializedState as MetricsURLDeserializedState} from '../metrics/types';
import {URLDeserializedState as RunsURLDeserializedState} from '../runs/types';
// No need to deserialize the Experimental Plugins as it is immutable and is only read at
// the start of the application.
export type DeserializedState = MetricsURLDeserializedState &
RunsURLDeserializedState;
export const SMOOTHING_KEY = 'smoothing';
export const PINNED_CARDS_KEY = 'pinnedCards';
export const RUN_COLOR_GROUP_KEY = 'runColorGroup';
export const TAG_FILTER_KEY = 'tagFilter';
export const RUN_FILTER_KEY = 'runFilter'; | See the License for the specific language governing permissions and
limitations under the License. | random_line_split |
__init__.py | # This file is part of Pyphen
#
# Copyright 2008 - Wilbert Berendsen <info@wilbertberendsen.nl>
# Copyright 2012-2013 - Guillaume Ayoub <guillaume.ayoub@kozea.fr>
#
# This library is free software. It is released under the
# GPL 2.0+/LGPL 2.1+/MPL 1.1 tri-license. See COPYING.GPL, COPYING.LGPL and
# COPYING.MPL for more details.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
"""
Pyphen
======
Pure Python module to hyphenate text, inspired by Ruby's Text::Hyphen.
"""
from __future__ import unicode_literals
import os
import re
try:
unichr
except NameError:
# Python3
unichr = chr
__all__ = ('Pyphen', 'LANGUAGES', 'language_fallback')
# cache of per-file HyphDict objects
hdcache = {}
# precompile some stuff
parse_hex = re.compile(r'\^{2}([0-9a-f]{2})').sub
parse = re.compile(r'(\d?)(\D?)').findall
try:
from pkg_resources import resource_filename
dictionaries_root = resource_filename('pyphen', 'dictionaries')
except ImportError:
dictionaries_root = os.path.join(os.path.dirname(__file__), 'dictionaries')
LANGUAGES = dict(
(filename[5:-4], os.path.join(dictionaries_root, filename))
for filename in os.listdir(dictionaries_root)
if filename.endswith('.dic'))
def language_fallback(language):
"""Get a fallback language available in our dictionaries.
http://www.unicode.org/reports/tr35/#Locale_Inheritance
We use the normal truncation inheritance. This function needs aliases
including scripts for languages with multiple regions available.
"""
parts = language.replace('-', '_').split('_')
while parts:
language = '_'.join(parts)
if language in LANGUAGES:
return language
parts.pop()
class AlternativeParser(object):
"""Parser of nonstandard hyphen pattern alternative.
The instance returns a special int with data about the current position in
the pattern when called with an odd value.
"""
def __init__(self, pattern, alternative):
alternative = alternative.split(',')
self.change = alternative[0]
self.index = int(alternative[1])
self.cut = int(alternative[2])
if pattern.startswith('.'):
self.index += 1
def __call__(self, value):
self.index -= 1
value = int(value)
if value & 1:
return DataInt(value, (self.change, self.index, self.cut))
else:
return value
class DataInt(int):
"""``int`` with some other data can be stuck to in a ``data`` attribute."""
def __new__(cls, value, data=None, reference=None):
"""Create a new ``DataInt``.
Call with ``reference=dataint_object`` to use the data from another
``DataInt``.
"""
obj = int.__new__(cls, value)
if reference and isinstance(reference, DataInt):
obj.data = reference.data
else:
obj.data = data
return obj
class HyphDict(object):
"""Hyphenation patterns."""
def __init__(self, filename):
"""Read a ``hyph_*.dic`` and parse its patterns.
:param filename: filename of hyph_*.dic to read
"""
self.patterns = {}
with open(filename, 'rb') as stream:
# see "man 4 hunspell", iscii-devanagari is not supported by python
charset = stream.readline().strip().decode('ascii')
if charset.lower() == 'microsoft-cp1251':
charset = 'cp1251'
for pattern in stream:
pattern = pattern.decode(charset).strip()
if (not pattern or
pattern.startswith('%') or
pattern.startswith('#')):
continue
# replace ^^hh with the real character
pattern = parse_hex(
lambda match: unichr(int(match.group(1), 16)), pattern)
# read nonstandard hyphen alternatives
if '/' in pattern:
pattern, alternative = pattern.split('/', 1)
factory = AlternativeParser(pattern, alternative)
else:
factory = int
tags, values = zip(*[
(string, factory(i or '0'))
for i, string in parse(pattern)])
# if only zeros, skip this pattern
if max(values) == 0:
continue
# chop zeros from beginning and end, and store start offset
start, end = 0, len(values)
while not values[start]:
start += 1
while not values[end - 1]:
end -= 1
self.patterns[''.join(tags)] = start, values[start:end]
self.cache = {}
self.maxlen = max(len(key) for key in self.patterns)
def positions(self, word):
"""Get a list of positions where the word can be hyphenated.
:param word: unicode string of the word to hyphenate
E.g. for the dutch word 'lettergrepen' this method returns ``[3, 6,
9]``.
Each position is a ``DataInt`` with a data attribute.
If the data attribute is not ``None``, it contains a tuple with
information about nonstandard hyphenation at that point: ``(change,
index, cut)``.
change
a string like ``'ff=f'``, that describes how hyphenation should
take place.
index
where to substitute the change, counting from the current point
cut
how many characters to remove while substituting the nonstandard
hyphenation
"""
word = word.lower()
points = self.cache.get(word)
if points is None:
pointed_word = '.%s.' % word
references = [0] * (len(pointed_word) + 1)
for i in range(len(pointed_word) - 1):
for j in range(
i + 1, min(i + self.maxlen, len(pointed_word)) + 1):
pattern = self.patterns.get(pointed_word[i:j])
if pattern:
offset, values = pattern
slice_ = slice(i + offset, i + offset + len(values))
references[slice_] = map(
max, values, references[slice_])
points = [
DataInt(i - 1, reference=reference)
for i, reference in enumerate(references) if reference % 2]
self.cache[word] = points
return points
class Pyphen(object):
"""Hyphenation class, with methods to hyphenate strings in various ways."""
def __init__(self, filename=None, lang=None, left=2, right=2, cache=True):
"""Create an hyphenation instance for given lang or filename.
:param filename: filename of hyph_*.dic to read
:param lang: lang of the included dict to use if no filename is given
:param left: minimum number of characters of the first syllabe
:param right: minimum number of characters of the last syllabe
:param cache: if ``True``, use cached copy of the hyphenation patterns
"""
if not filename:
filename = LANGUAGES[language_fallback(lang)]
self.left = left
self.right = right
if not cache or filename not in hdcache:
hdcache[filename] = HyphDict(filename)
self.hd = hdcache[filename]
def positions(self, word):
"""Get a list of positions where the word can be hyphenated.
:param word: unicode string of the word to hyphenate
See also ``HyphDict.positions``. The points that are too far to the
left or right are removed.
"""
right = len(word) - self.right
return [i for i in self.hd.positions(word) if self.left <= i <= right]
def iterate(self, word):
"""Iterate over all hyphenation possibilities, the longest first.
:param word: unicode string of the word to hyphenate
"""
for position in reversed(self.positions(word)):
if position.data:
# get the nonstandard hyphenation data
change, index, cut = position.data
index += position
if word.isupper():
change = change.upper()
c1, c2 = change.split('=')
yield word[:index] + c1, c2 + word[index + cut:]
else:
yield word[:position], word[position:]
def | (self, word, width, hyphen='-'):
"""Get the longest possible first part and the last part of a word.
:param word: unicode string of the word to hyphenate
:param width: maximum length of the first part
:param hyphen: unicode string used as hyphen character
The first part has the hyphen already attached.
Returns ``None`` if there is no hyphenation point before ``width``, or
if the word could not be hyphenated.
"""
width -= len(hyphen)
for w1, w2 in self.iterate(word):
if len(w1) <= width:
return w1 + hyphen, w2
def inserted(self, word, hyphen='-'):
"""Get the word as a string with all the possible hyphens inserted.
:param word: unicode string of the word to hyphenate
:param hyphen: unicode string used as hyphen character
E.g. for the dutch word ``'lettergrepen'``, this method returns the
unicode string ``'let-ter-gre-pen'``. The hyphen string to use can be
given as the second parameter, that defaults to ``'-'``.
"""
word_list = list(word)
for position in reversed(self.positions(word)):
if position.data:
# get the nonstandard hyphenation data
change, index, cut = position.data
index += position
if word.isupper():
change = change.upper()
word_list[index:index + cut] = change.replace('=', hyphen)
else:
word_list.insert(position, hyphen)
return ''.join(word_list)
__call__ = iterate
| wrap | identifier_name |
__init__.py | # This file is part of Pyphen
#
# Copyright 2008 - Wilbert Berendsen <info@wilbertberendsen.nl>
# Copyright 2012-2013 - Guillaume Ayoub <guillaume.ayoub@kozea.fr>
#
# This library is free software. It is released under the
# GPL 2.0+/LGPL 2.1+/MPL 1.1 tri-license. See COPYING.GPL, COPYING.LGPL and
# COPYING.MPL for more details.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
"""
Pyphen
======
Pure Python module to hyphenate text, inspired by Ruby's Text::Hyphen.
"""
from __future__ import unicode_literals
import os
import re
try:
unichr
except NameError:
# Python3
unichr = chr
__all__ = ('Pyphen', 'LANGUAGES', 'language_fallback')
# cache of per-file HyphDict objects
hdcache = {}
# precompile some stuff
parse_hex = re.compile(r'\^{2}([0-9a-f]{2})').sub
parse = re.compile(r'(\d?)(\D?)').findall
try:
from pkg_resources import resource_filename
dictionaries_root = resource_filename('pyphen', 'dictionaries')
except ImportError:
dictionaries_root = os.path.join(os.path.dirname(__file__), 'dictionaries')
LANGUAGES = dict(
(filename[5:-4], os.path.join(dictionaries_root, filename))
for filename in os.listdir(dictionaries_root)
if filename.endswith('.dic'))
def language_fallback(language):
"""Get a fallback language available in our dictionaries.
http://www.unicode.org/reports/tr35/#Locale_Inheritance
We use the normal truncation inheritance. This function needs aliases
including scripts for languages with multiple regions available.
"""
parts = language.replace('-', '_').split('_')
while parts:
language = '_'.join(parts)
if language in LANGUAGES:
return language
parts.pop()
class AlternativeParser(object):
"""Parser of nonstandard hyphen pattern alternative.
The instance returns a special int with data about the current position in
the pattern when called with an odd value.
"""
def __init__(self, pattern, alternative):
alternative = alternative.split(',')
self.change = alternative[0]
self.index = int(alternative[1])
self.cut = int(alternative[2])
if pattern.startswith('.'):
self.index += 1
def __call__(self, value):
self.index -= 1
value = int(value)
if value & 1:
return DataInt(value, (self.change, self.index, self.cut))
else:
return value
class DataInt(int):
"""``int`` with some other data can be stuck to in a ``data`` attribute."""
def __new__(cls, value, data=None, reference=None):
"""Create a new ``DataInt``.
Call with ``reference=dataint_object`` to use the data from another
``DataInt``.
"""
obj = int.__new__(cls, value)
if reference and isinstance(reference, DataInt):
obj.data = reference.data
else:
obj.data = data
return obj
class HyphDict(object):
"""Hyphenation patterns."""
def __init__(self, filename):
"""Read a ``hyph_*.dic`` and parse its patterns.
:param filename: filename of hyph_*.dic to read
"""
self.patterns = {}
with open(filename, 'rb') as stream:
# see "man 4 hunspell", iscii-devanagari is not supported by python
charset = stream.readline().strip().decode('ascii')
if charset.lower() == 'microsoft-cp1251':
charset = 'cp1251'
for pattern in stream:
pattern = pattern.decode(charset).strip()
if (not pattern or
pattern.startswith('%') or
pattern.startswith('#')):
continue
# replace ^^hh with the real character
pattern = parse_hex(
lambda match: unichr(int(match.group(1), 16)), pattern)
# read nonstandard hyphen alternatives
if '/' in pattern:
pattern, alternative = pattern.split('/', 1)
factory = AlternativeParser(pattern, alternative)
else:
factory = int
tags, values = zip(*[
(string, factory(i or '0'))
for i, string in parse(pattern)])
# if only zeros, skip this pattern
if max(values) == 0:
continue
# chop zeros from beginning and end, and store start offset
start, end = 0, len(values)
while not values[start]:
start += 1
while not values[end - 1]:
end -= 1
self.patterns[''.join(tags)] = start, values[start:end]
self.cache = {}
self.maxlen = max(len(key) for key in self.patterns)
def positions(self, word):
"""Get a list of positions where the word can be hyphenated.
:param word: unicode string of the word to hyphenate
E.g. for the dutch word 'lettergrepen' this method returns ``[3, 6,
9]``.
Each position is a ``DataInt`` with a data attribute.
If the data attribute is not ``None``, it contains a tuple with
information about nonstandard hyphenation at that point: ``(change,
index, cut)``.
change
a string like ``'ff=f'``, that describes how hyphenation should
take place.
index
where to substitute the change, counting from the current point
cut
how many characters to remove while substituting the nonstandard
hyphenation
"""
word = word.lower()
points = self.cache.get(word)
if points is None:
pointed_word = '.%s.' % word
references = [0] * (len(pointed_word) + 1)
for i in range(len(pointed_word) - 1):
for j in range(
i + 1, min(i + self.maxlen, len(pointed_word)) + 1):
pattern = self.patterns.get(pointed_word[i:j])
if pattern:
offset, values = pattern
slice_ = slice(i + offset, i + offset + len(values))
references[slice_] = map(
max, values, references[slice_])
points = [
DataInt(i - 1, reference=reference)
for i, reference in enumerate(references) if reference % 2]
self.cache[word] = points
return points
class Pyphen(object):
"""Hyphenation class, with methods to hyphenate strings in various ways."""
def __init__(self, filename=None, lang=None, left=2, right=2, cache=True):
"""Create an hyphenation instance for given lang or filename.
:param filename: filename of hyph_*.dic to read
:param lang: lang of the included dict to use if no filename is given
:param left: minimum number of characters of the first syllabe
:param right: minimum number of characters of the last syllabe
:param cache: if ``True``, use cached copy of the hyphenation patterns
"""
if not filename:
filename = LANGUAGES[language_fallback(lang)]
self.left = left
self.right = right
if not cache or filename not in hdcache:
hdcache[filename] = HyphDict(filename)
self.hd = hdcache[filename]
def positions(self, word):
"""Get a list of positions where the word can be hyphenated.
:param word: unicode string of the word to hyphenate
See also ``HyphDict.positions``. The points that are too far to the
left or right are removed.
"""
right = len(word) - self.right
return [i for i in self.hd.positions(word) if self.left <= i <= right]
def iterate(self, word):
"""Iterate over all hyphenation possibilities, the longest first.
:param word: unicode string of the word to hyphenate
"""
for position in reversed(self.positions(word)):
|
def wrap(self, word, width, hyphen='-'):
"""Get the longest possible first part and the last part of a word.
:param word: unicode string of the word to hyphenate
:param width: maximum length of the first part
:param hyphen: unicode string used as hyphen character
The first part has the hyphen already attached.
Returns ``None`` if there is no hyphenation point before ``width``, or
if the word could not be hyphenated.
"""
width -= len(hyphen)
for w1, w2 in self.iterate(word):
if len(w1) <= width:
return w1 + hyphen, w2
def inserted(self, word, hyphen='-'):
"""Get the word as a string with all the possible hyphens inserted.
:param word: unicode string of the word to hyphenate
:param hyphen: unicode string used as hyphen character
E.g. for the dutch word ``'lettergrepen'``, this method returns the
unicode string ``'let-ter-gre-pen'``. The hyphen string to use can be
given as the second parameter, that defaults to ``'-'``.
"""
word_list = list(word)
for position in reversed(self.positions(word)):
if position.data:
# get the nonstandard hyphenation data
change, index, cut = position.data
index += position
if word.isupper():
change = change.upper()
word_list[index:index + cut] = change.replace('=', hyphen)
else:
word_list.insert(position, hyphen)
return ''.join(word_list)
__call__ = iterate
| if position.data:
# get the nonstandard hyphenation data
change, index, cut = position.data
index += position
if word.isupper():
change = change.upper()
c1, c2 = change.split('=')
yield word[:index] + c1, c2 + word[index + cut:]
else:
yield word[:position], word[position:] | conditional_block |
__init__.py | # This file is part of Pyphen
#
# Copyright 2008 - Wilbert Berendsen <info@wilbertberendsen.nl>
# Copyright 2012-2013 - Guillaume Ayoub <guillaume.ayoub@kozea.fr>
#
# This library is free software. It is released under the
# GPL 2.0+/LGPL 2.1+/MPL 1.1 tri-license. See COPYING.GPL, COPYING.LGPL and
# COPYING.MPL for more details.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
"""
Pyphen
======
Pure Python module to hyphenate text, inspired by Ruby's Text::Hyphen.
"""
from __future__ import unicode_literals
import os
import re
try:
unichr
except NameError:
# Python3
unichr = chr
__all__ = ('Pyphen', 'LANGUAGES', 'language_fallback')
# cache of per-file HyphDict objects
hdcache = {}
# precompile some stuff
parse_hex = re.compile(r'\^{2}([0-9a-f]{2})').sub
parse = re.compile(r'(\d?)(\D?)').findall
try:
from pkg_resources import resource_filename
dictionaries_root = resource_filename('pyphen', 'dictionaries')
except ImportError:
dictionaries_root = os.path.join(os.path.dirname(__file__), 'dictionaries')
LANGUAGES = dict(
(filename[5:-4], os.path.join(dictionaries_root, filename))
for filename in os.listdir(dictionaries_root)
if filename.endswith('.dic'))
def language_fallback(language):
"""Get a fallback language available in our dictionaries.
http://www.unicode.org/reports/tr35/#Locale_Inheritance
We use the normal truncation inheritance. This function needs aliases
including scripts for languages with multiple regions available.
"""
parts = language.replace('-', '_').split('_')
while parts:
language = '_'.join(parts)
if language in LANGUAGES:
return language
parts.pop()
class AlternativeParser(object):
"""Parser of nonstandard hyphen pattern alternative.
The instance returns a special int with data about the current position in
the pattern when called with an odd value.
"""
def __init__(self, pattern, alternative):
alternative = alternative.split(',')
self.change = alternative[0]
self.index = int(alternative[1])
self.cut = int(alternative[2])
if pattern.startswith('.'):
self.index += 1
| else:
return value
class DataInt(int):
"""``int`` with some other data can be stuck to in a ``data`` attribute."""
def __new__(cls, value, data=None, reference=None):
"""Create a new ``DataInt``.
Call with ``reference=dataint_object`` to use the data from another
``DataInt``.
"""
obj = int.__new__(cls, value)
if reference and isinstance(reference, DataInt):
obj.data = reference.data
else:
obj.data = data
return obj
class HyphDict(object):
"""Hyphenation patterns."""
def __init__(self, filename):
"""Read a ``hyph_*.dic`` and parse its patterns.
:param filename: filename of hyph_*.dic to read
"""
self.patterns = {}
with open(filename, 'rb') as stream:
# see "man 4 hunspell", iscii-devanagari is not supported by python
charset = stream.readline().strip().decode('ascii')
if charset.lower() == 'microsoft-cp1251':
charset = 'cp1251'
for pattern in stream:
pattern = pattern.decode(charset).strip()
if (not pattern or
pattern.startswith('%') or
pattern.startswith('#')):
continue
# replace ^^hh with the real character
pattern = parse_hex(
lambda match: unichr(int(match.group(1), 16)), pattern)
# read nonstandard hyphen alternatives
if '/' in pattern:
pattern, alternative = pattern.split('/', 1)
factory = AlternativeParser(pattern, alternative)
else:
factory = int
tags, values = zip(*[
(string, factory(i or '0'))
for i, string in parse(pattern)])
# if only zeros, skip this pattern
if max(values) == 0:
continue
# chop zeros from beginning and end, and store start offset
start, end = 0, len(values)
while not values[start]:
start += 1
while not values[end - 1]:
end -= 1
self.patterns[''.join(tags)] = start, values[start:end]
self.cache = {}
self.maxlen = max(len(key) for key in self.patterns)
def positions(self, word):
"""Get a list of positions where the word can be hyphenated.
:param word: unicode string of the word to hyphenate
E.g. for the dutch word 'lettergrepen' this method returns ``[3, 6,
9]``.
Each position is a ``DataInt`` with a data attribute.
If the data attribute is not ``None``, it contains a tuple with
information about nonstandard hyphenation at that point: ``(change,
index, cut)``.
change
a string like ``'ff=f'``, that describes how hyphenation should
take place.
index
where to substitute the change, counting from the current point
cut
how many characters to remove while substituting the nonstandard
hyphenation
"""
word = word.lower()
points = self.cache.get(word)
if points is None:
pointed_word = '.%s.' % word
references = [0] * (len(pointed_word) + 1)
for i in range(len(pointed_word) - 1):
for j in range(
i + 1, min(i + self.maxlen, len(pointed_word)) + 1):
pattern = self.patterns.get(pointed_word[i:j])
if pattern:
offset, values = pattern
slice_ = slice(i + offset, i + offset + len(values))
references[slice_] = map(
max, values, references[slice_])
points = [
DataInt(i - 1, reference=reference)
for i, reference in enumerate(references) if reference % 2]
self.cache[word] = points
return points
class Pyphen(object):
"""Hyphenation class, with methods to hyphenate strings in various ways."""
def __init__(self, filename=None, lang=None, left=2, right=2, cache=True):
"""Create an hyphenation instance for given lang or filename.
:param filename: filename of hyph_*.dic to read
:param lang: lang of the included dict to use if no filename is given
:param left: minimum number of characters of the first syllabe
:param right: minimum number of characters of the last syllabe
:param cache: if ``True``, use cached copy of the hyphenation patterns
"""
if not filename:
filename = LANGUAGES[language_fallback(lang)]
self.left = left
self.right = right
if not cache or filename not in hdcache:
hdcache[filename] = HyphDict(filename)
self.hd = hdcache[filename]
def positions(self, word):
"""Get a list of positions where the word can be hyphenated.
:param word: unicode string of the word to hyphenate
See also ``HyphDict.positions``. The points that are too far to the
left or right are removed.
"""
right = len(word) - self.right
return [i for i in self.hd.positions(word) if self.left <= i <= right]
def iterate(self, word):
"""Iterate over all hyphenation possibilities, the longest first.
:param word: unicode string of the word to hyphenate
"""
for position in reversed(self.positions(word)):
if position.data:
# get the nonstandard hyphenation data
change, index, cut = position.data
index += position
if word.isupper():
change = change.upper()
c1, c2 = change.split('=')
yield word[:index] + c1, c2 + word[index + cut:]
else:
yield word[:position], word[position:]
def wrap(self, word, width, hyphen='-'):
"""Get the longest possible first part and the last part of a word.
:param word: unicode string of the word to hyphenate
:param width: maximum length of the first part
:param hyphen: unicode string used as hyphen character
The first part has the hyphen already attached.
Returns ``None`` if there is no hyphenation point before ``width``, or
if the word could not be hyphenated.
"""
width -= len(hyphen)
for w1, w2 in self.iterate(word):
if len(w1) <= width:
return w1 + hyphen, w2
def inserted(self, word, hyphen='-'):
"""Get the word as a string with all the possible hyphens inserted.
:param word: unicode string of the word to hyphenate
:param hyphen: unicode string used as hyphen character
E.g. for the dutch word ``'lettergrepen'``, this method returns the
unicode string ``'let-ter-gre-pen'``. The hyphen string to use can be
given as the second parameter, that defaults to ``'-'``.
"""
word_list = list(word)
for position in reversed(self.positions(word)):
if position.data:
# get the nonstandard hyphenation data
change, index, cut = position.data
index += position
if word.isupper():
change = change.upper()
word_list[index:index + cut] = change.replace('=', hyphen)
else:
word_list.insert(position, hyphen)
return ''.join(word_list)
__call__ = iterate | def __call__(self, value):
self.index -= 1
value = int(value)
if value & 1:
return DataInt(value, (self.change, self.index, self.cut)) | random_line_split |
__init__.py | # This file is part of Pyphen
#
# Copyright 2008 - Wilbert Berendsen <info@wilbertberendsen.nl>
# Copyright 2012-2013 - Guillaume Ayoub <guillaume.ayoub@kozea.fr>
#
# This library is free software. It is released under the
# GPL 2.0+/LGPL 2.1+/MPL 1.1 tri-license. See COPYING.GPL, COPYING.LGPL and
# COPYING.MPL for more details.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
"""
Pyphen
======
Pure Python module to hyphenate text, inspired by Ruby's Text::Hyphen.
"""
from __future__ import unicode_literals
import os
import re
try:
unichr
except NameError:
# Python3
unichr = chr
__all__ = ('Pyphen', 'LANGUAGES', 'language_fallback')
# cache of per-file HyphDict objects
hdcache = {}
# precompile some stuff
parse_hex = re.compile(r'\^{2}([0-9a-f]{2})').sub
parse = re.compile(r'(\d?)(\D?)').findall
try:
from pkg_resources import resource_filename
dictionaries_root = resource_filename('pyphen', 'dictionaries')
except ImportError:
dictionaries_root = os.path.join(os.path.dirname(__file__), 'dictionaries')
LANGUAGES = dict(
(filename[5:-4], os.path.join(dictionaries_root, filename))
for filename in os.listdir(dictionaries_root)
if filename.endswith('.dic'))
def language_fallback(language):
"""Get a fallback language available in our dictionaries.
http://www.unicode.org/reports/tr35/#Locale_Inheritance
We use the normal truncation inheritance. This function needs aliases
including scripts for languages with multiple regions available.
"""
parts = language.replace('-', '_').split('_')
while parts:
language = '_'.join(parts)
if language in LANGUAGES:
return language
parts.pop()
class AlternativeParser(object):
"""Parser of nonstandard hyphen pattern alternative.
The instance returns a special int with data about the current position in
the pattern when called with an odd value.
"""
def __init__(self, pattern, alternative):
alternative = alternative.split(',')
self.change = alternative[0]
self.index = int(alternative[1])
self.cut = int(alternative[2])
if pattern.startswith('.'):
self.index += 1
def __call__(self, value):
self.index -= 1
value = int(value)
if value & 1:
return DataInt(value, (self.change, self.index, self.cut))
else:
return value
class DataInt(int):
"""``int`` with some other data can be stuck to in a ``data`` attribute."""
def __new__(cls, value, data=None, reference=None):
"""Create a new ``DataInt``.
Call with ``reference=dataint_object`` to use the data from another
``DataInt``.
"""
obj = int.__new__(cls, value)
if reference and isinstance(reference, DataInt):
obj.data = reference.data
else:
obj.data = data
return obj
class HyphDict(object):
"""Hyphenation patterns."""
def __init__(self, filename):
"""Read a ``hyph_*.dic`` and parse its patterns.
:param filename: filename of hyph_*.dic to read
"""
self.patterns = {}
with open(filename, 'rb') as stream:
# see "man 4 hunspell", iscii-devanagari is not supported by python
charset = stream.readline().strip().decode('ascii')
if charset.lower() == 'microsoft-cp1251':
charset = 'cp1251'
for pattern in stream:
pattern = pattern.decode(charset).strip()
if (not pattern or
pattern.startswith('%') or
pattern.startswith('#')):
continue
# replace ^^hh with the real character
pattern = parse_hex(
lambda match: unichr(int(match.group(1), 16)), pattern)
# read nonstandard hyphen alternatives
if '/' in pattern:
pattern, alternative = pattern.split('/', 1)
factory = AlternativeParser(pattern, alternative)
else:
factory = int
tags, values = zip(*[
(string, factory(i or '0'))
for i, string in parse(pattern)])
# if only zeros, skip this pattern
if max(values) == 0:
continue
# chop zeros from beginning and end, and store start offset
start, end = 0, len(values)
while not values[start]:
start += 1
while not values[end - 1]:
end -= 1
self.patterns[''.join(tags)] = start, values[start:end]
self.cache = {}
self.maxlen = max(len(key) for key in self.patterns)
def positions(self, word):
"""Get a list of positions where the word can be hyphenated.
:param word: unicode string of the word to hyphenate
E.g. for the dutch word 'lettergrepen' this method returns ``[3, 6,
9]``.
Each position is a ``DataInt`` with a data attribute.
If the data attribute is not ``None``, it contains a tuple with
information about nonstandard hyphenation at that point: ``(change,
index, cut)``.
change
a string like ``'ff=f'``, that describes how hyphenation should
take place.
index
where to substitute the change, counting from the current point
cut
how many characters to remove while substituting the nonstandard
hyphenation
"""
word = word.lower()
points = self.cache.get(word)
if points is None:
pointed_word = '.%s.' % word
references = [0] * (len(pointed_word) + 1)
for i in range(len(pointed_word) - 1):
for j in range(
i + 1, min(i + self.maxlen, len(pointed_word)) + 1):
pattern = self.patterns.get(pointed_word[i:j])
if pattern:
offset, values = pattern
slice_ = slice(i + offset, i + offset + len(values))
references[slice_] = map(
max, values, references[slice_])
points = [
DataInt(i - 1, reference=reference)
for i, reference in enumerate(references) if reference % 2]
self.cache[word] = points
return points
class Pyphen(object):
"""Hyphenation class, with methods to hyphenate strings in various ways."""
def __init__(self, filename=None, lang=None, left=2, right=2, cache=True):
"""Create an hyphenation instance for given lang or filename.
:param filename: filename of hyph_*.dic to read
:param lang: lang of the included dict to use if no filename is given
:param left: minimum number of characters of the first syllabe
:param right: minimum number of characters of the last syllabe
:param cache: if ``True``, use cached copy of the hyphenation patterns
"""
if not filename:
filename = LANGUAGES[language_fallback(lang)]
self.left = left
self.right = right
if not cache or filename not in hdcache:
hdcache[filename] = HyphDict(filename)
self.hd = hdcache[filename]
def positions(self, word):
"""Get a list of positions where the word can be hyphenated.
:param word: unicode string of the word to hyphenate
See also ``HyphDict.positions``. The points that are too far to the
left or right are removed.
"""
right = len(word) - self.right
return [i for i in self.hd.positions(word) if self.left <= i <= right]
def iterate(self, word):
|
def wrap(self, word, width, hyphen='-'):
"""Get the longest possible first part and the last part of a word.
:param word: unicode string of the word to hyphenate
:param width: maximum length of the first part
:param hyphen: unicode string used as hyphen character
The first part has the hyphen already attached.
Returns ``None`` if there is no hyphenation point before ``width``, or
if the word could not be hyphenated.
"""
width -= len(hyphen)
for w1, w2 in self.iterate(word):
if len(w1) <= width:
return w1 + hyphen, w2
def inserted(self, word, hyphen='-'):
"""Get the word as a string with all the possible hyphens inserted.
:param word: unicode string of the word to hyphenate
:param hyphen: unicode string used as hyphen character
E.g. for the dutch word ``'lettergrepen'``, this method returns the
unicode string ``'let-ter-gre-pen'``. The hyphen string to use can be
given as the second parameter, that defaults to ``'-'``.
"""
word_list = list(word)
for position in reversed(self.positions(word)):
if position.data:
# get the nonstandard hyphenation data
change, index, cut = position.data
index += position
if word.isupper():
change = change.upper()
word_list[index:index + cut] = change.replace('=', hyphen)
else:
word_list.insert(position, hyphen)
return ''.join(word_list)
__call__ = iterate
| """Iterate over all hyphenation possibilities, the longest first.
:param word: unicode string of the word to hyphenate
"""
for position in reversed(self.positions(word)):
if position.data:
# get the nonstandard hyphenation data
change, index, cut = position.data
index += position
if word.isupper():
change = change.upper()
c1, c2 = change.split('=')
yield word[:index] + c1, c2 + word[index + cut:]
else:
yield word[:position], word[position:] | identifier_body |
hex.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// ignore-lexer-test FIXME #15679
//! Hex binary-to-text encoding
pub use self::FromHexError::*;
use std::fmt;
use std::error;
/// A trait for converting a value to hexadecimal encoding
pub trait ToHex {
/// Converts the value of `self` to a hex value, returning the owned
/// string.
fn to_hex(&self) -> String;
}
const CHARS: &'static [u8] = b"0123456789abcdef";
impl ToHex for [u8] {
/// Turn a vector of `u8` bytes into a hexadecimal string.
///
/// # Examples
///
/// ```
/// # #![feature(rustc_private)]
/// extern crate serialize;
/// use serialize::hex::ToHex;
///
/// fn main () {
/// let str = [52,32].to_hex();
/// println!("{}", str);
/// }
/// ```
fn to_hex(&self) -> String {
let mut v = Vec::with_capacity(self.len() * 2);
for &byte in self {
v.push(CHARS[(byte >> 4) as usize]);
v.push(CHARS[(byte & 0xf) as usize]);
}
unsafe {
String::from_utf8_unchecked(v)
}
}
}
/// A trait for converting hexadecimal encoded values
pub trait FromHex {
/// Converts the value of `self`, interpreted as hexadecimal encoded data,
/// into an owned vector of bytes, returning the vector.
fn from_hex(&self) -> Result<Vec<u8>, FromHexError>;
}
/// Errors that can occur when decoding a hex encoded string
#[derive(Copy, Clone, Debug)]
pub enum FromHexError {
/// The input contained a character not part of the hex format
InvalidHexCharacter(char, usize),
/// The input had an invalid length
InvalidHexLength,
}
impl fmt::Display for FromHexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
InvalidHexCharacter(ch, idx) =>
write!(f, "Invalid character '{}' at position {}", ch, idx),
InvalidHexLength => write!(f, "Invalid input length"),
}
}
}
impl error::Error for FromHexError {
fn description(&self) -> &str {
match *self {
InvalidHexCharacter(_, _) => "invalid character",
InvalidHexLength => "invalid length",
}
}
}
impl FromHex for str {
/// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`)
/// to the byte values it encodes.
///
/// You can use the `String::from_utf8` function to turn a
/// `Vec<u8>` into a string with characters corresponding to those values.
///
/// # Examples
///
/// This converts a string literal to hexadecimal and back.
///
/// ```
/// # #![feature(rustc_private)]
/// extern crate serialize;
/// use serialize::hex::{FromHex, ToHex};
///
/// fn main () {
/// let hello_str = "Hello, World".as_bytes().to_hex();
/// println!("{}", hello_str);
/// let bytes = hello_str.from_hex().unwrap();
/// println!("{:?}", bytes);
/// let result_str = String::from_utf8(bytes).unwrap();
/// println!("{}", result_str);
/// }
/// ```
fn from_hex(&self) -> Result<Vec<u8>, FromHexError> {
// This may be an overestimate if there is any whitespace
let mut b = Vec::with_capacity(self.len() / 2);
let mut modulus = 0;
let mut buf = 0;
for (idx, byte) in self.bytes().enumerate() {
buf <<= 4;
match byte {
b'A'...b'F' => buf |= byte - b'A' + 10,
b'a'...b'f' => buf |= byte - b'a' + 10,
b'0'...b'9' => buf |= byte - b'0',
b' '|b'\r'|b'\n'|b'\t' => {
buf >>= 4;
continue
}
_ => return Err(InvalidHexCharacter(self.char_at(idx), idx)),
}
modulus += 1;
if modulus == 2 {
modulus = 0;
b.push(buf);
}
}
match modulus {
0 => Ok(b.into_iter().collect()),
_ => Err(InvalidHexLength),
}
}
}
#[cfg(test)]
mod tests {
extern crate test;
use self::test::Bencher;
use hex::{FromHex, ToHex};
#[test]
pub fn test_to_hex() {
assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172");
}
#[test]
pub fn test_from_hex_okay() {
assert_eq!("666f6f626172".from_hex().unwrap(),
b"foobar");
assert_eq!("666F6F626172".from_hex().unwrap(),
b"foobar");
}
#[test]
pub fn test_from_hex_odd_len() |
#[test]
pub fn test_from_hex_invalid_char() {
assert!("66y6".from_hex().is_err());
}
#[test]
pub fn test_from_hex_ignores_whitespace() {
assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(),
b"foobar");
}
#[test]
pub fn test_to_hex_all_bytes() {
for i in 0..256 {
assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize));
}
}
#[test]
pub fn test_from_hex_all_bytes() {
for i in 0..256 {
let ii: &[u8] = &[i as u8];
assert_eq!(format!("{:02x}", i as usize).from_hex()
.unwrap(),
ii);
assert_eq!(format!("{:02X}", i as usize).from_hex()
.unwrap(),
ii);
}
}
#[bench]
pub fn bench_to_hex(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
b.iter(|| {
s.as_bytes().to_hex();
});
b.bytes = s.len() as u64;
}
#[bench]
pub fn bench_from_hex(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
let sb = s.as_bytes().to_hex();
b.iter(|| {
sb.from_hex().unwrap();
});
b.bytes = sb.len() as u64;
}
}
| {
assert!("666".from_hex().is_err());
assert!("66 6".from_hex().is_err());
} | identifier_body |
hex.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// ignore-lexer-test FIXME #15679 | use std::fmt;
use std::error;
/// A trait for converting a value to hexadecimal encoding
pub trait ToHex {
/// Converts the value of `self` to a hex value, returning the owned
/// string.
fn to_hex(&self) -> String;
}
const CHARS: &'static [u8] = b"0123456789abcdef";
impl ToHex for [u8] {
/// Turn a vector of `u8` bytes into a hexadecimal string.
///
/// # Examples
///
/// ```
/// # #![feature(rustc_private)]
/// extern crate serialize;
/// use serialize::hex::ToHex;
///
/// fn main () {
/// let str = [52,32].to_hex();
/// println!("{}", str);
/// }
/// ```
fn to_hex(&self) -> String {
let mut v = Vec::with_capacity(self.len() * 2);
for &byte in self {
v.push(CHARS[(byte >> 4) as usize]);
v.push(CHARS[(byte & 0xf) as usize]);
}
unsafe {
String::from_utf8_unchecked(v)
}
}
}
/// A trait for converting hexadecimal encoded values
pub trait FromHex {
/// Converts the value of `self`, interpreted as hexadecimal encoded data,
/// into an owned vector of bytes, returning the vector.
fn from_hex(&self) -> Result<Vec<u8>, FromHexError>;
}
/// Errors that can occur when decoding a hex encoded string
#[derive(Copy, Clone, Debug)]
pub enum FromHexError {
/// The input contained a character not part of the hex format
InvalidHexCharacter(char, usize),
/// The input had an invalid length
InvalidHexLength,
}
impl fmt::Display for FromHexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
InvalidHexCharacter(ch, idx) =>
write!(f, "Invalid character '{}' at position {}", ch, idx),
InvalidHexLength => write!(f, "Invalid input length"),
}
}
}
impl error::Error for FromHexError {
fn description(&self) -> &str {
match *self {
InvalidHexCharacter(_, _) => "invalid character",
InvalidHexLength => "invalid length",
}
}
}
impl FromHex for str {
/// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`)
/// to the byte values it encodes.
///
/// You can use the `String::from_utf8` function to turn a
/// `Vec<u8>` into a string with characters corresponding to those values.
///
/// # Examples
///
/// This converts a string literal to hexadecimal and back.
///
/// ```
/// # #![feature(rustc_private)]
/// extern crate serialize;
/// use serialize::hex::{FromHex, ToHex};
///
/// fn main () {
/// let hello_str = "Hello, World".as_bytes().to_hex();
/// println!("{}", hello_str);
/// let bytes = hello_str.from_hex().unwrap();
/// println!("{:?}", bytes);
/// let result_str = String::from_utf8(bytes).unwrap();
/// println!("{}", result_str);
/// }
/// ```
fn from_hex(&self) -> Result<Vec<u8>, FromHexError> {
// This may be an overestimate if there is any whitespace
let mut b = Vec::with_capacity(self.len() / 2);
let mut modulus = 0;
let mut buf = 0;
for (idx, byte) in self.bytes().enumerate() {
buf <<= 4;
match byte {
b'A'...b'F' => buf |= byte - b'A' + 10,
b'a'...b'f' => buf |= byte - b'a' + 10,
b'0'...b'9' => buf |= byte - b'0',
b' '|b'\r'|b'\n'|b'\t' => {
buf >>= 4;
continue
}
_ => return Err(InvalidHexCharacter(self.char_at(idx), idx)),
}
modulus += 1;
if modulus == 2 {
modulus = 0;
b.push(buf);
}
}
match modulus {
0 => Ok(b.into_iter().collect()),
_ => Err(InvalidHexLength),
}
}
}
#[cfg(test)]
mod tests {
extern crate test;
use self::test::Bencher;
use hex::{FromHex, ToHex};
#[test]
pub fn test_to_hex() {
assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172");
}
#[test]
pub fn test_from_hex_okay() {
assert_eq!("666f6f626172".from_hex().unwrap(),
b"foobar");
assert_eq!("666F6F626172".from_hex().unwrap(),
b"foobar");
}
#[test]
pub fn test_from_hex_odd_len() {
assert!("666".from_hex().is_err());
assert!("66 6".from_hex().is_err());
}
#[test]
pub fn test_from_hex_invalid_char() {
assert!("66y6".from_hex().is_err());
}
#[test]
pub fn test_from_hex_ignores_whitespace() {
assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(),
b"foobar");
}
#[test]
pub fn test_to_hex_all_bytes() {
for i in 0..256 {
assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize));
}
}
#[test]
pub fn test_from_hex_all_bytes() {
for i in 0..256 {
let ii: &[u8] = &[i as u8];
assert_eq!(format!("{:02x}", i as usize).from_hex()
.unwrap(),
ii);
assert_eq!(format!("{:02X}", i as usize).from_hex()
.unwrap(),
ii);
}
}
#[bench]
pub fn bench_to_hex(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
b.iter(|| {
s.as_bytes().to_hex();
});
b.bytes = s.len() as u64;
}
#[bench]
pub fn bench_from_hex(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
let sb = s.as_bytes().to_hex();
b.iter(|| {
sb.from_hex().unwrap();
});
b.bytes = sb.len() as u64;
}
} |
//! Hex binary-to-text encoding
pub use self::FromHexError::*;
| random_line_split |
hex.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// ignore-lexer-test FIXME #15679
//! Hex binary-to-text encoding
pub use self::FromHexError::*;
use std::fmt;
use std::error;
/// A trait for converting a value to hexadecimal encoding
pub trait ToHex {
/// Converts the value of `self` to a hex value, returning the owned
/// string.
fn to_hex(&self) -> String;
}
const CHARS: &'static [u8] = b"0123456789abcdef";
impl ToHex for [u8] {
/// Turn a vector of `u8` bytes into a hexadecimal string.
///
/// # Examples
///
/// ```
/// # #![feature(rustc_private)]
/// extern crate serialize;
/// use serialize::hex::ToHex;
///
/// fn main () {
/// let str = [52,32].to_hex();
/// println!("{}", str);
/// }
/// ```
fn to_hex(&self) -> String {
let mut v = Vec::with_capacity(self.len() * 2);
for &byte in self {
v.push(CHARS[(byte >> 4) as usize]);
v.push(CHARS[(byte & 0xf) as usize]);
}
unsafe {
String::from_utf8_unchecked(v)
}
}
}
/// A trait for converting hexadecimal encoded values
pub trait FromHex {
/// Converts the value of `self`, interpreted as hexadecimal encoded data,
/// into an owned vector of bytes, returning the vector.
fn from_hex(&self) -> Result<Vec<u8>, FromHexError>;
}
/// Errors that can occur when decoding a hex encoded string
#[derive(Copy, Clone, Debug)]
pub enum FromHexError {
/// The input contained a character not part of the hex format
InvalidHexCharacter(char, usize),
/// The input had an invalid length
InvalidHexLength,
}
impl fmt::Display for FromHexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
InvalidHexCharacter(ch, idx) =>
write!(f, "Invalid character '{}' at position {}", ch, idx),
InvalidHexLength => write!(f, "Invalid input length"),
}
}
}
impl error::Error for FromHexError {
fn description(&self) -> &str {
match *self {
InvalidHexCharacter(_, _) => "invalid character",
InvalidHexLength => "invalid length",
}
}
}
impl FromHex for str {
/// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`)
/// to the byte values it encodes.
///
/// You can use the `String::from_utf8` function to turn a
/// `Vec<u8>` into a string with characters corresponding to those values.
///
/// # Examples
///
/// This converts a string literal to hexadecimal and back.
///
/// ```
/// # #![feature(rustc_private)]
/// extern crate serialize;
/// use serialize::hex::{FromHex, ToHex};
///
/// fn main () {
/// let hello_str = "Hello, World".as_bytes().to_hex();
/// println!("{}", hello_str);
/// let bytes = hello_str.from_hex().unwrap();
/// println!("{:?}", bytes);
/// let result_str = String::from_utf8(bytes).unwrap();
/// println!("{}", result_str);
/// }
/// ```
fn | (&self) -> Result<Vec<u8>, FromHexError> {
// This may be an overestimate if there is any whitespace
let mut b = Vec::with_capacity(self.len() / 2);
let mut modulus = 0;
let mut buf = 0;
for (idx, byte) in self.bytes().enumerate() {
buf <<= 4;
match byte {
b'A'...b'F' => buf |= byte - b'A' + 10,
b'a'...b'f' => buf |= byte - b'a' + 10,
b'0'...b'9' => buf |= byte - b'0',
b' '|b'\r'|b'\n'|b'\t' => {
buf >>= 4;
continue
}
_ => return Err(InvalidHexCharacter(self.char_at(idx), idx)),
}
modulus += 1;
if modulus == 2 {
modulus = 0;
b.push(buf);
}
}
match modulus {
0 => Ok(b.into_iter().collect()),
_ => Err(InvalidHexLength),
}
}
}
#[cfg(test)]
mod tests {
extern crate test;
use self::test::Bencher;
use hex::{FromHex, ToHex};
#[test]
pub fn test_to_hex() {
assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172");
}
#[test]
pub fn test_from_hex_okay() {
assert_eq!("666f6f626172".from_hex().unwrap(),
b"foobar");
assert_eq!("666F6F626172".from_hex().unwrap(),
b"foobar");
}
#[test]
pub fn test_from_hex_odd_len() {
assert!("666".from_hex().is_err());
assert!("66 6".from_hex().is_err());
}
#[test]
pub fn test_from_hex_invalid_char() {
assert!("66y6".from_hex().is_err());
}
#[test]
pub fn test_from_hex_ignores_whitespace() {
assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(),
b"foobar");
}
#[test]
pub fn test_to_hex_all_bytes() {
for i in 0..256 {
assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize));
}
}
#[test]
pub fn test_from_hex_all_bytes() {
for i in 0..256 {
let ii: &[u8] = &[i as u8];
assert_eq!(format!("{:02x}", i as usize).from_hex()
.unwrap(),
ii);
assert_eq!(format!("{:02X}", i as usize).from_hex()
.unwrap(),
ii);
}
}
#[bench]
pub fn bench_to_hex(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
b.iter(|| {
s.as_bytes().to_hex();
});
b.bytes = s.len() as u64;
}
#[bench]
pub fn bench_from_hex(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
let sb = s.as_bytes().to_hex();
b.iter(|| {
sb.from_hex().unwrap();
});
b.bytes = sb.len() as u64;
}
}
| from_hex | identifier_name |
97bbc733896c_create_oauthclient_tables.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Create oauthclient tables."""
import sqlalchemy as sa
import sqlalchemy_utils
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision = '97bbc733896c'
down_revision = '44ab9963e8cf'
branch_labels = ()
depends_on = '9848d0149abd'
def upgrade():
"""Upgrade database."""
op.create_table(
'oauthclient_remoteaccount',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column(
'extra_data',
sqlalchemy_utils.JSONType(),
nullable=False),
sa.ForeignKeyConstraint(['user_id'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'client_id')
)
op.create_table(
'oauthclient_useridentity',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('method', sa.String(length=255), nullable=False),
sa.Column('id_user', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id_user'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id', 'method')
)
op.create_index(
'useridentity_id_user_method', 'oauthclient_useridentity',
['id_user', 'method'], unique=True
)
op.create_table(
'oauthclient_remotetoken',
sa.Column('id_remote_account', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column(
'access_token',
sqlalchemy_utils.EncryptedType(),
nullable=False),
sa.Column('secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(
['id_remote_account'], [u'oauthclient_remoteaccount.id'],
name='fk_oauthclient_remote_token_remote_account'
),
sa.PrimaryKeyConstraint('id_remote_account', 'token_type')
)
def downgrade():
| """Downgrade database."""
ctx = op.get_context()
insp = Inspector.from_engine(ctx.connection.engine)
op.drop_table('oauthclient_remotetoken')
for fk in insp.get_foreign_keys('oauthclient_useridentity'):
if fk['referred_table'] == 'accounts_user':
op.drop_constraint(
op.f(fk['name']),
'oauthclient_useridentity',
type_='foreignkey'
)
op.drop_index(
'useridentity_id_user_method',
table_name='oauthclient_useridentity')
op.drop_table('oauthclient_useridentity')
op.drop_table('oauthclient_remoteaccount') | identifier_body | |
97bbc733896c_create_oauthclient_tables.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Create oauthclient tables."""
import sqlalchemy as sa
import sqlalchemy_utils
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision = '97bbc733896c'
down_revision = '44ab9963e8cf'
branch_labels = ()
depends_on = '9848d0149abd'
def upgrade():
"""Upgrade database."""
op.create_table(
'oauthclient_remoteaccount',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column(
'extra_data',
sqlalchemy_utils.JSONType(),
nullable=False),
sa.ForeignKeyConstraint(['user_id'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'client_id')
)
op.create_table(
'oauthclient_useridentity',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('method', sa.String(length=255), nullable=False),
sa.Column('id_user', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id_user'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id', 'method')
)
op.create_index(
'useridentity_id_user_method', 'oauthclient_useridentity',
['id_user', 'method'], unique=True
)
op.create_table(
'oauthclient_remotetoken',
sa.Column('id_remote_account', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column(
'access_token',
sqlalchemy_utils.EncryptedType(),
nullable=False),
sa.Column('secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(
['id_remote_account'], [u'oauthclient_remoteaccount.id'],
name='fk_oauthclient_remote_token_remote_account'
),
sa.PrimaryKeyConstraint('id_remote_account', 'token_type')
)
def downgrade():
"""Downgrade database."""
ctx = op.get_context()
insp = Inspector.from_engine(ctx.connection.engine)
op.drop_table('oauthclient_remotetoken')
for fk in insp.get_foreign_keys('oauthclient_useridentity'):
if fk['referred_table'] == 'accounts_user':
|
op.drop_index(
'useridentity_id_user_method',
table_name='oauthclient_useridentity')
op.drop_table('oauthclient_useridentity')
op.drop_table('oauthclient_remoteaccount')
| op.drop_constraint(
op.f(fk['name']),
'oauthclient_useridentity',
type_='foreignkey'
) | conditional_block |
97bbc733896c_create_oauthclient_tables.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Create oauthclient tables."""
import sqlalchemy as sa
import sqlalchemy_utils | # revision identifiers, used by Alembic.
revision = '97bbc733896c'
down_revision = '44ab9963e8cf'
branch_labels = ()
depends_on = '9848d0149abd'
def upgrade():
"""Upgrade database."""
op.create_table(
'oauthclient_remoteaccount',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column(
'extra_data',
sqlalchemy_utils.JSONType(),
nullable=False),
sa.ForeignKeyConstraint(['user_id'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'client_id')
)
op.create_table(
'oauthclient_useridentity',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('method', sa.String(length=255), nullable=False),
sa.Column('id_user', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id_user'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id', 'method')
)
op.create_index(
'useridentity_id_user_method', 'oauthclient_useridentity',
['id_user', 'method'], unique=True
)
op.create_table(
'oauthclient_remotetoken',
sa.Column('id_remote_account', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column(
'access_token',
sqlalchemy_utils.EncryptedType(),
nullable=False),
sa.Column('secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(
['id_remote_account'], [u'oauthclient_remoteaccount.id'],
name='fk_oauthclient_remote_token_remote_account'
),
sa.PrimaryKeyConstraint('id_remote_account', 'token_type')
)
def downgrade():
"""Downgrade database."""
ctx = op.get_context()
insp = Inspector.from_engine(ctx.connection.engine)
op.drop_table('oauthclient_remotetoken')
for fk in insp.get_foreign_keys('oauthclient_useridentity'):
if fk['referred_table'] == 'accounts_user':
op.drop_constraint(
op.f(fk['name']),
'oauthclient_useridentity',
type_='foreignkey'
)
op.drop_index(
'useridentity_id_user_method',
table_name='oauthclient_useridentity')
op.drop_table('oauthclient_useridentity')
op.drop_table('oauthclient_remoteaccount') | from alembic import op
from sqlalchemy.engine.reflection import Inspector
| random_line_split |
97bbc733896c_create_oauthclient_tables.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Create oauthclient tables."""
import sqlalchemy as sa
import sqlalchemy_utils
from alembic import op
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision = '97bbc733896c'
down_revision = '44ab9963e8cf'
branch_labels = ()
depends_on = '9848d0149abd'
def upgrade():
"""Upgrade database."""
op.create_table(
'oauthclient_remoteaccount',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column(
'extra_data',
sqlalchemy_utils.JSONType(),
nullable=False),
sa.ForeignKeyConstraint(['user_id'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'client_id')
)
op.create_table(
'oauthclient_useridentity',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('method', sa.String(length=255), nullable=False),
sa.Column('id_user', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id_user'], [u'accounts_user.id'], ),
sa.PrimaryKeyConstraint('id', 'method')
)
op.create_index(
'useridentity_id_user_method', 'oauthclient_useridentity',
['id_user', 'method'], unique=True
)
op.create_table(
'oauthclient_remotetoken',
sa.Column('id_remote_account', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column(
'access_token',
sqlalchemy_utils.EncryptedType(),
nullable=False),
sa.Column('secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(
['id_remote_account'], [u'oauthclient_remoteaccount.id'],
name='fk_oauthclient_remote_token_remote_account'
),
sa.PrimaryKeyConstraint('id_remote_account', 'token_type')
)
def | ():
"""Downgrade database."""
ctx = op.get_context()
insp = Inspector.from_engine(ctx.connection.engine)
op.drop_table('oauthclient_remotetoken')
for fk in insp.get_foreign_keys('oauthclient_useridentity'):
if fk['referred_table'] == 'accounts_user':
op.drop_constraint(
op.f(fk['name']),
'oauthclient_useridentity',
type_='foreignkey'
)
op.drop_index(
'useridentity_id_user_method',
table_name='oauthclient_useridentity')
op.drop_table('oauthclient_useridentity')
op.drop_table('oauthclient_remoteaccount')
| downgrade | identifier_name |
40.rs | /* Problem 40: Champernowne's constant
*
* An irrational decimal fraction is created by concatenating the positive integers:
*
* 0.123456789101112131415161718192021...
*
* It can be seen that the 12th digit of the fractional part is 1.
*
* If dn represents the nth digit of the fractional part, find the value of the following
* expression.
*
* d1 × d10 × d100 × d1000 × d10000 × d100000 × d1000000 */
use shared::digits;
fn main() | let result: u32 = [1, 10, 100, 1000, 10000, 100000, 1000000]
.iter()
.map(|&position| {
(1..)
.flat_map(digits::new::<_, u32>)
.nth(position as usize - 1)
.unwrap()
})
.fold(1, |acc, n| acc * n);
println!("{}", result);
}
| {
| identifier_name |
40.rs | /* Problem 40: Champernowne's constant
*
* An irrational decimal fraction is created by concatenating the positive integers:
*
* 0.123456789101112131415161718192021...
*
* It can be seen that the 12th digit of the fractional part is 1.
*
* If dn represents the nth digit of the fractional part, find the value of the following
* expression. | *
* d1 × d10 × d100 × d1000 × d10000 × d100000 × d1000000 */
use shared::digits;
fn main() {
let result: u32 = [1, 10, 100, 1000, 10000, 100000, 1000000]
.iter()
.map(|&position| {
(1..)
.flat_map(digits::new::<_, u32>)
.nth(position as usize - 1)
.unwrap()
})
.fold(1, |acc, n| acc * n);
println!("{}", result);
} | random_line_split | |
40.rs | /* Problem 40: Champernowne's constant
*
* An irrational decimal fraction is created by concatenating the positive integers:
*
* 0.123456789101112131415161718192021...
*
* It can be seen that the 12th digit of the fractional part is 1.
*
* If dn represents the nth digit of the fractional part, find the value of the following
* expression.
*
* d1 × d10 × d100 × d1000 × d10000 × d100000 × d1000000 */
use shared::digits;
fn main() {
| let result: u32 = [1, 10, 100, 1000, 10000, 100000, 1000000]
.iter()
.map(|&position| {
(1..)
.flat_map(digits::new::<_, u32>)
.nth(position as usize - 1)
.unwrap()
})
.fold(1, |acc, n| acc * n);
println!("{}", result);
}
| identifier_body | |
hw5_start.py | # CIS 410/510pm
# Homework 5 beta 0.0.1
# Cameron Palk
# May 2016
#
# Special thanks to Daniel Lowd for the skeletor code
import sys
import tokenize
from functools import reduce
global_card = []
num_vars = 0
''' Calc Strides
'''
def calcStrides( scope ):
rev_scope = list( reversed( scope ) )
res = [ 1 ] + [ 0 ] * ( len( scope ) - 1 )
for idx in range( 1, len( rev_scope ) ):
res[ idx ] = res[ idx - 1 ] * global_card[ rev_scope[ idx - 1 ] ]
stride = list( reversed( res ) )
return { scope[i] : stride[i] for i in range( len( scope ) ) }
# FACTOR CLASS DEFINITION
class | ( dict ):
# Constructor
def __init__(self, scope_, vals_):
self.scope = scope_
self.vals = vals_
self.stride = calcStrides( scope_ )
#
# Are two object EQual, True of False
def __eq__(self, other):
return (self.scope == other.scope and
self.vals == other.vals and
self.stride == other.stride )
#
# A string used for printing the Factor Objects
def __repr__( self ):
style = "\n{0}\nScope: {1}\nStride: {2}\nCard: {3}\nVals:\n{4}\n{0}\n"
vertBar = ''.join( ['-'] * 50 )
return style.format( vertBar, self.scope, self.stride,
{ v : global_card[v] for v in self.scope },
'\n'.join( [ str( round( e, 3 ) ) for e in self.vals ] ) )
#
# What the '*' character does between our objects
def __mul__( self, other ):
new_scope = list( set( self.scope ).union( set( other.scope ) ) )
assignment = { e : 0 for e in new_scope }
card = { u : global_card[ u ] for u in new_scope }
val_count = reduce( lambda agg, x: agg * global_card[x], new_scope, 1 )
new_vals = [ 0 ] * val_count
idx1 = idx2 = 0
for i in range( 0, val_count ):
new_vals[ i ] = self.vals[ idx1 ] * other.vals[ idx2 ]
for rv in reversed( new_scope ):
if assignment[ rv ] == card[ rv ] - 1:
idx1 -= assignment[ rv ] * self.stride [ rv ] if rv in self.stride else 0
idx2 -= assignment[ rv ] * other.stride[ rv ] if rv in other.stride else 0
assignment[ rv ] = 0
else:
idx1 += self.stride [ rv ] if rv in self.scope else 0
idx2 += other.stride[ rv ] if rv in other.scope else 0
assignment[ rv ] += 1
break
#
return Factor( new_scope, new_vals )
#
# Sum out the variable and return a new Factor
def sumOut( self ):
# TODO Sum out a RV
return
#
# Helper Functions:
def containsRV( self, rv ):
return rv in self.scope
#
# END FACTOR CLASS DEFINITION
# IGNORE DANIELS READER BELOW
#
# Read in all tokens from stdin. Save it to a (global) buf that we use
# later. (Is there a better way to do this? Almost certainly.)
curr_token = 0
token_buf = []
def read_tokens():
global token_buf
for line in sys.stdin:
token_buf.extend(line.strip().split())
#
def next_token():
global curr_token
global token_buf
curr_token += 1
return token_buf[ curr_token - 1 ]
#
def next_int():
return int( next_token() )
#
def next_float():
return float( next_token() )
#
def read_model():
# Read in all tokens and throw away the first (expected to be "MARKOV")
read_tokens()
s = next_token()
# Get number of vars, followed by their ranges
global num_vars
num_vars = next_int()
global global_card
global_card = [ next_int() for i in range( num_vars ) ]
# Get number and scopes of factors
num_factors = int(next_token())
factor_scopes = []
for i in range(num_factors):
factor_scopes.append( [ next_int() for i in range( next_int() ) ] )
# Read in all factor values
factor_vals = []
for i in range(num_factors):
factor_vals.append( [ next_float() for i in range( next_int() ) ] )
return [ Factor(s,v) for (s,v) in zip( factor_scopes, factor_vals ) ]
#
# IGNORE DANIELS READER ABOVE
''' Factor Count With Var
@input factors Factors we want to look through
@input rv A RV
@return [int] The number of times the rv occures in the factors scopes
'''
def factorCountWithVar( factors, rv ):
return sum( [ 1 if f.containsRV( rv ) else 0 for f in factors ] )
''' Factor Stats
'''
def factorStats( factors, possibleVariables ):
return { v: factorCountWithVar(factors,v) for v in range( num_vars ) if v in possibleVariables }
''' Compute Partition Function
@input factors An array of Factor objects representing the graph
@return [float] The partition function ( why is it called a function? )
'''
def computePartitionFunction( factors ):
# TODO: Implement a faster way to computer partition function by summing out variables
f = reduce( Factor.__mul__, factors )
z = sum( f.vals )
return z
#
''' Main '''
def main():
# Read file
factors = read_model()
# Computer partition function
z = computePartitionFunction( factors )
# Print results
print( "Z =", z )
return
# Run main if this module is being run directly
if __name__ == '__main__':
main()
| Factor | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.