file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
alloc_support.rs | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Support for the `alloc` crate, when available.
use core::mem::MaybeUninit;
use core::pin::Pin;
use alloc::boxed::Box;
use alloc::rc::Rc;
use alloc::sync::Arc;
use crate::move_ref::DerefMove;
use crate::move_ref::MoveRef;
use crate::new::EmplaceUnpinned;
use crate::new::TryNew;
use crate::slot::DroppingSlot;
unsafe impl<T> DerefMove for Box<T> {
type Storage = Box<MaybeUninit<T>>;
#[inline]
fn deref_move<'frame>(
self,
storage: DroppingSlot<'frame, Self::Storage>,
) -> MoveRef<'frame, Self::Target>
where | unsafe { Box::from_raw(Box::into_raw(self).cast::<MaybeUninit<T>>()) };
let (storage, drop_flag) = storage.put(cast);
unsafe { MoveRef::new_unchecked(storage.assume_init_mut(), drop_flag) }
}
}
impl<T> EmplaceUnpinned<T> for Pin<Box<T>> {
fn try_emplace<N: TryNew<Output = T>>(n: N) -> Result<Self, N::Error> {
let mut uninit = Box::new(MaybeUninit::<T>::uninit());
unsafe {
let pinned = Pin::new_unchecked(&mut *uninit);
n.try_new(pinned)?;
Ok(Pin::new_unchecked(Box::from_raw(
Box::into_raw(uninit).cast::<T>(),
)))
}
}
}
impl<T> EmplaceUnpinned<T> for Pin<Rc<T>> {
fn try_emplace<N: TryNew<Output = T>>(n: N) -> Result<Self, N::Error> {
let uninit = Rc::new(MaybeUninit::<T>::uninit());
unsafe {
let pinned = Pin::new_unchecked(&mut *(Rc::as_ptr(&uninit) as *mut _));
n.try_new(pinned)?;
Ok(Pin::new_unchecked(Rc::from_raw(
Rc::into_raw(uninit).cast::<T>(),
)))
}
}
}
impl<T> EmplaceUnpinned<T> for Pin<Arc<T>> {
fn try_emplace<N: TryNew<Output = T>>(n: N) -> Result<Self, N::Error> {
let uninit = Arc::new(MaybeUninit::<T>::uninit());
unsafe {
let pinned = Pin::new_unchecked(&mut *(Arc::as_ptr(&uninit) as *mut _));
n.try_new(pinned)?;
Ok(Pin::new_unchecked(Arc::from_raw(
Arc::into_raw(uninit).cast::<T>(),
)))
}
}
} | Self: 'frame,
{
let cast = | random_line_split |
smtpmock.py | # -*- coding: utf-8 -*-
"""
2016-01-20 Cornelius Kölbel <cornelius@privacyidea.org>
Support STARTTLS mock
2015-01-30 Cornelius Kölbel <cornelius@privacyidea.org>
Change responses.py to be able to run with SMTP
Original responses.py is:
Copyright 2013 Dropbox, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import (
absolute_import, print_function, division, unicode_literals
)
import six
import smtplib
try:
from inspect import formatargspec, getfullargspec as getargspec
except ImportError:
from inspect import formatargspec, getargspec
from collections import namedtuple, Sequence, Sized
from functools import update_wrapper
from smtplib import SMTPException
Call = namedtuple('Call', ['request', 'response'])
_wrapper_template = """\
def wrapper%(signature)s:
with smtpmock:
return func%(funcargs)s
"""
def get_wrapped(func, wrapper_template, evaldict):
# Preserve the argspec for the wrapped function so that testing
# tools such as pytest can continue to use their fixture injection.
args = getargspec(func)
values = args.args[-len(args.defaults):] if args.defaults else None
signature = formatargspec(*args)
is_bound_method = hasattr(func, '__self__')
if is_bound_method:
args.args = args.args[1:] # Omit 'self'
callargs = formatargspec(*args, formatvalue=lambda v: '=' + v)
ctx = {'signature': signature, 'funcargs': callargs}
six.exec_(wrapper_template % ctx, evaldict)
wrapper = evaldict['wrapper']
update_wrapper(wrapper, func)
if is_bound_method:
wrapper = wrapper.__get__(func.__self__, type(func.__self__))
return wrapper
class CallList(Sequence, Sized):
def __init__(self):
self._calls = []
def __iter__(self):
return iter(self._calls)
def __len__(self):
return len(self._calls)
def __getitem__(self, idx):
return self._calls[idx]
def setdata(self, request, response):
self._calls.append(Call(request, response))
def reset(self):
self._calls = []
class SmtpMock(object):
def __init__(self):
self._calls = CallList()
self.sent_message = None
self.smtp_ssl = False
self.reset()
def reset(self):
self._request_data = {}
self._calls.reset()
def get_smtp_ssl(self):
return self.smtp_ssl
def setdata(self, response=None, authenticated=True,
config=None, exception=False, support_tls=True):
if response is None:
response = {}
config = config or {}
self.support_tls = support_tls
self.exception = exception
self._request_data = {
'response': response,
'authenticated': authenticated,
'config': config,
'recipient': config.get("MAILTO")
}
def get_sent_message(self):
return self.sent_message
@property
def calls(self):
return self._calls
def __enter__(self):
self.start()
def __exit__(self, *args):
self.stop()
self.reset()
def activate(self, func):
evaldict = {'smtpmock': self, 'func': func}
return get_wrapped(func, _wrapper_template, evaldict)
def _on_request(self, SMTP_instance, sender, recipient, msg):
# mangle request packet
response = self._request_data.get("response")
if not self._request_data.get("authenticated"):
response = {self._request_data.get("recipient"):
(530, "Authorization required (#5.7.1)")}
return response
def _on_login(self, SMTP_instance, username, password):
# mangle request packet
if self._request_data.get("authenticated"):
response = (235, "Authentication successful.")
else:
response = (535, "authentication failed (#5.7.1)")
return {self._request_data.get("recipient"): response}
# def _on_init(self, SMTP_instance, host, port=25, timeout=3):
def _on_init(self, *args, **kwargs):
SMTP_instance = args[0]
host = args[1]
if isinstance(SMTP_instance, smtplib.SMTP_SSL):
# in case we need sth. to do with SMTL_SSL
self.smtp_ssl = True
# mangle request packet
self.timeout = kwargs.get("timeout", 10)
self.port = kwargs.get("port", 25)
self.esmtp_features = {}
return None
@staticmethod
def _on_debuglevel(SMTP_instance, level):
return None
@staticmethod
def _on_quit(SMTP_instance):
return None
def _on_starttls(self, SMTP_instance):
if self.exception:
raise SMTPException("MOCK TLS ERROR")
if not self.support_tls:
raise SMTPException("The SMTP Server does not support TLS.")
return None
def start(self):
import mock
def unbound_on_send(SMTP, sender, recipient, msg, *a, **kwargs):
self.sent_message = msg
return self._on_request(SMTP, sender, recipient, msg, *a, **kwargs)
self._patcher = mock.patch('smtplib.SMTP.sendmail',
unbound_on_send)
self._patcher.start()
def unbound_on_login(SMTP, username, password, *a, **kwargs):
return self._on_login(SMTP, username, password, *a, **kwargs)
self._patcher2 = mock.patch('smtplib.SMTP.login',
unbound_on_login)
self._patcher2.start()
def unbound_on_init(SMTP, server, *a, **kwargs):
return self._on_init(SMTP, server, *a, **kwargs)
self._patcher3 = mock.patch('smtplib.SMTP.__init__',
unbound_on_init)
self._patcher3.start()
def unbound_on_debuglevel(SMTP, level, *a, **kwargs):
return self._on_debuglevel(SMTP, level, *a, **kwargs)
self._patcher4 = mock.patch('smtplib.SMTP.debuglevel',
unbound_on_debuglevel)
self._patcher4.start()
def unbound_on_quit(SMTP, *a, **kwargs):
return self._on_quit(SMTP, *a, **kwargs)
def unbound_on_starttls(SMTP, *a, **kwargs):
return self._on_starttls(SMTP, *a, **kwargs)
self._patcher5 = mock.patch('smtplib.SMTP.quit',
unbound_on_quit)
self._patcher5.start()
def unbound_on_empty(SMTP, *a, **kwargs):
return None
self._patcher6 = mock.patch('smtplib.SMTP.ehlo',
unbound_on_empty)
self._patcher6.start()
self._patcher7 = mock.patch('smtplib.SMTP.close',
unbound_on_empty)
self._patcher7.start()
self._patcher8 = mock.patch('smtplib.SMTP.starttls',
unbound_on_starttls)
self._patcher8.start()
def stop(self): | self._patcher.stop()
self._patcher2.stop()
self._patcher3.stop()
self._patcher4.stop()
self._patcher5.stop()
self._patcher6.stop()
self._patcher7.stop()
self._patcher8.stop()
# expose default mock namespace
mock = _default_mock = SmtpMock()
__all__ = []
for __attr in (a for a in dir(_default_mock) if not a.startswith('_')):
__all__.append(__attr)
globals()[__attr] = getattr(_default_mock, __attr) | random_line_split | |
smtpmock.py | # -*- coding: utf-8 -*-
"""
2016-01-20 Cornelius Kölbel <cornelius@privacyidea.org>
Support STARTTLS mock
2015-01-30 Cornelius Kölbel <cornelius@privacyidea.org>
Change responses.py to be able to run with SMTP
Original responses.py is:
Copyright 2013 Dropbox, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import (
absolute_import, print_function, division, unicode_literals
)
import six
import smtplib
try:
from inspect import formatargspec, getfullargspec as getargspec
except ImportError:
from inspect import formatargspec, getargspec
from collections import namedtuple, Sequence, Sized
from functools import update_wrapper
from smtplib import SMTPException
Call = namedtuple('Call', ['request', 'response'])
_wrapper_template = """\
def wrapper%(signature)s:
with smtpmock:
return func%(funcargs)s
"""
def get_wrapped(func, wrapper_template, evaldict):
# Preserve the argspec for the wrapped function so that testing
# tools such as pytest can continue to use their fixture injection.
args = getargspec(func)
values = args.args[-len(args.defaults):] if args.defaults else None
signature = formatargspec(*args)
is_bound_method = hasattr(func, '__self__')
if is_bound_method:
args.args = args.args[1:] # Omit 'self'
callargs = formatargspec(*args, formatvalue=lambda v: '=' + v)
ctx = {'signature': signature, 'funcargs': callargs}
six.exec_(wrapper_template % ctx, evaldict)
wrapper = evaldict['wrapper']
update_wrapper(wrapper, func)
if is_bound_method:
wrapper = wrapper.__get__(func.__self__, type(func.__self__))
return wrapper
class CallList(Sequence, Sized):
def __init__(self):
self._calls = []
def __iter__(self):
return iter(self._calls)
def __len__(self):
return len(self._calls)
def __getitem__(self, idx):
return self._calls[idx]
def setdata(self, request, response):
self._calls.append(Call(request, response))
def reset(self):
self._calls = []
class SmtpMock(object):
def __init__(self):
self._calls = CallList()
self.sent_message = None
self.smtp_ssl = False
self.reset()
def reset(self):
self._request_data = {}
self._calls.reset()
def get_smtp_ssl(self):
return self.smtp_ssl
def setdata(self, response=None, authenticated=True,
config=None, exception=False, support_tls=True):
if response is None:
response = {}
config = config or {}
self.support_tls = support_tls
self.exception = exception
self._request_data = {
'response': response,
'authenticated': authenticated,
'config': config,
'recipient': config.get("MAILTO")
}
def get_sent_message(self):
return self.sent_message
@property
def calls(self):
return self._calls
def __enter__(self):
self.start()
def __exit__(self, *args):
self.stop()
self.reset()
def activate(self, func):
evaldict = {'smtpmock': self, 'func': func}
return get_wrapped(func, _wrapper_template, evaldict)
def _on_request(self, SMTP_instance, sender, recipient, msg):
# mangle request packet
response = self._request_data.get("response")
if not self._request_data.get("authenticated"):
response = {self._request_data.get("recipient"):
(530, "Authorization required (#5.7.1)")}
return response
def _on_login(self, SMTP_instance, username, password):
# mangle request packet
if self._request_data.get("authenticated"):
response = (235, "Authentication successful.")
else:
response = (535, "authentication failed (#5.7.1)")
return {self._request_data.get("recipient"): response}
# def _on_init(self, SMTP_instance, host, port=25, timeout=3):
def _on_init(self, *args, **kwargs):
SMTP_instance = args[0]
host = args[1]
if isinstance(SMTP_instance, smtplib.SMTP_SSL):
# in case we need sth. to do with SMTL_SSL
self.smtp_ssl = True
# mangle request packet
self.timeout = kwargs.get("timeout", 10)
self.port = kwargs.get("port", 25)
self.esmtp_features = {}
return None
@staticmethod
def _on_debuglevel(SMTP_instance, level):
return None
@staticmethod
def _on_quit(SMTP_instance):
return None
def _on_starttls(self, SMTP_instance):
if self.exception:
raise SMTPException("MOCK TLS ERROR")
if not self.support_tls:
raise SMTPException("The SMTP Server does not support TLS.")
return None
def start(self):
import mock
def unbound_on_send(SMTP, sender, recipient, msg, *a, **kwargs):
self.sent_message = msg
return self._on_request(SMTP, sender, recipient, msg, *a, **kwargs)
self._patcher = mock.patch('smtplib.SMTP.sendmail',
unbound_on_send)
self._patcher.start()
def unbound_on_login(SMTP, username, password, *a, **kwargs):
return self._on_login(SMTP, username, password, *a, **kwargs)
self._patcher2 = mock.patch('smtplib.SMTP.login',
unbound_on_login)
self._patcher2.start()
def un | MTP, server, *a, **kwargs):
return self._on_init(SMTP, server, *a, **kwargs)
self._patcher3 = mock.patch('smtplib.SMTP.__init__',
unbound_on_init)
self._patcher3.start()
def unbound_on_debuglevel(SMTP, level, *a, **kwargs):
return self._on_debuglevel(SMTP, level, *a, **kwargs)
self._patcher4 = mock.patch('smtplib.SMTP.debuglevel',
unbound_on_debuglevel)
self._patcher4.start()
def unbound_on_quit(SMTP, *a, **kwargs):
return self._on_quit(SMTP, *a, **kwargs)
def unbound_on_starttls(SMTP, *a, **kwargs):
return self._on_starttls(SMTP, *a, **kwargs)
self._patcher5 = mock.patch('smtplib.SMTP.quit',
unbound_on_quit)
self._patcher5.start()
def unbound_on_empty(SMTP, *a, **kwargs):
return None
self._patcher6 = mock.patch('smtplib.SMTP.ehlo',
unbound_on_empty)
self._patcher6.start()
self._patcher7 = mock.patch('smtplib.SMTP.close',
unbound_on_empty)
self._patcher7.start()
self._patcher8 = mock.patch('smtplib.SMTP.starttls',
unbound_on_starttls)
self._patcher8.start()
def stop(self):
self._patcher.stop()
self._patcher2.stop()
self._patcher3.stop()
self._patcher4.stop()
self._patcher5.stop()
self._patcher6.stop()
self._patcher7.stop()
self._patcher8.stop()
# expose default mock namespace
mock = _default_mock = SmtpMock()
__all__ = []
for __attr in (a for a in dir(_default_mock) if not a.startswith('_')):
__all__.append(__attr)
globals()[__attr] = getattr(_default_mock, __attr)
| bound_on_init(S | identifier_name |
smtpmock.py | # -*- coding: utf-8 -*-
"""
2016-01-20 Cornelius Kölbel <cornelius@privacyidea.org>
Support STARTTLS mock
2015-01-30 Cornelius Kölbel <cornelius@privacyidea.org>
Change responses.py to be able to run with SMTP
Original responses.py is:
Copyright 2013 Dropbox, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import (
absolute_import, print_function, division, unicode_literals
)
import six
import smtplib
try:
from inspect import formatargspec, getfullargspec as getargspec
except ImportError:
from inspect import formatargspec, getargspec
from collections import namedtuple, Sequence, Sized
from functools import update_wrapper
from smtplib import SMTPException
Call = namedtuple('Call', ['request', 'response'])
_wrapper_template = """\
def wrapper%(signature)s:
with smtpmock:
return func%(funcargs)s
"""
def get_wrapped(func, wrapper_template, evaldict):
# Preserve the argspec for the wrapped function so that testing
# tools such as pytest can continue to use their fixture injection.
args = getargspec(func)
values = args.args[-len(args.defaults):] if args.defaults else None
signature = formatargspec(*args)
is_bound_method = hasattr(func, '__self__')
if is_bound_method:
args.args = args.args[1:] # Omit 'self'
callargs = formatargspec(*args, formatvalue=lambda v: '=' + v)
ctx = {'signature': signature, 'funcargs': callargs}
six.exec_(wrapper_template % ctx, evaldict)
wrapper = evaldict['wrapper']
update_wrapper(wrapper, func)
if is_bound_method:
wrapper = wrapper.__get__(func.__self__, type(func.__self__))
return wrapper
class CallList(Sequence, Sized):
def __init__(self):
self._calls = []
def __iter__(self):
return iter(self._calls)
def __len__(self):
return len(self._calls)
def __getitem__(self, idx):
return self._calls[idx]
def setdata(self, request, response):
self._calls.append(Call(request, response))
def reset(self):
self._calls = []
class SmtpMock(object):
def __init__(self):
self._calls = CallList()
self.sent_message = None
self.smtp_ssl = False
self.reset()
def reset(self):
self._request_data = {}
self._calls.reset()
def get_smtp_ssl(self):
return self.smtp_ssl
def setdata(self, response=None, authenticated=True,
config=None, exception=False, support_tls=True):
if response is None:
response = {}
config = config or {}
self.support_tls = support_tls
self.exception = exception
self._request_data = {
'response': response,
'authenticated': authenticated,
'config': config,
'recipient': config.get("MAILTO")
}
def get_sent_message(self):
return self.sent_message
@property
def calls(self):
return self._calls
def __enter__(self):
self.start()
def __exit__(self, *args):
self.stop()
self.reset()
def activate(self, func):
evaldict = {'smtpmock': self, 'func': func}
return get_wrapped(func, _wrapper_template, evaldict)
def _on_request(self, SMTP_instance, sender, recipient, msg):
# mangle request packet
response = self._request_data.get("response")
if not self._request_data.get("authenticated"):
response = {self._request_data.get("recipient"):
(530, "Authorization required (#5.7.1)")}
return response
def _on_login(self, SMTP_instance, username, password):
# mangle request packet
if self._request_data.get("authenticated"):
response = (235, "Authentication successful.")
else:
response = (535, "authentication failed (#5.7.1)")
return {self._request_data.get("recipient"): response}
# def _on_init(self, SMTP_instance, host, port=25, timeout=3):
def _on_init(self, *args, **kwargs):
SMTP_instance = args[0]
host = args[1]
if isinstance(SMTP_instance, smtplib.SMTP_SSL):
# in case we need sth. to do with SMTL_SSL
se | # mangle request packet
self.timeout = kwargs.get("timeout", 10)
self.port = kwargs.get("port", 25)
self.esmtp_features = {}
return None
@staticmethod
def _on_debuglevel(SMTP_instance, level):
return None
@staticmethod
def _on_quit(SMTP_instance):
return None
def _on_starttls(self, SMTP_instance):
if self.exception:
raise SMTPException("MOCK TLS ERROR")
if not self.support_tls:
raise SMTPException("The SMTP Server does not support TLS.")
return None
def start(self):
import mock
def unbound_on_send(SMTP, sender, recipient, msg, *a, **kwargs):
self.sent_message = msg
return self._on_request(SMTP, sender, recipient, msg, *a, **kwargs)
self._patcher = mock.patch('smtplib.SMTP.sendmail',
unbound_on_send)
self._patcher.start()
def unbound_on_login(SMTP, username, password, *a, **kwargs):
return self._on_login(SMTP, username, password, *a, **kwargs)
self._patcher2 = mock.patch('smtplib.SMTP.login',
unbound_on_login)
self._patcher2.start()
def unbound_on_init(SMTP, server, *a, **kwargs):
return self._on_init(SMTP, server, *a, **kwargs)
self._patcher3 = mock.patch('smtplib.SMTP.__init__',
unbound_on_init)
self._patcher3.start()
def unbound_on_debuglevel(SMTP, level, *a, **kwargs):
return self._on_debuglevel(SMTP, level, *a, **kwargs)
self._patcher4 = mock.patch('smtplib.SMTP.debuglevel',
unbound_on_debuglevel)
self._patcher4.start()
def unbound_on_quit(SMTP, *a, **kwargs):
return self._on_quit(SMTP, *a, **kwargs)
def unbound_on_starttls(SMTP, *a, **kwargs):
return self._on_starttls(SMTP, *a, **kwargs)
self._patcher5 = mock.patch('smtplib.SMTP.quit',
unbound_on_quit)
self._patcher5.start()
def unbound_on_empty(SMTP, *a, **kwargs):
return None
self._patcher6 = mock.patch('smtplib.SMTP.ehlo',
unbound_on_empty)
self._patcher6.start()
self._patcher7 = mock.patch('smtplib.SMTP.close',
unbound_on_empty)
self._patcher7.start()
self._patcher8 = mock.patch('smtplib.SMTP.starttls',
unbound_on_starttls)
self._patcher8.start()
def stop(self):
self._patcher.stop()
self._patcher2.stop()
self._patcher3.stop()
self._patcher4.stop()
self._patcher5.stop()
self._patcher6.stop()
self._patcher7.stop()
self._patcher8.stop()
# expose default mock namespace
mock = _default_mock = SmtpMock()
__all__ = []
for __attr in (a for a in dir(_default_mock) if not a.startswith('_')):
__all__.append(__attr)
globals()[__attr] = getattr(_default_mock, __attr)
| lf.smtp_ssl = True
| conditional_block |
smtpmock.py | # -*- coding: utf-8 -*-
"""
2016-01-20 Cornelius Kölbel <cornelius@privacyidea.org>
Support STARTTLS mock
2015-01-30 Cornelius Kölbel <cornelius@privacyidea.org>
Change responses.py to be able to run with SMTP
Original responses.py is:
Copyright 2013 Dropbox, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import (
absolute_import, print_function, division, unicode_literals
)
import six
import smtplib
try:
from inspect import formatargspec, getfullargspec as getargspec
except ImportError:
from inspect import formatargspec, getargspec
from collections import namedtuple, Sequence, Sized
from functools import update_wrapper
from smtplib import SMTPException
Call = namedtuple('Call', ['request', 'response'])
_wrapper_template = """\
def wrapper%(signature)s:
with smtpmock:
return func%(funcargs)s
"""
def get_wrapped(func, wrapper_template, evaldict):
# Preserve the argspec for the wrapped function so that testing
# tools such as pytest can continue to use their fixture injection.
args = getargspec(func)
values = args.args[-len(args.defaults):] if args.defaults else None
signature = formatargspec(*args)
is_bound_method = hasattr(func, '__self__')
if is_bound_method:
args.args = args.args[1:] # Omit 'self'
callargs = formatargspec(*args, formatvalue=lambda v: '=' + v)
ctx = {'signature': signature, 'funcargs': callargs}
six.exec_(wrapper_template % ctx, evaldict)
wrapper = evaldict['wrapper']
update_wrapper(wrapper, func)
if is_bound_method:
wrapper = wrapper.__get__(func.__self__, type(func.__self__))
return wrapper
class CallList(Sequence, Sized):
def __init__(self):
self._calls = []
def __iter__(self):
return iter(self._calls)
def __len__(self):
return len(self._calls)
def __getitem__(self, idx):
return self._calls[idx]
def setdata(self, request, response):
self._calls.append(Call(request, response))
def reset(self):
self._calls = []
class SmtpMock(object):
def __init__(self):
self._calls = CallList()
self.sent_message = None
self.smtp_ssl = False
self.reset()
def reset(self):
self._request_data = {}
self._calls.reset()
def get_smtp_ssl(self):
return self.smtp_ssl
def setdata(self, response=None, authenticated=True,
config=None, exception=False, support_tls=True):
if response is None:
response = {}
config = config or {}
self.support_tls = support_tls
self.exception = exception
self._request_data = {
'response': response,
'authenticated': authenticated,
'config': config,
'recipient': config.get("MAILTO")
}
def get_sent_message(self):
return self.sent_message
@property
def calls(self):
return self._calls
def __enter__(self):
self.start()
def __exit__(self, *args):
self.stop()
self.reset()
def activate(self, func):
evaldict = {'smtpmock': self, 'func': func}
return get_wrapped(func, _wrapper_template, evaldict)
def _on_request(self, SMTP_instance, sender, recipient, msg):
# mangle request packet
response = self._request_data.get("response")
if not self._request_data.get("authenticated"):
response = {self._request_data.get("recipient"):
(530, "Authorization required (#5.7.1)")}
return response
def _on_login(self, SMTP_instance, username, password):
# mangle request packet
if self._request_data.get("authenticated"):
response = (235, "Authentication successful.")
else:
response = (535, "authentication failed (#5.7.1)")
return {self._request_data.get("recipient"): response}
# def _on_init(self, SMTP_instance, host, port=25, timeout=3):
def _on_init(self, *args, **kwargs):
SM | @staticmethod
def _on_debuglevel(SMTP_instance, level):
return None
@staticmethod
def _on_quit(SMTP_instance):
return None
def _on_starttls(self, SMTP_instance):
if self.exception:
raise SMTPException("MOCK TLS ERROR")
if not self.support_tls:
raise SMTPException("The SMTP Server does not support TLS.")
return None
def start(self):
import mock
def unbound_on_send(SMTP, sender, recipient, msg, *a, **kwargs):
self.sent_message = msg
return self._on_request(SMTP, sender, recipient, msg, *a, **kwargs)
self._patcher = mock.patch('smtplib.SMTP.sendmail',
unbound_on_send)
self._patcher.start()
def unbound_on_login(SMTP, username, password, *a, **kwargs):
return self._on_login(SMTP, username, password, *a, **kwargs)
self._patcher2 = mock.patch('smtplib.SMTP.login',
unbound_on_login)
self._patcher2.start()
def unbound_on_init(SMTP, server, *a, **kwargs):
return self._on_init(SMTP, server, *a, **kwargs)
self._patcher3 = mock.patch('smtplib.SMTP.__init__',
unbound_on_init)
self._patcher3.start()
def unbound_on_debuglevel(SMTP, level, *a, **kwargs):
return self._on_debuglevel(SMTP, level, *a, **kwargs)
self._patcher4 = mock.patch('smtplib.SMTP.debuglevel',
unbound_on_debuglevel)
self._patcher4.start()
def unbound_on_quit(SMTP, *a, **kwargs):
return self._on_quit(SMTP, *a, **kwargs)
def unbound_on_starttls(SMTP, *a, **kwargs):
return self._on_starttls(SMTP, *a, **kwargs)
self._patcher5 = mock.patch('smtplib.SMTP.quit',
unbound_on_quit)
self._patcher5.start()
def unbound_on_empty(SMTP, *a, **kwargs):
return None
self._patcher6 = mock.patch('smtplib.SMTP.ehlo',
unbound_on_empty)
self._patcher6.start()
self._patcher7 = mock.patch('smtplib.SMTP.close',
unbound_on_empty)
self._patcher7.start()
self._patcher8 = mock.patch('smtplib.SMTP.starttls',
unbound_on_starttls)
self._patcher8.start()
def stop(self):
self._patcher.stop()
self._patcher2.stop()
self._patcher3.stop()
self._patcher4.stop()
self._patcher5.stop()
self._patcher6.stop()
self._patcher7.stop()
self._patcher8.stop()
# expose default mock namespace
mock = _default_mock = SmtpMock()
__all__ = []
for __attr in (a for a in dir(_default_mock) if not a.startswith('_')):
__all__.append(__attr)
globals()[__attr] = getattr(_default_mock, __attr)
| TP_instance = args[0]
host = args[1]
if isinstance(SMTP_instance, smtplib.SMTP_SSL):
# in case we need sth. to do with SMTL_SSL
self.smtp_ssl = True
# mangle request packet
self.timeout = kwargs.get("timeout", 10)
self.port = kwargs.get("port", 25)
self.esmtp_features = {}
return None
| identifier_body |
slider.ts | /**
* Created by 10177553 on 2017/4/13.
*/
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ElementRef,
EventEmitter,
forwardRef,
Host,
HostListener,
Inject,
Injector,
Input,
NgZone,
OnDestroy,
OnInit,
Output,
QueryList,
Renderer2,
ViewChildren,
ViewEncapsulation,
ViewChild
} from "@angular/core";
import {ControlValueAccessor, NG_VALUE_ACCESSOR} from "@angular/forms";
import {CallbackRemoval, CommonUtils} from "../../common/core/utils/common-utils";
import {ArrayCollection} from "../../common/core/data/array-collection";
import {AbstractJigsawComponent, AbstractJigsawViewBase, WingsTheme} from "../../common/common";
import {RequireMarkForCheck} from "../../common/decorator/mark-for-check";
import {JigsawTooltip} from "../../common/directive/tooltip/tooltip";
import {FloatPosition} from "../../common/directive/float/float";
export class SliderMark {
value: number;
label: string;
style?: any;
}
/**
* @internal
*/
@Component({
selector: 'jigsaw-slider-handle',
templateUrl: './slider-handle.html',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush
})
export class JigsawSliderHandle extends AbstractJigsawViewBase implements OnInit {
private _value: number;
/**
* @internal
*/
public _$tooltipRenderHtml: string;
/**
* @NoMarkForCheckRequired
*/
@Input()
public index: number;
/**
* @NoMarkForCheckRequired
*/
@Input()
public get value() {
return this._value;
}
public set value(value) {
this._value = this._slider._verifyValue(value);
this._valueToPos();
this._$tooltipRenderHtml = `<div style="word-break: normal;">${this._value}</div>`
}
/**
* @NoMarkForCheckRequired
*/
@Input()
public tooltipPosition: FloatPosition = 'top';
@Output()
public change = new EventEmitter<number>();
private _valueToPos() {
this._offset = this._slider._transformValueToPos(this.value);
this._setHandleStyle();
}
private _offset: number = 0;
/**
* @internal
*/
public _$handleStyle = {};
private _setHandleStyle() {
if (isNaN(this._offset)) {
return;
}
if (this._slider.vertical) {
this._$handleStyle = {
bottom: this._offset + "%"
}
} else {
this._$handleStyle = {
left: this._offset + "%"
}
}
this._cdr.markForCheck();
}
private _dragging: boolean = false;
private _transformPosToValue(pos: { x: number, y: number }): number {
// 更新取得的滑动条尺寸.
this._slider._refresh();
const dimensions = this._slider._dimensions;
// bottom 在dom中的位置.
const offset = this._slider.vertical ? dimensions.bottom : dimensions.left;
const size = this._slider.vertical ? dimensions.height : dimensions.width;
let posValue = this._slider.vertical ? pos.y - 6 : pos.x;
if (this._slider.vertical) {
posValue = posValue > offset ? offset : posValue;
} else {
posValue = posValue < offset ? offset : posValue;
}
let newValue = Math.abs(posValue - offset) / size * (this._slider.max - this._slider.min) + (this._slider.min - 0); // 保留两位小数
const m = this._calFloat(this._slider.step);
// 解决出现的有时小数点多了N多位.
newValue = Math.round(Math.round(newValue / this._slider.step) * this._slider.step * Math.pow(10, m)) / Math.pow(10, m);
return this._slider._verifyValue(newValue);
}
/**
* 增加步长的计算,计算需要保留小数的位数
*/
private _calFloat(value: number): number {
try {
return this._slider.step.toString().split(".")[1].length;
} catch (e) {
return 0;
}
}
/**
* @internal
*/
public _$startToDrag(): void {
this._tooltip.jigsawFloatCloseTrigger = 'none';
this._dragging = true;
this._registerGlobalEvent();
}
private _removeGlobalEventMouseMoveListener: Function;
private _removeGlobalEventMouseUpListener: Function;
private _registerGlobalEvent(): void {
if (this._removeGlobalEventMouseMoveListener) {
this._removeGlobalEventMouseMoveListener();
}
this._removeGlobalEventMouseMoveListener = this._render.listen("document", "mousemove", (e) => {
this._updateValuePosition(e);
});
if (this._removeGlobalEventMouseUpListener) {
this._removeGlobalEventMouseUpListener();
}
this._removeGlobalEventMouseUpListener = this._render.listen("document", "mouseup", () => {
this._dragging = false;
this._destroyGlobalEvent();
});
}
private _destroyGlobalEvent() {
if (this._removeGlobalEventMouseMoveListener) {
this._removeGlobalEventMouseMoveListener();
}
if (this._removeGlobalEventMouseUpListener) {
this._removeGlobalEventMouseUpListener();
}
this._tooltip.jigsawFloatCloseTrigger = 'mouseleave';
}
/**
* 父组件
* @private
*/
private _slider: JigsawSlider;
constructor(private _render: Renderer2, @Host() @Inject(forwardRef(() => JigsawSlider)) slider: any,
protected _zone: NgZone, private _cdr: ChangeDetectorRef) {
super();
this._slider = slider;
}
@ViewChild(JigsawTooltip)
private _tooltip: JigsawTooltip;
/**
* 改变value的值
*/
private _updateValuePosition(event?) {
if (!this._dragging || this._slider.disabled) {
return;
}
// 防止产生选中其他文本,造成鼠标放开后还可以拖拽的奇怪现象;
event.stopPropagation();
event.preventDefault();
const pos = {
x: event["clientX"],
y: event["clientY"]
};
let newValue = this._transformPosToValue(pos);
if (this.value === newValue) {
return;
}
this.value = newValue;
this._slider._updateValue(this.index, newValue);
this.runAfterMicrotasks(() => {
this._tooltip.reposition();
});
}
ngOnInit() {
this._valueToPos();
}
}
/**
* @description 滑动条组件.
*
* 何时使用
* 当用户需要在数值区间/自定义区间内进行选择时
*/
@WingsTheme('slider.scss')
@Component({
selector: 'jigsaw-slider, j-slider',
templateUrl: './slider.html',
host: {
'[style.width]': 'width',
'[style.height]': 'height',
'[attr.data-theme]': 'theme',
'[class.jigsaw-slider-host]': 'true',
'[class.jigsaw-slider-error]': '!valid',
'[class.jigsaw-slider-vertical]': 'vertical',
},
encapsulation: ViewEncapsulation.None,
providers: [
{provide: NG_VALUE_ACCESSOR, useExisting: forwardRef(() => JigsawSlider), multi: true},
],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class JigsawSlider extends AbstractJigsawComponent implements ControlValueAccessor, OnInit, OnDestroy {
constructor(private _element: ElementRef, private _render: Renderer2,
protected _zone: NgZone, private _changeDetectorRef: ChangeDetectorRef,
// @RequireMarkForCheck 需要用到,勿删
private _injector: Injector) {
super();
}
/**
* @NoMarkForCheckRequired
*/
@Input()
public valid: boolean = true;
// Todo 支持滑动条点击.
@ViewChildren(JigsawSliderHandle)
private _sliderHandle: QueryList<JigsawSliderHandle>;
/**
* @internal
*/
public get _$trackBy() {
return (index: number) => index;
}
/**
* @internal
*/
public _$value: ArrayCollection<number> = new ArrayCollection<number>();
private _removeRefreshCallback: CallbackRemoval = this._getRemoveRefreshCallback();
/**
* slider的当前值, 类型 number | ArrayCollection<number> 支持多触点
*
* @NoMarkForCheckRequired
*/
@Input()
public get value(): number | ArrayCollection<number> {
// 兼容返回单个值, 和多触点的数组;
if (this._$value.length == 1) {
return this._$value[0];
} else {
return this._$value;
}
}
public set value(value: number | ArrayCollection<number>) {
this.writeValue(value);
}
/**
* 设置单个的值。内部使用
* 子级组件需要用到
* @internal
*/
public _updateValue(index: number, value: number) {
this._$value.set(index, value);
this._$value.refresh();
this._changeDetectorRef.markForCheck();
}
/**
* 最后重新计算一下,垂直滚动条的位置
* 子级组件需要用到
* @internal
*/
public _refresh() {
this._dimensions = this._element.nativeElement.getBoundingClientRect();
this._changeDetectorRef.markForCheck();
}
/**
* 使 value 支持双向绑定
*/
@Output()
public valueChange = new EventEmitter<number | ArrayCollection<number>>();
// 当滑动条的组件值变化时,对外发出的事件
@Output()
public change = this.valueChange;
private _min: number = 0;
/**
* 可选范围的最小值
*
* @NoMarkForCheckRequired
*/
@Input()
public get min():number {
return this._min;
}
public set min(min: number) {
min = Number(min);
if (isNaN(min)) {
return;
}
this._min = min;
}
private _max: number = 100;
/**
* 输入范围的可选最大值.
*
* @NoMarkForCheckRequired
*/
@Input()
public get max():number {
return this._max;
}
public set max(max: number) {
max = Number(max);
if (isNaN(max)) {
return;
}
this._max = Number(max);
}
private _step: number = 1;
/**
* 每次变化的最小值, 最小支持小数点后两位.
*
* @NoMarkForCheckRequired
*/
@Input()
public get step() {
return this._step;
}
public set step(value: number) {
this._step = value;
}
/**
* 子级组件需要用到
* @internal
*/
public _transformValueToPos(value?) {
// 检验值的合法性, 不合法转换成默认可接受的合法值;
value = this._verifyValue(value);
return (value - this.min) / (this.max - this.min) * 100;
}
/**
* 子级组件需要用到
* @internal
*/
public _dimensions: ClientRect;
/**
* 垂直滑动条 默认 false
*
* @NoMarkForCheckRequired
*/
@Input()
public vertical: boolean = false;
/**
* 是否禁用. 数据类型 boolean, 默认false;
*/
@Input()
@RequireMarkForCheck()
public disabled: boolean = false;
/**
* @internal
*/
public _$trackStyle = {};
private _setTrackStyle() {
let startPos: number = 0;
let trackSize: number = 0;
if (this._$value.length > 1) {
// 多触点
let min: number = Math.min(...this._$value);
let max: number = Math.max(...this._$value);
startPos = this._transformValueToPos(min);
trackSize = Math.abs(this._transformValueToPos(max) - this._transformValueToPos(min));
} else {
// 单触点
trackSize = this._transformValueToPos(this.value);
}
if (this.vertical) {
this._$trackStyle = {
bottom: startPos + "%",
height: trackSize + "%"
}
} else {
this._$trackStyle = {
left: startPos + "%",
width: trackSize + "%"
}
}
}
/**
* @internal
*/
public _$marks: any[] = [];
private _marks: SliderMark[];
/**
* marks 标签 使用格式为 [Object] 其中 Object 必须包含value 及label 可以有style 属性
* 例如: marks = [{value: 20, label: '20 ℃'},
*/
@Input()
@RequireMarkForCheck()
public get marks(): SliderMark[] {
return this._marks;
}
public set marks(value: SliderMark[]) {
this._marks = value;
this._calcMarks();
}
/**
* @internal
* @param markVal
*/
public _$isDotActive(markVal: number): boolean {
if (this._$value.length == 1) {
return markVal < this.value;
} else {
const min = Math.min(...this._$value);
const max = Math.max(...this._$value);
return markVal >= min && markVal <= max;
}
}
private _calcMarks() {
if (!this._marks || !this.initialized) {
return;
}
this._$marks.splice(0, this._$marks.length);
let size = Math.round(100 / this._marks.length);
let margin = -Math.floor(size / 2);
let vertical = this.vertical;
this._marks.forEach(mark => {
const richMark: any = {};
if (vertical) {
richMark.dotStyle = {
bottom: this._transformValueToPos(mark.value) + "%"
| n + "%"
};
} else {
richMark.dotStyle = {
top: "-2px",
left: this._transformValueToPos(mark.value) + "%"
};
richMark.labelStyle = {
left: this._transformValueToPos(mark.value) + "%",
width: size + "%", "margin-left": margin + "%"
};
}
// 如果用户自定义了样式, 要进行样式的合并;
CommonUtils.extendObject(richMark.labelStyle, mark.style);
richMark.label = mark.label;
richMark.value = mark.value;
this._$marks.push(richMark);
});
}
ngOnInit() {
super.ngOnInit();
// 计算slider 的尺寸.
this._dimensions = this._element.nativeElement.getBoundingClientRect();
// 设置标记.
this._calcMarks();
// 注册resize事件;
this._resize();
}
private _removeResizeEvent: Function;
private _resize() {
this._zone.runOutsideAngular(() => {
this._removeResizeEvent = this._render.listen("window", "resize", () => {
// 计算slider 的尺寸.
this._dimensions = this._element.nativeElement.getBoundingClientRect();
})
})
}
/**
* 暂没有使用场景.
*/
public ngOnDestroy() {
super.ngOnDestroy();
if (this._removeResizeEvent) {
this._removeResizeEvent();
}
if (this._removeRefreshCallback) {
this._removeRefreshCallback()
}
}
/**
* 校验value的合法性. 大于最大值,取最大值, 小于最小值取最小值
* 子级组件需要用到
* @internal
*/
public _verifyValue(value: number): number {
if (value - this.min < 0 && this.initialized) {
return this.min;
} else if (value - this.max > 0 && this.initialized) {
return this.max;
} else {
return value;
}
}
private _getRemoveRefreshCallback() {
return this._$value.onRefresh(() => {
this._zone.runOutsideAngular(() => this._setTrackStyle());
this._updateSliderHandleValue();
this.valueChange.emit(this.value);
this._propagateChange(this.value);
this._changeDetectorRef.markForCheck();
});
}
/**
* 手动更新handle的值,通过ngFor更新必须value发生变化,如max变化也需要调整位置
* @private
*/
private _updateSliderHandleValue() {
if(!this._sliderHandle || !this._$value) {
return;
}
this._sliderHandle.forEach((item, index) => item.value = this._$value[index])
}
private _propagateChange: any = () => {
};
private _onTouched: any = () => {
};
// ngModel触发的writeValue方法,只会在ngOnInit,ngAfterContentInit,ngAfterViewInit这些生命周期之后才调用
public writeValue(value: any): void {
if (value instanceof Array) {
value = new ArrayCollection(value);
}
if (value instanceof ArrayCollection) {
if (this._$value !== value) {
this._$value = value;
if (this._removeRefreshCallback) {
this._removeRefreshCallback();
}
this._removeRefreshCallback = this._getRemoveRefreshCallback();
}
} else {
this._$value.splice(0, this._$value.length);
this._$value.push(this._verifyValue(+value));
}
// refresh的回调是异步的
this._$value.refresh();
this._changeDetectorRef.markForCheck();
}
public registerOnChange(fn: any): void {
this._propagateChange = fn;
}
public registerOnTouched(fn: any): void {
this._onTouched = fn;
}
@HostListener('click')
onClickTrigger(): void {
if (this.disabled) {
return;
}
this._onTouched();
}
public setDisabledState(disabled: boolean): void {
this.disabled = disabled;
}
}
| };
richMark.labelStyle = {
bottom: this._transformValueToPos(mark.value) + "%",
"margin-bottom": margi | conditional_block |
slider.ts | /**
* Created by 10177553 on 2017/4/13.
*/
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ElementRef,
EventEmitter,
forwardRef,
Host,
HostListener,
Inject,
Injector,
Input,
NgZone,
OnDestroy,
OnInit,
Output,
QueryList,
Renderer2,
ViewChildren,
ViewEncapsulation,
ViewChild
} from "@angular/core";
import {ControlValueAccessor, NG_VALUE_ACCESSOR} from "@angular/forms";
import {CallbackRemoval, CommonUtils} from "../../common/core/utils/common-utils";
import {ArrayCollection} from "../../common/core/data/array-collection";
import {AbstractJigsawComponent, AbstractJigsawViewBase, WingsTheme} from "../../common/common";
import {RequireMarkForCheck} from "../../common/decorator/mark-for-check";
import {JigsawTooltip} from "../../common/directive/tooltip/tooltip";
import {FloatPosition} from "../../common/directive/float/float";
export class SliderMark {
value: number;
label: string;
style?: any;
}
/**
* @internal
*/
@Component({
selector: 'jigsaw-slider-handle',
templateUrl: './slider-handle.html',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush
})
export class JigsawSliderHandle extends AbstractJigsawViewBase implements OnInit {
private _value: number;
/**
* @internal
*/
public _$tooltipRenderHtml: string;
/**
* @NoMarkForCheckRequired
*/
@Input()
public index: number;
/**
* @NoMarkForCheckRequired
*/
@Input()
public get value() {
return this._value;
}
public set value(value) {
this._value = this._slider._verifyValue(value);
this._valueToPos();
this._$tooltipRenderHtml = `<div style="word-break: normal;">${this._value}</div>`
}
/**
* @NoMarkForCheckRequired
*/
@Input()
public tooltipPosition: FloatPosition = 'top';
@Output()
public change = new EventEmitter<number>();
private _valueToPos() {
this._offset = this._slider._transformValueToPos(this.value);
this._setHandleStyle();
}
private _offset: number = 0;
/**
* @internal
*/
public _$handleStyle = {};
private _setHandleStyle() {
if (isNaN(this._offset)) {
return;
}
if (this._slider.vertical) {
this._$handleStyle = {
bottom: this._offset + "%"
}
} else {
this._$handleStyle = {
left: this._offset + "%"
}
}
this._cdr.markForCheck();
}
private _dragging: boolean = false;
private _transformPosToValue(pos: { x: number, y: number }): number {
// 更新取得的滑动条尺寸.
this._slider._refresh();
const dimensions = this._slider._dimensions;
// bottom 在dom中的位置.
const offset = this._slider.vertical ? dimensions.bottom : dimensions.left;
const size = this._slider.vertical ? dimensions.height : dimensions.width;
let posValue = this._slider.vertical ? pos.y - 6 : pos.x;
if (this._slider.vertical) {
posValue = posValue > offset ? offset : posValue;
} else {
posValue = posValue < offset ? offset : posValue;
}
let newValue = Math.abs(posValue - offset) / size * (this._slider.max - this._slider.min) + (this._slider.min - 0); // 保留两位小数
const m = this._calFloat(this._slider.step);
// 解决出现的有时小数点多了N多位.
newValue = Math.round(Math.round(newValue / this._slider.step) * this._slider.step * Math.pow(10, m)) / Math.pow(10, m);
return this._slider._verifyValue(newValue);
}
/**
* 增加步长的计算,计算需要保留小数的位数
*/
private _calFloat(value: number): number {
try {
return this._slider.step.toString().split(".")[1].length;
} catch (e) {
return 0;
}
}
/**
* @internal
*/
public _$startToDrag(): void {
this._tooltip.jigsawFloatCloseTrigger = 'none';
this._dragging = true;
this._registerGlobalEvent();
}
private _removeGlobalEventMouseMoveListener: Function;
private _removeGlobalEventMouseUpListener: Function;
private _registerGlobalEvent(): void {
if (this._removeGlobalEventMouseMoveListener) {
this._removeGlobalEventMouseMoveListener();
}
this._removeGlobalEventMouseMoveListener = this._render.listen("document", "mousemove", (e) => {
this._updateValuePosition(e);
});
if (this._removeGlobalEventMouseUpListener) {
this._removeGlobalEventMouseUpListener();
}
this._removeGlobalEventMouseUpListener = this._render.listen("document", "mouseup", () => {
this._dragging = false;
this._destroyGlobalEvent();
});
}
private _destroyGlobalEvent() {
if (this._removeGlobalEventMouseMoveListener) {
this._removeGlobalEventMouseMoveListener();
}
if (this._removeGlobalEventMouseUpListener) {
this._removeGlobalEventMouseUpListener();
}
this._tooltip.jigsawFloatCloseTrigger = 'mouseleave';
}
/**
* 父组件
* @private
*/
private _slider: JigsawSlider;
constructor(private _render: Renderer2, @Host() @Inject(forwardRef(() => JigsawSlider)) slider: any,
protected _zone: NgZone, private _cdr: ChangeDetectorRef) {
super();
this._slider = slider;
}
@ViewChild(JigsawTooltip)
private _tooltip: JigsawTooltip;
/**
* 改变value的值
*/
private _updateValuePosition(event?) {
if (!this._dragging || this._slider.disabled) {
return;
}
// 防止产生选中其他文本,造成鼠标放开后还可以拖拽的奇怪现象;
event.stopPropagation();
event.preventDefault();
const pos = {
x: event["clientX"],
y: event["clientY"]
};
let newValue = this._transformPosToValue(pos);
if (this.value === newValue) {
return;
}
this.value = newValue;
this._slider._updateValue(this.index, newValue);
this.runAfterMicrotasks(() => {
this._tooltip.reposition();
});
}
ngOnInit() {
this._valueToPos();
}
}
/**
* @description 滑动条组件.
*
* 何时使用
* 当用户需要在数值区间/自定义区间内进行选择时
*/
@WingsTheme('slider.scss')
@Component({
selector: 'jigsaw-slider, j-slider',
templateUrl: './slider.html',
host: {
'[style.width]': 'width',
'[style.height]': 'height',
'[attr.data-theme]': 'theme',
'[class.jigsaw-slider-host]': 'true',
'[class.jigsaw-slider-error]': '!valid',
'[class.jigsaw-slider-vertical]': 'vertical',
},
encapsulation: ViewEncapsulation.None,
providers: [
{provide: NG_VALUE_ACCESSOR, useExisting: forwardRef(() => JigsawSlider), multi: true},
],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class JigsawSlider extends AbstractJigsawComponent implements ControlValueAccessor, OnInit, OnDestroy {
constructor(private _element: ElementRef, private _render: Renderer2,
protected _zone: NgZone, private _changeDetectorRef: ChangeDetectorRef,
// @RequireMarkForCheck 需要用到,勿删
private _injector: Injector) {
super();
}
/**
* @NoMarkForCheckRequired
*/
@Input()
public valid: boolean = true;
// Todo 支持滑动条点击.
@ViewChildren(JigsawSliderHandle)
private _sliderHandle: QueryList<JigsawSliderHandle>;
/**
* @internal
*/
public get _$trackBy() {
return (index: number) => index;
}
/**
* @internal
*/
public _$value: ArrayCollection<number> = new ArrayCollection<number>();
private _removeRefreshCallback: CallbackRemoval = this._getRemoveRefreshCallback();
/**
* sl | 多触点
*
* @NoMarkForCheckRequired
*/
@Input()
public get value(): number | ArrayCollection<number> {
// 兼容返回单个值, 和多触点的数组;
if (this._$value.length == 1) {
return this._$value[0];
} else {
return this._$value;
}
}
public set value(value: number | ArrayCollection<number>) {
this.writeValue(value);
}
/**
* 设置单个的值。内部使用
* 子级组件需要用到
* @internal
*/
public _updateValue(index: number, value: number) {
this._$value.set(index, value);
this._$value.refresh();
this._changeDetectorRef.markForCheck();
}
/**
* 最后重新计算一下,垂直滚动条的位置
* 子级组件需要用到
* @internal
*/
public _refresh() {
this._dimensions = this._element.nativeElement.getBoundingClientRect();
this._changeDetectorRef.markForCheck();
}
/**
* 使 value 支持双向绑定
*/
@Output()
public valueChange = new EventEmitter<number | ArrayCollection<number>>();
// 当滑动条的组件值变化时,对外发出的事件
@Output()
public change = this.valueChange;
private _min: number = 0;
/**
* 可选范围的最小值
*
* @NoMarkForCheckRequired
*/
@Input()
public get min():number {
return this._min;
}
public set min(min: number) {
min = Number(min);
if (isNaN(min)) {
return;
}
this._min = min;
}
private _max: number = 100;
/**
* 输入范围的可选最大值.
*
* @NoMarkForCheckRequired
*/
@Input()
public get max():number {
return this._max;
}
public set max(max: number) {
max = Number(max);
if (isNaN(max)) {
return;
}
this._max = Number(max);
}
private _step: number = 1;
/**
* 每次变化的最小值, 最小支持小数点后两位.
*
* @NoMarkForCheckRequired
*/
@Input()
public get step() {
return this._step;
}
public set step(value: number) {
this._step = value;
}
/**
* 子级组件需要用到
* @internal
*/
public _transformValueToPos(value?) {
// 检验值的合法性, 不合法转换成默认可接受的合法值;
value = this._verifyValue(value);
return (value - this.min) / (this.max - this.min) * 100;
}
/**
* 子级组件需要用到
* @internal
*/
public _dimensions: ClientRect;
/**
* 垂直滑动条 默认 false
*
* @NoMarkForCheckRequired
*/
@Input()
public vertical: boolean = false;
/**
* 是否禁用. 数据类型 boolean, 默认false;
*/
@Input()
@RequireMarkForCheck()
public disabled: boolean = false;
/**
* @internal
*/
public _$trackStyle = {};
private _setTrackStyle() {
let startPos: number = 0;
let trackSize: number = 0;
if (this._$value.length > 1) {
// 多触点
let min: number = Math.min(...this._$value);
let max: number = Math.max(...this._$value);
startPos = this._transformValueToPos(min);
trackSize = Math.abs(this._transformValueToPos(max) - this._transformValueToPos(min));
} else {
// 单触点
trackSize = this._transformValueToPos(this.value);
}
if (this.vertical) {
this._$trackStyle = {
bottom: startPos + "%",
height: trackSize + "%"
}
} else {
this._$trackStyle = {
left: startPos + "%",
width: trackSize + "%"
}
}
}
/**
* @internal
*/
public _$marks: any[] = [];
private _marks: SliderMark[];
/**
* marks 标签 使用格式为 [Object] 其中 Object 必须包含value 及label 可以有style 属性
* 例如: marks = [{value: 20, label: '20 ℃'},
*/
@Input()
@RequireMarkForCheck()
public get marks(): SliderMark[] {
return this._marks;
}
public set marks(value: SliderMark[]) {
this._marks = value;
this._calcMarks();
}
/**
* @internal
* @param markVal
*/
public _$isDotActive(markVal: number): boolean {
if (this._$value.length == 1) {
return markVal < this.value;
} else {
const min = Math.min(...this._$value);
const max = Math.max(...this._$value);
return markVal >= min && markVal <= max;
}
}
private _calcMarks() {
if (!this._marks || !this.initialized) {
return;
}
this._$marks.splice(0, this._$marks.length);
let size = Math.round(100 / this._marks.length);
let margin = -Math.floor(size / 2);
let vertical = this.vertical;
this._marks.forEach(mark => {
const richMark: any = {};
if (vertical) {
richMark.dotStyle = {
bottom: this._transformValueToPos(mark.value) + "%"
};
richMark.labelStyle = {
bottom: this._transformValueToPos(mark.value) + "%",
"margin-bottom": margin + "%"
};
} else {
richMark.dotStyle = {
top: "-2px",
left: this._transformValueToPos(mark.value) + "%"
};
richMark.labelStyle = {
left: this._transformValueToPos(mark.value) + "%",
width: size + "%", "margin-left": margin + "%"
};
}
// 如果用户自定义了样式, 要进行样式的合并;
CommonUtils.extendObject(richMark.labelStyle, mark.style);
richMark.label = mark.label;
richMark.value = mark.value;
this._$marks.push(richMark);
});
}
ngOnInit() {
super.ngOnInit();
// 计算slider 的尺寸.
this._dimensions = this._element.nativeElement.getBoundingClientRect();
// 设置标记.
this._calcMarks();
// 注册resize事件;
this._resize();
}
private _removeResizeEvent: Function;
private _resize() {
this._zone.runOutsideAngular(() => {
this._removeResizeEvent = this._render.listen("window", "resize", () => {
// 计算slider 的尺寸.
this._dimensions = this._element.nativeElement.getBoundingClientRect();
})
})
}
/**
* 暂没有使用场景.
*/
public ngOnDestroy() {
super.ngOnDestroy();
if (this._removeResizeEvent) {
this._removeResizeEvent();
}
if (this._removeRefreshCallback) {
this._removeRefreshCallback()
}
}
/**
* 校验value的合法性. 大于最大值,取最大值, 小于最小值取最小值
* 子级组件需要用到
* @internal
*/
public _verifyValue(value: number): number {
if (value - this.min < 0 && this.initialized) {
return this.min;
} else if (value - this.max > 0 && this.initialized) {
return this.max;
} else {
return value;
}
}
private _getRemoveRefreshCallback() {
return this._$value.onRefresh(() => {
this._zone.runOutsideAngular(() => this._setTrackStyle());
this._updateSliderHandleValue();
this.valueChange.emit(this.value);
this._propagateChange(this.value);
this._changeDetectorRef.markForCheck();
});
}
/**
* 手动更新handle的值,通过ngFor更新必须value发生变化,如max变化也需要调整位置
* @private
*/
private _updateSliderHandleValue() {
if(!this._sliderHandle || !this._$value) {
return;
}
this._sliderHandle.forEach((item, index) => item.value = this._$value[index])
}
private _propagateChange: any = () => {
};
private _onTouched: any = () => {
};
// ngModel触发的writeValue方法,只会在ngOnInit,ngAfterContentInit,ngAfterViewInit这些生命周期之后才调用
public writeValue(value: any): void {
if (value instanceof Array) {
value = new ArrayCollection(value);
}
if (value instanceof ArrayCollection) {
if (this._$value !== value) {
this._$value = value;
if (this._removeRefreshCallback) {
this._removeRefreshCallback();
}
this._removeRefreshCallback = this._getRemoveRefreshCallback();
}
} else {
this._$value.splice(0, this._$value.length);
this._$value.push(this._verifyValue(+value));
}
// refresh的回调是异步的
this._$value.refresh();
this._changeDetectorRef.markForCheck();
}
public registerOnChange(fn: any): void {
this._propagateChange = fn;
}
public registerOnTouched(fn: any): void {
this._onTouched = fn;
}
@HostListener('click')
onClickTrigger(): void {
if (this.disabled) {
return;
}
this._onTouched();
}
public setDisabledState(disabled: boolean): void {
this.disabled = disabled;
}
}
| ider的当前值, 类型 number | ArrayCollection<number> 支持 | identifier_body |
slider.ts | /**
* Created by 10177553 on 2017/4/13.
*/
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ElementRef,
EventEmitter,
forwardRef,
Host,
HostListener,
Inject,
Injector,
Input,
NgZone,
OnDestroy,
OnInit,
Output,
QueryList,
Renderer2,
ViewChildren,
ViewEncapsulation,
ViewChild
} from "@angular/core";
import {ControlValueAccessor, NG_VALUE_ACCESSOR} from "@angular/forms";
import {CallbackRemoval, CommonUtils} from "../../common/core/utils/common-utils";
import {ArrayCollection} from "../../common/core/data/array-collection";
import {AbstractJigsawComponent, AbstractJigsawViewBase, WingsTheme} from "../../common/common";
import {RequireMarkForCheck} from "../../common/decorator/mark-for-check";
import {JigsawTooltip} from "../../common/directive/tooltip/tooltip";
import {FloatPosition} from "../../common/directive/float/float";
export class SliderMark {
value: number;
label: string;
style?: any;
}
/**
* @internal
*/
@Component({
selector: 'jigsaw-slider-handle',
templateUrl: './slider-handle.html',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush
})
export class JigsawSliderHandle extends AbstractJigsawViewBase implements OnInit {
private _value: number;
/**
* @internal
*/
public _$tooltipRenderHtml: string;
/**
* @NoMarkForCheckRequired
*/
@Input()
public index: number;
/**
* @NoMarkForCheckRequired
*/
@Input()
public get value() {
return this._value;
}
public set value(value) {
this._value = this._slider._verifyValue(value);
this._valueToPos();
this._$tooltipRenderHtml = `<div style="word-break: normal;">${this._value}</div>`
}
/**
* @NoMarkForCheckRequired
*/
@Input()
public tooltipPosition: FloatPosition = 'top';
@Output()
public change = new EventEmitter<number>();
private | () {
this._offset = this._slider._transformValueToPos(this.value);
this._setHandleStyle();
}
private _offset: number = 0;
/**
* @internal
*/
public _$handleStyle = {};
private _setHandleStyle() {
if (isNaN(this._offset)) {
return;
}
if (this._slider.vertical) {
this._$handleStyle = {
bottom: this._offset + "%"
}
} else {
this._$handleStyle = {
left: this._offset + "%"
}
}
this._cdr.markForCheck();
}
private _dragging: boolean = false;
private _transformPosToValue(pos: { x: number, y: number }): number {
// 更新取得的滑动条尺寸.
this._slider._refresh();
const dimensions = this._slider._dimensions;
// bottom 在dom中的位置.
const offset = this._slider.vertical ? dimensions.bottom : dimensions.left;
const size = this._slider.vertical ? dimensions.height : dimensions.width;
let posValue = this._slider.vertical ? pos.y - 6 : pos.x;
if (this._slider.vertical) {
posValue = posValue > offset ? offset : posValue;
} else {
posValue = posValue < offset ? offset : posValue;
}
let newValue = Math.abs(posValue - offset) / size * (this._slider.max - this._slider.min) + (this._slider.min - 0); // 保留两位小数
const m = this._calFloat(this._slider.step);
// 解决出现的有时小数点多了N多位.
newValue = Math.round(Math.round(newValue / this._slider.step) * this._slider.step * Math.pow(10, m)) / Math.pow(10, m);
return this._slider._verifyValue(newValue);
}
/**
* 增加步长的计算,计算需要保留小数的位数
*/
private _calFloat(value: number): number {
try {
return this._slider.step.toString().split(".")[1].length;
} catch (e) {
return 0;
}
}
/**
* @internal
*/
public _$startToDrag(): void {
this._tooltip.jigsawFloatCloseTrigger = 'none';
this._dragging = true;
this._registerGlobalEvent();
}
private _removeGlobalEventMouseMoveListener: Function;
private _removeGlobalEventMouseUpListener: Function;
private _registerGlobalEvent(): void {
if (this._removeGlobalEventMouseMoveListener) {
this._removeGlobalEventMouseMoveListener();
}
this._removeGlobalEventMouseMoveListener = this._render.listen("document", "mousemove", (e) => {
this._updateValuePosition(e);
});
if (this._removeGlobalEventMouseUpListener) {
this._removeGlobalEventMouseUpListener();
}
this._removeGlobalEventMouseUpListener = this._render.listen("document", "mouseup", () => {
this._dragging = false;
this._destroyGlobalEvent();
});
}
private _destroyGlobalEvent() {
if (this._removeGlobalEventMouseMoveListener) {
this._removeGlobalEventMouseMoveListener();
}
if (this._removeGlobalEventMouseUpListener) {
this._removeGlobalEventMouseUpListener();
}
this._tooltip.jigsawFloatCloseTrigger = 'mouseleave';
}
/**
* 父组件
* @private
*/
private _slider: JigsawSlider;
constructor(private _render: Renderer2, @Host() @Inject(forwardRef(() => JigsawSlider)) slider: any,
protected _zone: NgZone, private _cdr: ChangeDetectorRef) {
super();
this._slider = slider;
}
@ViewChild(JigsawTooltip)
private _tooltip: JigsawTooltip;
/**
* 改变value的值
*/
private _updateValuePosition(event?) {
if (!this._dragging || this._slider.disabled) {
return;
}
// 防止产生选中其他文本,造成鼠标放开后还可以拖拽的奇怪现象;
event.stopPropagation();
event.preventDefault();
const pos = {
x: event["clientX"],
y: event["clientY"]
};
let newValue = this._transformPosToValue(pos);
if (this.value === newValue) {
return;
}
this.value = newValue;
this._slider._updateValue(this.index, newValue);
this.runAfterMicrotasks(() => {
this._tooltip.reposition();
});
}
ngOnInit() {
this._valueToPos();
}
}
/**
* @description 滑动条组件.
*
* 何时使用
* 当用户需要在数值区间/自定义区间内进行选择时
*/
@WingsTheme('slider.scss')
@Component({
selector: 'jigsaw-slider, j-slider',
templateUrl: './slider.html',
host: {
'[style.width]': 'width',
'[style.height]': 'height',
'[attr.data-theme]': 'theme',
'[class.jigsaw-slider-host]': 'true',
'[class.jigsaw-slider-error]': '!valid',
'[class.jigsaw-slider-vertical]': 'vertical',
},
encapsulation: ViewEncapsulation.None,
providers: [
{provide: NG_VALUE_ACCESSOR, useExisting: forwardRef(() => JigsawSlider), multi: true},
],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class JigsawSlider extends AbstractJigsawComponent implements ControlValueAccessor, OnInit, OnDestroy {
constructor(private _element: ElementRef, private _render: Renderer2,
protected _zone: NgZone, private _changeDetectorRef: ChangeDetectorRef,
// @RequireMarkForCheck 需要用到,勿删
private _injector: Injector) {
super();
}
/**
* @NoMarkForCheckRequired
*/
@Input()
public valid: boolean = true;
// Todo 支持滑动条点击.
@ViewChildren(JigsawSliderHandle)
private _sliderHandle: QueryList<JigsawSliderHandle>;
/**
* @internal
*/
public get _$trackBy() {
return (index: number) => index;
}
/**
* @internal
*/
public _$value: ArrayCollection<number> = new ArrayCollection<number>();
private _removeRefreshCallback: CallbackRemoval = this._getRemoveRefreshCallback();
/**
* slider的当前值, 类型 number | ArrayCollection<number> 支持多触点
*
* @NoMarkForCheckRequired
*/
@Input()
public get value(): number | ArrayCollection<number> {
// 兼容返回单个值, 和多触点的数组;
if (this._$value.length == 1) {
return this._$value[0];
} else {
return this._$value;
}
}
public set value(value: number | ArrayCollection<number>) {
this.writeValue(value);
}
/**
* 设置单个的值。内部使用
* 子级组件需要用到
* @internal
*/
public _updateValue(index: number, value: number) {
this._$value.set(index, value);
this._$value.refresh();
this._changeDetectorRef.markForCheck();
}
/**
* 最后重新计算一下,垂直滚动条的位置
* 子级组件需要用到
* @internal
*/
public _refresh() {
this._dimensions = this._element.nativeElement.getBoundingClientRect();
this._changeDetectorRef.markForCheck();
}
/**
* 使 value 支持双向绑定
*/
@Output()
public valueChange = new EventEmitter<number | ArrayCollection<number>>();
// 当滑动条的组件值变化时,对外发出的事件
@Output()
public change = this.valueChange;
private _min: number = 0;
/**
* 可选范围的最小值
*
* @NoMarkForCheckRequired
*/
@Input()
public get min():number {
return this._min;
}
public set min(min: number) {
min = Number(min);
if (isNaN(min)) {
return;
}
this._min = min;
}
private _max: number = 100;
/**
* 输入范围的可选最大值.
*
* @NoMarkForCheckRequired
*/
@Input()
public get max():number {
return this._max;
}
public set max(max: number) {
max = Number(max);
if (isNaN(max)) {
return;
}
this._max = Number(max);
}
private _step: number = 1;
/**
* 每次变化的最小值, 最小支持小数点后两位.
*
* @NoMarkForCheckRequired
*/
@Input()
public get step() {
return this._step;
}
public set step(value: number) {
this._step = value;
}
/**
* 子级组件需要用到
* @internal
*/
public _transformValueToPos(value?) {
// 检验值的合法性, 不合法转换成默认可接受的合法值;
value = this._verifyValue(value);
return (value - this.min) / (this.max - this.min) * 100;
}
/**
* 子级组件需要用到
* @internal
*/
public _dimensions: ClientRect;
/**
* 垂直滑动条 默认 false
*
* @NoMarkForCheckRequired
*/
@Input()
public vertical: boolean = false;
/**
* 是否禁用. 数据类型 boolean, 默认false;
*/
@Input()
@RequireMarkForCheck()
public disabled: boolean = false;
/**
* @internal
*/
public _$trackStyle = {};
private _setTrackStyle() {
let startPos: number = 0;
let trackSize: number = 0;
if (this._$value.length > 1) {
// 多触点
let min: number = Math.min(...this._$value);
let max: number = Math.max(...this._$value);
startPos = this._transformValueToPos(min);
trackSize = Math.abs(this._transformValueToPos(max) - this._transformValueToPos(min));
} else {
// 单触点
trackSize = this._transformValueToPos(this.value);
}
if (this.vertical) {
this._$trackStyle = {
bottom: startPos + "%",
height: trackSize + "%"
}
} else {
this._$trackStyle = {
left: startPos + "%",
width: trackSize + "%"
}
}
}
/**
* @internal
*/
public _$marks: any[] = [];
private _marks: SliderMark[];
/**
* marks 标签 使用格式为 [Object] 其中 Object 必须包含value 及label 可以有style 属性
* 例如: marks = [{value: 20, label: '20 ℃'},
*/
@Input()
@RequireMarkForCheck()
public get marks(): SliderMark[] {
return this._marks;
}
public set marks(value: SliderMark[]) {
this._marks = value;
this._calcMarks();
}
/**
* @internal
* @param markVal
*/
public _$isDotActive(markVal: number): boolean {
if (this._$value.length == 1) {
return markVal < this.value;
} else {
const min = Math.min(...this._$value);
const max = Math.max(...this._$value);
return markVal >= min && markVal <= max;
}
}
private _calcMarks() {
if (!this._marks || !this.initialized) {
return;
}
this._$marks.splice(0, this._$marks.length);
let size = Math.round(100 / this._marks.length);
let margin = -Math.floor(size / 2);
let vertical = this.vertical;
this._marks.forEach(mark => {
const richMark: any = {};
if (vertical) {
richMark.dotStyle = {
bottom: this._transformValueToPos(mark.value) + "%"
};
richMark.labelStyle = {
bottom: this._transformValueToPos(mark.value) + "%",
"margin-bottom": margin + "%"
};
} else {
richMark.dotStyle = {
top: "-2px",
left: this._transformValueToPos(mark.value) + "%"
};
richMark.labelStyle = {
left: this._transformValueToPos(mark.value) + "%",
width: size + "%", "margin-left": margin + "%"
};
}
// 如果用户自定义了样式, 要进行样式的合并;
CommonUtils.extendObject(richMark.labelStyle, mark.style);
richMark.label = mark.label;
richMark.value = mark.value;
this._$marks.push(richMark);
});
}
ngOnInit() {
super.ngOnInit();
// 计算slider 的尺寸.
this._dimensions = this._element.nativeElement.getBoundingClientRect();
// 设置标记.
this._calcMarks();
// 注册resize事件;
this._resize();
}
private _removeResizeEvent: Function;
private _resize() {
this._zone.runOutsideAngular(() => {
this._removeResizeEvent = this._render.listen("window", "resize", () => {
// 计算slider 的尺寸.
this._dimensions = this._element.nativeElement.getBoundingClientRect();
})
})
}
/**
* 暂没有使用场景.
*/
public ngOnDestroy() {
super.ngOnDestroy();
if (this._removeResizeEvent) {
this._removeResizeEvent();
}
if (this._removeRefreshCallback) {
this._removeRefreshCallback()
}
}
/**
* 校验value的合法性. 大于最大值,取最大值, 小于最小值取最小值
* 子级组件需要用到
* @internal
*/
public _verifyValue(value: number): number {
if (value - this.min < 0 && this.initialized) {
return this.min;
} else if (value - this.max > 0 && this.initialized) {
return this.max;
} else {
return value;
}
}
private _getRemoveRefreshCallback() {
return this._$value.onRefresh(() => {
this._zone.runOutsideAngular(() => this._setTrackStyle());
this._updateSliderHandleValue();
this.valueChange.emit(this.value);
this._propagateChange(this.value);
this._changeDetectorRef.markForCheck();
});
}
/**
* 手动更新handle的值,通过ngFor更新必须value发生变化,如max变化也需要调整位置
* @private
*/
private _updateSliderHandleValue() {
if(!this._sliderHandle || !this._$value) {
return;
}
this._sliderHandle.forEach((item, index) => item.value = this._$value[index])
}
private _propagateChange: any = () => {
};
private _onTouched: any = () => {
};
// ngModel触发的writeValue方法,只会在ngOnInit,ngAfterContentInit,ngAfterViewInit这些生命周期之后才调用
public writeValue(value: any): void {
if (value instanceof Array) {
value = new ArrayCollection(value);
}
if (value instanceof ArrayCollection) {
if (this._$value !== value) {
this._$value = value;
if (this._removeRefreshCallback) {
this._removeRefreshCallback();
}
this._removeRefreshCallback = this._getRemoveRefreshCallback();
}
} else {
this._$value.splice(0, this._$value.length);
this._$value.push(this._verifyValue(+value));
}
// refresh的回调是异步的
this._$value.refresh();
this._changeDetectorRef.markForCheck();
}
public registerOnChange(fn: any): void {
this._propagateChange = fn;
}
public registerOnTouched(fn: any): void {
this._onTouched = fn;
}
@HostListener('click')
onClickTrigger(): void {
if (this.disabled) {
return;
}
this._onTouched();
}
public setDisabledState(disabled: boolean): void {
this.disabled = disabled;
}
}
| _valueToPos | identifier_name |
slider.ts | /**
* Created by 10177553 on 2017/4/13.
*/
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ElementRef,
EventEmitter,
forwardRef,
Host,
HostListener,
Inject,
Injector,
Input,
NgZone,
OnDestroy,
OnInit,
Output,
QueryList,
Renderer2,
ViewChildren,
ViewEncapsulation,
ViewChild
} from "@angular/core";
import {ControlValueAccessor, NG_VALUE_ACCESSOR} from "@angular/forms";
import {CallbackRemoval, CommonUtils} from "../../common/core/utils/common-utils";
import {ArrayCollection} from "../../common/core/data/array-collection";
import {AbstractJigsawComponent, AbstractJigsawViewBase, WingsTheme} from "../../common/common";
import {RequireMarkForCheck} from "../../common/decorator/mark-for-check";
import {JigsawTooltip} from "../../common/directive/tooltip/tooltip";
import {FloatPosition} from "../../common/directive/float/float";
export class SliderMark {
value: number;
label: string;
style?: any;
}
/**
* @internal
*/
@Component({
selector: 'jigsaw-slider-handle',
templateUrl: './slider-handle.html',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush
})
export class JigsawSliderHandle extends AbstractJigsawViewBase implements OnInit {
private _value: number;
/**
* @internal
*/
public _$tooltipRenderHtml: string;
/**
* @NoMarkForCheckRequired
*/
@Input()
public index: number;
/**
* @NoMarkForCheckRequired
*/
@Input()
public get value() {
return this._value;
}
public set value(value) {
this._value = this._slider._verifyValue(value);
this._valueToPos();
this._$tooltipRenderHtml = `<div style="word-break: normal;">${this._value}</div>`
}
/**
* @NoMarkForCheckRequired
*/
@Input()
public tooltipPosition: FloatPosition = 'top';
@Output()
public change = new EventEmitter<number>();
private _valueToPos() {
this._offset = this._slider._transformValueToPos(this.value);
this._setHandleStyle();
}
private _offset: number = 0;
/**
* @internal
*/
public _$handleStyle = {};
private _setHandleStyle() {
if (isNaN(this._offset)) {
return;
}
if (this._slider.vertical) {
this._$handleStyle = {
bottom: this._offset + "%"
}
} else {
this._$handleStyle = {
left: this._offset + "%"
}
}
this._cdr.markForCheck();
}
private _dragging: boolean = false;
private _transformPosToValue(pos: { x: number, y: number }): number {
// 更新取得的滑动条尺寸.
this._slider._refresh();
const dimensions = this._slider._dimensions;
// bottom 在dom中的位置.
const offset = this._slider.vertical ? dimensions.bottom : dimensions.left;
const size = this._slider.vertical ? dimensions.height : dimensions.width;
let posValue = this._slider.vertical ? pos.y - 6 : pos.x;
if (this._slider.vertical) {
posValue = posValue > offset ? offset : posValue;
} else {
posValue = posValue < offset ? offset : posValue;
}
let newValue = Math.abs(posValue - offset) / size * (this._slider.max - this._slider.min) + (this._slider.min - 0); // 保留两位小数
const m = this._calFloat(this._slider.step);
// 解决出现的有时小数点多了N多位.
newValue = Math.round(Math.round(newValue / this._slider.step) * this._slider.step * Math.pow(10, m)) / Math.pow(10, m);
return this._slider._verifyValue(newValue);
}
/**
* 增加步长的计算,计算需要保留小数的位数
*/
private _calFloat(value: number): number {
try {
return this._slider.step.toString().split(".")[1].length;
} catch (e) {
return 0;
}
}
/**
* @internal
*/
public _$startToDrag(): void {
this._tooltip.jigsawFloatCloseTrigger = 'none';
this._dragging = true;
this._registerGlobalEvent();
}
private _removeGlobalEventMouseMoveListener: Function;
private _removeGlobalEventMouseUpListener: Function;
private _registerGlobalEvent(): void {
if (this._removeGlobalEventMouseMoveListener) {
this._removeGlobalEventMouseMoveListener();
}
this._removeGlobalEventMouseMoveListener = this._render.listen("document", "mousemove", (e) => {
this._updateValuePosition(e);
});
if (this._removeGlobalEventMouseUpListener) {
this._removeGlobalEventMouseUpListener();
}
this._removeGlobalEventMouseUpListener = this._render.listen("document", "mouseup", () => {
this._dragging = false;
this._destroyGlobalEvent();
});
}
private _destroyGlobalEvent() {
if (this._removeGlobalEventMouseMoveListener) {
this._removeGlobalEventMouseMoveListener();
}
if (this._removeGlobalEventMouseUpListener) {
this._removeGlobalEventMouseUpListener();
}
this._tooltip.jigsawFloatCloseTrigger = 'mouseleave';
}
/**
* 父组件
* @private
*/
private _slider: JigsawSlider;
constructor(private _render: Renderer2, @Host() @Inject(forwardRef(() => JigsawSlider)) slider: any,
protected _zone: NgZone, private _cdr: ChangeDetectorRef) {
super();
this._slider = slider;
}
@ViewChild(JigsawTooltip)
private _tooltip: JigsawTooltip;
/**
* 改变value的值
*/
private _updateValuePosition(event?) {
if (!this._dragging || this._slider.disabled) {
return;
}
// 防止产生选中其他文本,造成鼠标放开后还可以拖拽的奇怪现象;
event.stopPropagation();
event.preventDefault();
const pos = {
x: event["clientX"],
y: event["clientY"]
};
let newValue = this._transformPosToValue(pos);
if (this.value === newValue) {
return;
}
this.value = newValue;
this._slider._updateValue(this.index, newValue);
this.runAfterMicrotasks(() => {
this._tooltip.reposition();
});
}
ngOnInit() {
this._valueToPos();
}
}
/**
* @description 滑动条组件.
*
* 何时使用
* 当用户需要在数值区间/自定义区间内进行选择时
*/
@WingsTheme('slider.scss')
@Component({
selector: 'jigsaw-slider, j-slider',
templateUrl: './slider.html',
host: {
'[style.width]': 'width',
'[style.height]': 'height',
'[attr.data-theme]': 'theme',
'[class.jigsaw-slider-host]': 'true',
'[class.jigsaw-slider-error]': '!valid',
'[class.jigsaw-slider-vertical]': 'vertical',
},
encapsulation: ViewEncapsulation.None,
providers: [
{provide: NG_VALUE_ACCESSOR, useExisting: forwardRef(() => JigsawSlider), multi: true},
],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class JigsawSlider extends AbstractJigsawComponent implements ControlValueAccessor, OnInit, OnDestroy {
constructor(private _element: ElementRef, private _render: Renderer2,
protected _zone: NgZone, private _changeDetectorRef: ChangeDetectorRef,
// @RequireMarkForCheck 需要用到,勿删
private _injector: Injector) {
super();
}
/**
* @NoMarkForCheckRequired
*/
@Input()
public valid: boolean = true;
// Todo 支持滑动条点击.
@ViewChildren(JigsawSliderHandle)
private _sliderHandle: QueryList<JigsawSliderHandle>;
/**
* @internal
*/
public get _$trackBy() {
return (index: number) => index;
}
/**
* @internal
*/
public _$value: ArrayCollection<number> = new ArrayCollection<number>();
private _removeRefreshCallback: CallbackRemoval = this._getRemoveRefreshCallback();
/**
* slider的当前值, 类型 number | ArrayCollection<number> 支持多触点
*
* @NoMarkForCheckRequired
*/
@Input()
public get value(): number | ArrayCollection<number> {
// 兼容返回单个值, 和多触点的数组;
if (this._$value.length == 1) {
return this._$value[0];
} else {
return this._$value;
}
}
public set value(value: number | ArrayCollection<number>) {
this.writeValue(value);
}
/**
* 设置单个的值。内部使用
* 子级组件需要用到
* @internal
*/
public _updateValue(index: number, value: number) {
this._$value.set(index, value);
this._$value.refresh();
this._changeDetectorRef.markForCheck();
}
/**
* 最后重新计算一下,垂直滚动条的位置
* 子级组件需要用到
* @internal
*/
public _refresh() {
this._dimensions = this._element.nativeElement.getBoundingClientRect();
this._changeDetectorRef.markForCheck();
}
/**
* 使 value 支持双向绑定
*/
@Output()
public valueChange = new EventEmitter<number | ArrayCollection<number>>();
// 当滑动条的组件值变化时,对外发出的事件
@Output()
public change = this.valueChange;
private _min: number = 0;
/**
* 可选范围的最小值
*
* @NoMarkForCheckRequired
*/
@Input()
public get min():number {
return this._min;
}
public set min(min: number) {
min = Number(min);
if (isNaN(min)) {
return;
}
this._min = min;
}
private _max: number = 100;
/**
* 输入范围的可选最大值.
*
* @NoMarkForCheckRequired
*/
@Input()
public get max():number {
return this._max;
}
public set max(max: number) {
max = Number(max);
if (isNaN(max)) {
return;
}
this._max = Number(max);
}
private _step: number = 1;
/**
* 每次变化的最小值, 最小支持小数点后两位.
*
* @NoMarkForCheckRequired
*/
@Input()
public get step() {
return this._step;
}
public set step(value: number) {
this._step = value;
}
/**
* 子级组件需要用到
* @internal
*/
public _transformValueToPos(value?) {
// 检验值的合法性, 不合法转换成默认可接受的合法值;
value = this._verifyValue(value);
return (value - this.min) / (this.max - this.min) * 100;
}
/**
* 子级组件需要用到
* @internal
*/
public _dimensions: ClientRect;
/**
* 垂直滑动条 默认 false
*
* @NoMarkForCheckRequired
*/
@Input()
public vertical: boolean = false;
/**
* 是否禁用. 数据类型 boolean, 默认false;
*/
@Input()
@RequireMarkForCheck()
public disabled: boolean = false;
/**
* @internal
*/
public _$trackStyle = {};
private _setTrackStyle() {
let startPos: number = 0;
let trackSize: number = 0;
if (this._$value.length > 1) {
// 多触点
let min: number = Math.min(...this._$value);
let max: number = Math.max(...this._$value);
startPos = this._transformValueToPos(min);
trackSize = Math.abs(this._transformValueToPos(max) - this._transformValueToPos(min));
} else {
// 单触点
trackSize = this._transformValueToPos(this.value);
}
if (this.vertical) {
this._$trackStyle = {
bottom: startPos + "%",
height: trackSize + "%"
}
} else {
this._$trackStyle = {
left: startPos + "%",
width: trackSize + "%"
}
}
}
/**
* @internal
*/
public _$marks: any[] = [];
private _marks: SliderMark[];
/**
* marks 标签 使用格式为 [Object] 其中 Object 必须包含value 及label 可以有style 属性
* 例如: marks = [{value: 20, label: '20 ℃'},
*/
@Input()
@RequireMarkForCheck()
public get marks(): SliderMark[] {
return this._marks;
}
public set marks(value: SliderMark[]) {
this._marks = value;
this._calcMarks();
}
/**
* @internal
* @param markVal
*/
public _$isDotActive(markVal: number): boolean {
if (this._$value.length == 1) {
return markVal < this.value;
} else {
const min = Math.min(...this._$value);
const max = Math.max(...this._$value);
return markVal >= min && markVal <= max;
}
}
private _calcMarks() {
if (!this._marks || !this.initialized) {
return;
}
this._$marks.splice(0, this._$marks.length);
let size = Math.round(100 / this._marks.length);
let margin = -Math.floor(size / 2);
let vertical = this.vertical;
this._marks.forEach(mark => {
const richMark: any = {};
if (vertical) {
richMark.dotStyle = {
bottom: this._transformValueToPos(mark.value) + "%"
};
richMark.labelStyle = {
bottom: this._transformValueToPos(mark.value) + "%",
"margin-bottom": margin + "%"
};
} else {
richMark.dotStyle = {
top: "-2px",
left: this._transformValueToPos(mark.value) + "%"
};
richMark.labelStyle = {
left: this._transformValueToPos(mark.value) + "%",
width: size + "%", "margin-left": margin + "%"
};
}
// 如果用户自定义了样式, 要进行样式的合并;
CommonUtils.extendObject(richMark.labelStyle, mark.style);
richMark.label = mark.label;
richMark.value = mark.value;
this._$marks.push(richMark);
});
}
ngOnInit() {
super.ngOnInit();
// 计算slider 的尺寸.
this._dimensions = this._element.nativeElement.getBoundingClientRect();
| this._calcMarks();
// 注册resize事件;
this._resize();
}
private _removeResizeEvent: Function;
private _resize() {
this._zone.runOutsideAngular(() => {
this._removeResizeEvent = this._render.listen("window", "resize", () => {
// 计算slider 的尺寸.
this._dimensions = this._element.nativeElement.getBoundingClientRect();
})
})
}
/**
* 暂没有使用场景.
*/
public ngOnDestroy() {
super.ngOnDestroy();
if (this._removeResizeEvent) {
this._removeResizeEvent();
}
if (this._removeRefreshCallback) {
this._removeRefreshCallback()
}
}
/**
* 校验value的合法性. 大于最大值,取最大值, 小于最小值取最小值
* 子级组件需要用到
* @internal
*/
public _verifyValue(value: number): number {
if (value - this.min < 0 && this.initialized) {
return this.min;
} else if (value - this.max > 0 && this.initialized) {
return this.max;
} else {
return value;
}
}
private _getRemoveRefreshCallback() {
return this._$value.onRefresh(() => {
this._zone.runOutsideAngular(() => this._setTrackStyle());
this._updateSliderHandleValue();
this.valueChange.emit(this.value);
this._propagateChange(this.value);
this._changeDetectorRef.markForCheck();
});
}
/**
* 手动更新handle的值,通过ngFor更新必须value发生变化,如max变化也需要调整位置
* @private
*/
private _updateSliderHandleValue() {
if(!this._sliderHandle || !this._$value) {
return;
}
this._sliderHandle.forEach((item, index) => item.value = this._$value[index])
}
private _propagateChange: any = () => {
};
private _onTouched: any = () => {
};
// ngModel触发的writeValue方法,只会在ngOnInit,ngAfterContentInit,ngAfterViewInit这些生命周期之后才调用
public writeValue(value: any): void {
if (value instanceof Array) {
value = new ArrayCollection(value);
}
if (value instanceof ArrayCollection) {
if (this._$value !== value) {
this._$value = value;
if (this._removeRefreshCallback) {
this._removeRefreshCallback();
}
this._removeRefreshCallback = this._getRemoveRefreshCallback();
}
} else {
this._$value.splice(0, this._$value.length);
this._$value.push(this._verifyValue(+value));
}
// refresh的回调是异步的
this._$value.refresh();
this._changeDetectorRef.markForCheck();
}
public registerOnChange(fn: any): void {
this._propagateChange = fn;
}
public registerOnTouched(fn: any): void {
this._onTouched = fn;
}
@HostListener('click')
onClickTrigger(): void {
if (this.disabled) {
return;
}
this._onTouched();
}
public setDisabledState(disabled: boolean): void {
this.disabled = disabled;
}
} | // 设置标记. | random_line_split |
flow.py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from taskflow.utils import reflection
class Flow(six.with_metaclass(abc.ABCMeta)):
"""The base abstract class of all flow implementations.
A flow is a structure that defines relationships between tasks. You can
add tasks and other flows (as subflows) to the flow, and the flow provides
a way to implicitly or explicitly define how they are interdependent.
Exact structure of the relationships is defined by concrete
implementation, while this class defines common interface and adds
human-readable (not necessary unique) name.
NOTE(harlowja): if a flow is placed in another flow as a subflow, a desired
way to compose flows together, then it is valid and permissible that during
execution the subflow & parent flow may be flattened into a new flow. Since
a flow is just a 'structuring' concept this is typically a behavior that
should not be worried about (as it is not visible to the user), but it is
worth mentioning here.
Flows are expected to provide the following methods/properties:
- add
- __len__
- requires
- provides
"""
def __init__(self, name):
self._name = str(name)
@property
def name(self):
"""A non-unique name for this flow (human readable)""" | def __len__(self):
"""Returns how many items are in this flow."""
def __str__(self):
lines = ["%s: %s" % (reflection.get_class_name(self), self.name)]
lines.append("%s" % (len(self)))
return "; ".join(lines)
@abc.abstractmethod
def add(self, *items):
"""Adds a given item/items to this flow."""
@abc.abstractproperty
def requires(self):
"""Browse argument requirement names this flow requires to run."""
@abc.abstractproperty
def provides(self):
"""Browse argument names provided by the flow.""" | return self._name
@abc.abstractmethod | random_line_split |
flow.py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from taskflow.utils import reflection
class Flow(six.with_metaclass(abc.ABCMeta)):
"""The base abstract class of all flow implementations.
A flow is a structure that defines relationships between tasks. You can
add tasks and other flows (as subflows) to the flow, and the flow provides
a way to implicitly or explicitly define how they are interdependent.
Exact structure of the relationships is defined by concrete
implementation, while this class defines common interface and adds
human-readable (not necessary unique) name.
NOTE(harlowja): if a flow is placed in another flow as a subflow, a desired
way to compose flows together, then it is valid and permissible that during
execution the subflow & parent flow may be flattened into a new flow. Since
a flow is just a 'structuring' concept this is typically a behavior that
should not be worried about (as it is not visible to the user), but it is
worth mentioning here.
Flows are expected to provide the following methods/properties:
- add
- __len__
- requires
- provides
"""
def __init__(self, name):
self._name = str(name)
@property
def name(self):
"""A non-unique name for this flow (human readable)"""
return self._name
@abc.abstractmethod
def | (self):
"""Returns how many items are in this flow."""
def __str__(self):
lines = ["%s: %s" % (reflection.get_class_name(self), self.name)]
lines.append("%s" % (len(self)))
return "; ".join(lines)
@abc.abstractmethod
def add(self, *items):
"""Adds a given item/items to this flow."""
@abc.abstractproperty
def requires(self):
"""Browse argument requirement names this flow requires to run."""
@abc.abstractproperty
def provides(self):
"""Browse argument names provided by the flow."""
| __len__ | identifier_name |
flow.py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from taskflow.utils import reflection
class Flow(six.with_metaclass(abc.ABCMeta)):
"""The base abstract class of all flow implementations.
A flow is a structure that defines relationships between tasks. You can
add tasks and other flows (as subflows) to the flow, and the flow provides
a way to implicitly or explicitly define how they are interdependent.
Exact structure of the relationships is defined by concrete
implementation, while this class defines common interface and adds
human-readable (not necessary unique) name.
NOTE(harlowja): if a flow is placed in another flow as a subflow, a desired
way to compose flows together, then it is valid and permissible that during
execution the subflow & parent flow may be flattened into a new flow. Since
a flow is just a 'structuring' concept this is typically a behavior that
should not be worried about (as it is not visible to the user), but it is
worth mentioning here.
Flows are expected to provide the following methods/properties:
- add
- __len__
- requires
- provides
"""
def __init__(self, name):
self._name = str(name)
@property
def name(self):
"""A non-unique name for this flow (human readable)"""
return self._name
@abc.abstractmethod
def __len__(self):
"""Returns how many items are in this flow."""
def __str__(self):
lines = ["%s: %s" % (reflection.get_class_name(self), self.name)]
lines.append("%s" % (len(self)))
return "; ".join(lines)
@abc.abstractmethod
def add(self, *items):
"""Adds a given item/items to this flow."""
@abc.abstractproperty
def requires(self):
|
@abc.abstractproperty
def provides(self):
"""Browse argument names provided by the flow."""
| """Browse argument requirement names this flow requires to run.""" | identifier_body |
app.module.ts | import { FormsModule } from '@angular/forms';
import { BrowserModule } from '@angular/platform-browser';
import { SwingModule } from 'angular2-swing';
import {Application} from './app.component';
import {NgModule, ErrorHandler} from '@angular/core';
import {Storage} from '@ionic/storage';
import {IonicApp, IonicModule, IonicErrorHandler} from 'ionic-angular';
import {COMPONENTS, PROVIDERS} from '../const';
/**
* AppModule is used for configuring the whole environment,
* setting up every module, provider and component required
* to make the app to work.
* @class {AppModule}
*/
@NgModule({
declarations: [ Application, ...COMPONENTS],
entryComponents: COMPONENTS,
imports: [
BrowserModule,
FormsModule, | SwingModule,
IonicModule.forRoot(Application)
],
bootstrap: [IonicApp],
providers: [
{provide: ErrorHandler, useClass: IonicErrorHandler},
Storage,
...PROVIDERS
]
})
export class AppModule {} | random_line_split | |
app.module.ts | import { FormsModule } from '@angular/forms';
import { BrowserModule } from '@angular/platform-browser';
import { SwingModule } from 'angular2-swing';
import {Application} from './app.component';
import {NgModule, ErrorHandler} from '@angular/core';
import {Storage} from '@ionic/storage';
import {IonicApp, IonicModule, IonicErrorHandler} from 'ionic-angular';
import {COMPONENTS, PROVIDERS} from '../const';
/**
* AppModule is used for configuring the whole environment,
* setting up every module, provider and component required
* to make the app to work.
* @class {AppModule}
*/
@NgModule({
declarations: [ Application, ...COMPONENTS],
entryComponents: COMPONENTS,
imports: [
BrowserModule,
FormsModule,
SwingModule,
IonicModule.forRoot(Application)
],
bootstrap: [IonicApp],
providers: [
{provide: ErrorHandler, useClass: IonicErrorHandler},
Storage,
...PROVIDERS
]
})
export class | {} | AppModule | identifier_name |
authorization_code.py | #! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect, state):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
|
if state:
query['state'] = state
return url + urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant(
'/oauth/access_token', {
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
| query['redirect_uri'] = redirect | conditional_block |
authorization_code.py | #! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect, state):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
if state:
query['state'] = state
return url + urlencode(query)
def | (self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant(
'/oauth/access_token', {
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
| exchange_code | identifier_name |
authorization_code.py | #! /usr/bin/env python | from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect, state):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
if state:
query['state'] = state
return url + urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant(
'/oauth/access_token', {
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope'] | # encoding: utf-8
| random_line_split |
authorization_code.py | #! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
| """Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect, state):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
if state:
query['state'] = state
return url + urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant(
'/oauth/access_token', {
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope'] | identifier_body | |
client-policies.ts | /// <reference path="../apimanPlugin.ts"/>
/// <reference path="../rpc.ts"/>
module Apiman {
export var ClientPoliciesController = _module.controller("Apiman.ClientPoliciesController",
['$q', '$scope', '$location', 'PageLifecycle', 'ClientEntityLoader', 'OrgSvcs', 'Dialogs', '$routeParams', 'Configuration', 'EntityStatusSvc', 'CurrentUser',
($q, $scope, $location, PageLifecycle, ClientEntityLoader, OrgSvcs, Dialogs, $routeParams, Configuration, EntityStatusSvc, CurrentUser) => {
var params = $routeParams;
$scope.organizationId = params.org;
$scope.tab = 'policies';
$scope.version = params.version;
$scope.showMetrics = Configuration.ui.metrics;
var removePolicy = function(policy) {
angular.forEach($scope.policies, function(p, index) {
if (policy === p) {
$scope.policies.splice(index, 1);
}
});
};
$scope.removePolicy = function(policy) {
Dialogs.confirm('Confirm Remove Policy', 'Do you really want to remove this policy from the client app?', function() {
OrgSvcs.delete({ organizationId: params.org, entityType: 'clients', entityId: params.client, versionsOrActivity: 'versions', version: params.version, policiesOrActivity: 'policies', policyId: policy.id }, function(reply) {
removePolicy(policy);
EntityStatusSvc.getEntity().modifiedOn = Date.now();
EntityStatusSvc.getEntity().modifiedBy = CurrentUser.getCurrentUser();
}, PageLifecycle.handleError);
});
};
$scope.reorderPolicies = function(reorderedPolicies) {
var policyChainBean = {
policies: reorderedPolicies
};
OrgSvcs.save({ organizationId: params.org, entityType: 'clients', entityId: params.client, versionsOrActivity: 'versions', version: params.version, policiesOrActivity: 'reorderPolicies' },
policyChainBean,
function() {
Logger.debug("Reordering POSTed successfully");
EntityStatusSvc.getEntity().modifiedOn = Date.now();
EntityStatusSvc.getEntity().modifiedBy = CurrentUser.getCurrentUser();
}, function() {
Logger.debug("Reordering POST failed.")
});
}
var pageData = ClientEntityLoader.getCommonData($scope, $location);
pageData = angular.extend(pageData, {
policies: $q(function(resolve, reject) {
OrgSvcs.query({ organizationId: params.org, entityType: 'clients', entityId: params.client, versionsOrActivity: 'versions', version: params.version, policiesOrActivity: 'policies' }, function(policies) {
| }, reject);
})
});
PageLifecycle.loadPage('ClientPolicies', 'clientView', pageData, $scope, function() {
PageLifecycle.setPageTitle('client-policies', [ $scope.client.name ]);
});
}])
} | resolve(policies);
| random_line_split |
client-policies.ts | /// <reference path="../apimanPlugin.ts"/>
/// <reference path="../rpc.ts"/>
module Apiman {
export var ClientPoliciesController = _module.controller("Apiman.ClientPoliciesController",
['$q', '$scope', '$location', 'PageLifecycle', 'ClientEntityLoader', 'OrgSvcs', 'Dialogs', '$routeParams', 'Configuration', 'EntityStatusSvc', 'CurrentUser',
($q, $scope, $location, PageLifecycle, ClientEntityLoader, OrgSvcs, Dialogs, $routeParams, Configuration, EntityStatusSvc, CurrentUser) => {
var params = $routeParams;
$scope.organizationId = params.org;
$scope.tab = 'policies';
$scope.version = params.version;
$scope.showMetrics = Configuration.ui.metrics;
var removePolicy = function(policy) {
angular.forEach($scope.policies, function(p, index) {
if (policy === p) |
});
};
$scope.removePolicy = function(policy) {
Dialogs.confirm('Confirm Remove Policy', 'Do you really want to remove this policy from the client app?', function() {
OrgSvcs.delete({ organizationId: params.org, entityType: 'clients', entityId: params.client, versionsOrActivity: 'versions', version: params.version, policiesOrActivity: 'policies', policyId: policy.id }, function(reply) {
removePolicy(policy);
EntityStatusSvc.getEntity().modifiedOn = Date.now();
EntityStatusSvc.getEntity().modifiedBy = CurrentUser.getCurrentUser();
}, PageLifecycle.handleError);
});
};
$scope.reorderPolicies = function(reorderedPolicies) {
var policyChainBean = {
policies: reorderedPolicies
};
OrgSvcs.save({ organizationId: params.org, entityType: 'clients', entityId: params.client, versionsOrActivity: 'versions', version: params.version, policiesOrActivity: 'reorderPolicies' },
policyChainBean,
function() {
Logger.debug("Reordering POSTed successfully");
EntityStatusSvc.getEntity().modifiedOn = Date.now();
EntityStatusSvc.getEntity().modifiedBy = CurrentUser.getCurrentUser();
}, function() {
Logger.debug("Reordering POST failed.")
});
}
var pageData = ClientEntityLoader.getCommonData($scope, $location);
pageData = angular.extend(pageData, {
policies: $q(function(resolve, reject) {
OrgSvcs.query({ organizationId: params.org, entityType: 'clients', entityId: params.client, versionsOrActivity: 'versions', version: params.version, policiesOrActivity: 'policies' }, function(policies) {
resolve(policies);
}, reject);
})
});
PageLifecycle.loadPage('ClientPolicies', 'clientView', pageData, $scope, function() {
PageLifecycle.setPageTitle('client-policies', [ $scope.client.name ]);
});
}])
}
| {
$scope.policies.splice(index, 1);
} | conditional_block |
AttributesProcessor.ts | /*
* Copyright The OpenTelemetry Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Context } from '@opentelemetry/api';
import { Attributes } from '@opentelemetry/api-metrics-wip';
/**
* The {@link AttributesProcessor} is responsible for customizing which
* attribute(s) are to be reported as metrics dimension(s) and adding
* additional dimension(s) from the {@link Context}.
*/
export abstract class AttributesProcessor {
/**
* Process the metric instrument attributes.
*
* @param incoming The metric instrument attributes.
* @param context The active context when the instrument is synchronous.
* `undefined` otherwise.
*/
abstract process(incoming: Attributes, context?: Context): Attributes;
static Noop() {
return NOOP;
}
}
export class NoopAttributesProcessor extends AttributesProcessor {
process(incoming: Attributes, _context?: Context) { | }
}
/**
* {@link AttributesProcessor} that filters by allowed attribute names and drops any names that are not in the
* allow list.
*/
export class FilteringAttributesProcessor extends AttributesProcessor {
constructor(private _allowedAttributeNames: string[]) {
super();
}
process(incoming: Attributes, _context: Context): Attributes {
const filteredAttributes: Attributes = {};
Object.keys(incoming)
.filter(attributeName => this._allowedAttributeNames.includes(attributeName))
.forEach(attributeName => filteredAttributes[attributeName] = incoming[attributeName]);
return filteredAttributes;
}
}
const NOOP = new NoopAttributesProcessor; | return incoming; | random_line_split |
AttributesProcessor.ts | /*
* Copyright The OpenTelemetry Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Context } from '@opentelemetry/api';
import { Attributes } from '@opentelemetry/api-metrics-wip';
/**
* The {@link AttributesProcessor} is responsible for customizing which
* attribute(s) are to be reported as metrics dimension(s) and adding
* additional dimension(s) from the {@link Context}.
*/
export abstract class AttributesProcessor {
/**
* Process the metric instrument attributes.
*
* @param incoming The metric instrument attributes.
* @param context The active context when the instrument is synchronous.
* `undefined` otherwise.
*/
abstract process(incoming: Attributes, context?: Context): Attributes;
static Noop() |
}
export class NoopAttributesProcessor extends AttributesProcessor {
process(incoming: Attributes, _context?: Context) {
return incoming;
}
}
/**
* {@link AttributesProcessor} that filters by allowed attribute names and drops any names that are not in the
* allow list.
*/
export class FilteringAttributesProcessor extends AttributesProcessor {
constructor(private _allowedAttributeNames: string[]) {
super();
}
process(incoming: Attributes, _context: Context): Attributes {
const filteredAttributes: Attributes = {};
Object.keys(incoming)
.filter(attributeName => this._allowedAttributeNames.includes(attributeName))
.forEach(attributeName => filteredAttributes[attributeName] = incoming[attributeName]);
return filteredAttributes;
}
}
const NOOP = new NoopAttributesProcessor;
| {
return NOOP;
} | identifier_body |
AttributesProcessor.ts | /*
* Copyright The OpenTelemetry Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Context } from '@opentelemetry/api';
import { Attributes } from '@opentelemetry/api-metrics-wip';
/**
* The {@link AttributesProcessor} is responsible for customizing which
* attribute(s) are to be reported as metrics dimension(s) and adding
* additional dimension(s) from the {@link Context}.
*/
export abstract class AttributesProcessor {
/**
* Process the metric instrument attributes.
*
* @param incoming The metric instrument attributes.
* @param context The active context when the instrument is synchronous.
* `undefined` otherwise.
*/
abstract process(incoming: Attributes, context?: Context): Attributes;
static Noop() {
return NOOP;
}
}
export class NoopAttributesProcessor extends AttributesProcessor {
| (incoming: Attributes, _context?: Context) {
return incoming;
}
}
/**
* {@link AttributesProcessor} that filters by allowed attribute names and drops any names that are not in the
* allow list.
*/
export class FilteringAttributesProcessor extends AttributesProcessor {
constructor(private _allowedAttributeNames: string[]) {
super();
}
process(incoming: Attributes, _context: Context): Attributes {
const filteredAttributes: Attributes = {};
Object.keys(incoming)
.filter(attributeName => this._allowedAttributeNames.includes(attributeName))
.forEach(attributeName => filteredAttributes[attributeName] = incoming[attributeName]);
return filteredAttributes;
}
}
const NOOP = new NoopAttributesProcessor;
| process | identifier_name |
theme.js | $(function () {
$(window).scroll(function() {
if ($(".navbar").offset().top>30) {
$(".navbar-fixed-top").addClass("sticky");
}
else { |
// Flex
if ($(".flexslider").length) {
$('.flexslider').flexslider();
}
servicesOptions.initialize();
staticHeader.initialize();
portfolioItem.initialize();
// segun esto corrige el pedo del dropdown en tablets and such
// hay que testearlo!
$('.dropdown-toggle').click(function(e) {
e.preventDefault();
setTimeout($.proxy(function() {
if ('ontouchstart' in document.documentElement) {
$(this).siblings('.dropdown-backdrop').off().remove();
}
}, this), 0);
});
});
var portfolioItem = {
initialize: function () {
var $container = $("#portfolio_tem .left_box");
var $bigPics = $container.find(".big img");
var $thumbs = $container.find(".thumbs .thumb");
$bigPics.hide().eq(0).show();
$thumbs.click(function (e) {
e.preventDefault();
var index = $thumbs.index(this);
$bigPics.fadeOut();
$bigPics.eq(index).fadeIn();
});
}
}
var staticHeader = {
initialize: function () {
if ($(".navbar-static-top").length) {
$("body").css("padding-top", 0);
}
}
}
var servicesOptions = {
initialize: function () {
var $container = $(".services_circles");
var $texts = $container.find(".description .text");
var $circles = $container.find(".areas .circle");
$circles.click(function () {
var index = $circles.index(this);
$texts.fadeOut();
$texts.eq(index).fadeIn();
$circles.removeClass("active");
$(this).addClass("active");
});
}
}
$(document).ready(function(){
$("#menuContent div").hide();
$("#menuContent div:first").show();
$("#subMenu li:first").addClass("active");
$("#subMenu li a").click(function(){
$('#subMenu li').removeClass("active");
$(this).parent().addClass("active");
var current = $(this).attr("href");
$("#menuContent div:visible").fadeOut("fast");
$("#menuContent").animate({"height":$(current).height()},function(){
$(current).fadeIn("fast");
});
return false;
});
}); | $(".navbar-fixed-top").removeClass("sticky");
}
}); | random_line_split |
theme.js | $(function () {
$(window).scroll(function() {
if ($(".navbar").offset().top>30) {
$(".navbar-fixed-top").addClass("sticky");
}
else |
});
// Flex
if ($(".flexslider").length) {
$('.flexslider').flexslider();
}
servicesOptions.initialize();
staticHeader.initialize();
portfolioItem.initialize();
// segun esto corrige el pedo del dropdown en tablets and such
// hay que testearlo!
$('.dropdown-toggle').click(function(e) {
e.preventDefault();
setTimeout($.proxy(function() {
if ('ontouchstart' in document.documentElement) {
$(this).siblings('.dropdown-backdrop').off().remove();
}
}, this), 0);
});
});
var portfolioItem = {
initialize: function () {
var $container = $("#portfolio_tem .left_box");
var $bigPics = $container.find(".big img");
var $thumbs = $container.find(".thumbs .thumb");
$bigPics.hide().eq(0).show();
$thumbs.click(function (e) {
e.preventDefault();
var index = $thumbs.index(this);
$bigPics.fadeOut();
$bigPics.eq(index).fadeIn();
});
}
}
var staticHeader = {
initialize: function () {
if ($(".navbar-static-top").length) {
$("body").css("padding-top", 0);
}
}
}
var servicesOptions = {
initialize: function () {
var $container = $(".services_circles");
var $texts = $container.find(".description .text");
var $circles = $container.find(".areas .circle");
$circles.click(function () {
var index = $circles.index(this);
$texts.fadeOut();
$texts.eq(index).fadeIn();
$circles.removeClass("active");
$(this).addClass("active");
});
}
}
$(document).ready(function(){
$("#menuContent div").hide();
$("#menuContent div:first").show();
$("#subMenu li:first").addClass("active");
$("#subMenu li a").click(function(){
$('#subMenu li').removeClass("active");
$(this).parent().addClass("active");
var current = $(this).attr("href");
$("#menuContent div:visible").fadeOut("fast");
$("#menuContent").animate({"height":$(current).height()},function(){
$(current).fadeIn("fast");
});
return false;
});
});
| {
$(".navbar-fixed-top").removeClass("sticky");
} | conditional_block |
vscalefsd.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn | () {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM5)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Zero), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 242, 213, 254, 45, 242], OperandSize::Dword)
}
fn vscalefsd_2() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM7)), operand3: Some(Indirect(ECX, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 242, 197, 139, 45, 9], OperandSize::Dword)
}
fn vscalefsd_3() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM23)), operand3: Some(Direct(XMM20)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Up), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 178, 197, 214, 45, 252], OperandSize::Qword)
}
fn vscalefsd_4() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM12)), operand3: Some(IndirectDisplaced(RAX, 901452683, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 242, 157, 143, 45, 144, 139, 19, 187, 53], OperandSize::Qword)
}
| vscalefsd_1 | identifier_name |
vscalefsd.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vscalefsd_1() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM5)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Zero), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 242, 213, 254, 45, 242], OperandSize::Dword)
} |
fn vscalefsd_2() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM7)), operand3: Some(Indirect(ECX, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 242, 197, 139, 45, 9], OperandSize::Dword)
}
fn vscalefsd_3() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM23)), operand3: Some(Direct(XMM20)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Up), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 178, 197, 214, 45, 252], OperandSize::Qword)
}
fn vscalefsd_4() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM12)), operand3: Some(IndirectDisplaced(RAX, 901452683, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 242, 157, 143, 45, 144, 139, 19, 187, 53], OperandSize::Qword)
} | random_line_split | |
vscalefsd.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vscalefsd_1() |
fn vscalefsd_2() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM7)), operand3: Some(Indirect(ECX, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 242, 197, 139, 45, 9], OperandSize::Dword)
}
fn vscalefsd_3() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM23)), operand3: Some(Direct(XMM20)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Up), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 178, 197, 214, 45, 252], OperandSize::Qword)
}
fn vscalefsd_4() {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM12)), operand3: Some(IndirectDisplaced(RAX, 901452683, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 242, 157, 143, 45, 144, 139, 19, 187, 53], OperandSize::Qword)
}
| {
run_test(&Instruction { mnemonic: Mnemonic::VSCALEFSD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM5)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Zero), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 242, 213, 254, 45, 242], OperandSize::Dword)
} | identifier_body |
fetch.rs | use cargo::ops;
use cargo::util::{CliResult, CliError, Config};
use cargo::util::important_paths::find_root_manifest_for_cwd;
#[derive(RustcDecodable)]
struct | {
flag_manifest_path: Option<String>,
flag_verbose: bool,
flag_quiet: bool,
flag_color: Option<String>,
}
pub const USAGE: &'static str = "
Fetch dependencies of a package from the network.
Usage:
cargo fetch [options]
Options:
-h, --help Print this message
--manifest-path PATH Path to the manifest to fetch dependencies for
-v, --verbose Use verbose output
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally
available. The network is never touched after a `cargo fetch` unless
the lockfile changes.
If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated.
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet));
try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..])));
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
try!(ops::fetch(&root, config).map_err(|e| {
CliError::from_boxed(e, 101)
}));
Ok(None)
}
| Options | identifier_name |
fetch.rs | use cargo::ops;
use cargo::util::{CliResult, CliError, Config};
use cargo::util::important_paths::find_root_manifest_for_cwd;
#[derive(RustcDecodable)]
struct Options {
flag_manifest_path: Option<String>,
flag_verbose: bool,
flag_quiet: bool,
flag_color: Option<String>,
}
pub const USAGE: &'static str = "
Fetch dependencies of a package from the network.
Usage:
cargo fetch [options]
Options:
-h, --help Print this message
--manifest-path PATH Path to the manifest to fetch dependencies for
-v, --verbose Use verbose output
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally
available. The network is never touched after a `cargo fetch` unless
the lockfile changes.
If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated.
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> | {
try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet));
try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..])));
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
try!(ops::fetch(&root, config).map_err(|e| {
CliError::from_boxed(e, 101)
}));
Ok(None)
} | identifier_body | |
fetch.rs | use cargo::ops;
use cargo::util::{CliResult, CliError, Config};
use cargo::util::important_paths::find_root_manifest_for_cwd;
#[derive(RustcDecodable)]
struct Options {
flag_manifest_path: Option<String>,
flag_verbose: bool,
flag_quiet: bool,
flag_color: Option<String>,
}
pub const USAGE: &'static str = "
Fetch dependencies of a package from the network.
Usage:
cargo fetch [options]
Options:
-h, --help Print this message
--manifest-path PATH Path to the manifest to fetch dependencies for
-v, --verbose Use verbose output
-q, --quiet No output printed to stdout | available. The network is never touched after a `cargo fetch` unless
the lockfile changes.
If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated.
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet));
try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..])));
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
try!(ops::fetch(&root, config).map_err(|e| {
CliError::from_boxed(e, 101)
}));
Ok(None)
} | --color WHEN Coloring: auto, always, never
If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally | random_line_split |
Login.py | # !/usr/bin/python
# -*- coding: utf-8 -*-
import urllib
import urllib2
import cookielib
import base64
import re
import json
import hashlib
'''该登录程序是参考网上写的'''
cj = cookielib.LWPCookieJar()
cookie_support = urllib2.HTTPCookieProcessor(cj)
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
postdata = {
'entry': 'weibo',
'gateway': '1',
'from': '',
'savestate': '7',
'userticket': '1',
'ssosimplelogin': '1',
'vsnf': '1',
'vsnval': '',
'su': '',
'service': 'miniblog',
'servertime': '',
'nonce': '',
'pwencode': 'wsse',
'sp': '',
'encoding': 'UTF-8',
'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack',
'returntype': 'META'
}
def get_servertime():
url = 'http://login.sina.com.cn/sso/prelogin.php?entry=weibo&callback=sinaSSOController.preloginCallBack&su=dW5kZWZpbmVk&client=ssologin.js(v1.3.18)&_=1329806375939'
data = urllib2.urlopen(url).read()
p = re.compile('\((.*)\)')
try:
json_data = p.search(data).group(1)
data = json.loads(json_data)
servertime = str(data['servertime'])
nonce = data['nonce']
return servertime, nonce
except:
print 'Get severtime error!'
return None
def get_pwd(pwd, servertime, nonce):
pwd1 = hashlib.sha1(pwd).hexdigest()
pwd2 = hashlib.sha1(pwd1).hexdigest()
pwd3_ = pwd2 + servertime + nonce
pwd3 = hashlib.sha1(pwd3_).hexdigest()
return pwd3
def get_user(username):
| _ = urllib.quote(username)
username = base64.encodestring(username_)[:-1]
return username
def enableCookie():
cookiejar = cookielib.LWPCookieJar()
cookie_support = urllib2.HTTPCookieProcessor(cookiejar)
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
def login( username, pwd ):
url = 'http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.3.18)'
#enableCookie()
try:
servertime, nonce = get_servertime()
except:
return
global postdata
postdata['servertime'] = servertime
postdata['nonce'] = nonce
postdata['su'] = get_user(username)
postdata['sp'] = get_pwd(pwd, servertime, nonce)
postdata = urllib.urlencode(postdata)
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:8.0) Gecko/20100101 Firefox/8.0'}
req = urllib2.Request(
url = url,
data = postdata,
headers = headers
)
result = urllib2.urlopen(req)
text = result.read()
p = re.compile('location\.replace\(\'(.*?)\'\)')
try:
login_url = p.search(text).group(1)
#print login_url
urllib2.urlopen(login_url)
print "Login success!"
return True
except:
print 'Login error!'
return False
| username | identifier_name |
Login.py | # !/usr/bin/python
# -*- coding: utf-8 -*-
import urllib
import urllib2
import cookielib
import base64
import re
import json
import hashlib
'''该登录程序是参考网上写的'''
cj = cookielib.LWPCookieJar()
cookie_support = urllib2.HTTPCookieProcessor(cj)
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
postdata = {
'entry': 'weibo',
'gateway': '1',
'from': '',
'savestate': '7',
'userticket': '1',
'ssosimplelogin': '1',
'vsnf': '1',
'vsnval': '',
'su': '',
'service': 'miniblog',
'servertime': '',
'nonce': '',
'pwencode': 'wsse',
'sp': '',
'encoding': 'UTF-8',
'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack',
'returntype': 'META'
}
def get_servertime():
url = 'http://login.sina.com.cn/sso/prelogin.php?entry=weibo&callback=sinaSSOController.preloginCallBack&su=dW5kZWZpbmVk&client=ssologin.js(v1.3.18)&_=1329806375939'
data = urllib2.urlopen(url).read()
p = re.compile('\((.*)\)')
try:
json_data = p.search(data).group(1)
data = json.loads(json_data)
servertime = str(data['servertime'])
nonce = data['nonce']
return servertime, nonce
except:
print 'Get severtime error!'
return None
def get_pwd(pwd, servertime, nonce):
pwd1 = hashlib.sha1(pwd).hexdigest()
pwd2 = hashlib.sha1(pwd1).hexdigest()
pwd3_ = pwd2 + servertime + nonce
pwd3 = hashlib.sha1(pwd3_).hexdigest()
return pwd3
def get_user(username):
username_ = urllib.quote(username)
username = base64.encodestring(username_)[:-1]
return username
def enableCookie():
cookiejar = cookielib.LW | wd ):
url = 'http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.3.18)'
#enableCookie()
try:
servertime, nonce = get_servertime()
except:
return
global postdata
postdata['servertime'] = servertime
postdata['nonce'] = nonce
postdata['su'] = get_user(username)
postdata['sp'] = get_pwd(pwd, servertime, nonce)
postdata = urllib.urlencode(postdata)
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:8.0) Gecko/20100101 Firefox/8.0'}
req = urllib2.Request(
url = url,
data = postdata,
headers = headers
)
result = urllib2.urlopen(req)
text = result.read()
p = re.compile('location\.replace\(\'(.*?)\'\)')
try:
login_url = p.search(text).group(1)
#print login_url
urllib2.urlopen(login_url)
print "Login success!"
return True
except:
print 'Login error!'
return False
| PCookieJar()
cookie_support = urllib2.HTTPCookieProcessor(cookiejar)
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
def login( username, p | identifier_body |
Login.py | # !/usr/bin/python
# -*- coding: utf-8 -*-
import urllib
import urllib2
import cookielib
import base64
import re
import json
import hashlib
'''该登录程序是参考网上写的'''
cj = cookielib.LWPCookieJar()
cookie_support = urllib2.HTTPCookieProcessor(cj)
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
postdata = {
'entry': 'weibo',
'gateway': '1',
'from': '',
'savestate': '7',
'userticket': '1',
'ssosimplelogin': '1',
'vsnf': '1',
'vsnval': '',
'su': '',
'service': 'miniblog',
'servertime': '',
'nonce': '',
'pwencode': 'wsse',
'sp': '',
'encoding': 'UTF-8',
'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack',
'returntype': 'META'
}
def get_servertime():
url = 'http://login.sina.com.cn/sso/prelogin.php?entry=weibo&callback=sinaSSOController.preloginCallBack&su=dW5kZWZpbmVk&client=ssologin.js(v1.3.18)&_=1329806375939'
data = urllib2.urlopen(url).read()
p = re.compile('\((.*)\)')
try:
json_data = p.search(data).group(1)
data = json.loads(json_data)
servertime = str(data['servertime'])
nonce = data['nonce']
return servertime, nonce
except:
print 'Get severtime error!'
return None
def get_pwd(pwd, servertime, nonce):
pwd1 = hashlib.sha1(pwd).hexdigest()
pwd2 = hashlib.sha1(pwd1).hexdigest()
pwd3_ = pwd2 + servertime + nonce
pwd3 = hashlib.sha1(pwd3_).hexdigest()
return pwd3
def get_user(username):
username_ = urllib.quote(username)
username = base64.encodestring(username_)[:-1]
return username
def enableCookie():
cookiejar = cookielib.LWPCookieJar()
cookie_support = urllib2.HTTPCookieProcessor(cookiejar)
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
def login( username, pwd ):
url = 'http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.3.18)'
#enableCookie()
try:
servertime, nonce = get_servertime()
except:
return
global postdata
postdata['servertime'] = servertime
postdata['nonce'] = nonce
postdata['su'] = get_user(username)
postdata['sp'] = get_pwd(pwd, servertime, nonce)
postdata = urllib.urlencode(postdata)
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:8.0) Gecko/20100101 Firefox/8.0'}
req = urllib2.Request(
url = url,
data = postdata,
headers = headers
)
result = urllib2.urlopen(req)
text = result.read()
p = re.compile('location\.replace\(\'(.*?)\'\)')
try: | urllib2.urlopen(login_url)
print "Login success!"
return True
except:
print 'Login error!'
return False | login_url = p.search(text).group(1)
#print login_url | random_line_split |
mod.rs | #![stable(feature = "futures_api", since = "1.36.0")]
//! Asynchronous values.
use crate::{
ops::{Generator, GeneratorState},
pin::Pin,
ptr::NonNull,
task::{Context, Poll},
};
mod future;
mod into_future;
mod pending;
mod poll_fn;
mod ready;
#[stable(feature = "futures_api", since = "1.36.0")]
pub use self::future::Future;
#[unstable(feature = "into_future", issue = "67644")]
pub use into_future::IntoFuture;
#[stable(feature = "future_readiness_fns", since = "1.48.0")]
pub use pending::{pending, Pending};
#[stable(feature = "future_readiness_fns", since = "1.48.0")]
pub use ready::{ready, Ready};
#[unstable(feature = "future_poll_fn", issue = "72302")]
pub use poll_fn::{poll_fn, PollFn};
/// This type is needed because:
///
/// a) Generators cannot implement `for<'a, 'b> Generator<&'a mut Context<'b>>`, so we need to pass
/// a raw pointer (see <https://github.com/rust-lang/rust/issues/68923>).
/// b) Raw pointers and `NonNull` aren't `Send` or `Sync`, so that would make every single future
/// non-Send/Sync as well, and we don't want that.
///
/// It also simplifies the HIR lowering of `.await`.
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[derive(Debug, Copy, Clone)]
pub struct ResumeTy(NonNull<Context<'static>>);
#[unstable(feature = "gen_future", issue = "50547")]
unsafe impl Send for ResumeTy {}
#[unstable(feature = "gen_future", issue = "50547")]
unsafe impl Sync for ResumeTy {}
/// Wrap a generator in a future.
///
/// This function returns a `GenFuture` underneath, but hides it in `impl Trait` to give
/// better error messages (`impl Future` rather than `GenFuture<[closure.....]>`).
// This is `const` to avoid extra errors after we recover from `const async fn`
#[lang = "from_generator"]
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[rustc_const_unstable(feature = "gen_future", issue = "50547")]
#[inline]
pub const fn from_generator<T>(gen: T) -> impl Future<Output = T::Return>
where
T: Generator<ResumeTy, Yield = ()>,
{
#[rustc_diagnostic_item = "gen_future"]
struct GenFuture<T: Generator<ResumeTy, Yield = ()>>(T);
// We rely on the fact that async/await futures are immovable in order to create
// self-referential borrows in the underlying generator.
impl<T: Generator<ResumeTy, Yield = ()>> !Unpin for GenFuture<T> {}
impl<T: Generator<ResumeTy, Yield = ()>> Future for GenFuture<T> {
type Output = T::Return;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
// SAFETY: Safe because we're !Unpin + !Drop, and this is just a field projection.
let gen = unsafe { Pin::map_unchecked_mut(self, |s| &mut s.0) };
// Resume the generator, turning the `&mut Context` into a `NonNull` raw pointer. The
// `.await` lowering will safely cast that back to a `&mut Context`.
match gen.resume(ResumeTy(NonNull::from(cx).cast::<Context<'static>>())) {
GeneratorState::Yielded(()) => Poll::Pending,
GeneratorState::Complete(x) => Poll::Ready(x),
}
}
}
GenFuture(gen)
}
#[lang = "get_context"]
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[inline]
pub unsafe fn | <'a, 'b>(cx: ResumeTy) -> &'a mut Context<'b> {
// SAFETY: the caller must guarantee that `cx.0` is a valid pointer
// that fulfills all the requirements for a mutable reference.
unsafe { &mut *cx.0.as_ptr().cast() }
}
| get_context | identifier_name |
mod.rs | #![stable(feature = "futures_api", since = "1.36.0")]
//! Asynchronous values.
use crate::{
ops::{Generator, GeneratorState},
pin::Pin,
ptr::NonNull,
task::{Context, Poll},
};
mod future;
mod into_future;
mod pending;
mod poll_fn;
mod ready;
#[stable(feature = "futures_api", since = "1.36.0")]
pub use self::future::Future;
#[unstable(feature = "into_future", issue = "67644")]
pub use into_future::IntoFuture;
#[stable(feature = "future_readiness_fns", since = "1.48.0")]
pub use pending::{pending, Pending};
#[stable(feature = "future_readiness_fns", since = "1.48.0")]
pub use ready::{ready, Ready};
#[unstable(feature = "future_poll_fn", issue = "72302")]
pub use poll_fn::{poll_fn, PollFn};
/// This type is needed because:
///
/// a) Generators cannot implement `for<'a, 'b> Generator<&'a mut Context<'b>>`, so we need to pass
/// a raw pointer (see <https://github.com/rust-lang/rust/issues/68923>).
/// b) Raw pointers and `NonNull` aren't `Send` or `Sync`, so that would make every single future
/// non-Send/Sync as well, and we don't want that.
///
/// It also simplifies the HIR lowering of `.await`.
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[derive(Debug, Copy, Clone)]
pub struct ResumeTy(NonNull<Context<'static>>);
#[unstable(feature = "gen_future", issue = "50547")]
unsafe impl Send for ResumeTy {}
#[unstable(feature = "gen_future", issue = "50547")]
unsafe impl Sync for ResumeTy {}
/// Wrap a generator in a future.
///
/// This function returns a `GenFuture` underneath, but hides it in `impl Trait` to give
/// better error messages (`impl Future` rather than `GenFuture<[closure.....]>`).
// This is `const` to avoid extra errors after we recover from `const async fn`
#[lang = "from_generator"]
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[rustc_const_unstable(feature = "gen_future", issue = "50547")]
#[inline]
pub const fn from_generator<T>(gen: T) -> impl Future<Output = T::Return>
where
T: Generator<ResumeTy, Yield = ()>,
{
#[rustc_diagnostic_item = "gen_future"]
struct GenFuture<T: Generator<ResumeTy, Yield = ()>>(T);
// We rely on the fact that async/await futures are immovable in order to create
// self-referential borrows in the underlying generator.
impl<T: Generator<ResumeTy, Yield = ()>> !Unpin for GenFuture<T> {}
impl<T: Generator<ResumeTy, Yield = ()>> Future for GenFuture<T> {
type Output = T::Return;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
// SAFETY: Safe because we're !Unpin + !Drop, and this is just a field projection.
let gen = unsafe { Pin::map_unchecked_mut(self, |s| &mut s.0) };
// Resume the generator, turning the `&mut Context` into a `NonNull` raw pointer. The
// `.await` lowering will safely cast that back to a `&mut Context`.
match gen.resume(ResumeTy(NonNull::from(cx).cast::<Context<'static>>())) {
GeneratorState::Yielded(()) => Poll::Pending,
GeneratorState::Complete(x) => Poll::Ready(x),
}
}
}
GenFuture(gen)
}
#[lang = "get_context"]
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[inline]
pub unsafe fn get_context<'a, 'b>(cx: ResumeTy) -> &'a mut Context<'b> {
// SAFETY: the caller must guarantee that `cx.0` is a valid pointer
// that fulfills all the requirements for a mutable reference. | } | unsafe { &mut *cx.0.as_ptr().cast() } | random_line_split |
results.js | ;( function( d3 ) {
/**
* Get parent of dom element with
* given class
*
* @param {Object} el element
* @param {String} className className
* @return {Object} parent element with given class
*/
function getParent( el, className ) {
var parent = null;
var p = el.parentNode;
while ( p !== null ) {
var o = p;
if ( o.classList.contains( className ) ) {
parent = o;
break;
}
p = o.parentNode;
}
return parent; // returns an Array []
}
var id = window.location.href.split( '/' ).pop();
var keysToRender = [ {
key : 'render',
label : 'Start Render',
timing : true
}, {
key : 'SpeedIndex',
label : 'SpeedIndex'
}, {
key : 'domElements',
label : 'Number of DOM Elements'
}, {
key : 'docTime',
label : 'Document Complete',
timing : true
}, {
key : 'fullyLoaded',
label : 'Fully loaded',
timing : true
}, {
key : 'requests',
label : 'Number of Requests'
} ];
var loading = document.getElementById( 'loading' );
var table = document.getElementById( 'resultTable' );
var template = document.getElementById( 'resultTableEachTpl' );
var status = document.getElementById( 'status' );
function _getNormalizedData( data ) {
var normalizedData = [];
function | ( date, key, type ) {
var returnValue;
if ( key.key !== 'requests' ) {
returnValue = {
value : date.response.data.median[ type ] ?
date.response.data.median[ type ][ key.key ] :
0,
allowed : date.allowedUrl
};
} else {
returnValue = {
value : date.response.data.median[ type ] ?
date.response.data.median[ type ][ key.key ][ 0 ] :
0,
allowed : date.allowedUrl
};
}
if ( key.timing ) {
returnValue.withoutTTFB = returnValue.value - date.response.data.median[ type ].TTFB;
}
return returnValue;
}
keysToRender.forEach( function( key ) {
normalizedData.push( {
name : key.label,
key : key.key,
timing : !! key.timing,
data : [
data.map( function( date ) {
return getNormalizedDate( date, key, 'firstView' );
} ),
data.map( function( date ) {
return getNormalizedDate( date, key, 'repeatView' );
} )
]
} );
} )
return normalizedData;
}
function showBarHelp( data ) {
d3.selectAll( '.resultGraphs--help' ).remove();
var bar = this;
var bBox = bar.getBBox();
var detailBox = document.createElement( 'div' );
var listContainer = getParent( bar, 'resultGraphs--item--container' );
detailBox.classList.add( 'resultGraphs--help' );
detailBox.innerHTML =
'Allowed 3rd Party URL(s):<br><strong>' + bar.dataset.allowed + '</strong>';
listContainer.appendChild( detailBox );
detailBox.style.left = ( bBox.x + bBox.width / 2 - detailBox.getBoundingClientRect().width / 2 ) + 'px';
detailBox.style.top = ( bBox.y + bBox.height + detailBox.getBoundingClientRect().height ) + 'px';
}
function renderTable( container, data ) {
var table = document.querySelector( container );
table.innerHTML = nunjucks.renderString( template.innerHTML, { data : data } );
}
function renderGraphs( container, data ) {
var resultGraphs = d3.select( container );
var normalizedData = _getNormalizedData( data );
var items = resultGraphs.selectAll( '.resultGraphs--item' )
.data( normalizedData );
items.enter()
.append( 'li' )
.attr( 'class', 'resultGraphs--item' )
.attr( 'id', function( d ) {
return 'resultGraph--item-' + d.key;
} )
.html( function( d ) {
return '<h4 class="resultGraphs--item--headline">' + d.name + '</h4>' +
'<div class="resultGraphs--legend">' +
'<span class="resultGraphs--legend__first">First</span>' +
'<span class="resultGraphs--legend__repeat">Repeat</span>' +
'</div>' +
'<div class="resultGraphs--item--container"></div>';
} );
items.each( renderGraph );
items.exit().remove()
}
function renderGraph( data ) {
var containerEl = this.querySelector( '.resultGraphs--item--container' );
var margin = { top : 25, right : 0, bottom : 30, left : 0 };
var width = containerEl.clientWidth - margin.left - margin.right;
var height = width * 0.6 - margin.top - margin.bottom;
var y = d3.scale.linear()
.domain( [
0,
d3.max(
[
d3.max( data.data[ 0 ].map( function( d ) { return d.value; } ) ),
d3.max( data.data[ 1 ].map( function( d ) { return d.value; } ) )
]
)
] )
.range( [ height, 0 ] )
var x = d3.scale.linear()
.domain( [ 0, data.data[ 0 ].length + 1 ] )
.range( [ 0, width ] );
var xAxis = d3.svg.axis()
.scale( x )
.tickFormat( function( d ) {
return ( d % 1 !== 0 || ! d || d === data.data[ 0 ].length + 1 ) ?
'' :
d;
} )
.orient( 'bottom' );
var yAxis = d3.svg.axis()
.scale( y )
.tickSize( width )
.orient( 'right' );
var line = d3.svg.line()
.x( function( d, i ) { return x( i + 1 ); } )
.y( function( d ) { return y( d.value ); } )
.interpolate( 'cardinal' );
var container = d3.select( containerEl ).html( '<svg></svg>' );
var svg = container.select( 'svg' )
.attr( 'width', width + margin.left + margin.right )
.attr( 'height', height + margin.top + margin.bottom )
.append( 'g' )
.attr( 'transform', 'translate(' + margin.left + ',' + margin.top + ')');
var barChartWidth = 12;
var marks = svg.append( 'g' )
.attr( 'class', 'resultGraphs--marks' );
var bgBars = svg.append( 'g' )
.attr( 'class', 'resultGraphs--bgBars' );
bgBars.selectAll( '.resultGraphs--bgBar' )
.data( data.data[ 0 ] )
.enter().append( 'rect' )
.attr( 'class', 'resultGraphs--bgBar' )
.attr( 'x', function( d, i ) { return x( i + .5 ); } )
.attr( 'width', function( d, i ) { return x( i + .5 ) - x( i - .5 ) } )
.attr( 'y', function( d ) { return 0; } )
.attr( 'height', function( d ) { return height; } )
.attr( 'data-allowed', function( d ) { return d.allowed } )
.on( 'mouseenter', showBarHelp );
var gy = svg.append( 'g' )
.attr( 'class', 'resultGraphs--yAxisTicks' )
.call( yAxis )
.call( customAxis );
var gx = svg.append( 'g' )
.attr( 'class', 'resultGraphs--xAxisTicks' )
.attr( 'transform', 'translate(0,' + height + ')' )
.call( xAxis );
var circles = svg.append( 'g' )
.attr( 'class', 'resultGraphs--circles' );
drawLineWithCircles( data.data[ 0 ], 'first', svg, circles );
drawLineWithCircles( data.data[ 1 ], 'repeat', svg, circles );
// TODO implement this in a different way
// if ( data.timing ) {
// drawLineWithCircles( data.data[ 0 ].map( function( date ) {
// return {
// allowed : date.allowed,
// value : date.withoutTTFB
// };
// } ), 'firstWithoutTTFB', svg, circles );
// drawLineWithCircles( data.data[ 1 ].map( function( date ) {
// return {
// allowed : date.allowed,
// value : date.withoutTTFB
// };
// } ), 'repeatWithoutTTFB', svg, circles );
// }
function drawLineWithCircles( data, type, svg, circleContainer ) {
svg.append( 'path' )
.datum( data )
.attr( 'class', 'resultGraphs--line__' + type )
.attr( 'd', line );
circleContainer.selectAll( '.resultGraphs--circle__' + type )
.data( data )
.enter().append( 'circle' )
.attr( 'r', 5 )
.attr( 'class', 'resultGraphs--circle__' + type )
.attr( 'cx', function( d, i ) { return x( i + 1 ); } )
.attr( 'cy', function( d ) { return y( d.value ); });
}
function customAxis( g ) {
g.selectAll( 'text' )
.attr( 'x', 4 )
.attr( 'dy', -4 );
}
}
function fetchData() {
fetch( '/results/data/' + id )
.then( function( response ) {
return response.json();
} )
.then( function( result ) {
if ( result.data.length > 1 && ! result.error ) {
var allVsNoneData = [ result.data[ 0 ], result.data[ 1 ] ];
renderTable( '#resultTable--allVsNone', allVsNoneData );
renderGraphs( '#resultGraphs--allVsNone', allVsNoneData );
var noneVsEachData = result.data.slice( 1 );
renderTable( '#resultTable--noneVsEach', noneVsEachData );
renderGraphs( '#resultGraphs--noneVsEach', noneVsEachData );
}
if ( ! result.finished ) {
loading.innerText = result.runsToGo ?
result.runsToGo + ' run(s) to go' :
'Processing...';
setTimeout( fetchData, 7500 );
} else {
status.classList.remove( 'is-processing' );
if ( ! result.error ) {
status.classList.add( 'is-done' );
loading.innerText = 'DONE!';
} else {
status.classList.add( 'is-failed' );
loading.innerText = 'FAILED...';
}
}
} );
}
fetchData();
} )( d3 );
| getNormalizedDate | identifier_name |
results.js | ;( function( d3 ) {
/**
* Get parent of dom element with
* given class
*
* @param {Object} el element
* @param {String} className className
* @return {Object} parent element with given class
*/
function getParent( el, className ) {
var parent = null;
var p = el.parentNode;
while ( p !== null ) {
var o = p;
if ( o.classList.contains( className ) ) {
parent = o;
break;
}
p = o.parentNode;
}
return parent; // returns an Array []
}
var id = window.location.href.split( '/' ).pop();
var keysToRender = [ {
key : 'render',
label : 'Start Render',
timing : true
}, {
key : 'SpeedIndex',
label : 'SpeedIndex'
}, {
key : 'domElements',
label : 'Number of DOM Elements'
}, {
key : 'docTime',
label : 'Document Complete',
timing : true
}, {
key : 'fullyLoaded',
label : 'Fully loaded',
timing : true
}, {
key : 'requests',
label : 'Number of Requests'
} ];
var loading = document.getElementById( 'loading' );
var table = document.getElementById( 'resultTable' );
var template = document.getElementById( 'resultTableEachTpl' );
var status = document.getElementById( 'status' );
function _getNormalizedData( data ) {
var normalizedData = [];
function getNormalizedDate( date, key, type ) {
var returnValue;
if ( key.key !== 'requests' ) {
returnValue = {
value : date.response.data.median[ type ] ?
date.response.data.median[ type ][ key.key ] :
0,
allowed : date.allowedUrl
};
} else {
returnValue = {
value : date.response.data.median[ type ] ?
date.response.data.median[ type ][ key.key ][ 0 ] :
0,
allowed : date.allowedUrl
};
}
if ( key.timing ) {
returnValue.withoutTTFB = returnValue.value - date.response.data.median[ type ].TTFB;
}
return returnValue;
}
keysToRender.forEach( function( key ) {
normalizedData.push( {
name : key.label,
key : key.key,
timing : !! key.timing,
data : [
data.map( function( date ) {
return getNormalizedDate( date, key, 'firstView' );
} ),
data.map( function( date ) {
return getNormalizedDate( date, key, 'repeatView' );
} )
]
} );
} )
return normalizedData;
}
function showBarHelp( data ) {
d3.selectAll( '.resultGraphs--help' ).remove();
var bar = this;
var bBox = bar.getBBox();
var detailBox = document.createElement( 'div' );
var listContainer = getParent( bar, 'resultGraphs--item--container' );
detailBox.classList.add( 'resultGraphs--help' );
detailBox.innerHTML =
'Allowed 3rd Party URL(s):<br><strong>' + bar.dataset.allowed + '</strong>';
listContainer.appendChild( detailBox );
detailBox.style.left = ( bBox.x + bBox.width / 2 - detailBox.getBoundingClientRect().width / 2 ) + 'px';
detailBox.style.top = ( bBox.y + bBox.height + detailBox.getBoundingClientRect().height ) + 'px';
}
function renderTable( container, data ) {
var table = document.querySelector( container );
table.innerHTML = nunjucks.renderString( template.innerHTML, { data : data } );
}
function renderGraphs( container, data ) |
function renderGraph( data ) {
var containerEl = this.querySelector( '.resultGraphs--item--container' );
var margin = { top : 25, right : 0, bottom : 30, left : 0 };
var width = containerEl.clientWidth - margin.left - margin.right;
var height = width * 0.6 - margin.top - margin.bottom;
var y = d3.scale.linear()
.domain( [
0,
d3.max(
[
d3.max( data.data[ 0 ].map( function( d ) { return d.value; } ) ),
d3.max( data.data[ 1 ].map( function( d ) { return d.value; } ) )
]
)
] )
.range( [ height, 0 ] )
var x = d3.scale.linear()
.domain( [ 0, data.data[ 0 ].length + 1 ] )
.range( [ 0, width ] );
var xAxis = d3.svg.axis()
.scale( x )
.tickFormat( function( d ) {
return ( d % 1 !== 0 || ! d || d === data.data[ 0 ].length + 1 ) ?
'' :
d;
} )
.orient( 'bottom' );
var yAxis = d3.svg.axis()
.scale( y )
.tickSize( width )
.orient( 'right' );
var line = d3.svg.line()
.x( function( d, i ) { return x( i + 1 ); } )
.y( function( d ) { return y( d.value ); } )
.interpolate( 'cardinal' );
var container = d3.select( containerEl ).html( '<svg></svg>' );
var svg = container.select( 'svg' )
.attr( 'width', width + margin.left + margin.right )
.attr( 'height', height + margin.top + margin.bottom )
.append( 'g' )
.attr( 'transform', 'translate(' + margin.left + ',' + margin.top + ')');
var barChartWidth = 12;
var marks = svg.append( 'g' )
.attr( 'class', 'resultGraphs--marks' );
var bgBars = svg.append( 'g' )
.attr( 'class', 'resultGraphs--bgBars' );
bgBars.selectAll( '.resultGraphs--bgBar' )
.data( data.data[ 0 ] )
.enter().append( 'rect' )
.attr( 'class', 'resultGraphs--bgBar' )
.attr( 'x', function( d, i ) { return x( i + .5 ); } )
.attr( 'width', function( d, i ) { return x( i + .5 ) - x( i - .5 ) } )
.attr( 'y', function( d ) { return 0; } )
.attr( 'height', function( d ) { return height; } )
.attr( 'data-allowed', function( d ) { return d.allowed } )
.on( 'mouseenter', showBarHelp );
var gy = svg.append( 'g' )
.attr( 'class', 'resultGraphs--yAxisTicks' )
.call( yAxis )
.call( customAxis );
var gx = svg.append( 'g' )
.attr( 'class', 'resultGraphs--xAxisTicks' )
.attr( 'transform', 'translate(0,' + height + ')' )
.call( xAxis );
var circles = svg.append( 'g' )
.attr( 'class', 'resultGraphs--circles' );
drawLineWithCircles( data.data[ 0 ], 'first', svg, circles );
drawLineWithCircles( data.data[ 1 ], 'repeat', svg, circles );
// TODO implement this in a different way
// if ( data.timing ) {
// drawLineWithCircles( data.data[ 0 ].map( function( date ) {
// return {
// allowed : date.allowed,
// value : date.withoutTTFB
// };
// } ), 'firstWithoutTTFB', svg, circles );
// drawLineWithCircles( data.data[ 1 ].map( function( date ) {
// return {
// allowed : date.allowed,
// value : date.withoutTTFB
// };
// } ), 'repeatWithoutTTFB', svg, circles );
// }
function drawLineWithCircles( data, type, svg, circleContainer ) {
svg.append( 'path' )
.datum( data )
.attr( 'class', 'resultGraphs--line__' + type )
.attr( 'd', line );
circleContainer.selectAll( '.resultGraphs--circle__' + type )
.data( data )
.enter().append( 'circle' )
.attr( 'r', 5 )
.attr( 'class', 'resultGraphs--circle__' + type )
.attr( 'cx', function( d, i ) { return x( i + 1 ); } )
.attr( 'cy', function( d ) { return y( d.value ); });
}
function customAxis( g ) {
g.selectAll( 'text' )
.attr( 'x', 4 )
.attr( 'dy', -4 );
}
}
function fetchData() {
fetch( '/results/data/' + id )
.then( function( response ) {
return response.json();
} )
.then( function( result ) {
if ( result.data.length > 1 && ! result.error ) {
var allVsNoneData = [ result.data[ 0 ], result.data[ 1 ] ];
renderTable( '#resultTable--allVsNone', allVsNoneData );
renderGraphs( '#resultGraphs--allVsNone', allVsNoneData );
var noneVsEachData = result.data.slice( 1 );
renderTable( '#resultTable--noneVsEach', noneVsEachData );
renderGraphs( '#resultGraphs--noneVsEach', noneVsEachData );
}
if ( ! result.finished ) {
loading.innerText = result.runsToGo ?
result.runsToGo + ' run(s) to go' :
'Processing...';
setTimeout( fetchData, 7500 );
} else {
status.classList.remove( 'is-processing' );
if ( ! result.error ) {
status.classList.add( 'is-done' );
loading.innerText = 'DONE!';
} else {
status.classList.add( 'is-failed' );
loading.innerText = 'FAILED...';
}
}
} );
}
fetchData();
} )( d3 );
| {
var resultGraphs = d3.select( container );
var normalizedData = _getNormalizedData( data );
var items = resultGraphs.selectAll( '.resultGraphs--item' )
.data( normalizedData );
items.enter()
.append( 'li' )
.attr( 'class', 'resultGraphs--item' )
.attr( 'id', function( d ) {
return 'resultGraph--item-' + d.key;
} )
.html( function( d ) {
return '<h4 class="resultGraphs--item--headline">' + d.name + '</h4>' +
'<div class="resultGraphs--legend">' +
'<span class="resultGraphs--legend__first">First</span>' +
'<span class="resultGraphs--legend__repeat">Repeat</span>' +
'</div>' +
'<div class="resultGraphs--item--container"></div>';
} );
items.each( renderGraph );
items.exit().remove()
} | identifier_body |
results.js | ;( function( d3 ) {
/**
* Get parent of dom element with
* given class
*
* @param {Object} el element
* @param {String} className className
* @return {Object} parent element with given class
*/
function getParent( el, className ) {
var parent = null;
var p = el.parentNode;
while ( p !== null ) {
var o = p;
if ( o.classList.contains( className ) ) {
parent = o;
break;
}
p = o.parentNode;
}
return parent; // returns an Array []
}
var id = window.location.href.split( '/' ).pop();
var keysToRender = [ {
key : 'render',
label : 'Start Render',
timing : true
}, {
key : 'SpeedIndex',
label : 'SpeedIndex'
}, {
key : 'domElements',
label : 'Number of DOM Elements'
}, {
key : 'docTime',
label : 'Document Complete',
timing : true
}, {
key : 'fullyLoaded',
label : 'Fully loaded',
timing : true
}, {
key : 'requests',
label : 'Number of Requests'
} ];
var loading = document.getElementById( 'loading' );
var table = document.getElementById( 'resultTable' );
var template = document.getElementById( 'resultTableEachTpl' );
var status = document.getElementById( 'status' );
function _getNormalizedData( data ) {
var normalizedData = [];
function getNormalizedDate( date, key, type ) {
var returnValue;
if ( key.key !== 'requests' ) {
returnValue = {
value : date.response.data.median[ type ] ?
date.response.data.median[ type ][ key.key ] :
0,
allowed : date.allowedUrl
};
} else {
returnValue = {
value : date.response.data.median[ type ] ?
date.response.data.median[ type ][ key.key ][ 0 ] :
0,
allowed : date.allowedUrl
};
}
if ( key.timing ) {
returnValue.withoutTTFB = returnValue.value - date.response.data.median[ type ].TTFB;
}
return returnValue;
}
keysToRender.forEach( function( key ) {
normalizedData.push( {
name : key.label,
key : key.key,
timing : !! key.timing,
data : [
data.map( function( date ) {
return getNormalizedDate( date, key, 'firstView' );
} ),
data.map( function( date ) {
return getNormalizedDate( date, key, 'repeatView' );
} )
]
} );
} )
return normalizedData;
}
function showBarHelp( data ) {
d3.selectAll( '.resultGraphs--help' ).remove();
var bar = this;
var bBox = bar.getBBox();
var detailBox = document.createElement( 'div' );
var listContainer = getParent( bar, 'resultGraphs--item--container' );
detailBox.classList.add( 'resultGraphs--help' );
detailBox.innerHTML =
'Allowed 3rd Party URL(s):<br><strong>' + bar.dataset.allowed + '</strong>'; | listContainer.appendChild( detailBox );
detailBox.style.left = ( bBox.x + bBox.width / 2 - detailBox.getBoundingClientRect().width / 2 ) + 'px';
detailBox.style.top = ( bBox.y + bBox.height + detailBox.getBoundingClientRect().height ) + 'px';
}
function renderTable( container, data ) {
var table = document.querySelector( container );
table.innerHTML = nunjucks.renderString( template.innerHTML, { data : data } );
}
function renderGraphs( container, data ) {
var resultGraphs = d3.select( container );
var normalizedData = _getNormalizedData( data );
var items = resultGraphs.selectAll( '.resultGraphs--item' )
.data( normalizedData );
items.enter()
.append( 'li' )
.attr( 'class', 'resultGraphs--item' )
.attr( 'id', function( d ) {
return 'resultGraph--item-' + d.key;
} )
.html( function( d ) {
return '<h4 class="resultGraphs--item--headline">' + d.name + '</h4>' +
'<div class="resultGraphs--legend">' +
'<span class="resultGraphs--legend__first">First</span>' +
'<span class="resultGraphs--legend__repeat">Repeat</span>' +
'</div>' +
'<div class="resultGraphs--item--container"></div>';
} );
items.each( renderGraph );
items.exit().remove()
}
function renderGraph( data ) {
var containerEl = this.querySelector( '.resultGraphs--item--container' );
var margin = { top : 25, right : 0, bottom : 30, left : 0 };
var width = containerEl.clientWidth - margin.left - margin.right;
var height = width * 0.6 - margin.top - margin.bottom;
var y = d3.scale.linear()
.domain( [
0,
d3.max(
[
d3.max( data.data[ 0 ].map( function( d ) { return d.value; } ) ),
d3.max( data.data[ 1 ].map( function( d ) { return d.value; } ) )
]
)
] )
.range( [ height, 0 ] )
var x = d3.scale.linear()
.domain( [ 0, data.data[ 0 ].length + 1 ] )
.range( [ 0, width ] );
var xAxis = d3.svg.axis()
.scale( x )
.tickFormat( function( d ) {
return ( d % 1 !== 0 || ! d || d === data.data[ 0 ].length + 1 ) ?
'' :
d;
} )
.orient( 'bottom' );
var yAxis = d3.svg.axis()
.scale( y )
.tickSize( width )
.orient( 'right' );
var line = d3.svg.line()
.x( function( d, i ) { return x( i + 1 ); } )
.y( function( d ) { return y( d.value ); } )
.interpolate( 'cardinal' );
var container = d3.select( containerEl ).html( '<svg></svg>' );
var svg = container.select( 'svg' )
.attr( 'width', width + margin.left + margin.right )
.attr( 'height', height + margin.top + margin.bottom )
.append( 'g' )
.attr( 'transform', 'translate(' + margin.left + ',' + margin.top + ')');
var barChartWidth = 12;
var marks = svg.append( 'g' )
.attr( 'class', 'resultGraphs--marks' );
var bgBars = svg.append( 'g' )
.attr( 'class', 'resultGraphs--bgBars' );
bgBars.selectAll( '.resultGraphs--bgBar' )
.data( data.data[ 0 ] )
.enter().append( 'rect' )
.attr( 'class', 'resultGraphs--bgBar' )
.attr( 'x', function( d, i ) { return x( i + .5 ); } )
.attr( 'width', function( d, i ) { return x( i + .5 ) - x( i - .5 ) } )
.attr( 'y', function( d ) { return 0; } )
.attr( 'height', function( d ) { return height; } )
.attr( 'data-allowed', function( d ) { return d.allowed } )
.on( 'mouseenter', showBarHelp );
var gy = svg.append( 'g' )
.attr( 'class', 'resultGraphs--yAxisTicks' )
.call( yAxis )
.call( customAxis );
var gx = svg.append( 'g' )
.attr( 'class', 'resultGraphs--xAxisTicks' )
.attr( 'transform', 'translate(0,' + height + ')' )
.call( xAxis );
var circles = svg.append( 'g' )
.attr( 'class', 'resultGraphs--circles' );
drawLineWithCircles( data.data[ 0 ], 'first', svg, circles );
drawLineWithCircles( data.data[ 1 ], 'repeat', svg, circles );
// TODO implement this in a different way
// if ( data.timing ) {
// drawLineWithCircles( data.data[ 0 ].map( function( date ) {
// return {
// allowed : date.allowed,
// value : date.withoutTTFB
// };
// } ), 'firstWithoutTTFB', svg, circles );
// drawLineWithCircles( data.data[ 1 ].map( function( date ) {
// return {
// allowed : date.allowed,
// value : date.withoutTTFB
// };
// } ), 'repeatWithoutTTFB', svg, circles );
// }
function drawLineWithCircles( data, type, svg, circleContainer ) {
svg.append( 'path' )
.datum( data )
.attr( 'class', 'resultGraphs--line__' + type )
.attr( 'd', line );
circleContainer.selectAll( '.resultGraphs--circle__' + type )
.data( data )
.enter().append( 'circle' )
.attr( 'r', 5 )
.attr( 'class', 'resultGraphs--circle__' + type )
.attr( 'cx', function( d, i ) { return x( i + 1 ); } )
.attr( 'cy', function( d ) { return y( d.value ); });
}
function customAxis( g ) {
g.selectAll( 'text' )
.attr( 'x', 4 )
.attr( 'dy', -4 );
}
}
function fetchData() {
fetch( '/results/data/' + id )
.then( function( response ) {
return response.json();
} )
.then( function( result ) {
if ( result.data.length > 1 && ! result.error ) {
var allVsNoneData = [ result.data[ 0 ], result.data[ 1 ] ];
renderTable( '#resultTable--allVsNone', allVsNoneData );
renderGraphs( '#resultGraphs--allVsNone', allVsNoneData );
var noneVsEachData = result.data.slice( 1 );
renderTable( '#resultTable--noneVsEach', noneVsEachData );
renderGraphs( '#resultGraphs--noneVsEach', noneVsEachData );
}
if ( ! result.finished ) {
loading.innerText = result.runsToGo ?
result.runsToGo + ' run(s) to go' :
'Processing...';
setTimeout( fetchData, 7500 );
} else {
status.classList.remove( 'is-processing' );
if ( ! result.error ) {
status.classList.add( 'is-done' );
loading.innerText = 'DONE!';
} else {
status.classList.add( 'is-failed' );
loading.innerText = 'FAILED...';
}
}
} );
}
fetchData();
} )( d3 ); | random_line_split | |
results.js | ;( function( d3 ) {
/**
* Get parent of dom element with
* given class
*
* @param {Object} el element
* @param {String} className className
* @return {Object} parent element with given class
*/
function getParent( el, className ) {
var parent = null;
var p = el.parentNode;
while ( p !== null ) {
var o = p;
if ( o.classList.contains( className ) ) {
parent = o;
break;
}
p = o.parentNode;
}
return parent; // returns an Array []
}
var id = window.location.href.split( '/' ).pop();
var keysToRender = [ {
key : 'render',
label : 'Start Render',
timing : true
}, {
key : 'SpeedIndex',
label : 'SpeedIndex'
}, {
key : 'domElements',
label : 'Number of DOM Elements'
}, {
key : 'docTime',
label : 'Document Complete',
timing : true
}, {
key : 'fullyLoaded',
label : 'Fully loaded',
timing : true
}, {
key : 'requests',
label : 'Number of Requests'
} ];
var loading = document.getElementById( 'loading' );
var table = document.getElementById( 'resultTable' );
var template = document.getElementById( 'resultTableEachTpl' );
var status = document.getElementById( 'status' );
function _getNormalizedData( data ) {
var normalizedData = [];
function getNormalizedDate( date, key, type ) {
var returnValue;
if ( key.key !== 'requests' ) {
returnValue = {
value : date.response.data.median[ type ] ?
date.response.data.median[ type ][ key.key ] :
0,
allowed : date.allowedUrl
};
} else {
returnValue = {
value : date.response.data.median[ type ] ?
date.response.data.median[ type ][ key.key ][ 0 ] :
0,
allowed : date.allowedUrl
};
}
if ( key.timing ) {
returnValue.withoutTTFB = returnValue.value - date.response.data.median[ type ].TTFB;
}
return returnValue;
}
keysToRender.forEach( function( key ) {
normalizedData.push( {
name : key.label,
key : key.key,
timing : !! key.timing,
data : [
data.map( function( date ) {
return getNormalizedDate( date, key, 'firstView' );
} ),
data.map( function( date ) {
return getNormalizedDate( date, key, 'repeatView' );
} )
]
} );
} )
return normalizedData;
}
function showBarHelp( data ) {
d3.selectAll( '.resultGraphs--help' ).remove();
var bar = this;
var bBox = bar.getBBox();
var detailBox = document.createElement( 'div' );
var listContainer = getParent( bar, 'resultGraphs--item--container' );
detailBox.classList.add( 'resultGraphs--help' );
detailBox.innerHTML =
'Allowed 3rd Party URL(s):<br><strong>' + bar.dataset.allowed + '</strong>';
listContainer.appendChild( detailBox );
detailBox.style.left = ( bBox.x + bBox.width / 2 - detailBox.getBoundingClientRect().width / 2 ) + 'px';
detailBox.style.top = ( bBox.y + bBox.height + detailBox.getBoundingClientRect().height ) + 'px';
}
function renderTable( container, data ) {
var table = document.querySelector( container );
table.innerHTML = nunjucks.renderString( template.innerHTML, { data : data } );
}
function renderGraphs( container, data ) {
var resultGraphs = d3.select( container );
var normalizedData = _getNormalizedData( data );
var items = resultGraphs.selectAll( '.resultGraphs--item' )
.data( normalizedData );
items.enter()
.append( 'li' )
.attr( 'class', 'resultGraphs--item' )
.attr( 'id', function( d ) {
return 'resultGraph--item-' + d.key;
} )
.html( function( d ) {
return '<h4 class="resultGraphs--item--headline">' + d.name + '</h4>' +
'<div class="resultGraphs--legend">' +
'<span class="resultGraphs--legend__first">First</span>' +
'<span class="resultGraphs--legend__repeat">Repeat</span>' +
'</div>' +
'<div class="resultGraphs--item--container"></div>';
} );
items.each( renderGraph );
items.exit().remove()
}
function renderGraph( data ) {
var containerEl = this.querySelector( '.resultGraphs--item--container' );
var margin = { top : 25, right : 0, bottom : 30, left : 0 };
var width = containerEl.clientWidth - margin.left - margin.right;
var height = width * 0.6 - margin.top - margin.bottom;
var y = d3.scale.linear()
.domain( [
0,
d3.max(
[
d3.max( data.data[ 0 ].map( function( d ) { return d.value; } ) ),
d3.max( data.data[ 1 ].map( function( d ) { return d.value; } ) )
]
)
] )
.range( [ height, 0 ] )
var x = d3.scale.linear()
.domain( [ 0, data.data[ 0 ].length + 1 ] )
.range( [ 0, width ] );
var xAxis = d3.svg.axis()
.scale( x )
.tickFormat( function( d ) {
return ( d % 1 !== 0 || ! d || d === data.data[ 0 ].length + 1 ) ?
'' :
d;
} )
.orient( 'bottom' );
var yAxis = d3.svg.axis()
.scale( y )
.tickSize( width )
.orient( 'right' );
var line = d3.svg.line()
.x( function( d, i ) { return x( i + 1 ); } )
.y( function( d ) { return y( d.value ); } )
.interpolate( 'cardinal' );
var container = d3.select( containerEl ).html( '<svg></svg>' );
var svg = container.select( 'svg' )
.attr( 'width', width + margin.left + margin.right )
.attr( 'height', height + margin.top + margin.bottom )
.append( 'g' )
.attr( 'transform', 'translate(' + margin.left + ',' + margin.top + ')');
var barChartWidth = 12;
var marks = svg.append( 'g' )
.attr( 'class', 'resultGraphs--marks' );
var bgBars = svg.append( 'g' )
.attr( 'class', 'resultGraphs--bgBars' );
bgBars.selectAll( '.resultGraphs--bgBar' )
.data( data.data[ 0 ] )
.enter().append( 'rect' )
.attr( 'class', 'resultGraphs--bgBar' )
.attr( 'x', function( d, i ) { return x( i + .5 ); } )
.attr( 'width', function( d, i ) { return x( i + .5 ) - x( i - .5 ) } )
.attr( 'y', function( d ) { return 0; } )
.attr( 'height', function( d ) { return height; } )
.attr( 'data-allowed', function( d ) { return d.allowed } )
.on( 'mouseenter', showBarHelp );
var gy = svg.append( 'g' )
.attr( 'class', 'resultGraphs--yAxisTicks' )
.call( yAxis )
.call( customAxis );
var gx = svg.append( 'g' )
.attr( 'class', 'resultGraphs--xAxisTicks' )
.attr( 'transform', 'translate(0,' + height + ')' )
.call( xAxis );
var circles = svg.append( 'g' )
.attr( 'class', 'resultGraphs--circles' );
drawLineWithCircles( data.data[ 0 ], 'first', svg, circles );
drawLineWithCircles( data.data[ 1 ], 'repeat', svg, circles );
// TODO implement this in a different way
// if ( data.timing ) {
// drawLineWithCircles( data.data[ 0 ].map( function( date ) {
// return {
// allowed : date.allowed,
// value : date.withoutTTFB
// };
// } ), 'firstWithoutTTFB', svg, circles );
// drawLineWithCircles( data.data[ 1 ].map( function( date ) {
// return {
// allowed : date.allowed,
// value : date.withoutTTFB
// };
// } ), 'repeatWithoutTTFB', svg, circles );
// }
function drawLineWithCircles( data, type, svg, circleContainer ) {
svg.append( 'path' )
.datum( data )
.attr( 'class', 'resultGraphs--line__' + type )
.attr( 'd', line );
circleContainer.selectAll( '.resultGraphs--circle__' + type )
.data( data )
.enter().append( 'circle' )
.attr( 'r', 5 )
.attr( 'class', 'resultGraphs--circle__' + type )
.attr( 'cx', function( d, i ) { return x( i + 1 ); } )
.attr( 'cy', function( d ) { return y( d.value ); });
}
function customAxis( g ) {
g.selectAll( 'text' )
.attr( 'x', 4 )
.attr( 'dy', -4 );
}
}
function fetchData() {
fetch( '/results/data/' + id )
.then( function( response ) {
return response.json();
} )
.then( function( result ) {
if ( result.data.length > 1 && ! result.error ) |
if ( ! result.finished ) {
loading.innerText = result.runsToGo ?
result.runsToGo + ' run(s) to go' :
'Processing...';
setTimeout( fetchData, 7500 );
} else {
status.classList.remove( 'is-processing' );
if ( ! result.error ) {
status.classList.add( 'is-done' );
loading.innerText = 'DONE!';
} else {
status.classList.add( 'is-failed' );
loading.innerText = 'FAILED...';
}
}
} );
}
fetchData();
} )( d3 );
| {
var allVsNoneData = [ result.data[ 0 ], result.data[ 1 ] ];
renderTable( '#resultTable--allVsNone', allVsNoneData );
renderGraphs( '#resultGraphs--allVsNone', allVsNoneData );
var noneVsEachData = result.data.slice( 1 );
renderTable( '#resultTable--noneVsEach', noneVsEachData );
renderGraphs( '#resultGraphs--noneVsEach', noneVsEachData );
} | conditional_block |
Module.ts | import * as AdhTopLevelStateModule from "../TopLevelState/Module";
import * as AdhTopLevelState from "../TopLevelState/TopLevelState";
import * as AdhEmbed from "./Embed";
export var moduleName = "adhEmbed";
export var register = (angular) => {
angular
.module(moduleName, [
"pascalprecht.translate",
AdhTopLevelStateModule.moduleName
])
.config(["adhTopLevelStateProvider", (adhTopLevelStateProvider : AdhTopLevelState.Provider) => {
adhTopLevelStateProvider
.when("embed", ["$location", "adhEmbed", (
$location : angular.ILocationService,
adhEmbed : AdhEmbed.Service
) : AdhTopLevelState.IAreaInput => {
return adhEmbed.route($location);
}]);
}])
.run(["$location", "$translate", "adhConfig", ($location, $translate, adhConfig) => {
// Note: This works despite the routing removing the locale search
// parameter immediately after. This is a bit awkward though.
// FIXME: centralize locale setup in adhLocale
var params = $location.search();
if (params.hasOwnProperty("locale")) {
$translate.use(params.locale);
}
if (typeof params.locale !== "undefined") |
}])
.provider("adhEmbed", AdhEmbed.Provider)
.directive("href", ["adhConfig", "$location", "$rootScope", AdhEmbed.hrefDirective])
.filter("adhCanonicalUrl", ["adhConfig", AdhEmbed.canonicalUrl]);
};
| {
adhConfig.locale = params.locale;
} | conditional_block |
Module.ts | import * as AdhTopLevelStateModule from "../TopLevelState/Module";
import * as AdhTopLevelState from "../TopLevelState/TopLevelState"; |
export var moduleName = "adhEmbed";
export var register = (angular) => {
angular
.module(moduleName, [
"pascalprecht.translate",
AdhTopLevelStateModule.moduleName
])
.config(["adhTopLevelStateProvider", (adhTopLevelStateProvider : AdhTopLevelState.Provider) => {
adhTopLevelStateProvider
.when("embed", ["$location", "adhEmbed", (
$location : angular.ILocationService,
adhEmbed : AdhEmbed.Service
) : AdhTopLevelState.IAreaInput => {
return adhEmbed.route($location);
}]);
}])
.run(["$location", "$translate", "adhConfig", ($location, $translate, adhConfig) => {
// Note: This works despite the routing removing the locale search
// parameter immediately after. This is a bit awkward though.
// FIXME: centralize locale setup in adhLocale
var params = $location.search();
if (params.hasOwnProperty("locale")) {
$translate.use(params.locale);
}
if (typeof params.locale !== "undefined") {
adhConfig.locale = params.locale;
}
}])
.provider("adhEmbed", AdhEmbed.Provider)
.directive("href", ["adhConfig", "$location", "$rootScope", AdhEmbed.hrefDirective])
.filter("adhCanonicalUrl", ["adhConfig", AdhEmbed.canonicalUrl]);
}; |
import * as AdhEmbed from "./Embed";
| random_line_split |
getobject.py | # Copyright (c) 2013-2016 Hewlett Packard Enterprise Development LP
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import hashlib
import os.path
import sys
from requestbuilder import Arg
from requestbuilder.exceptions import ArgumentError
from requestbuilder.mixins import FileTransferProgressBarMixin
import six
from euca2ools.commands.s3 import S3Request
import euca2ools.bundle.pipes
class GetObject(S3Request, FileTransferProgressBarMixin):
DESCRIPTION = 'Retrieve objects from the server'
ARGS = [Arg('source', metavar='BUCKET/KEY', route_to=None,
help='the object to download (required)'),
Arg('-o', dest='dest', metavar='PATH', route_to=None,
default='.', help='''where to download to. If this names a
directory the object will be written to a file inside of that
directory. If this is is "-" the object will be written to
stdout. Otherwise it will be written to a file with the name
given. (default: current directory)''')]
def configure(self):
S3Request.configure(self)
bucket, _, key = self.args['source'].partition('/')
if not bucket:
raise ArgumentError('source must contain a bucket name')
if not key:
raise ArgumentError('source must contain a key name')
if isinstance(self.args.get('dest'), six.string_types):
# If it is not a string we assume it is a file-like object
if self.args['dest'] == '-':
self.args['dest'] = sys.stdout
elif os.path.isdir(self.args['dest']):
basename = os.path.basename(key)
if not basename:
raise ArgumentError("specify a complete file path with -o "
"to download objects that end in '/'")
dest_path = os.path.join(self.args['dest'], basename)
self.args['dest'] = open(dest_path, 'w')
else:
self.args['dest'] = open(self.args['dest'], 'w')
def preprocess(self):
self.path = self.args['source']
def main(self):
# Note that this method does not close self.args['dest']
self.preprocess()
bytes_written = 0
md5_digest = hashlib.md5()
sha_digest = hashlib.sha1()
response = self.send()
content_length = response.headers.get('Content-Length')
if content_length:
pbar = self.get_progressbar(label=self.args['source'],
maxval=int(content_length))
else:
pbar = self.get_progressbar(label=self.args['source'])
pbar.start()
for chunk in response.iter_content(chunk_size=euca2ools.BUFSIZE):
self.args['dest'].write(chunk)
bytes_written += len(chunk)
md5_digest.update(chunk)
sha_digest.update(chunk)
if pbar is not None:
pbar.update(bytes_written)
self.args['dest'].flush()
pbar.finish()
# Integrity checks
if content_length and bytes_written != int(content_length):
self.log.error('rejecting download due to Content-Length size '
'mismatch (expected: %i, actual: %i)',
content_length, bytes_written)
raise RuntimeError('downloaded file appears to be corrupt '
'(expected size: {0}, actual: {1})'
.format(content_length, bytes_written))
etag = response.headers.get('ETag', '').lower().strip('"')
if (len(etag) == 32 and
all(char in '0123456789abcdef' for char in etag)):
# It looks like an MD5 hash
if md5_digest.hexdigest() != etag:
|
return {self.args['source']: {'md5': md5_digest.hexdigest(),
'sha1': sha_digest.hexdigest(),
'size': bytes_written}}
| self.log.error('rejecting download due to ETag MD5 mismatch '
'(expected: %s, actual: %s)',
etag, md5_digest.hexdigest())
raise RuntimeError('downloaded file appears to be corrupt '
'(expected MD5: {0}, actual: {1})'
.format(etag, md5_digest.hexdigest())) | conditional_block |
getobject.py | # Copyright (c) 2013-2016 Hewlett Packard Enterprise Development LP
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import hashlib
import os.path
import sys
from requestbuilder import Arg
from requestbuilder.exceptions import ArgumentError
from requestbuilder.mixins import FileTransferProgressBarMixin
import six
from euca2ools.commands.s3 import S3Request
import euca2ools.bundle.pipes
class | (S3Request, FileTransferProgressBarMixin):
DESCRIPTION = 'Retrieve objects from the server'
ARGS = [Arg('source', metavar='BUCKET/KEY', route_to=None,
help='the object to download (required)'),
Arg('-o', dest='dest', metavar='PATH', route_to=None,
default='.', help='''where to download to. If this names a
directory the object will be written to a file inside of that
directory. If this is is "-" the object will be written to
stdout. Otherwise it will be written to a file with the name
given. (default: current directory)''')]
def configure(self):
S3Request.configure(self)
bucket, _, key = self.args['source'].partition('/')
if not bucket:
raise ArgumentError('source must contain a bucket name')
if not key:
raise ArgumentError('source must contain a key name')
if isinstance(self.args.get('dest'), six.string_types):
# If it is not a string we assume it is a file-like object
if self.args['dest'] == '-':
self.args['dest'] = sys.stdout
elif os.path.isdir(self.args['dest']):
basename = os.path.basename(key)
if not basename:
raise ArgumentError("specify a complete file path with -o "
"to download objects that end in '/'")
dest_path = os.path.join(self.args['dest'], basename)
self.args['dest'] = open(dest_path, 'w')
else:
self.args['dest'] = open(self.args['dest'], 'w')
def preprocess(self):
self.path = self.args['source']
def main(self):
# Note that this method does not close self.args['dest']
self.preprocess()
bytes_written = 0
md5_digest = hashlib.md5()
sha_digest = hashlib.sha1()
response = self.send()
content_length = response.headers.get('Content-Length')
if content_length:
pbar = self.get_progressbar(label=self.args['source'],
maxval=int(content_length))
else:
pbar = self.get_progressbar(label=self.args['source'])
pbar.start()
for chunk in response.iter_content(chunk_size=euca2ools.BUFSIZE):
self.args['dest'].write(chunk)
bytes_written += len(chunk)
md5_digest.update(chunk)
sha_digest.update(chunk)
if pbar is not None:
pbar.update(bytes_written)
self.args['dest'].flush()
pbar.finish()
# Integrity checks
if content_length and bytes_written != int(content_length):
self.log.error('rejecting download due to Content-Length size '
'mismatch (expected: %i, actual: %i)',
content_length, bytes_written)
raise RuntimeError('downloaded file appears to be corrupt '
'(expected size: {0}, actual: {1})'
.format(content_length, bytes_written))
etag = response.headers.get('ETag', '').lower().strip('"')
if (len(etag) == 32 and
all(char in '0123456789abcdef' for char in etag)):
# It looks like an MD5 hash
if md5_digest.hexdigest() != etag:
self.log.error('rejecting download due to ETag MD5 mismatch '
'(expected: %s, actual: %s)',
etag, md5_digest.hexdigest())
raise RuntimeError('downloaded file appears to be corrupt '
'(expected MD5: {0}, actual: {1})'
.format(etag, md5_digest.hexdigest()))
return {self.args['source']: {'md5': md5_digest.hexdigest(),
'sha1': sha_digest.hexdigest(),
'size': bytes_written}}
| GetObject | identifier_name |
getobject.py | # Copyright (c) 2013-2016 Hewlett Packard Enterprise Development LP
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import hashlib
import os.path
import sys
from requestbuilder import Arg
from requestbuilder.exceptions import ArgumentError
from requestbuilder.mixins import FileTransferProgressBarMixin
import six
from euca2ools.commands.s3 import S3Request
import euca2ools.bundle.pipes
class GetObject(S3Request, FileTransferProgressBarMixin):
DESCRIPTION = 'Retrieve objects from the server'
ARGS = [Arg('source', metavar='BUCKET/KEY', route_to=None,
help='the object to download (required)'),
Arg('-o', dest='dest', metavar='PATH', route_to=None,
default='.', help='''where to download to. If this names a
directory the object will be written to a file inside of that
directory. If this is is "-" the object will be written to
stdout. Otherwise it will be written to a file with the name
given. (default: current directory)''')]
def configure(self):
S3Request.configure(self)
bucket, _, key = self.args['source'].partition('/')
if not bucket:
raise ArgumentError('source must contain a bucket name')
if not key:
raise ArgumentError('source must contain a key name')
if isinstance(self.args.get('dest'), six.string_types):
# If it is not a string we assume it is a file-like object
if self.args['dest'] == '-':
self.args['dest'] = sys.stdout
elif os.path.isdir(self.args['dest']):
basename = os.path.basename(key)
if not basename:
raise ArgumentError("specify a complete file path with -o "
"to download objects that end in '/'")
dest_path = os.path.join(self.args['dest'], basename)
self.args['dest'] = open(dest_path, 'w')
else:
self.args['dest'] = open(self.args['dest'], 'w')
def preprocess(self):
self.path = self.args['source']
def main(self):
# Note that this method does not close self.args['dest']
| self.preprocess()
bytes_written = 0
md5_digest = hashlib.md5()
sha_digest = hashlib.sha1()
response = self.send()
content_length = response.headers.get('Content-Length')
if content_length:
pbar = self.get_progressbar(label=self.args['source'],
maxval=int(content_length))
else:
pbar = self.get_progressbar(label=self.args['source'])
pbar.start()
for chunk in response.iter_content(chunk_size=euca2ools.BUFSIZE):
self.args['dest'].write(chunk)
bytes_written += len(chunk)
md5_digest.update(chunk)
sha_digest.update(chunk)
if pbar is not None:
pbar.update(bytes_written)
self.args['dest'].flush()
pbar.finish()
# Integrity checks
if content_length and bytes_written != int(content_length):
self.log.error('rejecting download due to Content-Length size '
'mismatch (expected: %i, actual: %i)',
content_length, bytes_written)
raise RuntimeError('downloaded file appears to be corrupt '
'(expected size: {0}, actual: {1})'
.format(content_length, bytes_written))
etag = response.headers.get('ETag', '').lower().strip('"')
if (len(etag) == 32 and
all(char in '0123456789abcdef' for char in etag)):
# It looks like an MD5 hash
if md5_digest.hexdigest() != etag:
self.log.error('rejecting download due to ETag MD5 mismatch '
'(expected: %s, actual: %s)',
etag, md5_digest.hexdigest())
raise RuntimeError('downloaded file appears to be corrupt '
'(expected MD5: {0}, actual: {1})'
.format(etag, md5_digest.hexdigest()))
return {self.args['source']: {'md5': md5_digest.hexdigest(),
'sha1': sha_digest.hexdigest(),
'size': bytes_written}} | identifier_body | |
getobject.py | # Copyright (c) 2013-2016 Hewlett Packard Enterprise Development LP
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import hashlib
import os.path
import sys
from requestbuilder import Arg
from requestbuilder.exceptions import ArgumentError
from requestbuilder.mixins import FileTransferProgressBarMixin
import six
from euca2ools.commands.s3 import S3Request
import euca2ools.bundle.pipes
class GetObject(S3Request, FileTransferProgressBarMixin):
DESCRIPTION = 'Retrieve objects from the server'
ARGS = [Arg('source', metavar='BUCKET/KEY', route_to=None,
help='the object to download (required)'),
Arg('-o', dest='dest', metavar='PATH', route_to=None,
default='.', help='''where to download to. If this names a
directory the object will be written to a file inside of that
directory. If this is is "-" the object will be written to
stdout. Otherwise it will be written to a file with the name
given. (default: current directory)''')]
def configure(self):
S3Request.configure(self)
bucket, _, key = self.args['source'].partition('/')
if not bucket:
raise ArgumentError('source must contain a bucket name')
if not key:
raise ArgumentError('source must contain a key name')
if isinstance(self.args.get('dest'), six.string_types):
# If it is not a string we assume it is a file-like object
if self.args['dest'] == '-':
self.args['dest'] = sys.stdout
elif os.path.isdir(self.args['dest']):
basename = os.path.basename(key)
if not basename:
raise ArgumentError("specify a complete file path with -o "
"to download objects that end in '/'")
dest_path = os.path.join(self.args['dest'], basename)
self.args['dest'] = open(dest_path, 'w')
else:
self.args['dest'] = open(self.args['dest'], 'w')
def preprocess(self):
self.path = self.args['source']
def main(self):
# Note that this method does not close self.args['dest']
self.preprocess()
bytes_written = 0
md5_digest = hashlib.md5()
sha_digest = hashlib.sha1()
response = self.send()
content_length = response.headers.get('Content-Length')
if content_length:
pbar = self.get_progressbar(label=self.args['source'],
maxval=int(content_length))
else:
pbar = self.get_progressbar(label=self.args['source'])
pbar.start()
for chunk in response.iter_content(chunk_size=euca2ools.BUFSIZE):
self.args['dest'].write(chunk)
bytes_written += len(chunk)
md5_digest.update(chunk)
sha_digest.update(chunk)
if pbar is not None:
pbar.update(bytes_written) | if content_length and bytes_written != int(content_length):
self.log.error('rejecting download due to Content-Length size '
'mismatch (expected: %i, actual: %i)',
content_length, bytes_written)
raise RuntimeError('downloaded file appears to be corrupt '
'(expected size: {0}, actual: {1})'
.format(content_length, bytes_written))
etag = response.headers.get('ETag', '').lower().strip('"')
if (len(etag) == 32 and
all(char in '0123456789abcdef' for char in etag)):
# It looks like an MD5 hash
if md5_digest.hexdigest() != etag:
self.log.error('rejecting download due to ETag MD5 mismatch '
'(expected: %s, actual: %s)',
etag, md5_digest.hexdigest())
raise RuntimeError('downloaded file appears to be corrupt '
'(expected MD5: {0}, actual: {1})'
.format(etag, md5_digest.hexdigest()))
return {self.args['source']: {'md5': md5_digest.hexdigest(),
'sha1': sha_digest.hexdigest(),
'size': bytes_written}} | self.args['dest'].flush()
pbar.finish()
# Integrity checks | random_line_split |
nb_NO.js | module.exports = {
accepted: ':attribute må være akseptert.',
alpha: ':attribute feltet kan kun inneholde alfabetiske tegn.',
alpha_dash: ':attribute feltet kan kun inneholde alfanumeriske tegn, i tillegg til bindestreker og understreker.',
alpha_num: ':attribute feltet må være alfanumerisk.',
between: ':attribute feltet må være mellom :min og :max.',
confirmed: ':attribute feltet stemmer ikke overens med bekreftelsen.',
email: ':attribute formatet er ugyldig.',
date: ':attribute er et ugyldig datoformat.',
def: ':attribute attributtet har feil.',
digits: ':attribute må være på :digits siffer.',
different: ':attribute og :different må være forskjellige.',
'in': 'Den oppgitte verdien for :attribute er ugyldig.',
integer: ':attribute må være et heltall.',
hex: 'The :attribute should have hexadecimal format',
min: {
numeric: ':attribute må minst være :min.',
string: ':attribute må være på minst :min tegn.'
},
max: {
numeric: ':attribute kan ikke være større enn :max.',
string: ':attribute kan maks ha :max tegn.'
}, | present: 'The :attribute field must be present (but can be empty).',
required: ':attribute feltet er påkrevd.',
required_if: ':attribute er påkrevd når :other er :value.',
same: ':attribute og :same må være like.',
size: {
numeric: ':attribute må ha størrelsen :size.',
string: ':attribute må ha :size tegn.'
},
string: ':attribute må være tekst.',
url: ':attribute formatet er ugyldig.',
regex: ':attribute formatet er ugyldig.',
attributes: {}
}; | 'not_in': 'Den oppgitte verdien for :attribute er ugyldig.',
numeric: ':attribute må være et tall.', | random_line_split |
app.py | import os
import module
from flask import Flask, render_template, request, session, redirect, url_for, send_from_directory
from werkzeug import secure_filename
from functools import wraps
app = Flask(__name__)
# Configure upload locations
app.config['UPLOAD_FOLDER'] = 'uploads/'
app.config['ALLOWED_EXTENSIONS'] = set(['chessley']) # Change this to whatever filetype to accept
# Checks if uploaded file is a valid file
def allowed_file(filename):
"""
Checks if 'filename' is allowed to be uploaded to the server
Params:
filename - String containing the name of the uploaded file
Returns:
True if the file is allowed, False otherwise
"""
return '.' in filename and filename.rsplit('.',1)[1] in app.config['ALLOWED_EXTENSIONS']
# Wraps for login requirements on certain app.routes
def | (f):
"""
Python function wrapper, used on functions that require being logged in to
view. Run before a function's body is run.
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if "authenticated" not in session or not session["authenticated"] or \
"username" not in session:
session.clear()
return redirect(url_for("login"))
return f(*args, **kwargs)
return decorated_function
def redirect_if_logged_in(f):
"""
Python function wrapper, used on functions to redirect to other pages if
the user is already logged in. Run before a function's body is run.
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if "authenticated" in session and session["authenticated"]:
return redirect(url_for("profile"))
return f(*args, **kwargs)
return decorated_function
############### APPLICATION SITE ROUTES ###############
@app.route("/")
@app.route("/home")
@app.route("/home/")
@redirect_if_logged_in
def home():
return render_template("home.html")
@app.route("/login", methods=["GET","POST"])
@app.route("/login/", methods=["GET","POST"])
@redirect_if_logged_in
def login():
if request.method == "POST":
REQUIRED = ["username", "pass"]
for form_elem in REQUIRED:
if form_elem not in request.form:
return render_template("login.html")
if module.authenticate(request.form['username'], request.form['pass']):
session["authenticated"] = True
session["username"] = request.form['username']
return redirect(url_for("profile"))
return render_template("login.html")
@app.route("/logout")
@app.route("/logout/")
@login_required
def logout():
session.clear()
return redirect(url_for("login"))
@app.route("/register", methods=["POST"])
@app.route("/register/", methods=["POST"])
@redirect_if_logged_in
def register():
REQUIRED = ["username", "pass", "pass2"]
for form_elem in REQUIRED:
if form_elem not in request.form:
return redirect(url_for("home"))
if request.form["pass"] != request.form["pass2"]:
return redirect(url_for("home"))
if module.newUser(request.form["username"], request.form["pass"]):
session['authenticated'] = True
session['username'] = request.form['username']
return redirect(url_for("profile"))
else:
return redirect(url_for("home"))
@app.route("/about")
@app.route("/about/")
def about():
LOGGED_IN = "authenticated" in session and session["authenticated"]
return render_template("about.html", AUTH=LOGGED_IN)
@app.route("/download", methods=["GET", "POST"])
@app.route("/download/", methods=["GET", "POST"])
@login_required
def download():
return render_template('download.html', USERNAME=session['username']) # For when the Jinja is configured
@app.route("/upload", methods=["GET","POST"])
@app.route("/upload/", methods=["GET","POST"])
@login_required
def upload():
if request.method == "POST":
file = request.files["upload_bot"]
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename + session["username"] + "_bot.chessley"))
return render_template("upload.html")
@app.route("/leaderboards", methods=["GET", "POST"])
@app.route("/leaderboards/", methods=["GET", "POST"])
def leaderboards():
LOGGED_IN = "authenticated" in session and session["authenticated"]
table = module.getRankedUsers()
return render_template("leaderboards.html", table=table, AUTH=LOGGED_IN)
@app.route("/profile", methods=["GET","POST"])
@app.route("/profile/", methods=["GET","POST"])
@login_required
def profile():
if 'username' in session and session['username']!=0:
#retrieve user data here
dict = module.getUser(session['username'])
#dict = {"rank":1,"elo":1400,"wins":100,"losses":50,"stalemates":0}
return render_template("profile.html", USERNAME=session['username'], DICT=dict)
return render_template("home.html")
app.secret_key = str(os.urandom(24))
if __name__ == "__main__":
app.debug = True
app.run(host="0.0.0.0", port=5000)
| login_required | identifier_name |
app.py | import os
import module
from flask import Flask, render_template, request, session, redirect, url_for, send_from_directory
from werkzeug import secure_filename
from functools import wraps
app = Flask(__name__)
# Configure upload locations
app.config['UPLOAD_FOLDER'] = 'uploads/'
app.config['ALLOWED_EXTENSIONS'] = set(['chessley']) # Change this to whatever filetype to accept
# Checks if uploaded file is a valid file
def allowed_file(filename):
"""
Checks if 'filename' is allowed to be uploaded to the server
Params:
filename - String containing the name of the uploaded file
Returns:
True if the file is allowed, False otherwise
"""
return '.' in filename and filename.rsplit('.',1)[1] in app.config['ALLOWED_EXTENSIONS']
# Wraps for login requirements on certain app.routes
def login_required(f):
"""
Python function wrapper, used on functions that require being logged in to
view. Run before a function's body is run.
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if "authenticated" not in session or not session["authenticated"] or \
"username" not in session:
session.clear()
return redirect(url_for("login"))
return f(*args, **kwargs)
return decorated_function
def redirect_if_logged_in(f):
"""
Python function wrapper, used on functions to redirect to other pages if
the user is already logged in. Run before a function's body is run.
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if "authenticated" in session and session["authenticated"]:
return redirect(url_for("profile"))
return f(*args, **kwargs)
return decorated_function
############### APPLICATION SITE ROUTES ###############
@app.route("/")
@app.route("/home")
@app.route("/home/")
@redirect_if_logged_in
def home():
return render_template("home.html")
@app.route("/login", methods=["GET","POST"])
@app.route("/login/", methods=["GET","POST"])
@redirect_if_logged_in
def login():
if request.method == "POST":
REQUIRED = ["username", "pass"]
for form_elem in REQUIRED:
if form_elem not in request.form:
return render_template("login.html")
if module.authenticate(request.form['username'], request.form['pass']):
session["authenticated"] = True
session["username"] = request.form['username']
return redirect(url_for("profile"))
return render_template("login.html")
@app.route("/logout")
@app.route("/logout/")
@login_required
def logout():
session.clear()
return redirect(url_for("login"))
@app.route("/register", methods=["POST"])
@app.route("/register/", methods=["POST"])
@redirect_if_logged_in
def register():
REQUIRED = ["username", "pass", "pass2"]
for form_elem in REQUIRED:
if form_elem not in request.form:
return redirect(url_for("home"))
if request.form["pass"] != request.form["pass2"]:
return redirect(url_for("home"))
if module.newUser(request.form["username"], request.form["pass"]):
session['authenticated'] = True
session['username'] = request.form['username']
return redirect(url_for("profile"))
else:
return redirect(url_for("home"))
@app.route("/about")
@app.route("/about/")
def about():
LOGGED_IN = "authenticated" in session and session["authenticated"]
return render_template("about.html", AUTH=LOGGED_IN)
@app.route("/download", methods=["GET", "POST"])
@app.route("/download/", methods=["GET", "POST"]) | @app.route("/upload", methods=["GET","POST"])
@app.route("/upload/", methods=["GET","POST"])
@login_required
def upload():
if request.method == "POST":
file = request.files["upload_bot"]
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename + session["username"] + "_bot.chessley"))
return render_template("upload.html")
@app.route("/leaderboards", methods=["GET", "POST"])
@app.route("/leaderboards/", methods=["GET", "POST"])
def leaderboards():
LOGGED_IN = "authenticated" in session and session["authenticated"]
table = module.getRankedUsers()
return render_template("leaderboards.html", table=table, AUTH=LOGGED_IN)
@app.route("/profile", methods=["GET","POST"])
@app.route("/profile/", methods=["GET","POST"])
@login_required
def profile():
if 'username' in session and session['username']!=0:
#retrieve user data here
dict = module.getUser(session['username'])
#dict = {"rank":1,"elo":1400,"wins":100,"losses":50,"stalemates":0}
return render_template("profile.html", USERNAME=session['username'], DICT=dict)
return render_template("home.html")
app.secret_key = str(os.urandom(24))
if __name__ == "__main__":
app.debug = True
app.run(host="0.0.0.0", port=5000) | @login_required
def download():
return render_template('download.html', USERNAME=session['username']) # For when the Jinja is configured
| random_line_split |
app.py | import os
import module
from flask import Flask, render_template, request, session, redirect, url_for, send_from_directory
from werkzeug import secure_filename
from functools import wraps
app = Flask(__name__)
# Configure upload locations
app.config['UPLOAD_FOLDER'] = 'uploads/'
app.config['ALLOWED_EXTENSIONS'] = set(['chessley']) # Change this to whatever filetype to accept
# Checks if uploaded file is a valid file
def allowed_file(filename):
"""
Checks if 'filename' is allowed to be uploaded to the server
Params:
filename - String containing the name of the uploaded file
Returns:
True if the file is allowed, False otherwise
"""
return '.' in filename and filename.rsplit('.',1)[1] in app.config['ALLOWED_EXTENSIONS']
# Wraps for login requirements on certain app.routes
def login_required(f):
"""
Python function wrapper, used on functions that require being logged in to
view. Run before a function's body is run.
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if "authenticated" not in session or not session["authenticated"] or \
"username" not in session:
|
return f(*args, **kwargs)
return decorated_function
def redirect_if_logged_in(f):
"""
Python function wrapper, used on functions to redirect to other pages if
the user is already logged in. Run before a function's body is run.
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if "authenticated" in session and session["authenticated"]:
return redirect(url_for("profile"))
return f(*args, **kwargs)
return decorated_function
############### APPLICATION SITE ROUTES ###############
@app.route("/")
@app.route("/home")
@app.route("/home/")
@redirect_if_logged_in
def home():
return render_template("home.html")
@app.route("/login", methods=["GET","POST"])
@app.route("/login/", methods=["GET","POST"])
@redirect_if_logged_in
def login():
if request.method == "POST":
REQUIRED = ["username", "pass"]
for form_elem in REQUIRED:
if form_elem not in request.form:
return render_template("login.html")
if module.authenticate(request.form['username'], request.form['pass']):
session["authenticated"] = True
session["username"] = request.form['username']
return redirect(url_for("profile"))
return render_template("login.html")
@app.route("/logout")
@app.route("/logout/")
@login_required
def logout():
session.clear()
return redirect(url_for("login"))
@app.route("/register", methods=["POST"])
@app.route("/register/", methods=["POST"])
@redirect_if_logged_in
def register():
REQUIRED = ["username", "pass", "pass2"]
for form_elem in REQUIRED:
if form_elem not in request.form:
return redirect(url_for("home"))
if request.form["pass"] != request.form["pass2"]:
return redirect(url_for("home"))
if module.newUser(request.form["username"], request.form["pass"]):
session['authenticated'] = True
session['username'] = request.form['username']
return redirect(url_for("profile"))
else:
return redirect(url_for("home"))
@app.route("/about")
@app.route("/about/")
def about():
LOGGED_IN = "authenticated" in session and session["authenticated"]
return render_template("about.html", AUTH=LOGGED_IN)
@app.route("/download", methods=["GET", "POST"])
@app.route("/download/", methods=["GET", "POST"])
@login_required
def download():
return render_template('download.html', USERNAME=session['username']) # For when the Jinja is configured
@app.route("/upload", methods=["GET","POST"])
@app.route("/upload/", methods=["GET","POST"])
@login_required
def upload():
if request.method == "POST":
file = request.files["upload_bot"]
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename + session["username"] + "_bot.chessley"))
return render_template("upload.html")
@app.route("/leaderboards", methods=["GET", "POST"])
@app.route("/leaderboards/", methods=["GET", "POST"])
def leaderboards():
LOGGED_IN = "authenticated" in session and session["authenticated"]
table = module.getRankedUsers()
return render_template("leaderboards.html", table=table, AUTH=LOGGED_IN)
@app.route("/profile", methods=["GET","POST"])
@app.route("/profile/", methods=["GET","POST"])
@login_required
def profile():
if 'username' in session and session['username']!=0:
#retrieve user data here
dict = module.getUser(session['username'])
#dict = {"rank":1,"elo":1400,"wins":100,"losses":50,"stalemates":0}
return render_template("profile.html", USERNAME=session['username'], DICT=dict)
return render_template("home.html")
app.secret_key = str(os.urandom(24))
if __name__ == "__main__":
app.debug = True
app.run(host="0.0.0.0", port=5000)
| session.clear()
return redirect(url_for("login")) | conditional_block |
app.py | import os
import module
from flask import Flask, render_template, request, session, redirect, url_for, send_from_directory
from werkzeug import secure_filename
from functools import wraps
app = Flask(__name__)
# Configure upload locations
app.config['UPLOAD_FOLDER'] = 'uploads/'
app.config['ALLOWED_EXTENSIONS'] = set(['chessley']) # Change this to whatever filetype to accept
# Checks if uploaded file is a valid file
def allowed_file(filename):
"""
Checks if 'filename' is allowed to be uploaded to the server
Params:
filename - String containing the name of the uploaded file
Returns:
True if the file is allowed, False otherwise
"""
return '.' in filename and filename.rsplit('.',1)[1] in app.config['ALLOWED_EXTENSIONS']
# Wraps for login requirements on certain app.routes
def login_required(f):
"""
Python function wrapper, used on functions that require being logged in to
view. Run before a function's body is run.
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if "authenticated" not in session or not session["authenticated"] or \
"username" not in session:
session.clear()
return redirect(url_for("login"))
return f(*args, **kwargs)
return decorated_function
def redirect_if_logged_in(f):
"""
Python function wrapper, used on functions to redirect to other pages if
the user is already logged in. Run before a function's body is run.
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if "authenticated" in session and session["authenticated"]:
return redirect(url_for("profile"))
return f(*args, **kwargs)
return decorated_function
############### APPLICATION SITE ROUTES ###############
@app.route("/")
@app.route("/home")
@app.route("/home/")
@redirect_if_logged_in
def home():
return render_template("home.html")
@app.route("/login", methods=["GET","POST"])
@app.route("/login/", methods=["GET","POST"])
@redirect_if_logged_in
def login():
if request.method == "POST":
REQUIRED = ["username", "pass"]
for form_elem in REQUIRED:
if form_elem not in request.form:
return render_template("login.html")
if module.authenticate(request.form['username'], request.form['pass']):
session["authenticated"] = True
session["username"] = request.form['username']
return redirect(url_for("profile"))
return render_template("login.html")
@app.route("/logout")
@app.route("/logout/")
@login_required
def logout():
|
@app.route("/register", methods=["POST"])
@app.route("/register/", methods=["POST"])
@redirect_if_logged_in
def register():
REQUIRED = ["username", "pass", "pass2"]
for form_elem in REQUIRED:
if form_elem not in request.form:
return redirect(url_for("home"))
if request.form["pass"] != request.form["pass2"]:
return redirect(url_for("home"))
if module.newUser(request.form["username"], request.form["pass"]):
session['authenticated'] = True
session['username'] = request.form['username']
return redirect(url_for("profile"))
else:
return redirect(url_for("home"))
@app.route("/about")
@app.route("/about/")
def about():
LOGGED_IN = "authenticated" in session and session["authenticated"]
return render_template("about.html", AUTH=LOGGED_IN)
@app.route("/download", methods=["GET", "POST"])
@app.route("/download/", methods=["GET", "POST"])
@login_required
def download():
return render_template('download.html', USERNAME=session['username']) # For when the Jinja is configured
@app.route("/upload", methods=["GET","POST"])
@app.route("/upload/", methods=["GET","POST"])
@login_required
def upload():
if request.method == "POST":
file = request.files["upload_bot"]
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename + session["username"] + "_bot.chessley"))
return render_template("upload.html")
@app.route("/leaderboards", methods=["GET", "POST"])
@app.route("/leaderboards/", methods=["GET", "POST"])
def leaderboards():
LOGGED_IN = "authenticated" in session and session["authenticated"]
table = module.getRankedUsers()
return render_template("leaderboards.html", table=table, AUTH=LOGGED_IN)
@app.route("/profile", methods=["GET","POST"])
@app.route("/profile/", methods=["GET","POST"])
@login_required
def profile():
if 'username' in session and session['username']!=0:
#retrieve user data here
dict = module.getUser(session['username'])
#dict = {"rank":1,"elo":1400,"wins":100,"losses":50,"stalemates":0}
return render_template("profile.html", USERNAME=session['username'], DICT=dict)
return render_template("home.html")
app.secret_key = str(os.urandom(24))
if __name__ == "__main__":
app.debug = True
app.run(host="0.0.0.0", port=5000)
| session.clear()
return redirect(url_for("login")) | identifier_body |
ActionsheetContent.tsx | import React, { memo, forwardRef } from 'react';
import { Modal } from '../../composites/Modal';
import type { IActionsheetContentProps } from './types';
import { usePropsResolution } from '../../../hooks';
import { Animated, PanResponder } from 'react-native';
import { ModalContext } from '../Modal/Context';
import Box from '../../primitives/Box';
import { ActionSheetContext } from './ActionSheetContext';
import { useHasResponsiveProps } from '../../../hooks/useHasResponsiveProps';
const Content = memo(
forwardRef(
(
{
hideDragIndicator,
children,
handleClose,
pan,
sheetHeight,
...props
}: any,
ref: any
) => {
const { _dragIndicator, ...resolvedProps } = usePropsResolution(
'ActionsheetContent',
props
);
const panResponder = React.useRef(
PanResponder.create({
onStartShouldSetPanResponder: () => true,
onMoveShouldSetPanResponder: (_evt, gestureState) => {
return gestureState.dy > 15;
},
onPanResponderMove: (e, gestureState) => {
if (gestureState.dy > 0) {
Animated.event([null, { dy: pan.y }], {
useNativeDriver: false,
})(e, gestureState);
}
},
onPanResponderRelease: (_e, gestureState) => {
// If sheet is dragged 1/4th of it's height, close it
if (sheetHeight.current / 4 - gestureState.dy < 0) {
Animated.timing(pan, {
toValue: { x: 0, y: sheetHeight.current },
duration: 150,
useNativeDriver: true,
}).start(handleClose);
setTimeout(() => {
Animated.spring(pan, {
toValue: { x: 0, y: 0 },
overshootClamping: true,
useNativeDriver: true,
}).start();
});
} else |
},
})
).current;
return (
<>
{!hideDragIndicator ? (
<>
{/* To increase the draggable area */}
<Box py={5} {...panResponder.panHandlers} collapsable={false} />
</>
) : null}
<Modal.Content {...resolvedProps} ref={ref} safeAreaBottom>
{!hideDragIndicator ? (
<>
{/* Hack. Fix later. Add -2 negative margin to remove the padding added by ActionSheetContent */}
<Box
pt={3}
pb={3}
mt={-2}
{...panResponder.panHandlers}
width="100%"
alignItems="center"
collapsable={false}
>
<Box {..._dragIndicator} />
</Box>
</>
) : null}
{children}
</Modal.Content>
</>
);
}
)
);
const ActionsheetContent = (
{ children, ...props }: IActionsheetContentProps,
ref?: any
) => {
// return null;
const { handleClose } = React.useContext(ModalContext);
const { hideDragIndicator } = React.useContext(ActionSheetContext);
const pan = React.useRef(new Animated.ValueXY()).current;
const sheetHeight = React.useRef(0);
const handleCloseCallback = React.useCallback(handleClose, [
ModalContext,
handleClose,
]);
// useEffect(() => {
// }, [])
//TODO: refactor for responsive prop
if (useHasResponsiveProps(props)) {
return null;
}
return (
<Animated.View
style={{
transform: [{ translateY: pan.y }],
width: '100%',
}}
onLayout={(event) => {
const { height } = event.nativeEvent.layout;
sheetHeight.current = height;
}}
pointerEvents="box-none"
>
<Content
children={children}
sheetHeight={sheetHeight}
pan={pan}
hideDragIndicator={hideDragIndicator}
handleClose={handleCloseCallback}
ref={ref}
{...props}
/>
</Animated.View>
);
};
export default memo(forwardRef(ActionsheetContent));
| {
Animated.spring(pan, {
toValue: { x: 0, y: 0 },
overshootClamping: true,
useNativeDriver: true,
}).start();
} | conditional_block |
ActionsheetContent.tsx | import React, { memo, forwardRef } from 'react';
import { Modal } from '../../composites/Modal';
import type { IActionsheetContentProps } from './types';
import { usePropsResolution } from '../../../hooks';
import { Animated, PanResponder } from 'react-native';
import { ModalContext } from '../Modal/Context';
import Box from '../../primitives/Box';
import { ActionSheetContext } from './ActionSheetContext';
import { useHasResponsiveProps } from '../../../hooks/useHasResponsiveProps';
const Content = memo(
forwardRef(
(
{
hideDragIndicator,
children,
handleClose,
pan,
sheetHeight,
...props
}: any,
ref: any
) => {
const { _dragIndicator, ...resolvedProps } = usePropsResolution(
'ActionsheetContent',
props
);
const panResponder = React.useRef(
PanResponder.create({
onStartShouldSetPanResponder: () => true,
onMoveShouldSetPanResponder: (_evt, gestureState) => {
return gestureState.dy > 15;
},
onPanResponderMove: (e, gestureState) => {
if (gestureState.dy > 0) {
Animated.event([null, { dy: pan.y }], {
useNativeDriver: false,
})(e, gestureState);
}
},
onPanResponderRelease: (_e, gestureState) => {
// If sheet is dragged 1/4th of it's height, close it
if (sheetHeight.current / 4 - gestureState.dy < 0) {
Animated.timing(pan, {
toValue: { x: 0, y: sheetHeight.current },
duration: 150,
useNativeDriver: true,
}).start(handleClose);
setTimeout(() => {
Animated.spring(pan, {
toValue: { x: 0, y: 0 },
overshootClamping: true,
useNativeDriver: true,
}).start();
});
} else {
Animated.spring(pan, {
toValue: { x: 0, y: 0 },
overshootClamping: true,
useNativeDriver: true,
}).start();
}
},
})
).current;
return (
<>
{!hideDragIndicator ? (
<>
{/* To increase the draggable area */}
<Box py={5} {...panResponder.panHandlers} collapsable={false} />
</>
) : null}
<Modal.Content {...resolvedProps} ref={ref} safeAreaBottom>
{!hideDragIndicator ? (
<>
{/* Hack. Fix later. Add -2 negative margin to remove the padding added by ActionSheetContent */}
<Box
pt={3}
pb={3}
mt={-2}
{...panResponder.panHandlers}
width="100%"
alignItems="center"
collapsable={false}
>
<Box {..._dragIndicator} />
</Box>
</>
) : null}
{children}
</Modal.Content>
</>
);
}
)
);
const ActionsheetContent = (
{ children, ...props }: IActionsheetContentProps,
ref?: any
) => {
// return null;
const { handleClose } = React.useContext(ModalContext);
const { hideDragIndicator } = React.useContext(ActionSheetContext);
const pan = React.useRef(new Animated.ValueXY()).current;
const sheetHeight = React.useRef(0);
const handleCloseCallback = React.useCallback(handleClose, [
ModalContext,
handleClose,
]);
// useEffect(() => {
// }, [])
//TODO: refactor for responsive prop
if (useHasResponsiveProps(props)) { | }
return (
<Animated.View
style={{
transform: [{ translateY: pan.y }],
width: '100%',
}}
onLayout={(event) => {
const { height } = event.nativeEvent.layout;
sheetHeight.current = height;
}}
pointerEvents="box-none"
>
<Content
children={children}
sheetHeight={sheetHeight}
pan={pan}
hideDragIndicator={hideDragIndicator}
handleClose={handleCloseCallback}
ref={ref}
{...props}
/>
</Animated.View>
);
};
export default memo(forwardRef(ActionsheetContent)); | return null; | random_line_split |
google-map.ts | import { Component, OnInit, Input } from '@angular/core';
import { LoadingController, NavController } from 'ionic-angular';
import { Geolocation } from 'ionic-native';
import { Observable } from 'rxjs/Observable';
import { OriginLocationComponent } from '../origin-location/origin-location';
// import { AvailableProvidersComponent } from '../available-providers/available-providers';
@Component({
selector: 'google-map',
templateUrl: 'google-map.html',
entryComponents: [OriginLocationComponent]
})
export class GoogleMapComponent implements OnInit {
@Input() isServiceRequested: boolean;
public location; map;
public isMapIdle: boolean;
constructor(public navCtrl: NavController, public loadingCtrl: LoadingController) {}
ngOnInit(){
this.map = this.createMap();
this.addMapEventListeners();
this.getLocation().subscribe(location => {
this.centerLocation(location)
})
}
addMapEventListeners(){
google.maps.event.addListener(this.map, 'dragstart', ()=>{
this.isMapIdle = false;
})
google.maps.event.addListener(this.map, 'idle', ()=>{
this.isMapIdle = true;
})
}
getLocation() {
let loading = this.loadingCtrl.create({
content: 'Locating...',
spinner: 'bubbles'
});
loading.present()
setTimeout(() => {
loading.dismiss();
}, 5000)
let options = {timeout: 10000, enableHighAccuracy: true};
let locationObs = Observable.create(observable => {
Geolocation.getCurrentPosition(options)
.then(resp => {
let lat = resp.coords.latitude;
let lng = resp.coords.longitude;
let location = new google.maps.LatLng(lat, lng);
console.log(lat, lng)
observable.next(location);
},
(err) => {
console.log('Geolocation err: ' + err);
loading.dismiss();
})
})
return locationObs;
}
createMap(location = new google.maps.LatLng(39.1031, -84.5120)) |
centerLocation(location){
if (location){
this.map.panTo(location)
} else {
this.getLocation().subscribe(currentLocation => {
this.map.panTo(currentLocation)
})
}
}
}
| {
let mapOptions = {
center: location,
zoom: 13,
mapTypeId: google.maps.MapTypeId.ROADMAP,
disableDefaultUI: true
}
let mapEl = document.getElementById('map');
let map = new google.maps.Map(mapEl, mapOptions);
return map;
} | identifier_body |
google-map.ts | import { Component, OnInit, Input } from '@angular/core';
import { LoadingController, NavController } from 'ionic-angular';
import { Geolocation } from 'ionic-native';
import { Observable } from 'rxjs/Observable';
import { OriginLocationComponent } from '../origin-location/origin-location';
// import { AvailableProvidersComponent } from '../available-providers/available-providers';
@Component({
selector: 'google-map',
templateUrl: 'google-map.html',
entryComponents: [OriginLocationComponent]
})
export class GoogleMapComponent implements OnInit {
@Input() isServiceRequested: boolean;
public location; map;
public isMapIdle: boolean;
constructor(public navCtrl: NavController, public loadingCtrl: LoadingController) {}
ngOnInit(){
this.map = this.createMap();
this.addMapEventListeners();
this.getLocation().subscribe(location => {
this.centerLocation(location)
})
}
addMapEventListeners(){
google.maps.event.addListener(this.map, 'dragstart', ()=>{
this.isMapIdle = false;
})
google.maps.event.addListener(this.map, 'idle', ()=>{
this.isMapIdle = true;
})
}
getLocation() {
let loading = this.loadingCtrl.create({
content: 'Locating...',
spinner: 'bubbles'
});
loading.present()
setTimeout(() => {
loading.dismiss();
}, 5000)
let options = {timeout: 10000, enableHighAccuracy: true};
let locationObs = Observable.create(observable => {
Geolocation.getCurrentPosition(options)
.then(resp => {
let lat = resp.coords.latitude;
let lng = resp.coords.longitude;
let location = new google.maps.LatLng(lat, lng);
console.log(lat, lng)
observable.next(location);
},
(err) => {
console.log('Geolocation err: ' + err);
loading.dismiss();
})
})
return locationObs;
}
createMap(location = new google.maps.LatLng(39.1031, -84.5120)){
let mapOptions = {
center: location,
zoom: 13,
mapTypeId: google.maps.MapTypeId.ROADMAP,
disableDefaultUI: true
}
let mapEl = document.getElementById('map');
let map = new google.maps.Map(mapEl, mapOptions);
return map;
}
centerLocation(location){
if (location){
this.map.panTo(location)
} else {
this.getLocation().subscribe(currentLocation => {
this.map.panTo(currentLocation)
})
}
} | } | random_line_split | |
google-map.ts | import { Component, OnInit, Input } from '@angular/core';
import { LoadingController, NavController } from 'ionic-angular';
import { Geolocation } from 'ionic-native';
import { Observable } from 'rxjs/Observable';
import { OriginLocationComponent } from '../origin-location/origin-location';
// import { AvailableProvidersComponent } from '../available-providers/available-providers';
@Component({
selector: 'google-map',
templateUrl: 'google-map.html',
entryComponents: [OriginLocationComponent]
})
export class GoogleMapComponent implements OnInit {
@Input() isServiceRequested: boolean;
public location; map;
public isMapIdle: boolean;
constructor(public navCtrl: NavController, public loadingCtrl: LoadingController) {}
| (){
this.map = this.createMap();
this.addMapEventListeners();
this.getLocation().subscribe(location => {
this.centerLocation(location)
})
}
addMapEventListeners(){
google.maps.event.addListener(this.map, 'dragstart', ()=>{
this.isMapIdle = false;
})
google.maps.event.addListener(this.map, 'idle', ()=>{
this.isMapIdle = true;
})
}
getLocation() {
let loading = this.loadingCtrl.create({
content: 'Locating...',
spinner: 'bubbles'
});
loading.present()
setTimeout(() => {
loading.dismiss();
}, 5000)
let options = {timeout: 10000, enableHighAccuracy: true};
let locationObs = Observable.create(observable => {
Geolocation.getCurrentPosition(options)
.then(resp => {
let lat = resp.coords.latitude;
let lng = resp.coords.longitude;
let location = new google.maps.LatLng(lat, lng);
console.log(lat, lng)
observable.next(location);
},
(err) => {
console.log('Geolocation err: ' + err);
loading.dismiss();
})
})
return locationObs;
}
createMap(location = new google.maps.LatLng(39.1031, -84.5120)){
let mapOptions = {
center: location,
zoom: 13,
mapTypeId: google.maps.MapTypeId.ROADMAP,
disableDefaultUI: true
}
let mapEl = document.getElementById('map');
let map = new google.maps.Map(mapEl, mapOptions);
return map;
}
centerLocation(location){
if (location){
this.map.panTo(location)
} else {
this.getLocation().subscribe(currentLocation => {
this.map.panTo(currentLocation)
})
}
}
}
| ngOnInit | identifier_name |
google-map.ts | import { Component, OnInit, Input } from '@angular/core';
import { LoadingController, NavController } from 'ionic-angular';
import { Geolocation } from 'ionic-native';
import { Observable } from 'rxjs/Observable';
import { OriginLocationComponent } from '../origin-location/origin-location';
// import { AvailableProvidersComponent } from '../available-providers/available-providers';
@Component({
selector: 'google-map',
templateUrl: 'google-map.html',
entryComponents: [OriginLocationComponent]
})
export class GoogleMapComponent implements OnInit {
@Input() isServiceRequested: boolean;
public location; map;
public isMapIdle: boolean;
constructor(public navCtrl: NavController, public loadingCtrl: LoadingController) {}
ngOnInit(){
this.map = this.createMap();
this.addMapEventListeners();
this.getLocation().subscribe(location => {
this.centerLocation(location)
})
}
addMapEventListeners(){
google.maps.event.addListener(this.map, 'dragstart', ()=>{
this.isMapIdle = false;
})
google.maps.event.addListener(this.map, 'idle', ()=>{
this.isMapIdle = true;
})
}
getLocation() {
let loading = this.loadingCtrl.create({
content: 'Locating...',
spinner: 'bubbles'
});
loading.present()
setTimeout(() => {
loading.dismiss();
}, 5000)
let options = {timeout: 10000, enableHighAccuracy: true};
let locationObs = Observable.create(observable => {
Geolocation.getCurrentPosition(options)
.then(resp => {
let lat = resp.coords.latitude;
let lng = resp.coords.longitude;
let location = new google.maps.LatLng(lat, lng);
console.log(lat, lng)
observable.next(location);
},
(err) => {
console.log('Geolocation err: ' + err);
loading.dismiss();
})
})
return locationObs;
}
createMap(location = new google.maps.LatLng(39.1031, -84.5120)){
let mapOptions = {
center: location,
zoom: 13,
mapTypeId: google.maps.MapTypeId.ROADMAP,
disableDefaultUI: true
}
let mapEl = document.getElementById('map');
let map = new google.maps.Map(mapEl, mapOptions);
return map;
}
centerLocation(location){
if (location) | else {
this.getLocation().subscribe(currentLocation => {
this.map.panTo(currentLocation)
})
}
}
}
| {
this.map.panTo(location)
} | conditional_block |
github.ts | var ref = new Firebase("https://ng2-projects.firebaseio.com");
const GIT_API = 'https://api.github.com/repos/angular/angular/';
const TRAVIS_API = 'https://api.travis-ci.org/repos/angular/angular/';
function gitToken() {
if (ref.getAuth()) {
return (<any>ref.getAuth()).github.accessToken
}
return null;
}
function urlGET(url: string, token: string, cb:(statusCode: number, data:any) => void) {
var http = new XMLHttpRequest();
http.open('GET', url);
if (token) {
http.setRequestHeader("Authorization", "token " + token);
}
http.onreadystatechange = () => {
if (http.readyState == 4) {
var status = http.status;
var data = http.responseText;
if (data.length && (data.charAt(0) == '[' || data.charAt(0) == '{')) {
data = JSON.parse(data);
}
cb(status, data);
}
}
http.send();
}
export class Repository {
state: string;
issues: { [s: string]: Issue; } = {};
previousIssues: { [s: string]: Issue; } = {};
prs: { [s: string]: Issue; } = {};
previousPrs: { [s: string]: Issue; } = {};
onNewIssue: (issue: Issue) => void = () => null;
onRemovedIssue: (issue: Issue) => void = () => null;
onNewPR: (issue: Issue) => void = () => null;
onRemovedPR: (issue: Issue) => void = () => null;
constructor(public username: string, public repository: string) {
this.state = '';
}
loadBranches(notify:(name: string, job: string, status: string) => void) {
urlGET(GIT_API + 'branches?per_page=100', gitToken(), (code: number, data: Array<Branch>) => {
if (code !== 200) return;
data.forEach((branch: Branch) => {
if (branch.name.indexOf('presubmit-') == 0) {
urlGET(TRAVIS_API + 'branches/' + branch.name, null, (code: number, travis: TravisBranch) => {
notify(branch.name, travis.branch.id, travis.branch.state);
});
}
});
});
}
refresh() {
this.state = 'refreshing';
this.previousIssues = this.issues;
this.previousPrs = this.prs;
var fetchPage = (page: number) => {
var http = new XMLHttpRequest();
var url = buildUrl('/repos/angular/angular/issues', {
per_page: 100,
page: page
});
urlGET(url, gitToken(), (status, data) => {
if(status == 200) {
var issues: Array<Issue> = data;
issues.forEach(this._processIssues.bind(this));
if (issues.length >= 100) {
fetchPage(page + 1);
} else {
this.state = '';
this._notifyRemoves();
}
} else {
console.error(data);
}
});
}
fetchPage(0);
}
_processIssues(issue: Issue) {
this._parseLabels(issue);
issue.needsTriage = function() {
if (this.pull_request) {
return false;
} else {
return !this.type || !this.priority || !this.comp || !this.effort;
}
}
if (issue.pull_request) {
this.issues[issue.number] = issue;
this.onNewPR(issue);
} else {
this.prs[issue.number] = issue;
this.onNewIssue(issue);
}
}
_notifyRemoves() {
for(var issueNo in this.previousIssues) {
if (!this.issues[issueNo]) {
this.onRemovedIssue(this.previousIssues[issueNo]);
}
}
for(var prNo in this.previousPrs) {
if (!this.prs[prNo]) {
this.onRemovedIssue(this.previousPrs[prNo]);
}
}
}
_parseLabels(issue: Issue) {
var other: Array<string> = issue.labels_other = [];
issue.priority = '';
issue.type = '';
//issue.component = '';
issue.labels.forEach((label: Label) => {
var match = /^([A-Za-z]+)(\d*):\s*(.*)$/.exec(label.name);
var name = match && match[1] || '';
var level = match && match[2] || 0;
var value = match && match[3] || '';
if (value) {
value = value.split(' / ')[0].trim();
}
if (name == 'P') {
name = 'priority';
value = 'P' + level;
}
if (name == 'effort') {
value = level + ': ' + value;
}
if (name == 'state') |
switch (name) {
case 'priority':
case 'effort':
case 'comp':
case 'cla':
case 'pr_state':
case 'pr_action':
case 'cust':
case 'hotlist':
case 'issue_state':
case 'type':
(<any>issue)[name] = ((<any>issue)[name] ? (<any>issue)[name] + '; ' : '') + value;
break;
default:
other.push(label.name);
}
});
}
}
export class Mentions {
list: {title: string, url: string, number: number, state: string}[] = [];
refresh(username: string, org: string, days: any, from: string[]) {
this.list = [];
var xhr = new XMLHttpRequest();
var url = buildUrl('/search/issues', {
q: this._buildQuery(username, org, days, from)
});
xhr.onload = () => {
var status = xhr.status;
if (200 <= status && status <= 300) {
var mentions = JSON.parse(xhr.responseText);
mentions.items.forEach((mention: Mention) => {
this.list.push({
number: mention.number,
title: mention.title,
url: mention.html_url,
state: mention.state
});
});
} else {
console.error(xhr.responseText);
}
}
xhr.open("GET", url);
if (ref.getAuth()) {
xhr.setRequestHeader("Authorization", "token " + (<any>ref.getAuth()).github.accessToken);
}
xhr.send();
}
_buildQuery(username: string, org: string, days: any, from: string[]) {
let date: Date = new Date(Date.now() - days * 24 * 3600 * 1000);
let query = `mentions:${username}+user:${org}+created:>=${date.toISOString().substring(0, 10)}`;
if (from && from.length) {
from.forEach(u => {
query += `+involves:${u}`;
})
}
return query;
}
}
function buildUrl(ep: string, params: any): string {
var strParams: Array<string> = [];
for (let p in params) {
strParams.push(`${p}=${params[p]}`);
}
if (ep[0] == '/') ep = ep.substring(1);
return `https://api.github.com/${ep}?${strParams.join('&')}`;
}
| {
name = 'issue_state';
} | conditional_block |
github.ts | var ref = new Firebase("https://ng2-projects.firebaseio.com");
const GIT_API = 'https://api.github.com/repos/angular/angular/';
const TRAVIS_API = 'https://api.travis-ci.org/repos/angular/angular/';
function gitToken() {
if (ref.getAuth()) {
return (<any>ref.getAuth()).github.accessToken
}
return null;
}
function urlGET(url: string, token: string, cb:(statusCode: number, data:any) => void) {
var http = new XMLHttpRequest();
http.open('GET', url);
if (token) {
http.setRequestHeader("Authorization", "token " + token);
}
http.onreadystatechange = () => {
if (http.readyState == 4) {
var status = http.status;
var data = http.responseText;
if (data.length && (data.charAt(0) == '[' || data.charAt(0) == '{')) {
data = JSON.parse(data);
}
cb(status, data);
}
}
http.send();
}
export class Repository {
state: string;
issues: { [s: string]: Issue; } = {};
previousIssues: { [s: string]: Issue; } = {};
prs: { [s: string]: Issue; } = {};
previousPrs: { [s: string]: Issue; } = {};
onNewIssue: (issue: Issue) => void = () => null;
onRemovedIssue: (issue: Issue) => void = () => null;
onNewPR: (issue: Issue) => void = () => null;
onRemovedPR: (issue: Issue) => void = () => null;
constructor(public username: string, public repository: string) {
this.state = '';
}
loadBranches(notify:(name: string, job: string, status: string) => void) {
urlGET(GIT_API + 'branches?per_page=100', gitToken(), (code: number, data: Array<Branch>) => {
if (code !== 200) return;
data.forEach((branch: Branch) => {
if (branch.name.indexOf('presubmit-') == 0) {
urlGET(TRAVIS_API + 'branches/' + branch.name, null, (code: number, travis: TravisBranch) => {
notify(branch.name, travis.branch.id, travis.branch.state);
});
}
});
});
}
refresh() {
this.state = 'refreshing';
this.previousIssues = this.issues;
this.previousPrs = this.prs; |
var fetchPage = (page: number) => {
var http = new XMLHttpRequest();
var url = buildUrl('/repos/angular/angular/issues', {
per_page: 100,
page: page
});
urlGET(url, gitToken(), (status, data) => {
if(status == 200) {
var issues: Array<Issue> = data;
issues.forEach(this._processIssues.bind(this));
if (issues.length >= 100) {
fetchPage(page + 1);
} else {
this.state = '';
this._notifyRemoves();
}
} else {
console.error(data);
}
});
}
fetchPage(0);
}
_processIssues(issue: Issue) {
this._parseLabels(issue);
issue.needsTriage = function() {
if (this.pull_request) {
return false;
} else {
return !this.type || !this.priority || !this.comp || !this.effort;
}
}
if (issue.pull_request) {
this.issues[issue.number] = issue;
this.onNewPR(issue);
} else {
this.prs[issue.number] = issue;
this.onNewIssue(issue);
}
}
_notifyRemoves() {
for(var issueNo in this.previousIssues) {
if (!this.issues[issueNo]) {
this.onRemovedIssue(this.previousIssues[issueNo]);
}
}
for(var prNo in this.previousPrs) {
if (!this.prs[prNo]) {
this.onRemovedIssue(this.previousPrs[prNo]);
}
}
}
_parseLabels(issue: Issue) {
var other: Array<string> = issue.labels_other = [];
issue.priority = '';
issue.type = '';
//issue.component = '';
issue.labels.forEach((label: Label) => {
var match = /^([A-Za-z]+)(\d*):\s*(.*)$/.exec(label.name);
var name = match && match[1] || '';
var level = match && match[2] || 0;
var value = match && match[3] || '';
if (value) {
value = value.split(' / ')[0].trim();
}
if (name == 'P') {
name = 'priority';
value = 'P' + level;
}
if (name == 'effort') {
value = level + ': ' + value;
}
if (name == 'state') {
name = 'issue_state';
}
switch (name) {
case 'priority':
case 'effort':
case 'comp':
case 'cla':
case 'pr_state':
case 'pr_action':
case 'cust':
case 'hotlist':
case 'issue_state':
case 'type':
(<any>issue)[name] = ((<any>issue)[name] ? (<any>issue)[name] + '; ' : '') + value;
break;
default:
other.push(label.name);
}
});
}
}
export class Mentions {
list: {title: string, url: string, number: number, state: string}[] = [];
refresh(username: string, org: string, days: any, from: string[]) {
this.list = [];
var xhr = new XMLHttpRequest();
var url = buildUrl('/search/issues', {
q: this._buildQuery(username, org, days, from)
});
xhr.onload = () => {
var status = xhr.status;
if (200 <= status && status <= 300) {
var mentions = JSON.parse(xhr.responseText);
mentions.items.forEach((mention: Mention) => {
this.list.push({
number: mention.number,
title: mention.title,
url: mention.html_url,
state: mention.state
});
});
} else {
console.error(xhr.responseText);
}
}
xhr.open("GET", url);
if (ref.getAuth()) {
xhr.setRequestHeader("Authorization", "token " + (<any>ref.getAuth()).github.accessToken);
}
xhr.send();
}
_buildQuery(username: string, org: string, days: any, from: string[]) {
let date: Date = new Date(Date.now() - days * 24 * 3600 * 1000);
let query = `mentions:${username}+user:${org}+created:>=${date.toISOString().substring(0, 10)}`;
if (from && from.length) {
from.forEach(u => {
query += `+involves:${u}`;
})
}
return query;
}
}
function buildUrl(ep: string, params: any): string {
var strParams: Array<string> = [];
for (let p in params) {
strParams.push(`${p}=${params[p]}`);
}
if (ep[0] == '/') ep = ep.substring(1);
return `https://api.github.com/${ep}?${strParams.join('&')}`;
} | random_line_split | |
github.ts | var ref = new Firebase("https://ng2-projects.firebaseio.com");
const GIT_API = 'https://api.github.com/repos/angular/angular/';
const TRAVIS_API = 'https://api.travis-ci.org/repos/angular/angular/';
function gitToken() {
if (ref.getAuth()) {
return (<any>ref.getAuth()).github.accessToken
}
return null;
}
function urlGET(url: string, token: string, cb:(statusCode: number, data:any) => void) {
var http = new XMLHttpRequest();
http.open('GET', url);
if (token) {
http.setRequestHeader("Authorization", "token " + token);
}
http.onreadystatechange = () => {
if (http.readyState == 4) {
var status = http.status;
var data = http.responseText;
if (data.length && (data.charAt(0) == '[' || data.charAt(0) == '{')) {
data = JSON.parse(data);
}
cb(status, data);
}
}
http.send();
}
export class Repository {
state: string;
issues: { [s: string]: Issue; } = {};
previousIssues: { [s: string]: Issue; } = {};
prs: { [s: string]: Issue; } = {};
previousPrs: { [s: string]: Issue; } = {};
onNewIssue: (issue: Issue) => void = () => null;
onRemovedIssue: (issue: Issue) => void = () => null;
onNewPR: (issue: Issue) => void = () => null;
onRemovedPR: (issue: Issue) => void = () => null;
constructor(public username: string, public repository: string) {
this.state = '';
}
loadBranches(notify:(name: string, job: string, status: string) => void) {
urlGET(GIT_API + 'branches?per_page=100', gitToken(), (code: number, data: Array<Branch>) => {
if (code !== 200) return;
data.forEach((branch: Branch) => {
if (branch.name.indexOf('presubmit-') == 0) {
urlGET(TRAVIS_API + 'branches/' + branch.name, null, (code: number, travis: TravisBranch) => {
notify(branch.name, travis.branch.id, travis.branch.state);
});
}
});
});
}
refresh() {
this.state = 'refreshing';
this.previousIssues = this.issues;
this.previousPrs = this.prs;
var fetchPage = (page: number) => {
var http = new XMLHttpRequest();
var url = buildUrl('/repos/angular/angular/issues', {
per_page: 100,
page: page
});
urlGET(url, gitToken(), (status, data) => {
if(status == 200) {
var issues: Array<Issue> = data;
issues.forEach(this._processIssues.bind(this));
if (issues.length >= 100) {
fetchPage(page + 1);
} else {
this.state = '';
this._notifyRemoves();
}
} else {
console.error(data);
}
});
}
fetchPage(0);
}
_processIssues(issue: Issue) {
this._parseLabels(issue);
issue.needsTriage = function() {
if (this.pull_request) {
return false;
} else {
return !this.type || !this.priority || !this.comp || !this.effort;
}
}
if (issue.pull_request) {
this.issues[issue.number] = issue;
this.onNewPR(issue);
} else {
this.prs[issue.number] = issue;
this.onNewIssue(issue);
}
}
| () {
for(var issueNo in this.previousIssues) {
if (!this.issues[issueNo]) {
this.onRemovedIssue(this.previousIssues[issueNo]);
}
}
for(var prNo in this.previousPrs) {
if (!this.prs[prNo]) {
this.onRemovedIssue(this.previousPrs[prNo]);
}
}
}
_parseLabels(issue: Issue) {
var other: Array<string> = issue.labels_other = [];
issue.priority = '';
issue.type = '';
//issue.component = '';
issue.labels.forEach((label: Label) => {
var match = /^([A-Za-z]+)(\d*):\s*(.*)$/.exec(label.name);
var name = match && match[1] || '';
var level = match && match[2] || 0;
var value = match && match[3] || '';
if (value) {
value = value.split(' / ')[0].trim();
}
if (name == 'P') {
name = 'priority';
value = 'P' + level;
}
if (name == 'effort') {
value = level + ': ' + value;
}
if (name == 'state') {
name = 'issue_state';
}
switch (name) {
case 'priority':
case 'effort':
case 'comp':
case 'cla':
case 'pr_state':
case 'pr_action':
case 'cust':
case 'hotlist':
case 'issue_state':
case 'type':
(<any>issue)[name] = ((<any>issue)[name] ? (<any>issue)[name] + '; ' : '') + value;
break;
default:
other.push(label.name);
}
});
}
}
export class Mentions {
list: {title: string, url: string, number: number, state: string}[] = [];
refresh(username: string, org: string, days: any, from: string[]) {
this.list = [];
var xhr = new XMLHttpRequest();
var url = buildUrl('/search/issues', {
q: this._buildQuery(username, org, days, from)
});
xhr.onload = () => {
var status = xhr.status;
if (200 <= status && status <= 300) {
var mentions = JSON.parse(xhr.responseText);
mentions.items.forEach((mention: Mention) => {
this.list.push({
number: mention.number,
title: mention.title,
url: mention.html_url,
state: mention.state
});
});
} else {
console.error(xhr.responseText);
}
}
xhr.open("GET", url);
if (ref.getAuth()) {
xhr.setRequestHeader("Authorization", "token " + (<any>ref.getAuth()).github.accessToken);
}
xhr.send();
}
_buildQuery(username: string, org: string, days: any, from: string[]) {
let date: Date = new Date(Date.now() - days * 24 * 3600 * 1000);
let query = `mentions:${username}+user:${org}+created:>=${date.toISOString().substring(0, 10)}`;
if (from && from.length) {
from.forEach(u => {
query += `+involves:${u}`;
})
}
return query;
}
}
function buildUrl(ep: string, params: any): string {
var strParams: Array<string> = [];
for (let p in params) {
strParams.push(`${p}=${params[p]}`);
}
if (ep[0] == '/') ep = ep.substring(1);
return `https://api.github.com/${ep}?${strParams.join('&')}`;
}
| _notifyRemoves | identifier_name |
cmac.py | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import CMACBackend
from cryptography.hazmat.primitives import ciphers, mac
@utils.register_interface(mac.MACContext)
class CMAC(object):
| def __init__(self, algorithm, backend, ctx=None):
if not isinstance(backend, CMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement CMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
raise TypeError(
"Expected instance of BlockCipherAlgorithm."
)
self._algorithm = algorithm
self._backend = backend
if ctx is None:
self._ctx = self._backend.create_cmac_ctx(self._algorithm)
else:
self._ctx = ctx
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
self._ctx.update(data)
def finalize(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
digest = self._ctx.finalize()
self._ctx = None
return digest
def verify(self, signature):
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
ctx, self._ctx = self._ctx, None
ctx.verify(signature)
def copy(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return CMAC(
self._algorithm,
backend=self._backend,
ctx=self._ctx.copy()
) | identifier_body | |
cmac.py | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import CMACBackend
from cryptography.hazmat.primitives import ciphers, mac
@utils.register_interface(mac.MACContext)
class CMAC(object):
def __init__(self, algorithm, backend, ctx=None):
if not isinstance(backend, CMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement CMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
raise TypeError(
"Expected instance of BlockCipherAlgorithm."
)
self._algorithm = algorithm
self._backend = backend
if ctx is None:
self._ctx = self._backend.create_cmac_ctx(self._algorithm)
else:
self._ctx = ctx
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
self._ctx.update(data)
def finalize(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
digest = self._ctx.finalize()
self._ctx = None
return digest
def verify(self, signature):
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
if self._ctx is None:
|
ctx, self._ctx = self._ctx, None
ctx.verify(signature)
def copy(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return CMAC(
self._algorithm,
backend=self._backend,
ctx=self._ctx.copy()
)
| raise AlreadyFinalized("Context was already finalized.") | conditional_block |
cmac.py | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import CMACBackend
from cryptography.hazmat.primitives import ciphers, mac
@utils.register_interface(mac.MACContext)
class CMAC(object):
def __init__(self, algorithm, backend, ctx=None):
if not isinstance(backend, CMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement CMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
raise TypeError(
"Expected instance of BlockCipherAlgorithm."
)
self._algorithm = algorithm
self._backend = backend
if ctx is None:
self._ctx = self._backend.create_cmac_ctx(self._algorithm)
else:
self._ctx = ctx
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
self._ctx.update(data)
def | (self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
digest = self._ctx.finalize()
self._ctx = None
return digest
def verify(self, signature):
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
ctx, self._ctx = self._ctx, None
ctx.verify(signature)
def copy(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return CMAC(
self._algorithm,
backend=self._backend,
ctx=self._ctx.copy()
)
| finalize | identifier_name |
cmac.py | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import CMACBackend
from cryptography.hazmat.primitives import ciphers, mac
@utils.register_interface(mac.MACContext)
class CMAC(object):
def __init__(self, algorithm, backend, ctx=None):
if not isinstance(backend, CMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement CMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
raise TypeError(
"Expected instance of BlockCipherAlgorithm."
)
self._algorithm = algorithm
self._backend = backend
if ctx is None:
self._ctx = self._backend.create_cmac_ctx(self._algorithm)
else:
self._ctx = ctx
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
self._ctx.update(data) | if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
digest = self._ctx.finalize()
self._ctx = None
return digest
def verify(self, signature):
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
ctx, self._ctx = self._ctx, None
ctx.verify(signature)
def copy(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return CMAC(
self._algorithm,
backend=self._backend,
ctx=self._ctx.copy()
) |
def finalize(self): | random_line_split |
group__eth__mac__interface__gr.js | var group__eth__mac__interface__gr =
[
[ "Ethernet MAC Events", "group__ETH__MAC__events.html", "group__ETH__MAC__events" ],
[ "Ethernet MAC Control Codes", "group__eth__mac__control.html", "group__eth__mac__control" ],
[ "Ethernet MAC Timer Control Codes", "group__eth__mac__time__control.html", "group__eth__mac__time__control" ],
[ "Ethernet MAC Frame Transmit Flags", "group__eth__mac__frame__transmit__ctrls.html", "group__eth__mac__frame__transmit__ctrls" ],
[ "ARM_ETH_MAC_CAPABILITIES", "group__eth__mac__interface__gr.html#structARM__ETH__MAC__CAPABILITIES", [
[ "checksum_offload_rx_ip4", "group__eth__mac__interface__gr.html#a0051111be2e389c3161da1c444746216", null ],
[ "checksum_offload_rx_ip6", "group__eth__mac__interface__gr.html#a674b2306c64901e924b3cb7bb882f32f", null ],
[ "checksum_offload_rx_udp", "group__eth__mac__interface__gr.html#a5a447f05a5fbfd35896aad9cd769511c", null ],
[ "checksum_offload_rx_tcp", "group__eth__mac__interface__gr.html#a730d6be6a7b868e0690d9548e77b7aae", null ],
[ "checksum_offload_rx_icmp", "group__eth__mac__interface__gr.html#a142179445bfdbaaaf0d451f277fb0e96", null ],
[ "checksum_offload_tx_ip4", "group__eth__mac__interface__gr.html#ac787d70407ce70e28724932fb32ef0ba", null ],
[ "checksum_offload_tx_ip6", "group__eth__mac__interface__gr.html#a8f7a154565e652d976b9e65bf3516504", null ],
[ "checksum_offload_tx_udp", "group__eth__mac__interface__gr.html#ab3f9560668a087606c40cd81b935396b", null ],
[ "checksum_offload_tx_tcp", "group__eth__mac__interface__gr.html#a6c2b80bbfe520f3e7808cf3d4aaedb45", null ],
[ "checksum_offload_tx_icmp", "group__eth__mac__interface__gr.html#a7b701bac9d66886b5c6964b20c6ca55a", null ],
[ "media_interface", "group__eth__mac__interface__gr.html#a3c5cb74e086417a01d0079f847a3fc8d", null ],
[ "mac_address", "group__eth__mac__interface__gr.html#a7fdea04bacd9c0e12792751055ef6238", null ],
[ "event_rx_frame", "group__eth__mac__interface__gr.html#a8c8f1ac2bf053a9bac98c476646a6018", null ],
[ "event_tx_frame", "group__eth__mac__interface__gr.html#a1b4af3590d59ea4f8e845b4239a4e445", null ],
[ "event_wakeup", "group__eth__mac__interface__gr.html#a7536d9b9818b20b6974a712e0449439b", null ],
[ "precision_timer", "group__eth__mac__interface__gr.html#a881a863974d32f95d7829f768ac47aa2", null ],
[ "reserved", "group__eth__mac__interface__gr.html#aa43c4c21b173ada1b6b7568956f0d650", null ]
] ],
[ "ARM_DRIVER_ETH_MAC", "group__eth__mac__interface__gr.html#structARM__DRIVER__ETH__MAC", [
[ "GetVersion", "group__eth__mac__interface__gr.html#a8834b281da48583845c044a81566c1b3", null ],
[ "GetCapabilities", "group__eth__mac__interface__gr.html#a9fd725bb058c584a9ced9c579561cdf1", null ],
[ "Initialize", "group__eth__mac__interface__gr.html#aa34417c70cb8b43567c59aa530866cc7", null ],
[ "Uninitialize", "group__eth__mac__interface__gr.html#adcf20681a1402869ecb5c6447fada17b", null ],
[ "PowerControl", "group__eth__mac__interface__gr.html#aba8f1c8019af95ffe19c32403e3240ef", null ],
[ "GetMacAddress", "group__eth__mac__interface__gr.html#a02837059933cd04b04bf795a7138f218", null ],
[ "SetMacAddress", "group__eth__mac__interface__gr.html#ac640f929dc4d5bde3e4282c75b25c00d", null ],
[ "SetAddressFilter", "group__eth__mac__interface__gr.html#a45b879a6df608f582d1866daff715798", null ],
[ "SendFrame", "group__eth__mac__interface__gr.html#ac095aea379f23e30a0e51b1f3518ad37", null ],
[ "ReadFrame", "group__eth__mac__interface__gr.html#a466b724be2167ea7d9a14569062a8fa8", null ],
[ "GetRxFrameSize", "group__eth__mac__interface__gr.html#a3286cc9c7624168b162aa3ce3cbe135e", null ],
[ "GetRxFrameTime", "group__eth__mac__interface__gr.html#a8ae5a588bf4055bba3de73cfba78f7e8", null ],
[ "GetTxFrameTime", "group__eth__mac__interface__gr.html#acf081f5020f4ef1435bcff7333a70b93", null ],
[ "ControlTimer", "group__eth__mac__interface__gr.html#ab6bdbdc7fdfcc52e027201738b88b431", null ],
[ "Control", "group__eth__mac__interface__gr.html#a6e0f47a92f626a971c5197fca6545505", null ],
[ "PHY_Read", "group__eth__mac__interface__gr.html#a0f2ddb734e4242077275761400b26e35", null ],
[ "PHY_Write", "group__eth__mac__interface__gr.html#ac3efe9bdc31c3b1d7fd8eb82bbfb4c13", null ]
] ],
[ "ARM_ETH_MAC_TIME", "group__eth__mac__interface__gr.html#structARM__ETH__MAC__TIME", [
[ "ns", "group__eth__mac__interface__gr.html#a048317f84621fb38ed0bf8c8255e26f0", null ],
[ "sec", "group__eth__mac__interface__gr.html#aaf5f5a3fa5d596a9136b4331f2b54bfc", null ]
] ],
[ "ARM_ETH_MAC_SignalEvent_t", "group__eth__mac__interface__gr.html#gadfc95cb09c541a29a72da86963668726", null ],
[ "ARM_ETH_MAC_GetVersion", "group__eth__mac__interface__gr.html#ga86b15062c297384ad5842dd57b9d6b1d", null ],
[ "ARM_ETH_MAC_GetCapabilities", "group__eth__mac__interface__gr.html#ga2b13b230502736d8c7679b359dff20d0", null ],
| [ "ARM_ETH_MAC_GetMacAddress", "group__eth__mac__interface__gr.html#ga66308c1e791952047e974bd653037fae", null ],
[ "ARM_ETH_MAC_SetMacAddress", "group__eth__mac__interface__gr.html#ga7cc3d17c7312c5032202dfd9a915f24a", null ],
[ "ARM_ETH_MAC_SetAddressFilter", "group__eth__mac__interface__gr.html#ga150fe30290275a4b32756f94208124e8", null ],
[ "ARM_ETH_MAC_SendFrame", "group__eth__mac__interface__gr.html#ga5bf58defdb239ed7dc948f1da147a1c3", null ],
[ "ARM_ETH_MAC_ReadFrame", "group__eth__mac__interface__gr.html#ga4b79f57d8624bb4410ee12c73a483993", null ],
[ "ARM_ETH_MAC_GetRxFrameSize", "group__eth__mac__interface__gr.html#ga5ee86d6b0efab5329b9bc191c23a466d", null ],
[ "ARM_ETH_MAC_GetRxFrameTime", "group__eth__mac__interface__gr.html#gaa7c6865fb09754be869778142466c5e4", null ],
[ "ARM_ETH_MAC_GetTxFrameTime", "group__eth__mac__interface__gr.html#ga115b5c7e149aec2b181de760f5d83f60", null ],
[ "ARM_ETH_MAC_Control", "group__eth__mac__interface__gr.html#gac3e90c66058d20077f04ac8e8b8d0536", null ],
[ "ARM_ETH_MAC_ControlTimer", "group__eth__mac__interface__gr.html#ga85d9dc865af3702b71a514b18a588643", null ],
[ "ARM_ETH_MAC_PHY_Read", "group__eth__mac__interface__gr.html#gaded29ad58366e9222487db9944373c29", null ],
[ "ARM_ETH_MAC_PHY_Write", "group__eth__mac__interface__gr.html#ga79dd38672749aeebd28f39d9b4f813ce", null ],
[ "ARM_ETH_MAC_SignalEvent", "group__eth__mac__interface__gr.html#gae0697be4c4229601f3bfc17e2978ada6", null ]
]; | [ "ARM_ETH_MAC_Initialize", "group__eth__mac__interface__gr.html#gacf42d11b171cd032f0ec1de6db2b6832", null ],
[ "ARM_ETH_MAC_Uninitialize", "group__eth__mac__interface__gr.html#gacb2c2ae06f32328775bffbdeaaabfb5d", null ],
[ "ARM_ETH_MAC_PowerControl", "group__eth__mac__interface__gr.html#ga346fef040a0e9bac5762a04a306b1be7", null ],
| random_line_split |
autotest.py | #!/usr/bin/env python
# APM automatic test suite
# Andrew Tridgell, October 2011
import pexpect, os, sys, shutil, atexit
import optparse, fnmatch, time, glob, traceback, signal
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), 'pysim'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', '..', 'mavlink', 'pymavlink'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', '..', 'mavlink', 'pymavlink', 'generator'))
import util
os.environ['PYTHONUNBUFFERED'] = '1'
os.putenv('TMPDIR', util.reltopdir('tmp'))
def get_default_params(atype):
'''get default parameters'''
sil = util.start_SIL(atype, wipe=True)
mavproxy = util.start_MAVProxy_SIL(atype)
print("Dumping defaults")
idx = mavproxy.expect(['Please Run Setup', 'Saved [0-9]+ parameters to (\S+)'])
if idx == 0:
# we need to restart it after eeprom erase
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sil = util.start_SIL(atype)
mavproxy = util.start_MAVProxy_SIL(atype)
idx = mavproxy.expect('Saved [0-9]+ parameters to (\S+)')
parmfile = mavproxy.match.group(1)
dest = util.reltopdir('../buildlogs/%s.defaults.txt' % atype)
shutil.copy(parmfile, dest)
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
print("Saved defaults for %s to %s" % (atype, dest))
return True
def dump_logs(atype):
'''dump DataFlash logs'''
print("Dumping logs for %s" % atype)
sil = util.start_SIL(atype)
logfile = util.reltopdir('../buildlogs/%s.flashlog' % atype)
log = open(logfile, mode='w')
mavproxy = util.start_MAVProxy_SIL(atype, setup=True, logfile=log)
mavproxy.send('\n\n\n')
print("navigating menus")
mavproxy.expect(']')
mavproxy.send("logs\n")
mavproxy.expect("logs enabled:")
lognums = []
i = mavproxy.expect(["No logs", "(\d+) logs"])
if i == 0:
numlogs = 0
else:
numlogs = int(mavproxy.match.group(1))
for i in range(numlogs):
mavproxy.expect("Log (\d+)")
lognums.append(int(mavproxy.match.group(1)))
mavproxy.expect("Log]")
for i in range(numlogs):
print("Dumping log %u (i=%u)" % (lognums[i], i))
mavproxy.send("dump %u\n" % lognums[i])
mavproxy.expect("logs enabled:", timeout=120)
mavproxy.expect("Log]")
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
log.close()
print("Saved log for %s to %s" % (atype, logfile))
return True
def build_all():
'''run the build_all.sh script'''
print("Running build_all.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_all.sh'), dir=util.reltopdir('.')) != 0:
print("Failed build_all.sh")
return False
return True
def build_binaries():
'''run the build_binaries.sh script'''
print("Running build_binaries.sh")
import shutil
# copy the script as it changes git branch, which can change the script while running
orig=util.reltopdir('Tools/scripts/build_binaries.sh')
copy=util.reltopdir('./build_binaries.sh')
shutil.copyfile(orig, copy)
shutil.copymode(orig, copy)
if util.run_cmd(copy, dir=util.reltopdir('.')) != 0:
print("Failed build_binaries.sh")
return False
return True
def build_examples():
'''run the build_examples.sh script'''
print("Running build_examples.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_examples.sh'), dir=util.reltopdir('.')) != 0:
print("Failed build_examples.sh")
return False
return True
def convert_gpx():
'''convert any tlog files to GPX and KML'''
import glob
mavlog = glob.glob(util.reltopdir("../buildlogs/*.tlog"))
for m in mavlog:
util.run_cmd(util.reltopdir("../mavlink/pymavlink/examples/mavtogpx.py") + " --nofixcheck " + m)
gpx = m + '.gpx'
kml = m + '.kml'
util.run_cmd('gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s' % (gpx, kml), checkfail=False)
util.run_cmd('zip %s.kmz %s.kml' % (m, m), checkfail=False)
util.run_cmd(util.reltopdir("../MAVProxy/tools/mavflightview.py") + " --imagefile=%s.png %s" % (m,m))
return True
def test_prerequesites():
'''check we have the right directories and tools to run tests'''
print("Testing prerequesites")
util.mkdir_p(util.reltopdir('../buildlogs'))
return True
def alarm_handler(signum, frame):
'''handle test timeout'''
global results, opts
try:
results.add('TIMEOUT', '<span class="failed-text">FAILED</span>', opts.timeout)
util.pexpect_close_all()
convert_gpx()
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*.flashlog')
results.addglob("MAVLink log", '*.tlog')
results.addfile('ArduPlane build log', 'ArduPlane.txt')
results.addfile('ArduPlane defaults', 'ArduPlane.defaults.txt')
results.addfile('ArduCopter build log', 'ArduCopter.txt')
results.addfile('ArduCopter defaults', 'ArduCopter.defaults.txt')
results.addfile('APMrover2 build log', 'APMrover2.txt')
results.addfile('APMrover2 defaults', 'APMrover2.defaults.txt')
write_webresults(results)
os.killpg(0, signal.SIGKILL)
except Exception:
pass
sys.exit(1)
############## main program #############
parser = optparse.OptionParser("autotest")
parser.add_option("--skip", type='string', default='', help='list of steps to skip (comma separated)')
parser.add_option("--list", action='store_true', default=False, help='list the available steps')
parser.add_option("--viewerip", default=None, help='IP address to send MAVLink and fg packets to')
parser.add_option("--map", action='store_true', default=False, help='show map')
parser.add_option("--experimental", default=False, action='store_true', help='enable experimental tests')
parser.add_option("--timeout", default=3000, type='int', help='maximum runtime in seconds')
opts, args = parser.parse_args()
import arducopter, arduplane, apmrover2
steps = [
'prerequesites',
'build.All',
'build.Binaries',
'build.Examples',
'build1280.ArduPlane',
'build2560.ArduPlane',
'build.ArduPlane',
'defaults.ArduPlane',
'fly.ArduPlane',
'logs.ArduPlane',
'build1280.APMrover2',
'build2560.APMrover2',
'build.APMrover2',
'defaults.APMrover2',
'drive.APMrover2',
'logs.APMrover2',
'build2560.ArduCopter',
'build.ArduCopter',
'defaults.ArduCopter',
'fly.ArduCopter',
'logs.ArduCopter',
'convertgpx',
]
skipsteps = opts.skip.split(',')
# ensure we catch timeouts
signal.signal(signal.SIGALRM, alarm_handler)
signal.alarm(opts.timeout)
if opts.list:
for step in steps:
print(step)
sys.exit(0)
def skip_step(step):
'''see if a step should be skipped'''
for skip in skipsteps:
if fnmatch.fnmatch(step.lower(), skip.lower()):
return True
return False
def run_step(step):
'''run one step'''
if step == "prerequesites":
return test_prerequesites()
if step == 'build.ArduPlane':
return util.build_SIL('ArduPlane')
if step == 'build.APMrover2':
return util.build_SIL('APMrover2')
if step == 'build.ArduCopter':
return util.build_SIL('ArduCopter')
if step == 'build1280.ArduCopter':
return util.build_AVR('ArduCopter', board='mega')
if step == 'build2560.ArduCopter':
return util.build_AVR('ArduCopter', board='mega2560')
if step == 'build1280.ArduPlane':
return util.build_AVR('ArduPlane', board='mega')
if step == 'build2560.ArduPlane':
return util.build_AVR('ArduPlane', board='mega2560')
if step == 'build1280.APMrover2':
return util.build_AVR('APMrover2', board='mega')
if step == 'build2560.APMrover2':
return util.build_AVR('APMrover2', board='mega2560')
if step == 'defaults.ArduPlane':
return get_default_params('ArduPlane')
if step == 'defaults.ArduCopter':
return get_default_params('ArduCopter')
if step == 'defaults.APMrover2':
return get_default_params('APMrover2')
if step == 'logs.ArduPlane':
return dump_logs('ArduPlane')
if step == 'logs.ArduCopter':
return dump_logs('ArduCopter')
if step == 'logs.APMrover2':
return dump_logs('APMrover2')
if step == 'fly.ArduCopter':
return arducopter.fly_ArduCopter(viewerip=opts.viewerip, map=opts.map)
if step == 'fly.ArduPlane':
return arduplane.fly_ArduPlane(viewerip=opts.viewerip, map=opts.map)
if step == 'drive.APMrover2':
return apmrover2.drive_APMrover2(viewerip=opts.viewerip, map=opts.map)
if step == 'build.All':
return build_all()
if step == 'build.Binaries':
return build_binaries()
if step == 'build.Examples':
return build_examples()
if step == 'convertgpx':
return convert_gpx()
raise RuntimeError("Unknown step %s" % step)
class TestResult(object):
'''test result class'''
def __init__(self, name, result, elapsed):
self.name = name
self.result = result
self.elapsed = "%.1f" % elapsed
class TestFile(object):
'''test result file'''
def __init__(self, name, fname):
self.name = name
self.fname = fname
class TestResults(object):
'''test results class'''
def __init__(self):
|
def add(self, name, result, elapsed):
'''add a result'''
self.tests.append(TestResult(name, result, elapsed))
def addfile(self, name, fname):
'''add a result file'''
self.files.append(TestFile(name, fname))
def addimage(self, name, fname):
'''add a result image'''
self.images.append(TestFile(name, fname))
def addglob(self, name, pattern):
'''add a set of files'''
import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addfile(name, os.path.basename(f))
def addglobimage(self, name, pattern):
'''add a set of images'''
import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addimage(name, os.path.basename(f))
def write_webresults(results):
'''write webpage results'''
sys.path.insert(0, os.path.join(util.reltopdir("../mavlink/pymavlink/generator")))
import mavtemplate
t = mavtemplate.MAVTemplate()
for h in glob.glob(util.reltopdir('Tools/autotest/web/*.html')):
html = util.loadfile(h)
f = open(util.reltopdir("../buildlogs/%s" % os.path.basename(h)), mode='w')
t.write(f, html, results)
f.close()
for f in glob.glob(util.reltopdir('Tools/autotest/web/*.png')):
shutil.copy(f, util.reltopdir('../buildlogs/%s' % os.path.basename(f)))
results = TestResults()
def run_tests(steps):
'''run a list of steps'''
global results
passed = True
failed = []
for step in steps:
util.pexpect_close_all()
if skip_step(step):
continue
t1 = time.time()
print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime()))
try:
if not run_step(step):
print(">>>> FAILED STEP: %s at %s" % (step, time.asctime()))
passed = False
failed.append(step)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
continue
except Exception, msg:
passed = False
failed.append(step)
print(">>>> FAILED STEP: %s at %s (%s)" % (step, time.asctime(), msg))
traceback.print_exc(file=sys.stdout)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
continue
results.add(step, '<span class="passed-text">PASSED</span>', time.time() - t1)
print(">>>> PASSED STEP: %s at %s" % (step, time.asctime()))
if not passed:
print("FAILED %u tests: %s" % (len(failed), failed))
util.pexpect_close_all()
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*.flashlog')
results.addglob("MAVLink log", '*.tlog')
results.addglob("GPX track", '*.gpx')
results.addfile('ArduPlane build log', 'ArduPlane.txt')
results.addfile('ArduPlane code size', 'ArduPlane.sizes.txt')
results.addfile('ArduPlane stack sizes', 'ArduPlane.framesizes.txt')
results.addfile('ArduPlane defaults', 'ArduPlane.defaults.txt')
results.addfile('ArduCopter build log', 'ArduCopter.txt')
results.addfile('ArduCopter code size', 'ArduCopter.sizes.txt')
results.addfile('ArduCopter stack sizes', 'ArduCopter.framesizes.txt')
results.addfile('ArduCopter defaults', 'ArduCopter.defaults.txt')
results.addfile('APMrover2 build log', 'APMrover2.txt')
results.addfile('APMrover2 code size', 'APMrover2.sizes.txt')
results.addfile('APMrover2 stack sizes', 'APMrover2.framesizes.txt')
results.addfile('APMrover2 defaults', 'APMrover2.defaults.txt')
results.addglobimage("Flight Track", '*.png')
write_webresults(results)
return passed
util.mkdir_p(util.reltopdir('../buildlogs'))
lck = util.lock_file(util.reltopdir('../buildlogs/autotest.lck'))
if lck is None:
print("autotest is locked - exiting")
sys.exit(0)
atexit.register(util.pexpect_close_all)
if len(args) > 0:
# allow a wildcard list of steps
matched = []
for a in args:
for s in steps:
if fnmatch.fnmatch(s.lower(), a.lower()):
matched.append(s)
steps = matched
try:
if not run_tests(steps):
sys.exit(1)
except KeyboardInterrupt:
util.pexpect_close_all()
sys.exit(1)
except Exception:
# make sure we kill off any children
util.pexpect_close_all()
raise
| self.date = time.asctime()
self.githash = util.run_cmd('git rev-parse HEAD', output=True, dir=util.reltopdir('.')).strip()
self.tests = []
self.files = []
self.images = [] | identifier_body |
autotest.py | #!/usr/bin/env python
# APM automatic test suite
# Andrew Tridgell, October 2011
import pexpect, os, sys, shutil, atexit
import optparse, fnmatch, time, glob, traceback, signal
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), 'pysim'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', '..', 'mavlink', 'pymavlink'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', '..', 'mavlink', 'pymavlink', 'generator'))
import util
os.environ['PYTHONUNBUFFERED'] = '1'
os.putenv('TMPDIR', util.reltopdir('tmp'))
def get_default_params(atype):
'''get default parameters'''
sil = util.start_SIL(atype, wipe=True)
mavproxy = util.start_MAVProxy_SIL(atype)
print("Dumping defaults")
idx = mavproxy.expect(['Please Run Setup', 'Saved [0-9]+ parameters to (\S+)'])
if idx == 0:
# we need to restart it after eeprom erase
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sil = util.start_SIL(atype)
mavproxy = util.start_MAVProxy_SIL(atype)
idx = mavproxy.expect('Saved [0-9]+ parameters to (\S+)')
parmfile = mavproxy.match.group(1)
dest = util.reltopdir('../buildlogs/%s.defaults.txt' % atype)
shutil.copy(parmfile, dest)
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
print("Saved defaults for %s to %s" % (atype, dest))
return True
def dump_logs(atype):
'''dump DataFlash logs'''
print("Dumping logs for %s" % atype)
sil = util.start_SIL(atype)
logfile = util.reltopdir('../buildlogs/%s.flashlog' % atype)
log = open(logfile, mode='w')
mavproxy = util.start_MAVProxy_SIL(atype, setup=True, logfile=log)
mavproxy.send('\n\n\n')
print("navigating menus")
mavproxy.expect(']')
mavproxy.send("logs\n")
mavproxy.expect("logs enabled:")
lognums = []
i = mavproxy.expect(["No logs", "(\d+) logs"])
if i == 0:
numlogs = 0
else:
numlogs = int(mavproxy.match.group(1))
for i in range(numlogs):
mavproxy.expect("Log (\d+)")
lognums.append(int(mavproxy.match.group(1)))
mavproxy.expect("Log]")
for i in range(numlogs):
print("Dumping log %u (i=%u)" % (lognums[i], i))
mavproxy.send("dump %u\n" % lognums[i])
mavproxy.expect("logs enabled:", timeout=120)
mavproxy.expect("Log]")
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
log.close()
print("Saved log for %s to %s" % (atype, logfile))
return True
def build_all():
'''run the build_all.sh script'''
print("Running build_all.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_all.sh'), dir=util.reltopdir('.')) != 0:
print("Failed build_all.sh")
return False
return True
def build_binaries():
'''run the build_binaries.sh script'''
print("Running build_binaries.sh")
import shutil
# copy the script as it changes git branch, which can change the script while running
orig=util.reltopdir('Tools/scripts/build_binaries.sh')
copy=util.reltopdir('./build_binaries.sh')
shutil.copyfile(orig, copy)
shutil.copymode(orig, copy)
if util.run_cmd(copy, dir=util.reltopdir('.')) != 0:
print("Failed build_binaries.sh")
return False
return True
def build_examples():
'''run the build_examples.sh script'''
print("Running build_examples.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_examples.sh'), dir=util.reltopdir('.')) != 0:
print("Failed build_examples.sh")
return False
return True
def convert_gpx():
'''convert any tlog files to GPX and KML'''
import glob
mavlog = glob.glob(util.reltopdir("../buildlogs/*.tlog"))
for m in mavlog:
util.run_cmd(util.reltopdir("../mavlink/pymavlink/examples/mavtogpx.py") + " --nofixcheck " + m)
gpx = m + '.gpx'
kml = m + '.kml'
util.run_cmd('gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s' % (gpx, kml), checkfail=False)
util.run_cmd('zip %s.kmz %s.kml' % (m, m), checkfail=False)
util.run_cmd(util.reltopdir("../MAVProxy/tools/mavflightview.py") + " --imagefile=%s.png %s" % (m,m))
return True
def test_prerequesites():
'''check we have the right directories and tools to run tests'''
print("Testing prerequesites")
util.mkdir_p(util.reltopdir('../buildlogs'))
return True
def alarm_handler(signum, frame):
'''handle test timeout'''
global results, opts
try:
results.add('TIMEOUT', '<span class="failed-text">FAILED</span>', opts.timeout)
util.pexpect_close_all()
convert_gpx()
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*.flashlog')
results.addglob("MAVLink log", '*.tlog')
results.addfile('ArduPlane build log', 'ArduPlane.txt')
results.addfile('ArduPlane defaults', 'ArduPlane.defaults.txt')
results.addfile('ArduCopter build log', 'ArduCopter.txt')
results.addfile('ArduCopter defaults', 'ArduCopter.defaults.txt')
results.addfile('APMrover2 build log', 'APMrover2.txt')
results.addfile('APMrover2 defaults', 'APMrover2.defaults.txt')
write_webresults(results)
os.killpg(0, signal.SIGKILL)
except Exception:
pass
sys.exit(1)
############## main program #############
parser = optparse.OptionParser("autotest")
parser.add_option("--skip", type='string', default='', help='list of steps to skip (comma separated)')
parser.add_option("--list", action='store_true', default=False, help='list the available steps')
parser.add_option("--viewerip", default=None, help='IP address to send MAVLink and fg packets to')
parser.add_option("--map", action='store_true', default=False, help='show map')
parser.add_option("--experimental", default=False, action='store_true', help='enable experimental tests')
parser.add_option("--timeout", default=3000, type='int', help='maximum runtime in seconds')
opts, args = parser.parse_args()
import arducopter, arduplane, apmrover2
steps = [
'prerequesites',
'build.All',
'build.Binaries',
'build.Examples',
'build1280.ArduPlane',
'build2560.ArduPlane',
'build.ArduPlane',
'defaults.ArduPlane',
'fly.ArduPlane',
'logs.ArduPlane',
'build1280.APMrover2',
'build2560.APMrover2',
'build.APMrover2',
'defaults.APMrover2',
'drive.APMrover2',
'logs.APMrover2',
'build2560.ArduCopter',
'build.ArduCopter',
'defaults.ArduCopter',
'fly.ArduCopter',
'logs.ArduCopter',
'convertgpx',
]
skipsteps = opts.skip.split(',')
# ensure we catch timeouts
signal.signal(signal.SIGALRM, alarm_handler)
signal.alarm(opts.timeout)
if opts.list:
for step in steps:
print(step)
sys.exit(0)
def skip_step(step):
'''see if a step should be skipped'''
for skip in skipsteps:
if fnmatch.fnmatch(step.lower(), skip.lower()):
return True
return False
def run_step(step):
'''run one step'''
if step == "prerequesites":
return test_prerequesites()
if step == 'build.ArduPlane':
return util.build_SIL('ArduPlane')
if step == 'build.APMrover2':
return util.build_SIL('APMrover2')
if step == 'build.ArduCopter':
return util.build_SIL('ArduCopter')
if step == 'build1280.ArduCopter':
return util.build_AVR('ArduCopter', board='mega')
if step == 'build2560.ArduCopter':
return util.build_AVR('ArduCopter', board='mega2560')
if step == 'build1280.ArduPlane':
return util.build_AVR('ArduPlane', board='mega')
if step == 'build2560.ArduPlane':
return util.build_AVR('ArduPlane', board='mega2560')
if step == 'build1280.APMrover2':
return util.build_AVR('APMrover2', board='mega')
if step == 'build2560.APMrover2':
return util.build_AVR('APMrover2', board='mega2560')
if step == 'defaults.ArduPlane':
return get_default_params('ArduPlane')
if step == 'defaults.ArduCopter':
return get_default_params('ArduCopter')
if step == 'defaults.APMrover2':
return get_default_params('APMrover2')
if step == 'logs.ArduPlane':
return dump_logs('ArduPlane')
if step == 'logs.ArduCopter':
return dump_logs('ArduCopter')
if step == 'logs.APMrover2':
return dump_logs('APMrover2')
if step == 'fly.ArduCopter':
return arducopter.fly_ArduCopter(viewerip=opts.viewerip, map=opts.map)
if step == 'fly.ArduPlane':
return arduplane.fly_ArduPlane(viewerip=opts.viewerip, map=opts.map)
if step == 'drive.APMrover2':
return apmrover2.drive_APMrover2(viewerip=opts.viewerip, map=opts.map)
if step == 'build.All':
return build_all()
if step == 'build.Binaries':
return build_binaries()
if step == 'build.Examples':
return build_examples()
if step == 'convertgpx':
return convert_gpx()
raise RuntimeError("Unknown step %s" % step)
class TestResult(object):
'''test result class'''
def __init__(self, name, result, elapsed):
self.name = name
self.result = result
self.elapsed = "%.1f" % elapsed
class TestFile(object):
'''test result file'''
def __init__(self, name, fname):
self.name = name
self.fname = fname
class TestResults(object):
'''test results class'''
def __init__(self):
self.date = time.asctime()
self.githash = util.run_cmd('git rev-parse HEAD', output=True, dir=util.reltopdir('.')).strip()
self.tests = []
self.files = []
self.images = []
def add(self, name, result, elapsed):
'''add a result'''
self.tests.append(TestResult(name, result, elapsed))
def addfile(self, name, fname):
'''add a result file'''
self.files.append(TestFile(name, fname))
def addimage(self, name, fname):
'''add a result image'''
self.images.append(TestFile(name, fname))
def addglob(self, name, pattern):
'''add a set of files''' | '''add a set of images'''
import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addimage(name, os.path.basename(f))
def write_webresults(results):
'''write webpage results'''
sys.path.insert(0, os.path.join(util.reltopdir("../mavlink/pymavlink/generator")))
import mavtemplate
t = mavtemplate.MAVTemplate()
for h in glob.glob(util.reltopdir('Tools/autotest/web/*.html')):
html = util.loadfile(h)
f = open(util.reltopdir("../buildlogs/%s" % os.path.basename(h)), mode='w')
t.write(f, html, results)
f.close()
for f in glob.glob(util.reltopdir('Tools/autotest/web/*.png')):
shutil.copy(f, util.reltopdir('../buildlogs/%s' % os.path.basename(f)))
results = TestResults()
def run_tests(steps):
'''run a list of steps'''
global results
passed = True
failed = []
for step in steps:
util.pexpect_close_all()
if skip_step(step):
continue
t1 = time.time()
print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime()))
try:
if not run_step(step):
print(">>>> FAILED STEP: %s at %s" % (step, time.asctime()))
passed = False
failed.append(step)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
continue
except Exception, msg:
passed = False
failed.append(step)
print(">>>> FAILED STEP: %s at %s (%s)" % (step, time.asctime(), msg))
traceback.print_exc(file=sys.stdout)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
continue
results.add(step, '<span class="passed-text">PASSED</span>', time.time() - t1)
print(">>>> PASSED STEP: %s at %s" % (step, time.asctime()))
if not passed:
print("FAILED %u tests: %s" % (len(failed), failed))
util.pexpect_close_all()
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*.flashlog')
results.addglob("MAVLink log", '*.tlog')
results.addglob("GPX track", '*.gpx')
results.addfile('ArduPlane build log', 'ArduPlane.txt')
results.addfile('ArduPlane code size', 'ArduPlane.sizes.txt')
results.addfile('ArduPlane stack sizes', 'ArduPlane.framesizes.txt')
results.addfile('ArduPlane defaults', 'ArduPlane.defaults.txt')
results.addfile('ArduCopter build log', 'ArduCopter.txt')
results.addfile('ArduCopter code size', 'ArduCopter.sizes.txt')
results.addfile('ArduCopter stack sizes', 'ArduCopter.framesizes.txt')
results.addfile('ArduCopter defaults', 'ArduCopter.defaults.txt')
results.addfile('APMrover2 build log', 'APMrover2.txt')
results.addfile('APMrover2 code size', 'APMrover2.sizes.txt')
results.addfile('APMrover2 stack sizes', 'APMrover2.framesizes.txt')
results.addfile('APMrover2 defaults', 'APMrover2.defaults.txt')
results.addglobimage("Flight Track", '*.png')
write_webresults(results)
return passed
util.mkdir_p(util.reltopdir('../buildlogs'))
lck = util.lock_file(util.reltopdir('../buildlogs/autotest.lck'))
if lck is None:
print("autotest is locked - exiting")
sys.exit(0)
atexit.register(util.pexpect_close_all)
if len(args) > 0:
# allow a wildcard list of steps
matched = []
for a in args:
for s in steps:
if fnmatch.fnmatch(s.lower(), a.lower()):
matched.append(s)
steps = matched
try:
if not run_tests(steps):
sys.exit(1)
except KeyboardInterrupt:
util.pexpect_close_all()
sys.exit(1)
except Exception:
# make sure we kill off any children
util.pexpect_close_all()
raise | import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addfile(name, os.path.basename(f))
def addglobimage(self, name, pattern): | random_line_split |
autotest.py | #!/usr/bin/env python
# APM automatic test suite
# Andrew Tridgell, October 2011
import pexpect, os, sys, shutil, atexit
import optparse, fnmatch, time, glob, traceback, signal
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), 'pysim'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', '..', 'mavlink', 'pymavlink'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', '..', 'mavlink', 'pymavlink', 'generator'))
import util
os.environ['PYTHONUNBUFFERED'] = '1'
os.putenv('TMPDIR', util.reltopdir('tmp'))
def get_default_params(atype):
'''get default parameters'''
sil = util.start_SIL(atype, wipe=True)
mavproxy = util.start_MAVProxy_SIL(atype)
print("Dumping defaults")
idx = mavproxy.expect(['Please Run Setup', 'Saved [0-9]+ parameters to (\S+)'])
if idx == 0:
# we need to restart it after eeprom erase
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sil = util.start_SIL(atype)
mavproxy = util.start_MAVProxy_SIL(atype)
idx = mavproxy.expect('Saved [0-9]+ parameters to (\S+)')
parmfile = mavproxy.match.group(1)
dest = util.reltopdir('../buildlogs/%s.defaults.txt' % atype)
shutil.copy(parmfile, dest)
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
print("Saved defaults for %s to %s" % (atype, dest))
return True
def dump_logs(atype):
'''dump DataFlash logs'''
print("Dumping logs for %s" % atype)
sil = util.start_SIL(atype)
logfile = util.reltopdir('../buildlogs/%s.flashlog' % atype)
log = open(logfile, mode='w')
mavproxy = util.start_MAVProxy_SIL(atype, setup=True, logfile=log)
mavproxy.send('\n\n\n')
print("navigating menus")
mavproxy.expect(']')
mavproxy.send("logs\n")
mavproxy.expect("logs enabled:")
lognums = []
i = mavproxy.expect(["No logs", "(\d+) logs"])
if i == 0:
numlogs = 0
else:
numlogs = int(mavproxy.match.group(1))
for i in range(numlogs):
mavproxy.expect("Log (\d+)")
lognums.append(int(mavproxy.match.group(1)))
mavproxy.expect("Log]")
for i in range(numlogs):
print("Dumping log %u (i=%u)" % (lognums[i], i))
mavproxy.send("dump %u\n" % lognums[i])
mavproxy.expect("logs enabled:", timeout=120)
mavproxy.expect("Log]")
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
log.close()
print("Saved log for %s to %s" % (atype, logfile))
return True
def build_all():
'''run the build_all.sh script'''
print("Running build_all.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_all.sh'), dir=util.reltopdir('.')) != 0:
print("Failed build_all.sh")
return False
return True
def build_binaries():
'''run the build_binaries.sh script'''
print("Running build_binaries.sh")
import shutil
# copy the script as it changes git branch, which can change the script while running
orig=util.reltopdir('Tools/scripts/build_binaries.sh')
copy=util.reltopdir('./build_binaries.sh')
shutil.copyfile(orig, copy)
shutil.copymode(orig, copy)
if util.run_cmd(copy, dir=util.reltopdir('.')) != 0:
print("Failed build_binaries.sh")
return False
return True
def build_examples():
'''run the build_examples.sh script'''
print("Running build_examples.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_examples.sh'), dir=util.reltopdir('.')) != 0:
print("Failed build_examples.sh")
return False
return True
def convert_gpx():
'''convert any tlog files to GPX and KML'''
import glob
mavlog = glob.glob(util.reltopdir("../buildlogs/*.tlog"))
for m in mavlog:
util.run_cmd(util.reltopdir("../mavlink/pymavlink/examples/mavtogpx.py") + " --nofixcheck " + m)
gpx = m + '.gpx'
kml = m + '.kml'
util.run_cmd('gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s' % (gpx, kml), checkfail=False)
util.run_cmd('zip %s.kmz %s.kml' % (m, m), checkfail=False)
util.run_cmd(util.reltopdir("../MAVProxy/tools/mavflightview.py") + " --imagefile=%s.png %s" % (m,m))
return True
def test_prerequesites():
'''check we have the right directories and tools to run tests'''
print("Testing prerequesites")
util.mkdir_p(util.reltopdir('../buildlogs'))
return True
def alarm_handler(signum, frame):
'''handle test timeout'''
global results, opts
try:
results.add('TIMEOUT', '<span class="failed-text">FAILED</span>', opts.timeout)
util.pexpect_close_all()
convert_gpx()
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*.flashlog')
results.addglob("MAVLink log", '*.tlog')
results.addfile('ArduPlane build log', 'ArduPlane.txt')
results.addfile('ArduPlane defaults', 'ArduPlane.defaults.txt')
results.addfile('ArduCopter build log', 'ArduCopter.txt')
results.addfile('ArduCopter defaults', 'ArduCopter.defaults.txt')
results.addfile('APMrover2 build log', 'APMrover2.txt')
results.addfile('APMrover2 defaults', 'APMrover2.defaults.txt')
write_webresults(results)
os.killpg(0, signal.SIGKILL)
except Exception:
pass
sys.exit(1)
############## main program #############
parser = optparse.OptionParser("autotest")
parser.add_option("--skip", type='string', default='', help='list of steps to skip (comma separated)')
parser.add_option("--list", action='store_true', default=False, help='list the available steps')
parser.add_option("--viewerip", default=None, help='IP address to send MAVLink and fg packets to')
parser.add_option("--map", action='store_true', default=False, help='show map')
parser.add_option("--experimental", default=False, action='store_true', help='enable experimental tests')
parser.add_option("--timeout", default=3000, type='int', help='maximum runtime in seconds')
opts, args = parser.parse_args()
import arducopter, arduplane, apmrover2
steps = [
'prerequesites',
'build.All',
'build.Binaries',
'build.Examples',
'build1280.ArduPlane',
'build2560.ArduPlane',
'build.ArduPlane',
'defaults.ArduPlane',
'fly.ArduPlane',
'logs.ArduPlane',
'build1280.APMrover2',
'build2560.APMrover2',
'build.APMrover2',
'defaults.APMrover2',
'drive.APMrover2',
'logs.APMrover2',
'build2560.ArduCopter',
'build.ArduCopter',
'defaults.ArduCopter',
'fly.ArduCopter',
'logs.ArduCopter',
'convertgpx',
]
skipsteps = opts.skip.split(',')
# ensure we catch timeouts
signal.signal(signal.SIGALRM, alarm_handler)
signal.alarm(opts.timeout)
if opts.list:
for step in steps:
print(step)
sys.exit(0)
def skip_step(step):
'''see if a step should be skipped'''
for skip in skipsteps:
if fnmatch.fnmatch(step.lower(), skip.lower()):
return True
return False
def run_step(step):
'''run one step'''
if step == "prerequesites":
return test_prerequesites()
if step == 'build.ArduPlane':
return util.build_SIL('ArduPlane')
if step == 'build.APMrover2':
return util.build_SIL('APMrover2')
if step == 'build.ArduCopter':
return util.build_SIL('ArduCopter')
if step == 'build1280.ArduCopter':
return util.build_AVR('ArduCopter', board='mega')
if step == 'build2560.ArduCopter':
return util.build_AVR('ArduCopter', board='mega2560')
if step == 'build1280.ArduPlane':
return util.build_AVR('ArduPlane', board='mega')
if step == 'build2560.ArduPlane':
return util.build_AVR('ArduPlane', board='mega2560')
if step == 'build1280.APMrover2':
return util.build_AVR('APMrover2', board='mega')
if step == 'build2560.APMrover2':
return util.build_AVR('APMrover2', board='mega2560')
if step == 'defaults.ArduPlane':
return get_default_params('ArduPlane')
if step == 'defaults.ArduCopter':
return get_default_params('ArduCopter')
if step == 'defaults.APMrover2':
return get_default_params('APMrover2')
if step == 'logs.ArduPlane':
return dump_logs('ArduPlane')
if step == 'logs.ArduCopter':
return dump_logs('ArduCopter')
if step == 'logs.APMrover2':
return dump_logs('APMrover2')
if step == 'fly.ArduCopter':
return arducopter.fly_ArduCopter(viewerip=opts.viewerip, map=opts.map)
if step == 'fly.ArduPlane':
return arduplane.fly_ArduPlane(viewerip=opts.viewerip, map=opts.map)
if step == 'drive.APMrover2':
return apmrover2.drive_APMrover2(viewerip=opts.viewerip, map=opts.map)
if step == 'build.All':
return build_all()
if step == 'build.Binaries':
return build_binaries()
if step == 'build.Examples':
return build_examples()
if step == 'convertgpx':
return convert_gpx()
raise RuntimeError("Unknown step %s" % step)
class TestResult(object):
'''test result class'''
def __init__(self, name, result, elapsed):
self.name = name
self.result = result
self.elapsed = "%.1f" % elapsed
class TestFile(object):
'''test result file'''
def __init__(self, name, fname):
self.name = name
self.fname = fname
class TestResults(object):
'''test results class'''
def __init__(self):
self.date = time.asctime()
self.githash = util.run_cmd('git rev-parse HEAD', output=True, dir=util.reltopdir('.')).strip()
self.tests = []
self.files = []
self.images = []
def add(self, name, result, elapsed):
'''add a result'''
self.tests.append(TestResult(name, result, elapsed))
def addfile(self, name, fname):
'''add a result file'''
self.files.append(TestFile(name, fname))
def addimage(self, name, fname):
'''add a result image'''
self.images.append(TestFile(name, fname))
def addglob(self, name, pattern):
'''add a set of files'''
import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addfile(name, os.path.basename(f))
def addglobimage(self, name, pattern):
'''add a set of images'''
import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addimage(name, os.path.basename(f))
def write_webresults(results):
'''write webpage results'''
sys.path.insert(0, os.path.join(util.reltopdir("../mavlink/pymavlink/generator")))
import mavtemplate
t = mavtemplate.MAVTemplate()
for h in glob.glob(util.reltopdir('Tools/autotest/web/*.html')):
html = util.loadfile(h)
f = open(util.reltopdir("../buildlogs/%s" % os.path.basename(h)), mode='w')
t.write(f, html, results)
f.close()
for f in glob.glob(util.reltopdir('Tools/autotest/web/*.png')):
shutil.copy(f, util.reltopdir('../buildlogs/%s' % os.path.basename(f)))
results = TestResults()
def run_tests(steps):
'''run a list of steps'''
global results
passed = True
failed = []
for step in steps:
util.pexpect_close_all()
if skip_step(step):
continue
t1 = time.time()
print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime()))
try:
if not run_step(step):
print(">>>> FAILED STEP: %s at %s" % (step, time.asctime()))
passed = False
failed.append(step)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
continue
except Exception, msg:
passed = False
failed.append(step)
print(">>>> FAILED STEP: %s at %s (%s)" % (step, time.asctime(), msg))
traceback.print_exc(file=sys.stdout)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
continue
results.add(step, '<span class="passed-text">PASSED</span>', time.time() - t1)
print(">>>> PASSED STEP: %s at %s" % (step, time.asctime()))
if not passed:
print("FAILED %u tests: %s" % (len(failed), failed))
util.pexpect_close_all()
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*.flashlog')
results.addglob("MAVLink log", '*.tlog')
results.addglob("GPX track", '*.gpx')
results.addfile('ArduPlane build log', 'ArduPlane.txt')
results.addfile('ArduPlane code size', 'ArduPlane.sizes.txt')
results.addfile('ArduPlane stack sizes', 'ArduPlane.framesizes.txt')
results.addfile('ArduPlane defaults', 'ArduPlane.defaults.txt')
results.addfile('ArduCopter build log', 'ArduCopter.txt')
results.addfile('ArduCopter code size', 'ArduCopter.sizes.txt')
results.addfile('ArduCopter stack sizes', 'ArduCopter.framesizes.txt')
results.addfile('ArduCopter defaults', 'ArduCopter.defaults.txt')
results.addfile('APMrover2 build log', 'APMrover2.txt')
results.addfile('APMrover2 code size', 'APMrover2.sizes.txt')
results.addfile('APMrover2 stack sizes', 'APMrover2.framesizes.txt')
results.addfile('APMrover2 defaults', 'APMrover2.defaults.txt')
results.addglobimage("Flight Track", '*.png')
write_webresults(results)
return passed
util.mkdir_p(util.reltopdir('../buildlogs'))
lck = util.lock_file(util.reltopdir('../buildlogs/autotest.lck'))
if lck is None:
print("autotest is locked - exiting")
sys.exit(0)
atexit.register(util.pexpect_close_all)
if len(args) > 0:
# allow a wildcard list of steps
matched = []
for a in args:
for s in steps:
|
steps = matched
try:
if not run_tests(steps):
sys.exit(1)
except KeyboardInterrupt:
util.pexpect_close_all()
sys.exit(1)
except Exception:
# make sure we kill off any children
util.pexpect_close_all()
raise
| if fnmatch.fnmatch(s.lower(), a.lower()):
matched.append(s) | conditional_block |
autotest.py | #!/usr/bin/env python
# APM automatic test suite
# Andrew Tridgell, October 2011
import pexpect, os, sys, shutil, atexit
import optparse, fnmatch, time, glob, traceback, signal
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), 'pysim'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', '..', 'mavlink', 'pymavlink'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', '..', 'mavlink', 'pymavlink', 'generator'))
import util
os.environ['PYTHONUNBUFFERED'] = '1'
os.putenv('TMPDIR', util.reltopdir('tmp'))
def get_default_params(atype):
'''get default parameters'''
sil = util.start_SIL(atype, wipe=True)
mavproxy = util.start_MAVProxy_SIL(atype)
print("Dumping defaults")
idx = mavproxy.expect(['Please Run Setup', 'Saved [0-9]+ parameters to (\S+)'])
if idx == 0:
# we need to restart it after eeprom erase
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sil = util.start_SIL(atype)
mavproxy = util.start_MAVProxy_SIL(atype)
idx = mavproxy.expect('Saved [0-9]+ parameters to (\S+)')
parmfile = mavproxy.match.group(1)
dest = util.reltopdir('../buildlogs/%s.defaults.txt' % atype)
shutil.copy(parmfile, dest)
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
print("Saved defaults for %s to %s" % (atype, dest))
return True
def dump_logs(atype):
'''dump DataFlash logs'''
print("Dumping logs for %s" % atype)
sil = util.start_SIL(atype)
logfile = util.reltopdir('../buildlogs/%s.flashlog' % atype)
log = open(logfile, mode='w')
mavproxy = util.start_MAVProxy_SIL(atype, setup=True, logfile=log)
mavproxy.send('\n\n\n')
print("navigating menus")
mavproxy.expect(']')
mavproxy.send("logs\n")
mavproxy.expect("logs enabled:")
lognums = []
i = mavproxy.expect(["No logs", "(\d+) logs"])
if i == 0:
numlogs = 0
else:
numlogs = int(mavproxy.match.group(1))
for i in range(numlogs):
mavproxy.expect("Log (\d+)")
lognums.append(int(mavproxy.match.group(1)))
mavproxy.expect("Log]")
for i in range(numlogs):
print("Dumping log %u (i=%u)" % (lognums[i], i))
mavproxy.send("dump %u\n" % lognums[i])
mavproxy.expect("logs enabled:", timeout=120)
mavproxy.expect("Log]")
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
log.close()
print("Saved log for %s to %s" % (atype, logfile))
return True
def build_all():
'''run the build_all.sh script'''
print("Running build_all.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_all.sh'), dir=util.reltopdir('.')) != 0:
print("Failed build_all.sh")
return False
return True
def build_binaries():
'''run the build_binaries.sh script'''
print("Running build_binaries.sh")
import shutil
# copy the script as it changes git branch, which can change the script while running
orig=util.reltopdir('Tools/scripts/build_binaries.sh')
copy=util.reltopdir('./build_binaries.sh')
shutil.copyfile(orig, copy)
shutil.copymode(orig, copy)
if util.run_cmd(copy, dir=util.reltopdir('.')) != 0:
print("Failed build_binaries.sh")
return False
return True
def build_examples():
'''run the build_examples.sh script'''
print("Running build_examples.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_examples.sh'), dir=util.reltopdir('.')) != 0:
print("Failed build_examples.sh")
return False
return True
def convert_gpx():
'''convert any tlog files to GPX and KML'''
import glob
mavlog = glob.glob(util.reltopdir("../buildlogs/*.tlog"))
for m in mavlog:
util.run_cmd(util.reltopdir("../mavlink/pymavlink/examples/mavtogpx.py") + " --nofixcheck " + m)
gpx = m + '.gpx'
kml = m + '.kml'
util.run_cmd('gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s' % (gpx, kml), checkfail=False)
util.run_cmd('zip %s.kmz %s.kml' % (m, m), checkfail=False)
util.run_cmd(util.reltopdir("../MAVProxy/tools/mavflightview.py") + " --imagefile=%s.png %s" % (m,m))
return True
def test_prerequesites():
'''check we have the right directories and tools to run tests'''
print("Testing prerequesites")
util.mkdir_p(util.reltopdir('../buildlogs'))
return True
def alarm_handler(signum, frame):
'''handle test timeout'''
global results, opts
try:
results.add('TIMEOUT', '<span class="failed-text">FAILED</span>', opts.timeout)
util.pexpect_close_all()
convert_gpx()
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*.flashlog')
results.addglob("MAVLink log", '*.tlog')
results.addfile('ArduPlane build log', 'ArduPlane.txt')
results.addfile('ArduPlane defaults', 'ArduPlane.defaults.txt')
results.addfile('ArduCopter build log', 'ArduCopter.txt')
results.addfile('ArduCopter defaults', 'ArduCopter.defaults.txt')
results.addfile('APMrover2 build log', 'APMrover2.txt')
results.addfile('APMrover2 defaults', 'APMrover2.defaults.txt')
write_webresults(results)
os.killpg(0, signal.SIGKILL)
except Exception:
pass
sys.exit(1)
############## main program #############
parser = optparse.OptionParser("autotest")
parser.add_option("--skip", type='string', default='', help='list of steps to skip (comma separated)')
parser.add_option("--list", action='store_true', default=False, help='list the available steps')
parser.add_option("--viewerip", default=None, help='IP address to send MAVLink and fg packets to')
parser.add_option("--map", action='store_true', default=False, help='show map')
parser.add_option("--experimental", default=False, action='store_true', help='enable experimental tests')
parser.add_option("--timeout", default=3000, type='int', help='maximum runtime in seconds')
opts, args = parser.parse_args()
import arducopter, arduplane, apmrover2
steps = [
'prerequesites',
'build.All',
'build.Binaries',
'build.Examples',
'build1280.ArduPlane',
'build2560.ArduPlane',
'build.ArduPlane',
'defaults.ArduPlane',
'fly.ArduPlane',
'logs.ArduPlane',
'build1280.APMrover2',
'build2560.APMrover2',
'build.APMrover2',
'defaults.APMrover2',
'drive.APMrover2',
'logs.APMrover2',
'build2560.ArduCopter',
'build.ArduCopter',
'defaults.ArduCopter',
'fly.ArduCopter',
'logs.ArduCopter',
'convertgpx',
]
skipsteps = opts.skip.split(',')
# ensure we catch timeouts
signal.signal(signal.SIGALRM, alarm_handler)
signal.alarm(opts.timeout)
if opts.list:
for step in steps:
print(step)
sys.exit(0)
def skip_step(step):
'''see if a step should be skipped'''
for skip in skipsteps:
if fnmatch.fnmatch(step.lower(), skip.lower()):
return True
return False
def run_step(step):
'''run one step'''
if step == "prerequesites":
return test_prerequesites()
if step == 'build.ArduPlane':
return util.build_SIL('ArduPlane')
if step == 'build.APMrover2':
return util.build_SIL('APMrover2')
if step == 'build.ArduCopter':
return util.build_SIL('ArduCopter')
if step == 'build1280.ArduCopter':
return util.build_AVR('ArduCopter', board='mega')
if step == 'build2560.ArduCopter':
return util.build_AVR('ArduCopter', board='mega2560')
if step == 'build1280.ArduPlane':
return util.build_AVR('ArduPlane', board='mega')
if step == 'build2560.ArduPlane':
return util.build_AVR('ArduPlane', board='mega2560')
if step == 'build1280.APMrover2':
return util.build_AVR('APMrover2', board='mega')
if step == 'build2560.APMrover2':
return util.build_AVR('APMrover2', board='mega2560')
if step == 'defaults.ArduPlane':
return get_default_params('ArduPlane')
if step == 'defaults.ArduCopter':
return get_default_params('ArduCopter')
if step == 'defaults.APMrover2':
return get_default_params('APMrover2')
if step == 'logs.ArduPlane':
return dump_logs('ArduPlane')
if step == 'logs.ArduCopter':
return dump_logs('ArduCopter')
if step == 'logs.APMrover2':
return dump_logs('APMrover2')
if step == 'fly.ArduCopter':
return arducopter.fly_ArduCopter(viewerip=opts.viewerip, map=opts.map)
if step == 'fly.ArduPlane':
return arduplane.fly_ArduPlane(viewerip=opts.viewerip, map=opts.map)
if step == 'drive.APMrover2':
return apmrover2.drive_APMrover2(viewerip=opts.viewerip, map=opts.map)
if step == 'build.All':
return build_all()
if step == 'build.Binaries':
return build_binaries()
if step == 'build.Examples':
return build_examples()
if step == 'convertgpx':
return convert_gpx()
raise RuntimeError("Unknown step %s" % step)
class TestResult(object):
'''test result class'''
def | (self, name, result, elapsed):
self.name = name
self.result = result
self.elapsed = "%.1f" % elapsed
class TestFile(object):
'''test result file'''
def __init__(self, name, fname):
self.name = name
self.fname = fname
class TestResults(object):
'''test results class'''
def __init__(self):
self.date = time.asctime()
self.githash = util.run_cmd('git rev-parse HEAD', output=True, dir=util.reltopdir('.')).strip()
self.tests = []
self.files = []
self.images = []
def add(self, name, result, elapsed):
'''add a result'''
self.tests.append(TestResult(name, result, elapsed))
def addfile(self, name, fname):
'''add a result file'''
self.files.append(TestFile(name, fname))
def addimage(self, name, fname):
'''add a result image'''
self.images.append(TestFile(name, fname))
def addglob(self, name, pattern):
'''add a set of files'''
import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addfile(name, os.path.basename(f))
def addglobimage(self, name, pattern):
'''add a set of images'''
import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addimage(name, os.path.basename(f))
def write_webresults(results):
'''write webpage results'''
sys.path.insert(0, os.path.join(util.reltopdir("../mavlink/pymavlink/generator")))
import mavtemplate
t = mavtemplate.MAVTemplate()
for h in glob.glob(util.reltopdir('Tools/autotest/web/*.html')):
html = util.loadfile(h)
f = open(util.reltopdir("../buildlogs/%s" % os.path.basename(h)), mode='w')
t.write(f, html, results)
f.close()
for f in glob.glob(util.reltopdir('Tools/autotest/web/*.png')):
shutil.copy(f, util.reltopdir('../buildlogs/%s' % os.path.basename(f)))
results = TestResults()
def run_tests(steps):
'''run a list of steps'''
global results
passed = True
failed = []
for step in steps:
util.pexpect_close_all()
if skip_step(step):
continue
t1 = time.time()
print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime()))
try:
if not run_step(step):
print(">>>> FAILED STEP: %s at %s" % (step, time.asctime()))
passed = False
failed.append(step)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
continue
except Exception, msg:
passed = False
failed.append(step)
print(">>>> FAILED STEP: %s at %s (%s)" % (step, time.asctime(), msg))
traceback.print_exc(file=sys.stdout)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
continue
results.add(step, '<span class="passed-text">PASSED</span>', time.time() - t1)
print(">>>> PASSED STEP: %s at %s" % (step, time.asctime()))
if not passed:
print("FAILED %u tests: %s" % (len(failed), failed))
util.pexpect_close_all()
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*.flashlog')
results.addglob("MAVLink log", '*.tlog')
results.addglob("GPX track", '*.gpx')
results.addfile('ArduPlane build log', 'ArduPlane.txt')
results.addfile('ArduPlane code size', 'ArduPlane.sizes.txt')
results.addfile('ArduPlane stack sizes', 'ArduPlane.framesizes.txt')
results.addfile('ArduPlane defaults', 'ArduPlane.defaults.txt')
results.addfile('ArduCopter build log', 'ArduCopter.txt')
results.addfile('ArduCopter code size', 'ArduCopter.sizes.txt')
results.addfile('ArduCopter stack sizes', 'ArduCopter.framesizes.txt')
results.addfile('ArduCopter defaults', 'ArduCopter.defaults.txt')
results.addfile('APMrover2 build log', 'APMrover2.txt')
results.addfile('APMrover2 code size', 'APMrover2.sizes.txt')
results.addfile('APMrover2 stack sizes', 'APMrover2.framesizes.txt')
results.addfile('APMrover2 defaults', 'APMrover2.defaults.txt')
results.addglobimage("Flight Track", '*.png')
write_webresults(results)
return passed
util.mkdir_p(util.reltopdir('../buildlogs'))
lck = util.lock_file(util.reltopdir('../buildlogs/autotest.lck'))
if lck is None:
print("autotest is locked - exiting")
sys.exit(0)
atexit.register(util.pexpect_close_all)
if len(args) > 0:
# allow a wildcard list of steps
matched = []
for a in args:
for s in steps:
if fnmatch.fnmatch(s.lower(), a.lower()):
matched.append(s)
steps = matched
try:
if not run_tests(steps):
sys.exit(1)
except KeyboardInterrupt:
util.pexpect_close_all()
sys.exit(1)
except Exception:
# make sure we kill off any children
util.pexpect_close_all()
raise
| __init__ | identifier_name |
index.js | "use strict";
const helper = require("../../../helper.js");
const mw = helper.requireModule('./mw/cors/index');
const assert = require('assert');
describe("Unit test for: mw - cors", function () {
let req = {
"soajs": {
"registry": {
"serviceConfig": {
"cors": {
"enabled": true,
"origin": "*",
"credentials": "true",
"methods": "GET,HEAD,PUT,PATCH,POST,DELETE",
"headers": "key,soajsauth,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type",
"maxage": 1728000
}
}
}
}
};
let res = {
"setHeader": (msg, data) => {
res.header[msg] = data;
},
"header": {},
"end": () => {
}
};
it("Install & Use the MW", function (done) {
let mw_use = mw({});
mw_use(req, res, () => {
assert.ok(true);
done();
});
});
it("Run MW with OPTION as method", function (done) {
req.method = "OPTIONS";
res.end = () => {
done();
};
let mw_use = mw({});
mw_use(req, res, () => { | }); | });
}); | random_line_split |
vs.py | # -*- coding: utf-8 -*-
"""
pygments.styles.vs
~~~~~~~~~~~~~~~~~~
Simple style with MS Visual Studio colors.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
""" | from pygments.token import Keyword, Name, Comment, String, Error, \
Operator, Generic
class VisualStudioStyle(Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "#008000",
Comment.Preproc: "#0000ff",
Keyword: "#0000ff",
Operator.Word: "#0000ff",
Keyword.Type: "#2b91af",
Name.Class: "#2b91af",
String: "#a31515",
Generic.Heading: "bold",
Generic.Subheading: "bold",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold",
Error: "border:#FF0000"
} |
from pygments.style import Style | random_line_split |
vs.py | # -*- coding: utf-8 -*-
"""
pygments.styles.vs
~~~~~~~~~~~~~~~~~~
Simple style with MS Visual Studio colors.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Operator, Generic
class | (Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "#008000",
Comment.Preproc: "#0000ff",
Keyword: "#0000ff",
Operator.Word: "#0000ff",
Keyword.Type: "#2b91af",
Name.Class: "#2b91af",
String: "#a31515",
Generic.Heading: "bold",
Generic.Subheading: "bold",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold",
Error: "border:#FF0000"
}
| VisualStudioStyle | identifier_name |
vs.py | # -*- coding: utf-8 -*-
"""
pygments.styles.vs
~~~~~~~~~~~~~~~~~~
Simple style with MS Visual Studio colors.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Operator, Generic
class VisualStudioStyle(Style):
| background_color = "#ffffff"
default_style = ""
styles = {
Comment: "#008000",
Comment.Preproc: "#0000ff",
Keyword: "#0000ff",
Operator.Word: "#0000ff",
Keyword.Type: "#2b91af",
Name.Class: "#2b91af",
String: "#a31515",
Generic.Heading: "bold",
Generic.Subheading: "bold",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold",
Error: "border:#FF0000"
} | identifier_body | |
wunderground.py | import urllib2, json, time, sys
from datetime import date, datetime
from dateutil.rrule import rrule, DAILY
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-f", dest="fahrenheit", action="store", default=False, type="string", help="Convert to FAHRENHEIT")
parser.add_option("-e", dest="end", action="store", default=False, type="string", help="START date")
parser.add_option("-s", dest="start", action="store", default=False, type="string", help="END date")
parser.add_option("-t", dest="token", action="store", default=False, type="string", help="Weather Underground TOKEN")
(options, args) = parser.parse_args()
if options.token:
token = options.token
else:
parser.print_help()
sys.exit()
if options.start:
start = options.start
else:
parser.print_help()
sys.exit()
if options.end:
end = options.end
else:
parser.print_help()
sys.exit()
if options.fahrenheit:
fahrenheit = True
else:
fahrenheit = False
start = datetime.strptime(start,'%Y-%m-%d')
end = datetime.strptime(end,'%Y-%m-%d')
url = ""
if end < start:
print "Error: end date " + str(end) + " occurs before start date " + str(start)
sys.exit()
for dt in rrule(DAILY, dtstart=start, until=end):
total = 0.0
temp = 0.0
count = 0
wunderground_url ="http://api.wunderground.com/api/" + token + "/history_" + dt.strftime("%Y%m%d") +"/q/NY/New_York_City.json"
try:
url = urllib2.urlopen(wunderground_url)
parsed_json = json.loads(url.read())
except: | print "Is your token correct?"
url.close()
sys.exit()
try:
for mean in parsed_json['history']['observations']:
if fahrenheit:
total += float(mean['tempi'])
else:
total += float(mean['tempm'])
count += 1
temp = (total / count)
print dt.strftime("%Y-%m-%d") + "," + str(temp)
except:
print "Error retrieving temperature records for start date " + str(start) + " end date " + str(end)
url.close()
time.sleep(10) | print "Error reading URL " + wunderground_url | random_line_split |
wunderground.py | import urllib2, json, time, sys
from datetime import date, datetime
from dateutil.rrule import rrule, DAILY
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-f", dest="fahrenheit", action="store", default=False, type="string", help="Convert to FAHRENHEIT")
parser.add_option("-e", dest="end", action="store", default=False, type="string", help="START date")
parser.add_option("-s", dest="start", action="store", default=False, type="string", help="END date")
parser.add_option("-t", dest="token", action="store", default=False, type="string", help="Weather Underground TOKEN")
(options, args) = parser.parse_args()
if options.token:
token = options.token
else:
parser.print_help()
sys.exit()
if options.start:
start = options.start
else:
parser.print_help()
sys.exit()
if options.end:
end = options.end
else:
parser.print_help()
sys.exit()
if options.fahrenheit:
fahrenheit = True
else:
fahrenheit = False
start = datetime.strptime(start,'%Y-%m-%d')
end = datetime.strptime(end,'%Y-%m-%d')
url = ""
if end < start:
print "Error: end date " + str(end) + " occurs before start date " + str(start)
sys.exit()
for dt in rrule(DAILY, dtstart=start, until=end):
total = 0.0
temp = 0.0
count = 0
wunderground_url ="http://api.wunderground.com/api/" + token + "/history_" + dt.strftime("%Y%m%d") +"/q/NY/New_York_City.json"
try:
url = urllib2.urlopen(wunderground_url)
parsed_json = json.loads(url.read())
except:
print "Error reading URL " + wunderground_url
print "Is your token correct?"
url.close()
sys.exit()
try:
for mean in parsed_json['history']['observations']:
if fahrenheit:
total += float(mean['tempi'])
else:
|
count += 1
temp = (total / count)
print dt.strftime("%Y-%m-%d") + "," + str(temp)
except:
print "Error retrieving temperature records for start date " + str(start) + " end date " + str(end)
url.close()
time.sleep(10)
| total += float(mean['tempm']) | conditional_block |
youtube.py | # s-*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para Youtube
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import re
import urllib
import urlparse
from core import config
from core import httptools
from core import logger
from core import scrapertools
from core import jsontools as json
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("(page_url='%s')" % page_url)
if not page_url.startswith("http"):
page_url = "http://www.youtube.com/watch?v=%s" % page_url
logger.info(" page_url->'%s'" % page_url)
video_id = scrapertools.find_single_match(page_url, 'v=([A-z0-9_-]{11})')
video_urls = extract_videos(video_id)
video_urls.reverse()
for video_url in video_urls:
logger.info(str(video_url))
return video_urls
def remove_additional_ending_delimiter(data):
pos = data.find("};")
if pos != -1:
data = data[:pos + 1]
return data
def normalize_url(url):
if url[0:2] == "//":
url = "http:" + url
return url
def extract_flashvars(data):
assets = 0
flashvars = {}
found = False
for line in data.split("\n"):
if line.strip().find(";ytplayer.config = ") > 0:
found = True
p1 = line.find(";ytplayer.config = ") + len(";ytplayer.config = ") - 1
p2 = line.rfind(";")
if p1 <= 0 or p2 <= 0:
continue
data = line[p1 + 1:p2]
break
data = remove_additional_ending_delimiter(data)
if found:
data = json.load_json(data)
if assets:
flashvars = data["assets"]
else:
flashvars = data["args"]
for k in ["html", "css", "js"]:
if k in flashvars:
flashvars[k] = normalize_url(flashvars[k])
return flashvars
def extract_videos(video_id):
fmt_value = {
5: "240p h263 flv",
6: "240p h263 flv",
18: "360p h264 mp4",
22: "720p h264 mp4",
26: "???",
33: "???",
34: "360p h264 flv",
35: "480p h264 flv",
36: "3gpp",
37: "1080p h264 mp4",
38: "4K h264 mp4",
43: "360p vp8 webm",
44: "480p vp8 webm",
45: "720p vp8 webm",
46: "1080p vp8 webm",
59: "480p h264 mp4",
78: "480p h264 mp4",
82: "360p h264 3D",
83: "480p h264 3D",
84: "720p h264 3D",
85: "1080p h264 3D",
100: "360p vp8 3D",
101: "480p vp8 3D",
102: "720p vp8 3D"
}
url = 'http://www.youtube.com/get_video_info?video_id=%s&eurl=https://youtube.googleapis.com/v/%s&ssl_stream=1' % \
(video_id, video_id)
data = httptools.downloadpage(url).data
video_urls = []
params = dict(urlparse.parse_qsl(data))
if params.get('hlsvp'):
video_urls.append(["(LIVE .m3u8) [youtube]", params['hlsvp']])
return video_urls
if config.is_xbmc():
import xbmc
xbmc_version = config.get_platform(True)['num_version']
if xbmc_version >= 17 and xbmc.getCondVisibility('System.HasAddon(inputstream.adaptive)') \
and params.get('dashmpd'):
if params.get('use_cipher_signature', '') != 'True':
video_urls.append(['mpd HD [youtube]', params['dashmpd'], 0, '', True])
js_signature = ""
youtube_page_data = httptools.downloadpage("http://www.youtube.com/watch?v=%s" % video_id).data
params = extract_flashvars(youtube_page_data)
if params.get('url_encoded_fmt_stream_map'):
data_flashvars = params["url_encoded_fmt_stream_map"].split(",")
for url_desc in data_flashvars:
url_desc_map = dict(urlparse.parse_qsl(url_desc))
if not url_desc_map.get("url") and not url_desc_map.get("stream"):
continue
try:
key = int(url_desc_map["itag"])
if not fmt_value.get(key):
continue
if url_desc_map.get("url"):
url = urllib.unquote(url_desc_map["url"])
elif url_desc_map.get("conn") and url_desc_map.get("stream"):
url = urllib.unquote(url_desc_map["conn"])
if url.rfind("/") < len(url) - 1:
url += "/"
url += urllib.unquote(url_desc_map["stream"])
elif url_desc_map.get("stream") and not url_desc_map.get("conn"):
url = urllib.unquote(url_desc_map["stream"])
if url_desc_map.get("sig"):
url += "&signature=" + url_desc_map["sig"]
elif url_desc_map.get("s"):
sig = url_desc_map["s"]
if not js_signature:
|
signature = js_signature([sig])
url += "&signature=" + signature
url = url.replace(",", "%2C")
video_urls.append(["("+fmt_value[key]+") [youtube]", url])
except:
import traceback
logger.info(traceback.format_exc())
return video_urls
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = 'youtube(?:-nocookie)?\.com/(?:(?:(?:v/|embed/))|(?:(?:watch(?:_popup)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v=))?([0-9A-Za-z_-]{11})'#'"http://www.youtube.com/v/([^"]+)"'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url!='':
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
patronvideos = 'www.youtube.*?v(?:=|%3D)([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.youtube.com/v/AcbsMOMg2fQ
patronvideos = 'youtube.com/v/([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
| urljs = scrapertools.find_single_match(youtube_page_data, '"assets":.*?"js":\s*"([^"]+)"')
urljs = urljs.replace("\\", "")
if urljs:
if not re.search(r'https?://', urljs):
urljs = urlparse.urljoin("https://www.youtube.com", urljs)
data_js = httptools.downloadpage(urljs).data
from jsinterpreter import JSInterpreter
funcname = scrapertools.find_single_match(data_js, '\.sig\|\|([A-z0-9$]+)\(')
if not funcname:
funcname = scrapertools.find_single_match(data_js, '["\']signature["\']\s*,\s*'
'([A-z0-9$]+)\(')
jsi = JSInterpreter(data_js)
js_signature = jsi.extract_function(funcname) | conditional_block |
youtube.py | # s-*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para Youtube
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import re
import urllib
import urlparse
from core import config
from core import httptools
from core import logger
from core import scrapertools
from core import jsontools as json
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("(page_url='%s')" % page_url)
if not page_url.startswith("http"):
page_url = "http://www.youtube.com/watch?v=%s" % page_url
logger.info(" page_url->'%s'" % page_url) | video_urls.reverse()
for video_url in video_urls:
logger.info(str(video_url))
return video_urls
def remove_additional_ending_delimiter(data):
pos = data.find("};")
if pos != -1:
data = data[:pos + 1]
return data
def normalize_url(url):
if url[0:2] == "//":
url = "http:" + url
return url
def extract_flashvars(data):
assets = 0
flashvars = {}
found = False
for line in data.split("\n"):
if line.strip().find(";ytplayer.config = ") > 0:
found = True
p1 = line.find(";ytplayer.config = ") + len(";ytplayer.config = ") - 1
p2 = line.rfind(";")
if p1 <= 0 or p2 <= 0:
continue
data = line[p1 + 1:p2]
break
data = remove_additional_ending_delimiter(data)
if found:
data = json.load_json(data)
if assets:
flashvars = data["assets"]
else:
flashvars = data["args"]
for k in ["html", "css", "js"]:
if k in flashvars:
flashvars[k] = normalize_url(flashvars[k])
return flashvars
def extract_videos(video_id):
fmt_value = {
5: "240p h263 flv",
6: "240p h263 flv",
18: "360p h264 mp4",
22: "720p h264 mp4",
26: "???",
33: "???",
34: "360p h264 flv",
35: "480p h264 flv",
36: "3gpp",
37: "1080p h264 mp4",
38: "4K h264 mp4",
43: "360p vp8 webm",
44: "480p vp8 webm",
45: "720p vp8 webm",
46: "1080p vp8 webm",
59: "480p h264 mp4",
78: "480p h264 mp4",
82: "360p h264 3D",
83: "480p h264 3D",
84: "720p h264 3D",
85: "1080p h264 3D",
100: "360p vp8 3D",
101: "480p vp8 3D",
102: "720p vp8 3D"
}
url = 'http://www.youtube.com/get_video_info?video_id=%s&eurl=https://youtube.googleapis.com/v/%s&ssl_stream=1' % \
(video_id, video_id)
data = httptools.downloadpage(url).data
video_urls = []
params = dict(urlparse.parse_qsl(data))
if params.get('hlsvp'):
video_urls.append(["(LIVE .m3u8) [youtube]", params['hlsvp']])
return video_urls
if config.is_xbmc():
import xbmc
xbmc_version = config.get_platform(True)['num_version']
if xbmc_version >= 17 and xbmc.getCondVisibility('System.HasAddon(inputstream.adaptive)') \
and params.get('dashmpd'):
if params.get('use_cipher_signature', '') != 'True':
video_urls.append(['mpd HD [youtube]', params['dashmpd'], 0, '', True])
js_signature = ""
youtube_page_data = httptools.downloadpage("http://www.youtube.com/watch?v=%s" % video_id).data
params = extract_flashvars(youtube_page_data)
if params.get('url_encoded_fmt_stream_map'):
data_flashvars = params["url_encoded_fmt_stream_map"].split(",")
for url_desc in data_flashvars:
url_desc_map = dict(urlparse.parse_qsl(url_desc))
if not url_desc_map.get("url") and not url_desc_map.get("stream"):
continue
try:
key = int(url_desc_map["itag"])
if not fmt_value.get(key):
continue
if url_desc_map.get("url"):
url = urllib.unquote(url_desc_map["url"])
elif url_desc_map.get("conn") and url_desc_map.get("stream"):
url = urllib.unquote(url_desc_map["conn"])
if url.rfind("/") < len(url) - 1:
url += "/"
url += urllib.unquote(url_desc_map["stream"])
elif url_desc_map.get("stream") and not url_desc_map.get("conn"):
url = urllib.unquote(url_desc_map["stream"])
if url_desc_map.get("sig"):
url += "&signature=" + url_desc_map["sig"]
elif url_desc_map.get("s"):
sig = url_desc_map["s"]
if not js_signature:
urljs = scrapertools.find_single_match(youtube_page_data, '"assets":.*?"js":\s*"([^"]+)"')
urljs = urljs.replace("\\", "")
if urljs:
if not re.search(r'https?://', urljs):
urljs = urlparse.urljoin("https://www.youtube.com", urljs)
data_js = httptools.downloadpage(urljs).data
from jsinterpreter import JSInterpreter
funcname = scrapertools.find_single_match(data_js, '\.sig\|\|([A-z0-9$]+)\(')
if not funcname:
funcname = scrapertools.find_single_match(data_js, '["\']signature["\']\s*,\s*'
'([A-z0-9$]+)\(')
jsi = JSInterpreter(data_js)
js_signature = jsi.extract_function(funcname)
signature = js_signature([sig])
url += "&signature=" + signature
url = url.replace(",", "%2C")
video_urls.append(["("+fmt_value[key]+") [youtube]", url])
except:
import traceback
logger.info(traceback.format_exc())
return video_urls
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = 'youtube(?:-nocookie)?\.com/(?:(?:(?:v/|embed/))|(?:(?:watch(?:_popup)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v=))?([0-9A-Za-z_-]{11})'#'"http://www.youtube.com/v/([^"]+)"'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url!='':
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
patronvideos = 'www.youtube.*?v(?:=|%3D)([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.youtube.com/v/AcbsMOMg2fQ
patronvideos = 'youtube.com/v/([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve |
video_id = scrapertools.find_single_match(page_url, 'v=([A-z0-9_-]{11})')
video_urls = extract_videos(video_id) | random_line_split |
youtube.py | # s-*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para Youtube
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import re
import urllib
import urlparse
from core import config
from core import httptools
from core import logger
from core import scrapertools
from core import jsontools as json
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
|
def remove_additional_ending_delimiter(data):
pos = data.find("};")
if pos != -1:
data = data[:pos + 1]
return data
def normalize_url(url):
if url[0:2] == "//":
url = "http:" + url
return url
def extract_flashvars(data):
assets = 0
flashvars = {}
found = False
for line in data.split("\n"):
if line.strip().find(";ytplayer.config = ") > 0:
found = True
p1 = line.find(";ytplayer.config = ") + len(";ytplayer.config = ") - 1
p2 = line.rfind(";")
if p1 <= 0 or p2 <= 0:
continue
data = line[p1 + 1:p2]
break
data = remove_additional_ending_delimiter(data)
if found:
data = json.load_json(data)
if assets:
flashvars = data["assets"]
else:
flashvars = data["args"]
for k in ["html", "css", "js"]:
if k in flashvars:
flashvars[k] = normalize_url(flashvars[k])
return flashvars
def extract_videos(video_id):
fmt_value = {
5: "240p h263 flv",
6: "240p h263 flv",
18: "360p h264 mp4",
22: "720p h264 mp4",
26: "???",
33: "???",
34: "360p h264 flv",
35: "480p h264 flv",
36: "3gpp",
37: "1080p h264 mp4",
38: "4K h264 mp4",
43: "360p vp8 webm",
44: "480p vp8 webm",
45: "720p vp8 webm",
46: "1080p vp8 webm",
59: "480p h264 mp4",
78: "480p h264 mp4",
82: "360p h264 3D",
83: "480p h264 3D",
84: "720p h264 3D",
85: "1080p h264 3D",
100: "360p vp8 3D",
101: "480p vp8 3D",
102: "720p vp8 3D"
}
url = 'http://www.youtube.com/get_video_info?video_id=%s&eurl=https://youtube.googleapis.com/v/%s&ssl_stream=1' % \
(video_id, video_id)
data = httptools.downloadpage(url).data
video_urls = []
params = dict(urlparse.parse_qsl(data))
if params.get('hlsvp'):
video_urls.append(["(LIVE .m3u8) [youtube]", params['hlsvp']])
return video_urls
if config.is_xbmc():
import xbmc
xbmc_version = config.get_platform(True)['num_version']
if xbmc_version >= 17 and xbmc.getCondVisibility('System.HasAddon(inputstream.adaptive)') \
and params.get('dashmpd'):
if params.get('use_cipher_signature', '') != 'True':
video_urls.append(['mpd HD [youtube]', params['dashmpd'], 0, '', True])
js_signature = ""
youtube_page_data = httptools.downloadpage("http://www.youtube.com/watch?v=%s" % video_id).data
params = extract_flashvars(youtube_page_data)
if params.get('url_encoded_fmt_stream_map'):
data_flashvars = params["url_encoded_fmt_stream_map"].split(",")
for url_desc in data_flashvars:
url_desc_map = dict(urlparse.parse_qsl(url_desc))
if not url_desc_map.get("url") and not url_desc_map.get("stream"):
continue
try:
key = int(url_desc_map["itag"])
if not fmt_value.get(key):
continue
if url_desc_map.get("url"):
url = urllib.unquote(url_desc_map["url"])
elif url_desc_map.get("conn") and url_desc_map.get("stream"):
url = urllib.unquote(url_desc_map["conn"])
if url.rfind("/") < len(url) - 1:
url += "/"
url += urllib.unquote(url_desc_map["stream"])
elif url_desc_map.get("stream") and not url_desc_map.get("conn"):
url = urllib.unquote(url_desc_map["stream"])
if url_desc_map.get("sig"):
url += "&signature=" + url_desc_map["sig"]
elif url_desc_map.get("s"):
sig = url_desc_map["s"]
if not js_signature:
urljs = scrapertools.find_single_match(youtube_page_data, '"assets":.*?"js":\s*"([^"]+)"')
urljs = urljs.replace("\\", "")
if urljs:
if not re.search(r'https?://', urljs):
urljs = urlparse.urljoin("https://www.youtube.com", urljs)
data_js = httptools.downloadpage(urljs).data
from jsinterpreter import JSInterpreter
funcname = scrapertools.find_single_match(data_js, '\.sig\|\|([A-z0-9$]+)\(')
if not funcname:
funcname = scrapertools.find_single_match(data_js, '["\']signature["\']\s*,\s*'
'([A-z0-9$]+)\(')
jsi = JSInterpreter(data_js)
js_signature = jsi.extract_function(funcname)
signature = js_signature([sig])
url += "&signature=" + signature
url = url.replace(",", "%2C")
video_urls.append(["("+fmt_value[key]+") [youtube]", url])
except:
import traceback
logger.info(traceback.format_exc())
return video_urls
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = 'youtube(?:-nocookie)?\.com/(?:(?:(?:v/|embed/))|(?:(?:watch(?:_popup)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v=))?([0-9A-Za-z_-]{11})'#'"http://www.youtube.com/v/([^"]+)"'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url!='':
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
patronvideos = 'www.youtube.*?v(?:=|%3D)([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.youtube.com/v/AcbsMOMg2fQ
patronvideos = 'youtube.com/v/([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
| logger.info("(page_url='%s')" % page_url)
if not page_url.startswith("http"):
page_url = "http://www.youtube.com/watch?v=%s" % page_url
logger.info(" page_url->'%s'" % page_url)
video_id = scrapertools.find_single_match(page_url, 'v=([A-z0-9_-]{11})')
video_urls = extract_videos(video_id)
video_urls.reverse()
for video_url in video_urls:
logger.info(str(video_url))
return video_urls | identifier_body |
youtube.py | # s-*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para Youtube
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import re
import urllib
import urlparse
from core import config
from core import httptools
from core import logger
from core import scrapertools
from core import jsontools as json
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("(page_url='%s')" % page_url)
if not page_url.startswith("http"):
page_url = "http://www.youtube.com/watch?v=%s" % page_url
logger.info(" page_url->'%s'" % page_url)
video_id = scrapertools.find_single_match(page_url, 'v=([A-z0-9_-]{11})')
video_urls = extract_videos(video_id)
video_urls.reverse()
for video_url in video_urls:
logger.info(str(video_url))
return video_urls
def | (data):
pos = data.find("};")
if pos != -1:
data = data[:pos + 1]
return data
def normalize_url(url):
if url[0:2] == "//":
url = "http:" + url
return url
def extract_flashvars(data):
assets = 0
flashvars = {}
found = False
for line in data.split("\n"):
if line.strip().find(";ytplayer.config = ") > 0:
found = True
p1 = line.find(";ytplayer.config = ") + len(";ytplayer.config = ") - 1
p2 = line.rfind(";")
if p1 <= 0 or p2 <= 0:
continue
data = line[p1 + 1:p2]
break
data = remove_additional_ending_delimiter(data)
if found:
data = json.load_json(data)
if assets:
flashvars = data["assets"]
else:
flashvars = data["args"]
for k in ["html", "css", "js"]:
if k in flashvars:
flashvars[k] = normalize_url(flashvars[k])
return flashvars
def extract_videos(video_id):
fmt_value = {
5: "240p h263 flv",
6: "240p h263 flv",
18: "360p h264 mp4",
22: "720p h264 mp4",
26: "???",
33: "???",
34: "360p h264 flv",
35: "480p h264 flv",
36: "3gpp",
37: "1080p h264 mp4",
38: "4K h264 mp4",
43: "360p vp8 webm",
44: "480p vp8 webm",
45: "720p vp8 webm",
46: "1080p vp8 webm",
59: "480p h264 mp4",
78: "480p h264 mp4",
82: "360p h264 3D",
83: "480p h264 3D",
84: "720p h264 3D",
85: "1080p h264 3D",
100: "360p vp8 3D",
101: "480p vp8 3D",
102: "720p vp8 3D"
}
url = 'http://www.youtube.com/get_video_info?video_id=%s&eurl=https://youtube.googleapis.com/v/%s&ssl_stream=1' % \
(video_id, video_id)
data = httptools.downloadpage(url).data
video_urls = []
params = dict(urlparse.parse_qsl(data))
if params.get('hlsvp'):
video_urls.append(["(LIVE .m3u8) [youtube]", params['hlsvp']])
return video_urls
if config.is_xbmc():
import xbmc
xbmc_version = config.get_platform(True)['num_version']
if xbmc_version >= 17 and xbmc.getCondVisibility('System.HasAddon(inputstream.adaptive)') \
and params.get('dashmpd'):
if params.get('use_cipher_signature', '') != 'True':
video_urls.append(['mpd HD [youtube]', params['dashmpd'], 0, '', True])
js_signature = ""
youtube_page_data = httptools.downloadpage("http://www.youtube.com/watch?v=%s" % video_id).data
params = extract_flashvars(youtube_page_data)
if params.get('url_encoded_fmt_stream_map'):
data_flashvars = params["url_encoded_fmt_stream_map"].split(",")
for url_desc in data_flashvars:
url_desc_map = dict(urlparse.parse_qsl(url_desc))
if not url_desc_map.get("url") and not url_desc_map.get("stream"):
continue
try:
key = int(url_desc_map["itag"])
if not fmt_value.get(key):
continue
if url_desc_map.get("url"):
url = urllib.unquote(url_desc_map["url"])
elif url_desc_map.get("conn") and url_desc_map.get("stream"):
url = urllib.unquote(url_desc_map["conn"])
if url.rfind("/") < len(url) - 1:
url += "/"
url += urllib.unquote(url_desc_map["stream"])
elif url_desc_map.get("stream") and not url_desc_map.get("conn"):
url = urllib.unquote(url_desc_map["stream"])
if url_desc_map.get("sig"):
url += "&signature=" + url_desc_map["sig"]
elif url_desc_map.get("s"):
sig = url_desc_map["s"]
if not js_signature:
urljs = scrapertools.find_single_match(youtube_page_data, '"assets":.*?"js":\s*"([^"]+)"')
urljs = urljs.replace("\\", "")
if urljs:
if not re.search(r'https?://', urljs):
urljs = urlparse.urljoin("https://www.youtube.com", urljs)
data_js = httptools.downloadpage(urljs).data
from jsinterpreter import JSInterpreter
funcname = scrapertools.find_single_match(data_js, '\.sig\|\|([A-z0-9$]+)\(')
if not funcname:
funcname = scrapertools.find_single_match(data_js, '["\']signature["\']\s*,\s*'
'([A-z0-9$]+)\(')
jsi = JSInterpreter(data_js)
js_signature = jsi.extract_function(funcname)
signature = js_signature([sig])
url += "&signature=" + signature
url = url.replace(",", "%2C")
video_urls.append(["("+fmt_value[key]+") [youtube]", url])
except:
import traceback
logger.info(traceback.format_exc())
return video_urls
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = 'youtube(?:-nocookie)?\.com/(?:(?:(?:v/|embed/))|(?:(?:watch(?:_popup)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v=))?([0-9A-Za-z_-]{11})'#'"http://www.youtube.com/v/([^"]+)"'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url!='':
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
patronvideos = 'www.youtube.*?v(?:=|%3D)([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.youtube.com/v/AcbsMOMg2fQ
patronvideos = 'youtube.com/v/([0-9A-Za-z_-]{11})'
logger.info(" #"+patronvideos+"#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[YouTube]"
url = "http://www.youtube.com/watch?v="+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append([titulo, url, 'youtube'])
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
| remove_additional_ending_delimiter | identifier_name |
mlp.py | import sys
import numpy as np
from sklearn.model_selection import StratifiedKFold
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import f1_score,accuracy_score, recall_score, precision_score
import scipy
from random import shuffle
def | (filename):
f = open(filename)
x = []
y = []
for line in f:
v = line.rstrip('\n').split(',')
vf = [float(i) for i in v[:-1]]
x.append(vf)
y.append(float(v[-1]))
return x,y
def inductor(x,y):
clf = MLPClassifier(solver='lbfgs', alpha=1e-5,hidden_layer_sizes=(20, 8), max_iter=1000,random_state=1)
clf.fit(x,y)
return clf
if __name__ == '__main__':
fname = sys.argv[1]
print("loading data ..")
x,y = load_dataset(fname)
x = np.array(x)
y = np.array(y)
n = len(x)
kf = StratifiedKFold(n_splits=3, shuffle=True)
for train_index, test_index in kf.split(x,y):
shuffle(train_index)
shuffle(test_index)
xtrain = x[train_index]
ytrain = y[train_index]
xtest = x[test_index]
ytest = y[test_index]
print("training ...")
clf = inductor(xtrain,ytrain)
print("predicting ...")
ypred = clf.predict(xtest)
print "(accuracy : %4.3f) "%(accuracy_score(ytest,ypred))
print "(f1 : %4.3f) "%(f1_score(ytest,ypred, average='weighted'))
print "(recall : %4.3f) "%(recall_score(ytest,ypred,average='weighted'))
print "(precision : %4.3f) "%(precision_score(ytest,ypred,average='weighted'))
| load_dataset | identifier_name |
mlp.py | import sys
import numpy as np
from sklearn.model_selection import StratifiedKFold
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import f1_score,accuracy_score, recall_score, precision_score
import scipy
from random import shuffle
def load_dataset(filename):
f = open(filename)
x = []
y = []
for line in f:
v = line.rstrip('\n').split(',')
vf = [float(i) for i in v[:-1]]
x.append(vf)
y.append(float(v[-1]))
return x,y
def inductor(x,y):
clf = MLPClassifier(solver='lbfgs', alpha=1e-5,hidden_layer_sizes=(20, 8), max_iter=1000,random_state=1)
clf.fit(x,y)
return clf
if __name__ == '__main__':
| fname = sys.argv[1]
print("loading data ..")
x,y = load_dataset(fname)
x = np.array(x)
y = np.array(y)
n = len(x)
kf = StratifiedKFold(n_splits=3, shuffle=True)
for train_index, test_index in kf.split(x,y):
shuffle(train_index)
shuffle(test_index)
xtrain = x[train_index]
ytrain = y[train_index]
xtest = x[test_index]
ytest = y[test_index]
print("training ...")
clf = inductor(xtrain,ytrain)
print("predicting ...")
ypred = clf.predict(xtest)
print "(accuracy : %4.3f) "%(accuracy_score(ytest,ypred))
print "(f1 : %4.3f) "%(f1_score(ytest,ypred, average='weighted'))
print "(recall : %4.3f) "%(recall_score(ytest,ypred,average='weighted'))
print "(precision : %4.3f) "%(precision_score(ytest,ypred,average='weighted')) | conditional_block | |
mlp.py | import sys
import numpy as np
from sklearn.model_selection import StratifiedKFold
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import f1_score,accuracy_score, recall_score, precision_score
import scipy
from random import shuffle
def load_dataset(filename):
f = open(filename)
x = []
y = []
for line in f:
v = line.rstrip('\n').split(',')
vf = [float(i) for i in v[:-1]]
x.append(vf)
y.append(float(v[-1]))
return x,y
def inductor(x,y):
clf = MLPClassifier(solver='lbfgs', alpha=1e-5,hidden_layer_sizes=(20, 8), max_iter=1000,random_state=1)
clf.fit(x,y)
return clf
if __name__ == '__main__':
fname = sys.argv[1]
print("loading data ..") | kf = StratifiedKFold(n_splits=3, shuffle=True)
for train_index, test_index in kf.split(x,y):
shuffle(train_index)
shuffle(test_index)
xtrain = x[train_index]
ytrain = y[train_index]
xtest = x[test_index]
ytest = y[test_index]
print("training ...")
clf = inductor(xtrain,ytrain)
print("predicting ...")
ypred = clf.predict(xtest)
print "(accuracy : %4.3f) "%(accuracy_score(ytest,ypred))
print "(f1 : %4.3f) "%(f1_score(ytest,ypred, average='weighted'))
print "(recall : %4.3f) "%(recall_score(ytest,ypred,average='weighted'))
print "(precision : %4.3f) "%(precision_score(ytest,ypred,average='weighted')) | x,y = load_dataset(fname)
x = np.array(x)
y = np.array(y)
n = len(x) | random_line_split |
mlp.py | import sys
import numpy as np
from sklearn.model_selection import StratifiedKFold
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import f1_score,accuracy_score, recall_score, precision_score
import scipy
from random import shuffle
def load_dataset(filename):
f = open(filename)
x = []
y = []
for line in f:
v = line.rstrip('\n').split(',')
vf = [float(i) for i in v[:-1]]
x.append(vf)
y.append(float(v[-1]))
return x,y
def inductor(x,y):
|
if __name__ == '__main__':
fname = sys.argv[1]
print("loading data ..")
x,y = load_dataset(fname)
x = np.array(x)
y = np.array(y)
n = len(x)
kf = StratifiedKFold(n_splits=3, shuffle=True)
for train_index, test_index in kf.split(x,y):
shuffle(train_index)
shuffle(test_index)
xtrain = x[train_index]
ytrain = y[train_index]
xtest = x[test_index]
ytest = y[test_index]
print("training ...")
clf = inductor(xtrain,ytrain)
print("predicting ...")
ypred = clf.predict(xtest)
print "(accuracy : %4.3f) "%(accuracy_score(ytest,ypred))
print "(f1 : %4.3f) "%(f1_score(ytest,ypred, average='weighted'))
print "(recall : %4.3f) "%(recall_score(ytest,ypred,average='weighted'))
print "(precision : %4.3f) "%(precision_score(ytest,ypred,average='weighted'))
| clf = MLPClassifier(solver='lbfgs', alpha=1e-5,hidden_layer_sizes=(20, 8), max_iter=1000,random_state=1)
clf.fit(x,y)
return clf | identifier_body |
__init__.py | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.utils.translation import ugettext_noop as _
from geonode.notifications_helper import NotificationsAppConfigBase
class PeopleAppConfig(NotificationsAppConfigBase):
name = 'geonode.people'
NOTIFICATIONS = (("user_follow", _("User following you"), _("Another user has started following you"),),
("account_approve", _("User requested access"),
_("A new user has requested access to the site"),),
("account_active", _("Account activated"),
_("This account is now active and can log in the site"),),
)
def ready(self):
|
default_app_config = 'geonode.people.PeopleAppConfig'
| super(PeopleAppConfig, self).ready() | identifier_body |
__init__.py | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.utils.translation import ugettext_noop as _
from geonode.notifications_helper import NotificationsAppConfigBase
class PeopleAppConfig(NotificationsAppConfigBase):
name = 'geonode.people'
NOTIFICATIONS = (("user_follow", _("User following you"), _("Another user has started following you"),),
("account_approve", _("User requested access"),
_("A new user has requested access to the site"),),
("account_active", _("Account activated"),
_("This account is now active and can log in the site"),),
)
def | (self):
super(PeopleAppConfig, self).ready()
default_app_config = 'geonode.people.PeopleAppConfig'
| ready | identifier_name |
__init__.py | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. | #
#########################################################################
from django.utils.translation import ugettext_noop as _
from geonode.notifications_helper import NotificationsAppConfigBase
class PeopleAppConfig(NotificationsAppConfigBase):
name = 'geonode.people'
NOTIFICATIONS = (("user_follow", _("User following you"), _("Another user has started following you"),),
("account_approve", _("User requested access"),
_("A new user has requested access to the site"),),
("account_active", _("Account activated"),
_("This account is now active and can log in the site"),),
)
def ready(self):
super(PeopleAppConfig, self).ready()
default_app_config = 'geonode.people.PeopleAppConfig' | random_line_split | |
mod.rs | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use super::module::IRModule;
use super::span::*;
use crate::runtime::function::Result;
use crate::runtime::object::{Object, ObjectPtr};
use crate::runtime::{
array::Array,
function::{self, Function, ToFunction},
string::String as TString,
};
/// The diagnostic interface to TVM, used for reporting and rendering
/// diagnostic information by the compiler. This module exposes
/// three key abstractions: a Diagnostic, the DiagnosticContext,
/// and the DiagnosticRenderer.
use tvm_macros::{external, Object};
pub mod codespan;
external! {
#[name("runtime.ArrayGetItem")]
fn get_renderer() -> DiagnosticRenderer;
#[name("diagnostics.DiagnosticRenderer")]
fn diagnostic_renderer(func: Function) -> DiagnosticRenderer;
#[name("diagnostics.Emit")]
fn emit(ctx: DiagnosticContext, diagnostic: Diagnostic) -> ();
#[name("diagnostics.DiagnosticContextDefault")]
fn diagnostic_context_default(module: IRModule) -> DiagnosticContext;
#[name("diagnostics.DiagnosticContextRender")]
fn diagnostic_context_render(ctx: DiagnosticContext) -> ();
#[name("diagnostics.DiagnosticRendererRender")]
fn diagnositc_renderer_render(renderer: DiagnosticRenderer, ctx: DiagnosticContext) -> ();
#[name("diagnostics.ClearRenderer")]
fn clear_renderer() -> ();
}
/// The diagnostic level, controls the printing of the message.
#[repr(C)]
#[derive(PartialEq, Eq, Debug)]
pub enum DiagnosticLevel {
Bug = 10,
Error = 20,
Warning = 30,
Note = 40,
Help = 50,
}
/// A compiler diagnostic.
#[repr(C)]
#[derive(Object, Debug)]
#[ref_name = "Diagnostic"]
#[type_key = "Diagnostic"]
pub struct DiagnosticNode {
pub base: Object,
/// The level.
pub level: DiagnosticLevel,
/// The span at which to report an error.
pub span: Span,
/// The diagnostic message.
pub message: TString,
}
impl Diagnostic {
pub fn new(level: DiagnosticLevel, span: Span, message: TString) -> Diagnostic {
let node = DiagnosticNode {
base: Object::base::<DiagnosticNode>(),
level,
span,
message,
};
ObjectPtr::new(node).into()
}
pub fn bug(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Bug, span)
}
pub fn error(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Error, span)
}
pub fn warning(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Warning, span)
}
pub fn note(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Note, span)
}
pub fn help(span: Span) -> DiagnosticBuilder |
}
/// A wrapper around std::stringstream to build a diagnostic.
pub struct DiagnosticBuilder {
/// The level.
pub level: DiagnosticLevel,
/// The span of the diagnostic.
pub span: Span,
/// The in progress message.
pub message: String,
}
impl DiagnosticBuilder {
pub fn new(level: DiagnosticLevel, span: Span) -> DiagnosticBuilder {
DiagnosticBuilder {
level,
span,
message: "".into(),
}
}
}
/// Display diagnostics in a given display format.
///
/// A diagnostic renderer is responsible for converting the
/// raw diagnostics into consumable output.
///
/// For example the terminal renderer will render a sequence
/// of compiler diagnostics to std::out and std::err in
/// a human readable form.
#[repr(C)]
#[derive(Object, Debug)]
#[ref_name = "DiagnosticRenderer"]
#[type_key = "DiagnosticRenderer"]
/// A diagnostic renderer, which given a diagnostic context produces a "rendered"
/// form of the diagnostics for either human or computer consumption.
pub struct DiagnosticRendererNode {
/// The base type.
pub base: Object,
// TODO(@jroesch): we can't easily exposed packed functions due to
// memory layout
// missing field here
}
impl DiagnosticRenderer {
/// Render the provided context.
pub fn render(&self, ctx: DiagnosticContext) -> Result<()> {
diagnositc_renderer_render(self.clone(), ctx)
}
}
#[repr(C)]
#[derive(Object, Debug)]
#[ref_name = "DiagnosticContext"]
#[type_key = "DiagnosticContext"]
/// A diagnostic context for recording errors against a source file.
pub struct DiagnosticContextNode {
// The base type.
pub base: Object,
/// The Module to report against.
pub module: IRModule,
/// The set of diagnostics to report.
pub diagnostics: Array<Diagnostic>,
/// The renderer set for the context.
pub renderer: DiagnosticRenderer,
}
/// A diagnostic context which records active errors
/// and contains a renderer.
impl DiagnosticContext {
pub fn new<F>(module: IRModule, render_func: F) -> DiagnosticContext
where
F: Fn(DiagnosticContext) -> () + 'static,
{
let renderer = diagnostic_renderer(render_func.to_function()).unwrap();
let node = DiagnosticContextNode {
base: Object::base::<DiagnosticContextNode>(),
module,
diagnostics: Array::from_vec(vec![]).unwrap(),
renderer,
};
DiagnosticContext(Some(ObjectPtr::new(node)))
}
pub fn default(module: IRModule) -> DiagnosticContext {
diagnostic_context_default(module).unwrap()
}
/// Emit a diagnostic.
pub fn emit(&mut self, diagnostic: Diagnostic) -> Result<()> {
emit(self.clone(), diagnostic)
}
/// Render the errors and raise a DiagnosticError exception.
pub fn render(&mut self) -> Result<()> {
diagnostic_context_render(self.clone())
}
/// Emit a diagnostic and then immediately attempt to render all errors.
pub fn emit_fatal(&mut self, diagnostic: Diagnostic) -> Result<()> {
self.emit(diagnostic)?;
self.render()?;
Ok(())
}
}
/// Override the global diagnostics renderer.
// render_func: Option[Callable[[DiagnosticContext], None]]
// If the render_func is None it will remove the current custom renderer
// and return to default behavior.
fn override_renderer<F>(opt_func: Option<F>) -> Result<()>
where
F: Fn(DiagnosticContext) -> () + 'static,
{
match opt_func {
None => clear_renderer(),
Some(func) => {
let func = func.to_function();
let render_factory = move || diagnostic_renderer(func.clone()).unwrap();
function::register_override(render_factory, "diagnostics.OverrideRenderer", true)?;
Ok(())
}
}
}
| {
DiagnosticBuilder::new(DiagnosticLevel::Help, span)
} | identifier_body |
mod.rs | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use super::module::IRModule;
use super::span::*;
use crate::runtime::function::Result;
use crate::runtime::object::{Object, ObjectPtr};
use crate::runtime::{
array::Array,
function::{self, Function, ToFunction},
string::String as TString,
};
/// The diagnostic interface to TVM, used for reporting and rendering
/// diagnostic information by the compiler. This module exposes
/// three key abstractions: a Diagnostic, the DiagnosticContext,
/// and the DiagnosticRenderer.
use tvm_macros::{external, Object};
pub mod codespan;
external! {
#[name("runtime.ArrayGetItem")]
fn get_renderer() -> DiagnosticRenderer;
#[name("diagnostics.DiagnosticRenderer")]
fn diagnostic_renderer(func: Function) -> DiagnosticRenderer;
#[name("diagnostics.Emit")]
fn emit(ctx: DiagnosticContext, diagnostic: Diagnostic) -> ();
#[name("diagnostics.DiagnosticContextDefault")]
fn diagnostic_context_default(module: IRModule) -> DiagnosticContext;
#[name("diagnostics.DiagnosticContextRender")]
fn diagnostic_context_render(ctx: DiagnosticContext) -> ();
#[name("diagnostics.DiagnosticRendererRender")]
fn diagnositc_renderer_render(renderer: DiagnosticRenderer, ctx: DiagnosticContext) -> ();
#[name("diagnostics.ClearRenderer")]
fn clear_renderer() -> ();
}
/// The diagnostic level, controls the printing of the message.
#[repr(C)]
#[derive(PartialEq, Eq, Debug)]
pub enum DiagnosticLevel {
Bug = 10,
Error = 20,
Warning = 30,
Note = 40,
Help = 50,
}
/// A compiler diagnostic.
#[repr(C)]
#[derive(Object, Debug)]
#[ref_name = "Diagnostic"]
#[type_key = "Diagnostic"]
pub struct DiagnosticNode {
pub base: Object,
/// The level.
pub level: DiagnosticLevel,
/// The span at which to report an error.
pub span: Span,
/// The diagnostic message.
pub message: TString,
}
impl Diagnostic {
pub fn new(level: DiagnosticLevel, span: Span, message: TString) -> Diagnostic {
let node = DiagnosticNode {
base: Object::base::<DiagnosticNode>(),
level,
span,
message,
};
ObjectPtr::new(node).into()
}
pub fn bug(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Bug, span)
}
pub fn error(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Error, span)
}
pub fn warning(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Warning, span)
}
pub fn note(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Note, span)
}
pub fn help(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Help, span)
}
}
/// A wrapper around std::stringstream to build a diagnostic.
pub struct | {
/// The level.
pub level: DiagnosticLevel,
/// The span of the diagnostic.
pub span: Span,
/// The in progress message.
pub message: String,
}
impl DiagnosticBuilder {
pub fn new(level: DiagnosticLevel, span: Span) -> DiagnosticBuilder {
DiagnosticBuilder {
level,
span,
message: "".into(),
}
}
}
/// Display diagnostics in a given display format.
///
/// A diagnostic renderer is responsible for converting the
/// raw diagnostics into consumable output.
///
/// For example the terminal renderer will render a sequence
/// of compiler diagnostics to std::out and std::err in
/// a human readable form.
#[repr(C)]
#[derive(Object, Debug)]
#[ref_name = "DiagnosticRenderer"]
#[type_key = "DiagnosticRenderer"]
/// A diagnostic renderer, which given a diagnostic context produces a "rendered"
/// form of the diagnostics for either human or computer consumption.
pub struct DiagnosticRendererNode {
/// The base type.
pub base: Object,
// TODO(@jroesch): we can't easily exposed packed functions due to
// memory layout
// missing field here
}
impl DiagnosticRenderer {
/// Render the provided context.
pub fn render(&self, ctx: DiagnosticContext) -> Result<()> {
diagnositc_renderer_render(self.clone(), ctx)
}
}
#[repr(C)]
#[derive(Object, Debug)]
#[ref_name = "DiagnosticContext"]
#[type_key = "DiagnosticContext"]
/// A diagnostic context for recording errors against a source file.
pub struct DiagnosticContextNode {
// The base type.
pub base: Object,
/// The Module to report against.
pub module: IRModule,
/// The set of diagnostics to report.
pub diagnostics: Array<Diagnostic>,
/// The renderer set for the context.
pub renderer: DiagnosticRenderer,
}
/// A diagnostic context which records active errors
/// and contains a renderer.
impl DiagnosticContext {
pub fn new<F>(module: IRModule, render_func: F) -> DiagnosticContext
where
F: Fn(DiagnosticContext) -> () + 'static,
{
let renderer = diagnostic_renderer(render_func.to_function()).unwrap();
let node = DiagnosticContextNode {
base: Object::base::<DiagnosticContextNode>(),
module,
diagnostics: Array::from_vec(vec![]).unwrap(),
renderer,
};
DiagnosticContext(Some(ObjectPtr::new(node)))
}
pub fn default(module: IRModule) -> DiagnosticContext {
diagnostic_context_default(module).unwrap()
}
/// Emit a diagnostic.
pub fn emit(&mut self, diagnostic: Diagnostic) -> Result<()> {
emit(self.clone(), diagnostic)
}
/// Render the errors and raise a DiagnosticError exception.
pub fn render(&mut self) -> Result<()> {
diagnostic_context_render(self.clone())
}
/// Emit a diagnostic and then immediately attempt to render all errors.
pub fn emit_fatal(&mut self, diagnostic: Diagnostic) -> Result<()> {
self.emit(diagnostic)?;
self.render()?;
Ok(())
}
}
/// Override the global diagnostics renderer.
// render_func: Option[Callable[[DiagnosticContext], None]]
// If the render_func is None it will remove the current custom renderer
// and return to default behavior.
fn override_renderer<F>(opt_func: Option<F>) -> Result<()>
where
F: Fn(DiagnosticContext) -> () + 'static,
{
match opt_func {
None => clear_renderer(),
Some(func) => {
let func = func.to_function();
let render_factory = move || diagnostic_renderer(func.clone()).unwrap();
function::register_override(render_factory, "diagnostics.OverrideRenderer", true)?;
Ok(())
}
}
}
| DiagnosticBuilder | identifier_name |
mod.rs | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use super::module::IRModule;
use super::span::*;
use crate::runtime::function::Result;
use crate::runtime::object::{Object, ObjectPtr};
use crate::runtime::{
array::Array,
function::{self, Function, ToFunction},
string::String as TString,
};
/// The diagnostic interface to TVM, used for reporting and rendering
/// diagnostic information by the compiler. This module exposes
/// three key abstractions: a Diagnostic, the DiagnosticContext,
/// and the DiagnosticRenderer.
use tvm_macros::{external, Object};
pub mod codespan;
external! {
#[name("runtime.ArrayGetItem")]
fn get_renderer() -> DiagnosticRenderer;
#[name("diagnostics.DiagnosticRenderer")]
fn diagnostic_renderer(func: Function) -> DiagnosticRenderer;
#[name("diagnostics.Emit")]
fn emit(ctx: DiagnosticContext, diagnostic: Diagnostic) -> ();
#[name("diagnostics.DiagnosticContextDefault")]
fn diagnostic_context_default(module: IRModule) -> DiagnosticContext;
#[name("diagnostics.DiagnosticContextRender")]
fn diagnostic_context_render(ctx: DiagnosticContext) -> ();
#[name("diagnostics.DiagnosticRendererRender")]
fn diagnositc_renderer_render(renderer: DiagnosticRenderer, ctx: DiagnosticContext) -> ();
#[name("diagnostics.ClearRenderer")]
fn clear_renderer() -> ();
}
/// The diagnostic level, controls the printing of the message.
#[repr(C)]
#[derive(PartialEq, Eq, Debug)] | Warning = 30,
Note = 40,
Help = 50,
}
/// A compiler diagnostic.
#[repr(C)]
#[derive(Object, Debug)]
#[ref_name = "Diagnostic"]
#[type_key = "Diagnostic"]
pub struct DiagnosticNode {
pub base: Object,
/// The level.
pub level: DiagnosticLevel,
/// The span at which to report an error.
pub span: Span,
/// The diagnostic message.
pub message: TString,
}
impl Diagnostic {
pub fn new(level: DiagnosticLevel, span: Span, message: TString) -> Diagnostic {
let node = DiagnosticNode {
base: Object::base::<DiagnosticNode>(),
level,
span,
message,
};
ObjectPtr::new(node).into()
}
pub fn bug(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Bug, span)
}
pub fn error(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Error, span)
}
pub fn warning(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Warning, span)
}
pub fn note(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Note, span)
}
pub fn help(span: Span) -> DiagnosticBuilder {
DiagnosticBuilder::new(DiagnosticLevel::Help, span)
}
}
/// A wrapper around std::stringstream to build a diagnostic.
pub struct DiagnosticBuilder {
/// The level.
pub level: DiagnosticLevel,
/// The span of the diagnostic.
pub span: Span,
/// The in progress message.
pub message: String,
}
impl DiagnosticBuilder {
pub fn new(level: DiagnosticLevel, span: Span) -> DiagnosticBuilder {
DiagnosticBuilder {
level,
span,
message: "".into(),
}
}
}
/// Display diagnostics in a given display format.
///
/// A diagnostic renderer is responsible for converting the
/// raw diagnostics into consumable output.
///
/// For example the terminal renderer will render a sequence
/// of compiler diagnostics to std::out and std::err in
/// a human readable form.
#[repr(C)]
#[derive(Object, Debug)]
#[ref_name = "DiagnosticRenderer"]
#[type_key = "DiagnosticRenderer"]
/// A diagnostic renderer, which given a diagnostic context produces a "rendered"
/// form of the diagnostics for either human or computer consumption.
pub struct DiagnosticRendererNode {
/// The base type.
pub base: Object,
// TODO(@jroesch): we can't easily exposed packed functions due to
// memory layout
// missing field here
}
impl DiagnosticRenderer {
/// Render the provided context.
pub fn render(&self, ctx: DiagnosticContext) -> Result<()> {
diagnositc_renderer_render(self.clone(), ctx)
}
}
#[repr(C)]
#[derive(Object, Debug)]
#[ref_name = "DiagnosticContext"]
#[type_key = "DiagnosticContext"]
/// A diagnostic context for recording errors against a source file.
pub struct DiagnosticContextNode {
// The base type.
pub base: Object,
/// The Module to report against.
pub module: IRModule,
/// The set of diagnostics to report.
pub diagnostics: Array<Diagnostic>,
/// The renderer set for the context.
pub renderer: DiagnosticRenderer,
}
/// A diagnostic context which records active errors
/// and contains a renderer.
impl DiagnosticContext {
pub fn new<F>(module: IRModule, render_func: F) -> DiagnosticContext
where
F: Fn(DiagnosticContext) -> () + 'static,
{
let renderer = diagnostic_renderer(render_func.to_function()).unwrap();
let node = DiagnosticContextNode {
base: Object::base::<DiagnosticContextNode>(),
module,
diagnostics: Array::from_vec(vec![]).unwrap(),
renderer,
};
DiagnosticContext(Some(ObjectPtr::new(node)))
}
pub fn default(module: IRModule) -> DiagnosticContext {
diagnostic_context_default(module).unwrap()
}
/// Emit a diagnostic.
pub fn emit(&mut self, diagnostic: Diagnostic) -> Result<()> {
emit(self.clone(), diagnostic)
}
/// Render the errors and raise a DiagnosticError exception.
pub fn render(&mut self) -> Result<()> {
diagnostic_context_render(self.clone())
}
/// Emit a diagnostic and then immediately attempt to render all errors.
pub fn emit_fatal(&mut self, diagnostic: Diagnostic) -> Result<()> {
self.emit(diagnostic)?;
self.render()?;
Ok(())
}
}
/// Override the global diagnostics renderer.
// render_func: Option[Callable[[DiagnosticContext], None]]
// If the render_func is None it will remove the current custom renderer
// and return to default behavior.
fn override_renderer<F>(opt_func: Option<F>) -> Result<()>
where
F: Fn(DiagnosticContext) -> () + 'static,
{
match opt_func {
None => clear_renderer(),
Some(func) => {
let func = func.to_function();
let render_factory = move || diagnostic_renderer(func.clone()).unwrap();
function::register_override(render_factory, "diagnostics.OverrideRenderer", true)?;
Ok(())
}
}
} | pub enum DiagnosticLevel {
Bug = 10,
Error = 20, | random_line_split |
Constants.ts | export const enum CardType {
FaceUp,
FaceDown,
Hand
};
export const enum CardState {
Default,
Playable,
Invalid
};
export const SUIT: any = {
SPADE: 0,
DIAMOND: 1,
CLOVER: 2,
HEART: 3
};
export const FACE: any = {
JACK: 11,
QUEEN: 12,
KING: 13,
ACE: 14
};
export const SPECIAL: any = {
RESET: 2,
INVISIBLE: 3,
BURN: 10,
REVERSE: 7
};
export const CARD: any = {
HEIGHT: 100, | },
HOVER: {
COLOR: 'rgb(0,200,0)'
},
SELECTED: {
COLOR: 'rgb(139,0,0)'
}
};
export const DECK: any = {
MAX_RENDER: 3,
X: -90,
Y: -35
};
export const PILE: any = {
MAX_RENDER: 3,
X: 10,
Y: -35
};
export const PLAYER: any = {
X: -80,
Y: 300,
FACEUP_DIST: 120,
CARD_SPREAD: 85,
FACEUP_X_OFF: 3,
FACEUP_Y_OFF: 4
};
export const GAME: any = {
PLAYERS: 4,
DELAY: 500,
DELAY2: 800
};
export const BOARD: any = {
COLOR: 'rgb(8, 132, 36)'
};
export const MESSAGE: any = {
ZONE1: {
x: -120,
y: 90
},
ZONE2: {
x: -250,
y: 270
},
FONT: '20px serif',
COLOR: 'black'
};
export const DEBUG = false;
export const LOG = true; | WIDTH: 80,
BORDER_COLOR: 'black',
INVISIBLE: {
COLOR: 'red',
OPACITY: 0.3 | random_line_split |
ete_extract.py | # #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: huerta@embl.de
#
#
# #END_LICENSE#############################################################
from __future__ import absolute_import
from __future__ import print_function
from .common import src_tree_iterator
DESC = ""
def populate_args(extract_args_p):
extract_args = extract_args_p.add_argument_group('TREE EDIT OPTIONS')
extract_args.add_argument("--orthologs", dest="orthologs",
nargs="*",
help="")
extract_args.add_argument("--duplications", dest="duplications",
action="store_true",
help="")
def run(args):
from .. import Tree, PhyloTree
for nw in src_tree_iterator(args):
if args.orthologs is not None:
t = PhyloTree(nw)
for e in t.get_descendant_evol_events():
| print(e.in_seqs, e.out_seqs) | conditional_block | |
ete_extract.py | # #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: huerta@embl.de
#
#
# #END_LICENSE#############################################################
from __future__ import absolute_import
from __future__ import print_function
from .common import src_tree_iterator
DESC = ""
def | (extract_args_p):
extract_args = extract_args_p.add_argument_group('TREE EDIT OPTIONS')
extract_args.add_argument("--orthologs", dest="orthologs",
nargs="*",
help="")
extract_args.add_argument("--duplications", dest="duplications",
action="store_true",
help="")
def run(args):
from .. import Tree, PhyloTree
for nw in src_tree_iterator(args):
if args.orthologs is not None:
t = PhyloTree(nw)
for e in t.get_descendant_evol_events():
print(e.in_seqs, e.out_seqs)
| populate_args | identifier_name |
ete_extract.py | # #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: huerta@embl.de
#
#
# #END_LICENSE#############################################################
from __future__ import absolute_import
from __future__ import print_function
from .common import src_tree_iterator
DESC = ""
def populate_args(extract_args_p):
extract_args = extract_args_p.add_argument_group('TREE EDIT OPTIONS')
extract_args.add_argument("--orthologs", dest="orthologs",
nargs="*",
help="")
extract_args.add_argument("--duplications", dest="duplications",
action="store_true",
help="")
def run(args):
| from .. import Tree, PhyloTree
for nw in src_tree_iterator(args):
if args.orthologs is not None:
t = PhyloTree(nw)
for e in t.get_descendant_evol_events():
print(e.in_seqs, e.out_seqs) | identifier_body | |
ete_extract.py | # #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: huerta@embl.de
#
#
# #END_LICENSE#############################################################
from __future__ import absolute_import |
from .common import src_tree_iterator
DESC = ""
def populate_args(extract_args_p):
extract_args = extract_args_p.add_argument_group('TREE EDIT OPTIONS')
extract_args.add_argument("--orthologs", dest="orthologs",
nargs="*",
help="")
extract_args.add_argument("--duplications", dest="duplications",
action="store_true",
help="")
def run(args):
from .. import Tree, PhyloTree
for nw in src_tree_iterator(args):
if args.orthologs is not None:
t = PhyloTree(nw)
for e in t.get_descendant_evol_events():
print(e.in_seqs, e.out_seqs) | from __future__ import print_function | random_line_split |
index.ts | // Copyright 2018, Google, LLC.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*! THIS FILE IS AUTO-GENERATED */
import {getAPI, GoogleConfigurable} from 'googleapis-common';
import {ml_v1} from './v1';
export const VERSIONS = {
'v1': ml_v1.Ml, | export function ml(options: ml_v1.Options): ml_v1.Ml;
export function ml<T = ml_v1.Ml>(
this: GoogleConfigurable, versionOrOptions: 'v1'|ml_v1.Options) {
return getAPI<T>('ml', versionOrOptions, VERSIONS, this);
} | };
export function ml(version: 'v1'): ml_v1.Ml; | random_line_split |
index.ts | // Copyright 2018, Google, LLC.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*! THIS FILE IS AUTO-GENERATED */
import {getAPI, GoogleConfigurable} from 'googleapis-common';
import {ml_v1} from './v1';
export const VERSIONS = {
'v1': ml_v1.Ml,
};
export function ml(version: 'v1'): ml_v1.Ml;
export function ml(options: ml_v1.Options): ml_v1.Ml;
export function ml<T = ml_v1.Ml>(
this: GoogleConfigurable, versionOrOptions: 'v1'|ml_v1.Options) | {
return getAPI<T>('ml', versionOrOptions, VERSIONS, this);
} | identifier_body | |
index.ts | // Copyright 2018, Google, LLC.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*! THIS FILE IS AUTO-GENERATED */
import {getAPI, GoogleConfigurable} from 'googleapis-common';
import {ml_v1} from './v1';
export const VERSIONS = {
'v1': ml_v1.Ml,
};
export function ml(version: 'v1'): ml_v1.Ml;
export function ml(options: ml_v1.Options): ml_v1.Ml;
export function | <T = ml_v1.Ml>(
this: GoogleConfigurable, versionOrOptions: 'v1'|ml_v1.Options) {
return getAPI<T>('ml', versionOrOptions, VERSIONS, this);
}
| ml | identifier_name |
index.js | import React from 'react';
import ReactDOM from 'react-dom';
import _ from 'underscore';
import babel from 'babel-core/browser';
import esprima from 'esprima';
import escodegen from 'escodegen';
import estraverse from 'estraverse';
import Codemirror from 'react-codemirror';
import classNames from 'classnames';
import { iff, default as globalUtils } from 'app/utils/globalUtils';
import './styles/app.less';
import 'react-codemirror/node_modules/codemirror/lib/codemirror.css';
import 'react-codemirror/node_modules/codemirror/theme/material.css';
import 'app/modules/JsxMode';
const localStorage = window.localStorage;
const TAB_SOURCE = 'SOURCE';
const TAB_TRANSCODE = 'TRANSCODE';
const LiveDemoApp = React.createClass({
getInitialState() {
return {
sourceCode: '',
transCode: '',
transError: '',
tab: TAB_SOURCE,
func: function() { }
};
},
componentWillMount() {
this._setSource(localStorage.getItem('sourceCode') || '');
},
componentDidMount() {
this._renderPreview();
},
| () {
this._renderPreview();
},
render() {
const {
sourceCode,
transCode,
tab,
transError
} = this.state;
const showSource = (tab === TAB_SOURCE);
const cmOptions = {
lineNumbers: true,
readOnly: !showSource,
mode: 'jsx',
theme: 'material',
tabSize: 2,
smartIndent: true,
indentWithTabs: false
};
const srcTabClassName = classNames({
'otsLiveDemoApp-tab': true,
'otsLiveDemoApp-active': showSource
});
const transTabClassName = classNames({
'otsLiveDemoApp-tab': true,
'otsLiveDemoApp-active': !showSource
});
console.log((transCode || transError));
return (
<div className='otsLiveDemoApp'>
<div className='otsLiveDemoApp-tabs'>
<button className={srcTabClassName} onClick={this._onSrcClick}>Source</button>
<button className={transTabClassName} onClick={this._onTransClick}>Transcode</button>
</div>
<div className='otsLiveDemoApp-src'>
<Codemirror
value={showSource ? sourceCode : (transCode || transError)}
onChange={this._onChangeEditor}
options={cmOptions}
/>
</div>
</div>
);
},
_onChangeEditor(value) {
const { tab } = this.state;
if (tab === TAB_SOURCE) {
this._setSource(value);
}
},
_onSrcClick() {
this.setState({
tab: TAB_SOURCE
});
},
_onTransClick() {
this.setState({
tab: TAB_TRANSCODE
});
},
_setSource(sourceCode) {
localStorage.setItem('sourceCode', sourceCode);
const dependencies = [];
let transCode;
let transError;
try {
const es5trans = babel.transform(sourceCode);
let uniqueId = 0;
estraverse.replace(es5trans.ast.program, {
enter(node, parent) {
if (
node.type === 'CallExpression' &&
node.callee.type === 'Identifier' &&
node.callee.name === 'require' &&
node.arguments.length === 1 &&
node.arguments[0].type === 'Literal'
) {
const dep = {
identifier: '__DEPENDENCY_'+ (uniqueId++) ,
depName: node.arguments[0].value
};
dependencies.push(dep);
return {
name: dep.identifier,
type: 'Identifier'
};
}
else if (
node.type === 'AssignmentExpression' &&
node.left.type === 'MemberExpression' &&
node.left.object.type === 'Identifier' &&
node.left.object.name === 'module' &&
node.left.property.type === 'Identifier' &&
node.left.property.name === 'exports'
) {
return {
type: 'ReturnStatement',
argument: node.right
}
}
}
});
transCode = escodegen.generate(es5trans.ast.program);
}
catch (e) {
const msg = 'Error transpiling source code: ';
transError = msg + e.toString();
globalUtils.error(msg, e);
}
this.setState({
sourceCode,
transCode,
transError
});
if (transCode) {
try {
const fnConstArgs = [{ what: 'aaa'}].concat(dependencies.map((dep) => {
return dep.identifier;
}));
fnConstArgs.push('exports');
fnConstArgs.push(transCode);
this.setState({
func: new (Function.prototype.bind.apply(Function, fnConstArgs))
});
}
catch(e) {
console.error('Runtime Error', e);
}
}
},
_renderPreview() {
const { func } = this.state;
const { Component, error } = (() => {
try {
return {
Component: func(React, {})
};
}
catch(e) {
return {
error: e
};
}
})();
try {
if (Component) {
ReactDOM.render(<Component />, document.getElementById('preview'));
}
else if (error) {
ReactDOM.render(<div className='otsLiveDemoApp-error'>{error.toString()}</div>, document.getElementById('preview'));
}
}
catch (e) {
globalUtils.error('Fatal error rendering preview: ', e);
}
}
});
ReactDOM.render(<LiveDemoApp />, document.getElementById('editor'));
// const newProgram = {
// type: 'Program',
// body: [
// {
// type: 'CallExpression',
// callee: {
// type: 'FunctionExpression',
// id: null,
// params: dependencies.map((dep) => {
// return {
// type: 'Identifier',
// name: dep.identifier
// }
// }),
// body: {
// type: 'BlockStatement',
// body: es5trans.ast.program.body
// }
// },
// arguments: []
// }
// ]
// }; | componentDidUpdate | identifier_name |
index.js | import React from 'react';
import ReactDOM from 'react-dom';
import _ from 'underscore';
import babel from 'babel-core/browser';
import esprima from 'esprima';
import escodegen from 'escodegen';
import estraverse from 'estraverse';
import Codemirror from 'react-codemirror';
import classNames from 'classnames';
import { iff, default as globalUtils } from 'app/utils/globalUtils';
import './styles/app.less';
import 'react-codemirror/node_modules/codemirror/lib/codemirror.css';
import 'react-codemirror/node_modules/codemirror/theme/material.css';
import 'app/modules/JsxMode';
const localStorage = window.localStorage;
const TAB_SOURCE = 'SOURCE';
const TAB_TRANSCODE = 'TRANSCODE';
const LiveDemoApp = React.createClass({
getInitialState() {
return {
sourceCode: '',
transCode: '',
transError: '',
tab: TAB_SOURCE,
func: function() { }
};
},
componentWillMount() {
this._setSource(localStorage.getItem('sourceCode') || '');
},
componentDidMount() {
this._renderPreview();
},
componentDidUpdate() {
this._renderPreview();
},
render() {
const {
sourceCode,
transCode,
tab,
transError
} = this.state;
const showSource = (tab === TAB_SOURCE);
const cmOptions = {
lineNumbers: true,
readOnly: !showSource,
mode: 'jsx',
theme: 'material',
tabSize: 2,
smartIndent: true,
indentWithTabs: false
};
const srcTabClassName = classNames({
'otsLiveDemoApp-tab': true,
'otsLiveDemoApp-active': showSource
});
const transTabClassName = classNames({
'otsLiveDemoApp-tab': true,
'otsLiveDemoApp-active': !showSource
});
console.log((transCode || transError));
return (
<div className='otsLiveDemoApp'>
<div className='otsLiveDemoApp-tabs'>
<button className={srcTabClassName} onClick={this._onSrcClick}>Source</button>
<button className={transTabClassName} onClick={this._onTransClick}>Transcode</button>
</div>
<div className='otsLiveDemoApp-src'>
<Codemirror
value={showSource ? sourceCode : (transCode || transError)}
onChange={this._onChangeEditor}
options={cmOptions}
/>
</div>
</div>
);
},
_onChangeEditor(value) {
const { tab } = this.state;
if (tab === TAB_SOURCE) {
this._setSource(value);
}
},
_onSrcClick() | ,
_onTransClick() {
this.setState({
tab: TAB_TRANSCODE
});
},
_setSource(sourceCode) {
localStorage.setItem('sourceCode', sourceCode);
const dependencies = [];
let transCode;
let transError;
try {
const es5trans = babel.transform(sourceCode);
let uniqueId = 0;
estraverse.replace(es5trans.ast.program, {
enter(node, parent) {
if (
node.type === 'CallExpression' &&
node.callee.type === 'Identifier' &&
node.callee.name === 'require' &&
node.arguments.length === 1 &&
node.arguments[0].type === 'Literal'
) {
const dep = {
identifier: '__DEPENDENCY_'+ (uniqueId++) ,
depName: node.arguments[0].value
};
dependencies.push(dep);
return {
name: dep.identifier,
type: 'Identifier'
};
}
else if (
node.type === 'AssignmentExpression' &&
node.left.type === 'MemberExpression' &&
node.left.object.type === 'Identifier' &&
node.left.object.name === 'module' &&
node.left.property.type === 'Identifier' &&
node.left.property.name === 'exports'
) {
return {
type: 'ReturnStatement',
argument: node.right
}
}
}
});
transCode = escodegen.generate(es5trans.ast.program);
}
catch (e) {
const msg = 'Error transpiling source code: ';
transError = msg + e.toString();
globalUtils.error(msg, e);
}
this.setState({
sourceCode,
transCode,
transError
});
if (transCode) {
try {
const fnConstArgs = [{ what: 'aaa'}].concat(dependencies.map((dep) => {
return dep.identifier;
}));
fnConstArgs.push('exports');
fnConstArgs.push(transCode);
this.setState({
func: new (Function.prototype.bind.apply(Function, fnConstArgs))
});
}
catch(e) {
console.error('Runtime Error', e);
}
}
},
_renderPreview() {
const { func } = this.state;
const { Component, error } = (() => {
try {
return {
Component: func(React, {})
};
}
catch(e) {
return {
error: e
};
}
})();
try {
if (Component) {
ReactDOM.render(<Component />, document.getElementById('preview'));
}
else if (error) {
ReactDOM.render(<div className='otsLiveDemoApp-error'>{error.toString()}</div>, document.getElementById('preview'));
}
}
catch (e) {
globalUtils.error('Fatal error rendering preview: ', e);
}
}
});
ReactDOM.render(<LiveDemoApp />, document.getElementById('editor'));
// const newProgram = {
// type: 'Program',
// body: [
// {
// type: 'CallExpression',
// callee: {
// type: 'FunctionExpression',
// id: null,
// params: dependencies.map((dep) => {
// return {
// type: 'Identifier',
// name: dep.identifier
// }
// }),
// body: {
// type: 'BlockStatement',
// body: es5trans.ast.program.body
// }
// },
// arguments: []
// }
// ]
// }; | {
this.setState({
tab: TAB_SOURCE
});
} | identifier_body |
index.js | import React from 'react';
import ReactDOM from 'react-dom';
import _ from 'underscore';
import babel from 'babel-core/browser';
import esprima from 'esprima';
import escodegen from 'escodegen';
import estraverse from 'estraverse';
import Codemirror from 'react-codemirror';
import classNames from 'classnames';
import { iff, default as globalUtils } from 'app/utils/globalUtils';
import './styles/app.less';
import 'react-codemirror/node_modules/codemirror/lib/codemirror.css';
import 'react-codemirror/node_modules/codemirror/theme/material.css';
import 'app/modules/JsxMode';
const localStorage = window.localStorage;
const TAB_SOURCE = 'SOURCE';
const TAB_TRANSCODE = 'TRANSCODE';
const LiveDemoApp = React.createClass({
getInitialState() {
return {
sourceCode: '',
transCode: '',
transError: '',
tab: TAB_SOURCE,
func: function() { }
};
},
componentWillMount() {
this._setSource(localStorage.getItem('sourceCode') || '');
},
componentDidMount() {
this._renderPreview();
},
componentDidUpdate() {
this._renderPreview();
},
render() {
const {
sourceCode,
transCode,
tab,
transError
} = this.state;
const showSource = (tab === TAB_SOURCE);
const cmOptions = {
lineNumbers: true,
readOnly: !showSource,
mode: 'jsx',
theme: 'material',
tabSize: 2,
smartIndent: true,
indentWithTabs: false
};
const srcTabClassName = classNames({
'otsLiveDemoApp-tab': true,
'otsLiveDemoApp-active': showSource
});
const transTabClassName = classNames({
'otsLiveDemoApp-tab': true,
'otsLiveDemoApp-active': !showSource
});
console.log((transCode || transError));
return (
<div className='otsLiveDemoApp'>
<div className='otsLiveDemoApp-tabs'>
<button className={srcTabClassName} onClick={this._onSrcClick}>Source</button>
<button className={transTabClassName} onClick={this._onTransClick}>Transcode</button>
</div>
<div className='otsLiveDemoApp-src'>
<Codemirror
value={showSource ? sourceCode : (transCode || transError)}
onChange={this._onChangeEditor}
options={cmOptions}
/>
</div>
</div>
);
},
_onChangeEditor(value) {
const { tab } = this.state;
if (tab === TAB_SOURCE) {
this._setSource(value);
}
},
_onSrcClick() {
this.setState({
tab: TAB_SOURCE
});
},
_onTransClick() {
this.setState({
tab: TAB_TRANSCODE
});
},
_setSource(sourceCode) {
localStorage.setItem('sourceCode', sourceCode);
const dependencies = [];
let transCode;
let transError;
try {
const es5trans = babel.transform(sourceCode);
let uniqueId = 0;
estraverse.replace(es5trans.ast.program, {
enter(node, parent) {
if (
node.type === 'CallExpression' &&
node.callee.type === 'Identifier' &&
node.callee.name === 'require' &&
node.arguments.length === 1 &&
node.arguments[0].type === 'Literal'
) {
const dep = {
identifier: '__DEPENDENCY_'+ (uniqueId++) ,
depName: node.arguments[0].value
};
dependencies.push(dep);
return {
name: dep.identifier,
type: 'Identifier'
};
}
else if (
node.type === 'AssignmentExpression' &&
node.left.type === 'MemberExpression' &&
node.left.object.type === 'Identifier' &&
node.left.object.name === 'module' &&
node.left.property.type === 'Identifier' &&
node.left.property.name === 'exports'
) {
return {
type: 'ReturnStatement',
argument: node.right
}
}
}
});
transCode = escodegen.generate(es5trans.ast.program);
}
catch (e) {
const msg = 'Error transpiling source code: ';
transError = msg + e.toString();
globalUtils.error(msg, e);
}
this.setState({
sourceCode,
transCode,
transError
});
if (transCode) {
try {
const fnConstArgs = [{ what: 'aaa'}].concat(dependencies.map((dep) => {
return dep.identifier;
}));
fnConstArgs.push('exports');
fnConstArgs.push(transCode);
this.setState({
func: new (Function.prototype.bind.apply(Function, fnConstArgs))
});
}
catch(e) {
console.error('Runtime Error', e);
}
}
},
_renderPreview() {
const { func } = this.state;
const { Component, error } = (() => {
try {
return {
Component: func(React, {})
};
}
catch(e) {
return {
error: e
};
}
})();
try {
if (Component) {
ReactDOM.render(<Component />, document.getElementById('preview'));
}
else if (error) |
}
catch (e) {
globalUtils.error('Fatal error rendering preview: ', e);
}
}
});
ReactDOM.render(<LiveDemoApp />, document.getElementById('editor'));
// const newProgram = {
// type: 'Program',
// body: [
// {
// type: 'CallExpression',
// callee: {
// type: 'FunctionExpression',
// id: null,
// params: dependencies.map((dep) => {
// return {
// type: 'Identifier',
// name: dep.identifier
// }
// }),
// body: {
// type: 'BlockStatement',
// body: es5trans.ast.program.body
// }
// },
// arguments: []
// }
// ]
// }; | {
ReactDOM.render(<div className='otsLiveDemoApp-error'>{error.toString()}</div>, document.getElementById('preview'));
} | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.