hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a0e3fa6cd9a1adff0cb564e3ccc11f5d4ae339b
| 3,327
|
py
|
Python
|
examples/v2/sqladmin/python/sqladmin.py
|
cdmello-g/deploymentmanager-samples
|
9931f1c8881f353e0156a809da1b909604053fa0
|
[
"Apache-2.0"
] | 930
|
2015-07-30T19:02:32.000Z
|
2022-03-09T18:02:11.000Z
|
examples/v2/sqladmin/python/sqladmin.py
|
cdmello-g/deploymentmanager-samples
|
9931f1c8881f353e0156a809da1b909604053fa0
|
[
"Apache-2.0"
] | 426
|
2015-05-06T17:25:41.000Z
|
2022-03-09T16:13:41.000Z
|
examples/v2/sqladmin/python/sqladmin.py
|
cdmello-g/deploymentmanager-samples
|
9931f1c8881f353e0156a809da1b909604053fa0
|
[
"Apache-2.0"
] | 812
|
2015-05-06T15:43:05.000Z
|
2022-03-24T18:35:37.000Z
|
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates a Cloud SQL instance and database."""
import json
def GenerateConfig(context):
"""Generate YAML resource configuration."""
deployment_name = context.env['deployment']
instance_name = deployment_name + '-instance'
replica_name = deployment_name + '-replica'
database_name = deployment_name + '-db'
failover_name = deployment_name + '-failover'
resources = [{
'name': instance_name,
'type': 'gcp-types/sqladmin-v1beta4:instances',
'properties': {
'region': context.properties['region'],
'settings': {
'tier': context.properties['tier'],
'backupConfiguration' : {
'binaryLogEnabled': True,
'enabled': True
}
}
}
}, {
'name': database_name,
'type': 'gcp-types/sqladmin-v1beta4:databases',
'properties': {
'name': database_name,
'instance': ''.join(['$(ref.', instance_name,'.name)']),
'charset': 'utf8'
}
}]
dependency=database_name
for n in range(0,context.properties['readReplicas']):
name = ''.join([replica_name,'-',str(n)])
resources.append({'name': name,
'type': 'gcp-types/sqladmin-v1beta4:instances',
'metadata': {
'dependsOn': [ dependency ]
},
'properties': {
'region': context.properties['region'],
'masterInstanceName': ''.join(['$(ref.', instance_name,'.name)']),
'settings': {
'tier': context.properties['tier'],
'replicationType': context.properties['replicationType']
}
}
})
dependency=name
if context.properties['failOver']:
resources.append({'name': failover_name ,
'type': 'gcp-types/sqladmin-v1beta4:instances',
'metadata': {
'dependsOn': [ dependency ]
},
'properties': {
'replicaConfiguration':{'failoverTarget': True},
'region': context.properties['region'],
'masterInstanceName': ''.join(['$(ref.', instance_name,'.name)']),
'settings': {
'tier': context.properties['tier'],
'replicationType': context.properties['replicationType']
}
}
})
dependency=failover_name
return { 'resources': resources }
| 39.141176
| 92
| 0.526601
|
4a0e40b29fe4d69ea1824340bddb117dc9a78dec
| 155
|
py
|
Python
|
venv/lib/python3.8/site-packages/crispy_forms/templates/bootstrap4/layout/field_errors_block.html.py
|
Solurix/Flashcards-Django
|
03c863f6722936093927785a2b20b6b668bb743d
|
[
"MIT"
] | 1
|
2021-05-16T03:20:23.000Z
|
2021-05-16T03:20:23.000Z
|
venv/lib/python3.8/site-packages/crispy_forms/templates/bootstrap4/layout/field_errors_block.html.py
|
Solurix/Flashcards-Django
|
03c863f6722936093927785a2b20b6b668bb743d
|
[
"MIT"
] | 4
|
2021-03-30T14:06:09.000Z
|
2021-09-22T19:26:31.000Z
|
venv/lib/python3.8/site-packages/crispy_forms/templates/bootstrap4/layout/field_errors_block.html.py
|
Solurix/Flashcards-Django
|
03c863f6722936093927785a2b20b6b668bb743d
|
[
"MIT"
] | null | null | null |
BB BBBBBBBBBBBBBBBB BBB BBBBBBBBBBBB
BBB BBBBB BB BBBBBBBBBBBB
XX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
BBBBBB
BBBBB
| 25.833333
| 70
| 0.812903
|
4a0e41348af36c5f41c60e6be48882ddb2971da8
| 41,393
|
py
|
Python
|
discord/ext/menus/__init__.py
|
Tari-dev/discord-ext-menus
|
881b4dbe8aeb3ca70a7cba5757210b03764f589b
|
[
"MIT"
] | 1
|
2022-02-04T10:20:31.000Z
|
2022-02-04T10:20:31.000Z
|
discord/ext/menus/__init__.py
|
Tari-dev/discord-ext-menus
|
881b4dbe8aeb3ca70a7cba5757210b03764f589b
|
[
"MIT"
] | null | null | null |
discord/ext/menus/__init__.py
|
Tari-dev/discord-ext-menus
|
881b4dbe8aeb3ca70a7cba5757210b03764f589b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2019 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import asyncio
import discord
import itertools
import inspect
import bisect
import logging
import re
from collections import OrderedDict, namedtuple
# Needed for the setup.py script
__version__ = '1.0.0-a'
# consistency with the `discord` namespaced logging
log = logging.getLogger(__name__)
class MenuError(Exception):
pass
class CannotEmbedLinks(MenuError):
def __init__(self):
super().__init__('Bot does not have embed links permission in this channel.')
class CannotSendMessages(MenuError):
def __init__(self):
super().__init__('Bot cannot send messages in this channel.')
class CannotAddReactions(MenuError):
def __init__(self):
super().__init__('Bot cannot add reactions in this channel.')
class CannotReadMessageHistory(MenuError):
def __init__(self):
super().__init__('Bot does not have Read Message History permissions in this channel.')
class Position:
__slots__ = ('number', 'bucket')
def __init__(self, number, *, bucket=1):
self.bucket = bucket
self.number = number
def __lt__(self, other):
if not isinstance(other, Position) or not isinstance(self, Position):
return NotImplemented
return (self.bucket, self.number) < (other.bucket, other.number)
def __eq__(self, other):
return isinstance(other, Position) and other.bucket == self.bucket and other.number == self.number
def __le__(self, other):
r = Position.__lt__(other, self)
if r is NotImplemented:
return NotImplemented
return not r
def __gt__(self, other):
return Position.__lt__(other, self)
def __ge__(self, other):
r = Position.__lt__(self, other)
if r is NotImplemented:
return NotImplemented
return not r
def __repr__(self):
return '<{0.__class__.__name__}: {0.number}>'.format(self)
class Last(Position):
__slots__ = ()
def __init__(self, number=0):
super().__init__(number, bucket=2)
class First(Position):
__slots__ = ()
def __init__(self, number=0):
super().__init__(number, bucket=0)
_custom_emoji = re.compile(r'<?(?P<animated>a)?:?(?P<name>[A-Za-z0-9\_]+):(?P<id>[0-9]{13,20})>?')
def _cast_emoji(obj, *, _custom_emoji=_custom_emoji):
if isinstance(obj, discord.PartialEmoji):
return obj
obj = str(obj)
match = _custom_emoji.match(obj)
if match is not None:
groups = match.groupdict()
animated = bool(groups['animated'])
emoji_id = int(groups['id'])
name = groups['name']
return discord.PartialEmoji(name=name, animated=animated, id=emoji_id)
return discord.PartialEmoji(name=obj, id=None, animated=False)
class Button:
"""Represents a reaction-style button for the :class:`Menu`.
There are two ways to create this, the first being through explicitly
creating this class and the second being through the decorator interface,
:func:`button`.
The action must have both a ``self`` and a ``payload`` parameter
of type :class:`discord.RawReactionActionEvent`.
Attributes
------------
emoji: :class:`discord.PartialEmoji`
The emoji to use as the button. Note that passing a string will
transform it into a :class:`discord.PartialEmoji`.
action
A coroutine that is called when the button is pressed.
skip_if: Optional[Callable[[:class:`Menu`], :class:`bool`]]
A callable that detects whether it should be skipped.
A skipped button does not show up in the reaction list
and will not be processed.
position: :class:`Position`
The position the button should have in the initial order.
Note that since Discord does not actually maintain reaction
order, this is a best effort attempt to have an order until
the user restarts their client. Defaults to ``Position(0)``.
lock: :class:`bool`
Whether the button should lock all other buttons from being processed
until this button is done. Defaults to ``True``.
"""
__slots__ = ('emoji', '_action', '_skip_if', 'position', 'lock')
def __init__(self, emoji, action, *, skip_if=None, position=None, lock=True):
self.emoji = _cast_emoji(emoji)
self.action = action
self.skip_if = skip_if
self.position = position or Position(0)
self.lock = lock
@property
def skip_if(self):
return self._skip_if
@skip_if.setter
def skip_if(self, value):
if value is None:
self._skip_if = lambda x: False
return
try:
menu_self = value.__self__
except AttributeError:
self._skip_if = value
else:
# Unfurl the method to not be bound
if not isinstance(menu_self, Menu):
raise TypeError('skip_if bound method must be from Menu not %r' % menu_self)
self._skip_if = value.__func__
@property
def action(self):
return self._action
@action.setter
def action(self, value):
try:
menu_self = value.__self__
except AttributeError:
pass
else:
# Unfurl the method to not be bound
if not isinstance(menu_self, Menu):
raise TypeError('action bound method must be from Menu not %r' % menu_self)
value = value.__func__
if not inspect.iscoroutinefunction(value):
raise TypeError('action must be a coroutine not %r' % value)
self._action = value
def __call__(self, menu, payload):
if self.skip_if(menu):
return
return self._action(menu, payload)
def __str__(self):
return str(self.emoji)
def is_valid(self, menu):
return not self.skip_if(menu)
def button(emoji, **kwargs):
"""Denotes a method to be button for the :class:`Menu`.
The methods being wrapped must have both a ``self`` and a ``payload``
parameter of type :class:`discord.RawReactionActionEvent`.
The keyword arguments are forwarded to the :class:`Button` constructor.
Example
---------
.. code-block:: python3
class MyMenu(Menu):
async def send_initial_message(self, ctx, channel):
return await channel.send(f'Hello {ctx.author}')
@button('\\N{THUMBS UP SIGN}')
async def on_thumbs_up(self, payload):
await self.message.edit(content=f'Thanks {self.ctx.author}!')
@button('\\N{THUMBS DOWN SIGN}')
async def on_thumbs_down(self, payload):
await self.message.edit(content=f"That's not nice {self.ctx.author}...")
Parameters
------------
emoji: Union[:class:`str`, :class:`discord.PartialEmoji`]
The emoji to use for the button.
"""
def decorator(func):
func.__menu_button__ = _cast_emoji(emoji)
func.__menu_button_kwargs__ = kwargs
return func
return decorator
class _MenuMeta(type):
@classmethod
def __prepare__(cls, name, bases, **kwargs):
# This is needed to maintain member order for the buttons
return OrderedDict()
def __new__(cls, name, bases, attrs, **kwargs):
buttons = []
new_cls = super().__new__(cls, name, bases, attrs)
inherit_buttons = kwargs.pop('inherit_buttons', True)
if inherit_buttons:
# walk MRO to get all buttons even in subclasses
for base in reversed(new_cls.__mro__):
for elem, value in base.__dict__.items():
try:
value.__menu_button__
except AttributeError:
continue
else:
buttons.append(value)
else:
for elem, value in attrs.items():
try:
value.__menu_button__
except AttributeError:
continue
else:
buttons.append(value)
new_cls.__menu_buttons__ = buttons
return new_cls
def get_buttons(cls):
buttons = OrderedDict()
for func in cls.__menu_buttons__:
emoji = func.__menu_button__
buttons[emoji] = Button(emoji, func, **func.__menu_button_kwargs__)
return buttons
class Menu(metaclass=_MenuMeta):
r"""An interface that allows handling menus by using reactions as buttons.
Buttons should be marked with the :func:`button` decorator. Please note that
this expects the methods to have a single parameter, the ``payload``. This
``payload`` is of type :class:`discord.RawReactionActionEvent`.
Attributes
------------
timeout: :class:`float`
The timeout to wait between button inputs.
delete_message_after: :class:`bool`
Whether to delete the message after the menu interaction is done.
clear_reactions_after: :class:`bool`
Whether to clear reactions after the menu interaction is done.
Note that :attr:`delete_message_after` takes priority over this attribute.
If the bot does not have permissions to clear the reactions then it will
delete the reactions one by one.
check_embeds: :class:`bool`
Whether to verify embed permissions as well.
ctx: Optional[:class:`commands.Context`]
The context that started this pagination session or ``None`` if it hasn't
been started yet.
bot: Optional[:class:`commands.Bot`]
The bot that is running this pagination session or ``None`` if it hasn't
been started yet.
message: Optional[:class:`discord.Message`]
The message that has been sent for handling the menu. This is the returned
message of :meth:`send_initial_message`. You can set it in order to avoid
calling :meth:`send_initial_message`\, if for example you have a pre-existing
message you want to attach a menu to.
"""
def __init__(self, *, timeout=180.0, delete_message_after=False,
clear_reactions_after=False, check_embeds=False, message=None):
self.timeout = timeout
self.delete_message_after = delete_message_after
self.clear_reactions_after = clear_reactions_after
self.check_embeds = check_embeds
self._can_remove_reactions = False
self.__tasks = []
self._running = True
self.message = message
self.ctx = None
self.bot = None
self._author_id = None
self._buttons = self.__class__.get_buttons()
self._lock = asyncio.Lock()
self._event = asyncio.Event()
@discord.utils.cached_property
def buttons(self):
"""Retrieves the buttons that are to be used for this menu session.
Skipped buttons are not in the resulting dictionary.
Returns
---------
Mapping[:class:`str`, :class:`Button`]
A mapping of button emoji to the actual button class.
"""
buttons = sorted(self._buttons.values(), key=lambda b: b.position)
return {
button.emoji: button
for button in buttons
if button.is_valid(self)
}
def add_button(self, button, *, react=False):
"""|maybecoro|
Adds a button to the list of buttons.
If the menu has already been started then the button will
not be added unless the ``react`` keyword-only argument is
set to ``True``. Note that when this happens this function
will need to be awaited.
If a button with the same emoji is added then it is overridden.
.. warning::
If the menu has started and the reaction is added, the order
property of the newly added button is ignored due to an API
limitation with Discord and the fact that reaction ordering
is not guaranteed.
Parameters
------------
button: :class:`Button`
The button to add.
react: :class:`bool`
Whether to add a reaction if the menu has been started.
Note this turns the method into a coroutine.
Raises
---------
MenuError
Tried to use ``react`` when the menu had not been started.
discord.HTTPException
Adding the reaction failed.
"""
self._buttons[button.emoji] = button
if react:
if self.__tasks:
async def wrapped():
# Add the reaction
try:
await self.message.add_reaction(button.emoji)
except discord.HTTPException:
raise
else:
# Update the cache to have the value
self.buttons[button.emoji] = button
return wrapped()
async def dummy():
raise MenuError('Menu has not been started yet')
return dummy()
def remove_button(self, emoji, *, react=False):
"""|maybecoro|
Removes a button from the list of buttons.
This operates similar to :meth:`add_button`.
Parameters
------------
emoji: Union[:class:`Button`, :class:`str`]
The emoji or the button to remove.
react: :class:`bool`
Whether to remove the reaction if the menu has been started.
Note this turns the method into a coroutine.
Raises
---------
MenuError
Tried to use ``react`` when the menu had not been started.
discord.HTTPException
Removing the reaction failed.
"""
if isinstance(emoji, Button):
emoji = emoji.emoji
else:
emoji = _cast_emoji(emoji)
self._buttons.pop(emoji, None)
if react:
if self.__tasks:
async def wrapped():
# Remove the reaction from being processable
# Removing it from the cache first makes it so the check
# doesn't get triggered.
self.buttons.pop(emoji, None)
await self.message.remove_reaction(emoji, self.__me)
return wrapped()
async def dummy():
raise MenuError('Menu has not been started yet')
return dummy()
def clear_buttons(self, *, react=False):
"""|maybecoro|
Removes all buttons from the list of buttons.
If the menu has already been started then the buttons will
not be removed unless the ``react`` keyword-only argument is
set to ``True``. Note that when this happens this function
will need to be awaited.
Parameters
------------
react: :class:`bool`
Whether to clear the reactions if the menu has been started.
Note this turns the method into a coroutine.
Raises
---------
MenuError
Tried to use ``react`` when the menu had not been started.
discord.HTTPException
Clearing the reactions failed.
"""
self._buttons.clear()
if react:
if self.__tasks:
async def wrapped():
# A fast path if we have permissions
if self._can_remove_reactions:
try:
del self.buttons
except AttributeError:
pass
finally:
await self.message.clear_reactions()
return
# Remove the cache (the next call will have the updated buttons)
reactions = list(self.buttons.keys())
try:
del self.buttons
except AttributeError:
pass
for reaction in reactions:
await self.message.remove_reaction(reaction, self.__me)
return wrapped()
async def dummy():
raise MenuError('Menu has not been started yet')
return dummy()
def should_add_reactions(self):
""":class:`bool`: Whether to add reactions to this menu session."""
return len(self.buttons)
def _verify_permissions(self, ctx, channel, permissions):
if not permissions.send_messages:
raise CannotSendMessages()
if self.check_embeds and not permissions.embed_links:
raise CannotEmbedLinks()
self._can_remove_reactions = permissions.manage_messages
if self.should_add_reactions():
if not permissions.add_reactions:
raise CannotAddReactions()
if not permissions.read_message_history:
raise CannotReadMessageHistory()
def reaction_check(self, payload):
"""The function that is used to check whether the payload should be processed.
This is passed to :meth:`discord.ext.commands.Bot.wait_for <Bot.wait_for>`.
There should be no reason to override this function for most users.
Parameters
------------
payload: :class:`discord.RawReactionActionEvent`
The payload to check.
Returns
---------
:class:`bool`
Whether the payload should be processed.
"""
if payload.message_id != self.message.id:
return False
if payload.user_id not in {self.bot.owner_id, self._author_id, *self.bot.owner_ids}:
return False
return payload.emoji in self.buttons
async def _internal_loop(self):
try:
self.__timed_out = False
loop = self.bot.loop
# Ensure the name exists for the cancellation handling
tasks = []
while self._running:
tasks = [
asyncio.ensure_future(self.bot.wait_for('raw_reaction_add', check=self.reaction_check)),
asyncio.ensure_future(self.bot.wait_for('raw_reaction_remove', check=self.reaction_check))
]
done, pending = await asyncio.wait(tasks, timeout=self.timeout, return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
if len(done) == 0:
raise asyncio.TimeoutError()
# Exception will propagate if e.g. cancelled or timed out
payload = done.pop().result()
loop.create_task(self.update(payload))
# NOTE: Removing the reaction ourselves after it's been done when
# mixed with the checks above is incredibly racy.
# There is no guarantee when the MESSAGE_REACTION_REMOVE event will
# be called, and chances are when it does happen it'll always be
# after the remove_reaction HTTP call has returned back to the caller
# which means that the stuff above will catch the reaction that we
# just removed.
# For the future sake of myself and to save myself the hours in the future
# consider this my warning.
except asyncio.TimeoutError:
self.__timed_out = True
finally:
self._event.set()
# Cancel any outstanding tasks (if any)
for task in tasks:
task.cancel()
try:
await self.finalize(self.__timed_out)
except Exception:
pass
finally:
self.__timed_out = False
# Can't do any requests if the bot is closed
if self.bot.is_closed():
return
# Wrap it in another block anyway just to ensure
# nothing leaks out during clean-up
try:
if self.delete_message_after:
return await self.message.delete()
if self.clear_reactions_after:
if self._can_remove_reactions:
return await self.message.clear_reactions()
for button_emoji in self.buttons:
try:
await self.message.remove_reaction(button_emoji, self.__me)
except discord.HTTPException:
continue
except Exception:
pass
async def update(self, payload):
"""|coro|
Updates the menu after an event has been received.
Parameters
-----------
payload: :class:`discord.RawReactionActionEvent`
The reaction event that triggered this update.
"""
button = self.buttons[payload.emoji]
if not self._running:
return
try:
if button.lock:
async with self._lock:
if self._running:
await button(self, payload)
else:
await button(self, payload)
except Exception as exc:
await self.on_menu_button_error(exc)
async def on_menu_button_error(self, exc):
"""|coro|
Handles reporting of errors while updating the menu from events.
The default behaviour is to log the exception.
This may be overriden by subclasses.
Parameters
----------
exc: :class:`Exception`
The exception which was raised during a menu update.
"""
# some users may wish to take other actions during or beyond logging
# which would require awaiting, such as stopping an erroring menu.
log.exception("Unhandled exception during menu update.", exc_info=exc)
async def start(self, ctx, *, channel=None, wait=False):
"""|coro|
Starts the interactive menu session.
Parameters
-----------
ctx: :class:`Context`
The invocation context to use.
channel: :class:`discord.abc.Messageable`
The messageable to send the message to. If not given
then it defaults to the channel in the context.
wait: :class:`bool`
Whether to wait until the menu is completed before
returning back to the caller.
Raises
-------
MenuError
An error happened when verifying permissions.
discord.HTTPException
Adding a reaction failed.
"""
# Clear the buttons cache and re-compute if possible.
try:
del self.buttons
except AttributeError:
pass
self.bot = bot = ctx.bot
self.ctx = ctx
self._author_id = ctx.author.id
channel = channel or ctx.channel
me = channel.guild.me if hasattr(channel, 'guild') else ctx.bot.user
permissions = channel.permissions_for(me)
self.__me = discord.Object(id=me.id)
self._verify_permissions(ctx, channel, permissions)
self._event.clear()
msg = self.message
if msg is None:
self.message = msg = await self.send_initial_message(ctx, channel)
if self.should_add_reactions():
# Start the task first so we can listen to reactions before doing anything
for task in self.__tasks:
task.cancel()
self.__tasks.clear()
self._running = True
self.__tasks.append(bot.loop.create_task(self._internal_loop()))
async def add_reactions_task():
for emoji in self.buttons:
await msg.add_reaction(emoji)
self.__tasks.append(bot.loop.create_task(add_reactions_task()))
if wait:
await self._event.wait()
async def finalize(self, timed_out):
"""|coro|
A coroutine that is called when the menu loop has completed
its run. This is useful if some asynchronous clean-up is
required after the fact.
Parameters
--------------
timed_out: :class:`bool`
Whether the menu completed due to timing out.
"""
return
async def send_initial_message(self, ctx, channel):
"""|coro|
Sends the initial message for the menu session.
This is internally assigned to the :attr:`message` attribute.
Subclasses must implement this if they don't set the
:attr:`message` attribute themselves before starting the
menu via :meth:`start`.
Parameters
------------
ctx: :class:`Context`
The invocation context to use.
channel: :class:`discord.abc.Messageable`
The messageable to send the message to.
Returns
--------
:class:`discord.Message`
The message that has been sent.
"""
raise NotImplementedError
def stop(self):
"""Stops the internal loop."""
self._running = False
for task in self.__tasks:
task.cancel()
self.__tasks.clear()
class PageSource:
"""An interface representing a menu page's data source for the actual menu page.
Subclasses must implement the backing resource along with the following methods:
- :meth:`get_page`
- :meth:`is_paginating`
- :meth:`format_page`
"""
async def _prepare_once(self):
try:
# Don't feel like formatting hasattr with
# the proper mangling
# read this as follows:
# if hasattr(self, '__prepare')
# except that it works as you expect
self.__prepare
except AttributeError:
await self.prepare()
self.__prepare = True
async def prepare(self):
"""|coro|
A coroutine that is called after initialisation
but before anything else to do some asynchronous set up
as well as the one provided in ``__init__``.
By default this does nothing.
This coroutine will only be called once.
"""
return
def is_paginating(self):
"""An abstract method that notifies the :class:`MenuPages` whether or not
to start paginating. This signals whether to add reactions or not.
Subclasses must implement this.
Returns
--------
:class:`bool`
Whether to trigger pagination.
"""
raise NotImplementedError
def get_max_pages(self):
"""An optional abstract method that retrieves the maximum number of pages
this page source has. Useful for UX purposes.
The default implementation returns ``None``.
Returns
--------
Optional[:class:`int`]
The maximum number of pages required to properly
paginate the elements, if given.
"""
return None
async def get_page(self, page_number):
"""|coro|
An abstract method that retrieves an object representing the object to format.
Subclasses must implement this.
.. note::
The page_number is zero-indexed between [0, :meth:`get_max_pages`),
if there is a maximum number of pages.
Parameters
-----------
page_number: :class:`int`
The page number to access.
Returns
---------
Any
The object represented by that page.
This is passed into :meth:`format_page`.
"""
raise NotImplementedError
async def format_page(self, menu, page):
"""|maybecoro|
An abstract method to format the page.
This method must return one of the following types.
If this method returns a ``str`` then it is interpreted as returning
the ``content`` keyword argument in :meth:`discord.Message.edit`
and :meth:`discord.abc.Messageable.send`.
If this method returns a :class:`discord.Embed` then it is interpreted
as returning the ``embed`` keyword argument in :meth:`discord.Message.edit`
and :meth:`discord.abc.Messageable.send`.
If this method returns a ``dict`` then it is interpreted as the
keyword-arguments that are used in both :meth:`discord.Message.edit`
and :meth:`discord.abc.Messageable.send`. The two of interest are
``embed`` and ``content``.
Parameters
------------
menu: :class:`Menu`
The menu that wants to format this page.
page: Any
The page returned by :meth:`PageSource.get_page`.
Returns
---------
Union[:class:`str`, :class:`discord.Embed`, :class:`dict`]
See above.
"""
raise NotImplementedError
class MenuPages(Menu):
"""A special type of Menu dedicated to pagination.
Attributes
------------
current_page: :class:`int`
The current page that we are in. Zero-indexed
between [0, :attr:`PageSource.max_pages`).
"""
def __init__(self, source, **kwargs):
self._source = source
self.current_page = 0
super().__init__(**kwargs)
@property
def source(self):
""":class:`PageSource`: The source where the data comes from."""
return self._source
async def change_source(self, source):
"""|coro|
Changes the :class:`PageSource` to a different one at runtime.
Once the change has been set, the menu is moved to the first
page of the new source if it was started. This effectively
changes the :attr:`current_page` to 0.
Raises
--------
TypeError
A :class:`PageSource` was not passed.
"""
if not isinstance(source, PageSource):
raise TypeError('Expected {0!r} not {1.__class__!r}.'.format(PageSource, source))
self._source = source
self.current_page = 0
if self.message is not None:
await source._prepare_once()
await self.show_page(0)
def should_add_reactions(self):
return self._source.is_paginating()
async def _get_kwargs_from_page(self, page):
value = await discord.utils.maybe_coroutine(self._source.format_page, self, page)
if isinstance(value, dict):
return value
elif isinstance(value, str):
return { 'content': value, 'embed': None }
elif isinstance(value, discord.Embed):
return { 'embed': value, 'content': None }
async def show_page(self, page_number):
page = await self._source.get_page(page_number)
self.current_page = page_number
kwargs = await self._get_kwargs_from_page(page)
await self.message.edit(**kwargs)
async def send_initial_message(self, ctx, channel):
"""|coro|
The default implementation of :meth:`Menu.send_initial_message`
for the interactive pagination session.
This implementation shows the first page of the source.
"""
page = await self._source.get_page(0)
kwargs = await self._get_kwargs_from_page(page)
return await channel.send(**kwargs)
async def start(self, ctx, *, channel=None, wait=False):
await self._source._prepare_once()
await super().start(ctx, channel=channel, wait=wait)
async def show_checked_page(self, page_number):
max_pages = self._source.get_max_pages()
try:
if max_pages is None:
# If it doesn't give maximum pages, it cannot be checked
await self.show_page(page_number)
elif max_pages > page_number >= 0:
await self.show_page(page_number)
except IndexError:
# An error happened that can be handled, so ignore it.
pass
async def show_current_page(self):
if self._source.is_paginating():
await self.show_page(self.current_page)
def _skip_double_triangle_buttons(self):
max_pages = self._source.get_max_pages()
if max_pages is None:
return True
return max_pages <= 2
@button('<:ro_fast_backward:938115672339263508>',
position=First(0), skip_if=_skip_double_triangle_buttons)
async def go_to_first_page(self, payload):
"""go to the first page"""
await self.show_page(0)
@button('<:ro_arrow_backword:938118714237272094>', position=First(1))
async def go_to_previous_page(self, payload):
"""go to the previous page"""
await self.show_checked_page(self.current_page - 1)
@button('<:ro_arrow_forward:938115620359274526>', position=Last(0))
async def go_to_next_page(self, payload):
"""go to the next page"""
await self.show_checked_page(self.current_page + 1)
@button('<:ro_fast_forward:938113099272175626>',
position=Last(1), skip_if=_skip_double_triangle_buttons)
async def go_to_last_page(self, payload):
"""go to the last page"""
# The call here is safe because it's guarded by skip_if
await self.show_page(self._source.get_max_pages() - 1)
@button('<:ro_stop:938117135597703208>', position=Last(2))
async def stop_pages(self, payload):
"""stops the pagination session."""
self.stop()
class ListPageSource(PageSource):
"""A data source for a sequence of items.
This page source does not handle any sort of formatting, leaving it up
to the user. To do so, implement the :meth:`format_page` method.
Attributes
------------
entries: Sequence[Any]
The sequence of items to paginate.
per_page: :class:`int`
How many elements are in a page.
"""
def __init__(self, entries, *, per_page):
self.entries = entries
self.per_page = per_page
pages, left_over = divmod(len(entries), per_page)
if left_over:
pages += 1
self._max_pages = pages
def is_paginating(self):
""":class:`bool`: Whether pagination is required."""
return len(self.entries) > self.per_page
def get_max_pages(self):
""":class:`int`: The maximum number of pages required to paginate this sequence."""
return self._max_pages
async def get_page(self, page_number):
"""Returns either a single element of the sequence or
a slice of the sequence.
If :attr:`per_page` is set to ``1`` then this returns a single
element. Otherwise it returns at most :attr:`per_page` elements.
Returns
---------
Union[Any, List[Any]]
The data returned.
"""
if self.per_page == 1:
return self.entries[page_number]
else:
base = page_number * self.per_page
return self.entries[base:base + self.per_page]
_GroupByEntry = namedtuple('_GroupByEntry', 'key items')
class GroupByPageSource(ListPageSource):
"""A data source for grouped by sequence of items.
This inherits from :class:`ListPageSource`.
This page source does not handle any sort of formatting, leaving it up
to the user. To do so, implement the :meth:`format_page` method.
Parameters
------------
entries: Sequence[Any]
The sequence of items to paginate and group.
key: Callable[[Any], Any]
A key function to do the grouping with.
sort: :class:`bool`
Whether to sort the sequence before grouping it.
The elements are sorted according to the ``key`` function passed.
per_page: :class:`int`
How many elements to have per page of the group.
"""
def __init__(self, entries, *, key, per_page, sort=True):
self.__entries = entries if not sort else sorted(entries, key=key)
nested = []
self.nested_per_page = per_page
for k, g in itertools.groupby(self.__entries, key=key):
g = list(g)
if not g:
continue
size = len(g)
# Chunk the nested pages
nested.extend(_GroupByEntry(key=k, items=g[i:i+per_page]) for i in range(0, size, per_page))
super().__init__(nested, per_page=1)
async def get_page(self, page_number):
return self.entries[page_number]
async def format_page(self, menu, entry):
"""An abstract method to format the page.
This works similar to the :meth:`ListPageSource.format_page` except
the return type of the ``entry`` parameter is documented.
Parameters
------------
menu: :class:`Menu`
The menu that wants to format this page.
entry
A namedtuple with ``(key, items)`` representing the key of the
group by function and a sequence of paginated items within that
group.
Returns
---------
:class:`dict`
A dictionary representing keyword-arguments to pass to
the message related calls.
"""
raise NotImplementedError
def _aiter(obj, *, _isasync=inspect.iscoroutinefunction):
cls = obj.__class__
try:
async_iter = cls.__aiter__
except AttributeError:
raise TypeError('{0.__name__!r} object is not an async iterable'.format(cls))
async_iter = async_iter(obj)
if _isasync(async_iter):
raise TypeError('{0.__name__!r} object is not an async iterable'.format(cls))
return async_iter
class AsyncIteratorPageSource(PageSource):
"""A data source for data backed by an asynchronous iterator.
This page source does not handle any sort of formatting, leaving it up
to the user. To do so, implement the :meth:`format_page` method.
Parameters
------------
iter: AsyncIterator[Any]
The asynchronous iterator to paginate.
per_page: :class:`int`
How many elements to have per page.
"""
def __init__(self, iterator, *, per_page):
self.iterator = _aiter(iterator)
self.per_page = per_page
self._exhausted = False
self._cache = []
async def _iterate(self, n):
it = self.iterator
cache = self._cache
for i in range(0, n):
try:
elem = await it.__anext__()
except StopAsyncIteration:
self._exhausted = True
break
else:
cache.append(elem)
async def prepare(self, *, _aiter=_aiter):
# Iterate until we have at least a bit more single page
await self._iterate(self.per_page + 1)
def is_paginating(self):
""":class:`bool`: Whether pagination is required."""
return len(self._cache) > self.per_page
async def _get_single_page(self, page_number):
if page_number < 0:
raise IndexError('Negative page number.')
if not self._exhausted and len(self._cache) <= page_number:
await self._iterate((page_number + 1) - len(self._cache))
return self._cache[page_number]
async def _get_page_range(self, page_number):
if page_number < 0:
raise IndexError('Negative page number.')
base = page_number * self.per_page
max_base = base + self.per_page
if not self._exhausted and len(self._cache) <= max_base:
await self._iterate((max_base + 1) - len(self._cache))
entries = self._cache[base:max_base]
if not entries and max_base > len(self._cache):
raise IndexError('Went too far')
return entries
async def get_page(self, page_number):
"""Returns either a single element of the sequence or
a slice of the sequence.
If :attr:`per_page` is set to ``1`` then this returns a single
element. Otherwise it returns at most :attr:`per_page` elements.
Returns
---------
Union[Any, List[Any]]
The data returned.
"""
if self.per_page == 1:
return await self._get_single_page(page_number)
else:
return await self._get_page_range(page_number)
| 33.900901
| 116
| 0.601865
|
4a0e415749c832725ca2a111d666c1495a7aacd2
| 827
|
py
|
Python
|
Mission_to_Mars/app.py
|
emerette/web-scraping-challenge
|
117b59155726ac376cbbdbac295f20009ef80a88
|
[
"ADSL"
] | null | null | null |
Mission_to_Mars/app.py
|
emerette/web-scraping-challenge
|
117b59155726ac376cbbdbac295f20009ef80a88
|
[
"ADSL"
] | null | null | null |
Mission_to_Mars/app.py
|
emerette/web-scraping-challenge
|
117b59155726ac376cbbdbac295f20009ef80a88
|
[
"ADSL"
] | null | null | null |
# Import Libraries
from flask import Flask, render_template, redirect
from flask_pymongo import PyMongo
import scrape_mars
# Flask App
app = Flask(__name__)
mongo = PyMongo(app, uri="mongodb://localhost:27017/mars_DB")
# Route that renders index.html
@app.route("/")
def index():
scraped_data = mongo.db.scraped_data.find_one()
return render_template("index.html", scraped_data = scraped_data)
@app.route("/scrape")
def scrape():
scraped_data = mongo.db.scraped_data
scraped_data_list = scrape_to_mars.scrape()
scraped_data.update({}, scraped_data_list, upsert=True)
return redirect("/", code=302)
@app.route("/img")
def img():
img_data = scrape_mars.featured_image()
return render_template("index.html", img_data = hemisphere)
if __name__ == "__main__":
app.run(debug=True)
| 25.060606
| 69
| 0.721886
|
4a0e4185eaf82e6a02dca62c091613886e408d84
| 1,470
|
py
|
Python
|
tests/__init__.py
|
csaranbalaji/kedro
|
8968f95bf22322c19f8ddc8838589a2d1384b01c
|
[
"Apache-2.0"
] | 2
|
2020-07-27T12:20:55.000Z
|
2020-08-15T17:06:15.000Z
|
tests/__init__.py
|
csaranbalaji/kedro
|
8968f95bf22322c19f8ddc8838589a2d1384b01c
|
[
"Apache-2.0"
] | 1
|
2021-05-11T19:22:42.000Z
|
2021-05-11T19:22:42.000Z
|
tests/__init__.py
|
csaranbalaji/kedro
|
8968f95bf22322c19f8ddc8838589a2d1384b01c
|
[
"Apache-2.0"
] | 1
|
2021-08-22T08:16:22.000Z
|
2021-08-22T08:16:22.000Z
|
# Copyright 2020 QuantumBlack Visual Analytics Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND
# NONINFRINGEMENT. IN NO EVENT WILL THE LICENSOR OR OTHER CONTRIBUTORS
# BE LIABLE FOR ANY CLAIM, DAMAGES, OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF, OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# The QuantumBlack Visual Analytics Limited ("QuantumBlack") name and logo
# (either separately or in combination, "QuantumBlack Trademarks") are
# trademarks of QuantumBlack. The License does not grant you any right or
# license to the QuantumBlack Trademarks. You may not use the QuantumBlack
# Trademarks or any confusingly similar mark as a trademark for your product,
# or use the QuantumBlack Trademarks in any other manner that might cause
# confusion in the marketplace, including but not limited to in advertising,
# on websites, or on software.
#
# See the License for the specific language governing permissions and
# limitations under the License.
import kedro.config.default_logger # noqa
| 49
| 77
| 0.779592
|
4a0e419f44ca1a1bb3aced94e757853aef800ce3
| 832
|
py
|
Python
|
util/chplenv/chpl_make.py
|
vasslitvinov/chapel
|
53feadfc838e7f36ef863b1cd8cd5200d2d92ec8
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-01-22T07:51:04.000Z
|
2019-01-22T07:51:04.000Z
|
util/chplenv/chpl_make.py
|
vasslitvinov/chapel
|
53feadfc838e7f36ef863b1cd8cd5200d2d92ec8
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2016-11-29T18:21:05.000Z
|
2016-12-07T15:59:31.000Z
|
util/chplenv/chpl_make.py
|
vasslitvinov/chapel
|
53feadfc838e7f36ef863b1cd8cd5200d2d92ec8
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-05-03T22:17:08.000Z
|
2020-05-03T22:17:08.000Z
|
#!/usr/bin/env python
from distutils.spawn import find_executable
import os
import sys
chplenv_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.abspath(chplenv_dir))
import chpl_platform, overrides
from utils import memoize
@memoize
def get():
make_val = overrides.get('CHPL_MAKE')
if not make_val:
platform_val = chpl_platform.get()
if platform_val.startswith('cygwin') or platform_val == 'darwin':
make_val = 'make'
elif platform_val.startswith('linux'):
if find_executable('gmake'):
make_val = 'gmake'
else:
make_val = 'make'
else:
make_val = 'gmake'
return make_val
def _main():
make_val = get()
sys.stdout.write("{0}\n".format(make_val))
if __name__ == '__main__':
_main()
| 22.486486
| 73
| 0.626202
|
4a0e41a04f29075988d34977962fdb3424983ba2
| 1,280
|
py
|
Python
|
compile.py
|
TOMODOcom/TOMODO-ChromeExtension
|
fcc44aa79b23fe4ddc6e23c98483b3ee8b8d1593
|
[
"BSD-3-Clause"
] | 1
|
2019-02-05T15:18:37.000Z
|
2019-02-05T15:18:37.000Z
|
compile.py
|
TOMODOcom/TOMODO-ChromeExtension
|
fcc44aa79b23fe4ddc6e23c98483b3ee8b8d1593
|
[
"BSD-3-Clause"
] | null | null | null |
compile.py
|
TOMODOcom/TOMODO-ChromeExtension
|
fcc44aa79b23fe4ddc6e23c98483b3ee8b8d1593
|
[
"BSD-3-Clause"
] | null | null | null |
#! /usr/bin/python
from json import load, dump
from os import chdir, remove, getcwd, popen, system
from os.path import dirname, abspath
from shutil import copytree, copy2, rmtree
from subprocess import call
from sys import argv
def increment_version(version):
v = version.split(".")
v = map(int, v)
v.reverse()
for index, digit in enumerate(v):
v[index]+=1
if v[index] == 100:
v[index] = 0
else:
break
v.reverse()
v = map(str, v)
v = reduce(lambda x, y: x+'.'+y, v)
return v
chdir(dirname(abspath(__file__)))
with open('./extension/manifest.json') as fp:
manifest = load(fp)
manifest['version'] = increment_version(manifest['version'])
with open('./extension/manifest.json') as fp:
dump(manifest, open('./extension/manifest.json', 'w'), indent = 4)
print 'compiling version: %s' % manifest['version']
try:
rmtree('./compiled_extension')
except:
pass
try:
remove('./extension.zip')
except:
pass
copytree('./extension', './compiled_extension')
if 'dev' in argv:
settings_file = './dev_settings.js'
else:
settings_file = './production_settings.js'
copy2(settings_file, './compiled_extension/settings.js')
print(getcwd()+'/zipit')
system(getcwd()+'/zipit')
| 21.333333
| 70
| 0.648438
|
4a0e42ed0a14898842cda5180e3f08dec3fbb342
| 625
|
py
|
Python
|
post/api/urls.py
|
agiledesign2/drf-blog-post
|
a0dc7457e9e85e6b1a1f3aa81036b81f7054734f
|
[
"MIT"
] | null | null | null |
post/api/urls.py
|
agiledesign2/drf-blog-post
|
a0dc7457e9e85e6b1a1f3aa81036b81f7054734f
|
[
"MIT"
] | null | null | null |
post/api/urls.py
|
agiledesign2/drf-blog-post
|
a0dc7457e9e85e6b1a1f3aa81036b81f7054734f
|
[
"MIT"
] | null | null | null |
from django.urls import path, include
#from rest_framework.routers import DefaultRouter
from post.api import views
# Create a router and register viewsets with it.
#router = DefaultRouter()
#router.register(r"posts", PostViewSet)
#router.register(r"users", UserViewSet)
# The API URLs are now determined automatically by the router.
#urlpatterns = [path("", include(router.urls))]
app_name = 'post'
urlpatterns = [
path('post/',
views.PostList.as_view(),
name=views.PostList.name
),
path('post-detail/<str:slug>/',
views.PostDetail.as_view(),
name=views.PostDetail.name
),
]
| 27.173913
| 62
| 0.6976
|
4a0e4385feee5b082d039782fa2c1d0dd7885247
| 17,745
|
py
|
Python
|
voicetools/ttslab_make_halfphones.py
|
jkleczar/ttslabdev
|
52a7515734fd59e1a16dece8e2d567a33c435a27
|
[
"BSD-3-Clause"
] | null | null | null |
voicetools/ttslab_make_halfphones.py
|
jkleczar/ttslabdev
|
52a7515734fd59e1a16dece8e2d567a33c435a27
|
[
"BSD-3-Clause"
] | null | null | null |
voicetools/ttslab_make_halfphones.py
|
jkleczar/ttslabdev
|
52a7515734fd59e1a16dece8e2d567a33c435a27
|
[
"BSD-3-Clause"
] | 1
|
2019-02-25T10:27:34.000Z
|
2019-02-25T10:27:34.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Creates a ttslab halfphone catalogue using aligned utterances and
corresponding wave files...
DEMITASSE: THIS NEEDS A SERIOUS REWRITE...
"""
from __future__ import unicode_literals, division, print_function #Py2
__author__ = "Daniel van Niekerk"
__email__ = "dvn.demitasse@gmail.com"
import os
import sys
from collections import defaultdict
from glob import glob
import copy
from tempfile import mkstemp
from ConfigParser import ConfigParser
import numpy as np
from wav2psmfcc import PMExtractor
from make_f0_praat_script import script_writer as F0_PSCWriter
from make_f0_praat import f0filler as F0Filler
import ttslab
import ttslab.hrg as hrg
ttslab.extend(hrg.Utterance, "ufuncs_analysis")
from ttslab.trackfile import Track
SAVE_COMPLETE_UTTS = True
#sometimes the limit needs to be increased to pickle large utts...
BIGGER_RECURSION_LIMIT = 20000 #default is generally 1000
WAV_DIR = "wavs"
PM_DIR = "pm"
LPC_DIR = "lpc"
F0_DIR = "f0"
MCEP_DIR = "mcep"
JOIN_DIR = "joincoef"
UTT_DIR = "utts"
COMPLETE_UTT_DIR = "complete_utts"
WAV_EXT = "wav"
RES_EXT = "wav"
UTT_EXT = "utt.pickle"
LPC_EXT = "lpc"
MCEP_EXT = "mcep"
RES_EXT = "res"
JOIN_EXT = "join"
PM_EXT = "pm"
F0_EXT = "f0"
NAME = "ttslab_make_halfphones.py"
SIG2FV_BIN = "sig2fv"
SIGFILTER_BIN = "sigfilter"
WINDOWFACTOR = 1
########################################
## FUNCTIONS
def make_units(voice, utt_dir):
""" Run 'maketargetunits' process on Utterances to create Unit
level to generate structure for adding acoustic features...
"""
print("MAKING UNITS..")
utts = []
for uttfilename in sorted(glob(os.path.join(utt_dir, ".".join(["*", UTT_EXT])))):
print(uttfilename)
utt = ttslab.fromfile(uttfilename)
utt = voice.synthesizer(utt, "targetunits") #DEMITASSE voice needs resynth method..
utts.append(utt)
return utts
########## ADD_FEATS
def add_feats_to_utt(args):
u, lpc_dir, joincoef_dir, f0_dir = args
file_id = u["file_id"]
print("Processing:", file_id)
u.fill_startendtimes()
lpctrack = Track()
lpctrack.load_track(".".join([os.path.join(lpc_dir, file_id), LPC_EXT]))
restrack = Track()
restrack.load_wave(".".join([os.path.join(lpc_dir, file_id), RES_EXT]))
jointrack = ttslab.fromfile(".".join([os.path.join(joincoef_dir, file_id), JOIN_EXT]))
f0track = Track()
f0track.load_track(".".join([os.path.join(f0_dir, file_id), F0_EXT]))
#get boundarytimes:
boundarytimes = []
durations = []
starttime = 0.0
for seg in u.get_relation("Segment"):
endtime = float(seg["end"])
if "cl_end" in seg:
splittime = float(seg["cl_end"])
else:
splittime = (endtime + starttime) / 2
#TODO: should still add 25% split if diphthong...
boundarytimes.append([starttime, splittime, endtime])
durations.extend([splittime - starttime, endtime - splittime])
starttime = endtime
#convert boundtimes into sample ranges (and flatten):
lpcsampleranges = []
f0sampleranges = []
joinsamples = []
#DEMITASSE: If not pruning pau halfphones:
# for bounds in boundarytimes:
# lpcsampleranges.extend([lpctrack.get_index_at(bounds[0]),
# lpctrack.get_index_at(bounds[1])])
# joinsamples.extend([jointrack.get_sample_at(bounds[0]),
# jointrack.get_sample_at(bounds[1])])
# lpcsampleranges.append(len(lpctrack))
# joinsamples.append(jointrack.get_sample_at(len(jointrack)))
#DEMITASSE: If pruning pau halfphones:
durations = durations[1:-1]
for i, bounds in enumerate(boundarytimes):
if i == 0:
lpcsampleranges.append(lpctrack.index_at(bounds[1]))
f0sampleranges.append(f0track.index_at(bounds[1]))
joinsamples.append(jointrack.values[bounds[1]])
else:
lpcsampleranges.extend([lpctrack.index_at(bounds[0]),
lpctrack.index_at(bounds[1])])
f0sampleranges.extend([f0track.index_at(bounds[0]),
f0track.index_at(bounds[1])])
joinsamples.extend([jointrack.values[bounds[0]],
jointrack.values[bounds[1]]])
#get pitchperiods at lpc indices
lpctimes = np.concatenate(([0.0], lpctrack.times))
pitchperiod = np.diff(lpctimes)
units = u.get_relation("Unit").as_list()
assert len(units) == len(lpcsampleranges) - 1
for jc0, jc1, lti0, lti1, fti0, fti1, dur, i in zip(joinsamples[:-1], joinsamples[1:],
lpcsampleranges[:-1], lpcsampleranges[1:],
f0sampleranges[:-1], f0sampleranges[1:],
durations,
units):
# print(i["name"], "lpctrack[%s:%s]" % (lti0, lti1), "len(lpctrack)=%s" % len(lpctrack))
i["left-joincoef"] = jc0
i["right-joincoef"] = jc1
i["lpc-coefs"] = lpctrack.slice(lti0, lti1, copy=True) #like python indexing/slicing
if lti0 == 0:
i["lpc-coefs"].starttime = 0.0
else:
i["lpc-coefs"].starttime = lpctrack.times[lti0 - 1]
i["lpc-coefs"].zero_starttime()
i["dur"] = dur
#For windowfactor=2 (save only samples and assume 16kHz)
i["residuals"] = restrack.slice(restrack.index_at(lpctrack.times[lti0] - pitchperiod[lti0]),
restrack.index_at(lpctrack.times[lti1] + pitchperiod[lti0])).values
return u
def add_feats_to_units(utts):
""" Load acoustic information and populate units...
"""
print("ADDING FEATS...")
lpc_dir = os.path.join(os.getcwd(), LPC_DIR)
joincoef_dir = os.path.join(os.getcwd(), JOIN_DIR)
f0_dir = os.path.join(os.getcwd(), F0_DIR)
map(add_feats_to_utt,
[(utt, lpc_dir, joincoef_dir, f0_dir) for utt in utts])
return utts
########## ADD_FEATS
########## PITCHMARKS
def extract_pitchmarks(args):
wavfilename, minpitch, maxpitch, defstep, pm_dir= args
basename = os.path.splitext(os.path.basename(wavfilename))[0]
print(basename)
pme = PMExtractor(minpitch, maxpitch, defstep)
pme.get_pmarks(wavfilename)
pme.write_est_file(os.path.join(pm_dir, ".".join([basename, PM_EXT])))
def make_pitchmarks(featconfig, wav_dir):
""" Make 'filled' pitchmarks for future pitch-synchronous feature
extraction...
"""
minpitch = int(featconfig.get("PITCH", "MIN"))
maxpitch = int(featconfig.get("PITCH", "MAX"))
defstep = 1 / float(featconfig.get("PITCH", "DEFAULT"))
pm_dir = os.path.join(os.getcwd(), PM_DIR)
os.mkdir(pm_dir)
print("MAKING PITCHMARKS...")
map(extract_pitchmarks,
[(wavfilename, minpitch, maxpitch, defstep, pm_dir)
for wavfilename in sorted(glob(os.path.join(wav_dir, ".".join(["*", WAV_EXT]))))])
########## PITCHMARKS
########## LPCs
def extract_lpcs(args):
wavfilename, lpc_order, preemph_coef, window_factor, window_type, lpc_dir, pm_dir = args
basename = os.path.splitext(os.path.basename(wavfilename))[0]
# Extract the LPC coefficients
cmdstring = " ".join([SIG2FV_BIN,
wavfilename,
"-o",
os.path.join(lpc_dir, ".".join([basename, LPC_EXT])),
"-otype est",
"-lpc_order",
lpc_order,
"-coefs lpc",
"-pm",
os.path.join(pm_dir, ".".join([basename, PM_EXT])),
"-preemph",
preemph_coef,
"-factor",
window_factor,
"-window_type",
window_type])
print(cmdstring)
os.system(cmdstring)
# Extract the residual
cmdstring = " ".join([SIGFILTER_BIN,
wavfilename,
"-o",
os.path.join(lpc_dir, ".".join([basename, RES_EXT])),
"-otype riff",
"-lpcfilter",
os.path.join(lpc_dir, ".".join([basename, LPC_EXT])),
"-inv_filter"])
print(cmdstring)
os.system(cmdstring)
def make_lpcs(featconfig, wav_dir):
""" Make lpcs and residuals for synthesis units..
"""
lpc_order = featconfig.get("SIG2FV_LPC", "LPC_ORDER")
preemph_coef = featconfig.get("SIG2FV_LPC", "PREEMPH_COEF")
window_factor = featconfig.get("SIG2FV_LPC", "WINDOW_FACTOR")
window_type = featconfig.get("SIG2FV_LPC", "WINDOW_TYPE")
lpc_dir = os.path.join(os.getcwd(), LPC_DIR)
pm_dir = os.path.join(os.getcwd(), PM_DIR)
os.mkdir(lpc_dir)
print("MAKING LPCS...")
map(extract_lpcs,
[(wavfilename, lpc_order, preemph_coef, window_factor, window_type, lpc_dir, pm_dir)
for wavfilename in sorted(glob(os.path.join(wav_dir, ".".join(["*", WAV_EXT]))))])
########## LPCs
########## F0s
def extract_f0s(args):
wavfilename, praatscript, pm_dir, f0_dir = args
basename = os.path.splitext(os.path.basename(wavfilename))[0]
print(basename)
pmfile = os.path.join(pm_dir, ".".join([basename, PM_EXT]))
f0file = os.path.join(f0_dir, ".".join([basename, F0_EXT]))
f0file_writer = F0Filler()
f0file_writer.load_pitchmarks(pmfile)
f0file_writer.get_praat_f0(praatscript, wavfilename)
f0file_writer.make_festival_f0(f0file)
def make_f0s(featconfig, wav_dir):
""" Make f0s for incorporation in join costs..
"""
f0_dir = os.path.join(os.getcwd(), F0_DIR)
os.mkdir(f0_dir)
pm_dir = os.path.join(os.getcwd(), PM_DIR)
psc_writer = F0_PSCWriter()
psc_writer.min_pitch = int(featconfig.get("PITCH", "MIN"))
psc_writer.max_pitch = int(featconfig.get("PITCH", "MAX"))
psc_writer.default_pitch = int(featconfig.get("PITCH", "DEFAULT"))
#make the Praat script...
fd, praatscript = mkstemp()
psc_writer.create_praat_script(praatscript)
print("MAKING F0s...")
map(extract_f0s,
[(wavfilename, praatscript, pm_dir, f0_dir)
for wavfilename in sorted(glob(os.path.join(wav_dir, ".".join(["*", WAV_EXT]))))])
os.close(fd)
os.remove(praatscript)
########## F0s
########## MCEPs
def extract_mceps(args):
wavfilename, fbank_order, window_factor, preemph_coef, melcep_order, window_type, melcep_coefs, mcep_dir, pm_dir = args
basename = os.path.splitext(os.path.basename(wavfilename))[0]
# Extract the MELCEP coefficients
cmdstring = " ".join([SIG2FV_BIN,
"-fbank_order",
fbank_order,
"-factor",
window_factor,
"-preemph",
preemph_coef,
"-melcep_order",
melcep_order,
"-window_type",
window_type,
wavfilename,
"-otype est",
"-coefs",
melcep_coefs,
"-o",
os.path.join(mcep_dir, ".".join([basename, MCEP_EXT])),
"-pm",
os.path.join(pm_dir, ".".join([basename, PM_EXT]))])
print(cmdstring)
os.system(cmdstring)
def make_joincoefs(featconfig, wav_dir):
""" Make joincoefs...
"""
mcep_dir = os.path.join(os.getcwd(), MCEP_DIR)
os.mkdir(mcep_dir)
join_dir = os.path.join(os.getcwd(), JOIN_DIR)
os.mkdir(join_dir)
pm_dir = os.path.join(os.getcwd(), PM_DIR)
f0_dir = os.path.join(os.getcwd(), F0_DIR)
fbank_order = featconfig.get("SIG2FV_MCEP", "FBANK_ORDER")
melcep_order = featconfig.get("SIG2FV_MCEP", "MELCEP_ORDER")
melcep_coefs = featconfig.get("SIG2FV_MCEP", "MELCEP_COEFS")
preemph_coef = featconfig.get("SIG2FV_MCEP", "PREEMPH_COEF")
window_factor = featconfig.get("SIG2FV_MCEP", "WINDOW_FACTOR")
window_type = featconfig.get("SIG2FV_MCEP", "WINDOW_TYPE")
print("MAKING JOINCOEFS...")
map(extract_mceps,
[(wavfilename, fbank_order, window_factor, preemph_coef, melcep_order, window_type, melcep_coefs, mcep_dir, pm_dir)
for wavfilename in sorted(glob(os.path.join(wav_dir, ".".join(["*", WAV_EXT]))))])
print("NORMALISING AND JOINING F0 AND MCEPS...")
#Normalising mceps and f0s:
upper = +1.0
lower = -1.0
mceptracks = {}
for fn in glob(os.path.join(mcep_dir, ".".join(["*", MCEP_EXT]))):
t = Track()
t.load_track(fn)
mceptracks[os.path.basename(fn)] = t
allmcepvecs = np.concatenate([mceptracks[tn].values for tn in sorted(mceptracks)])
mcepmean = allmcepvecs.mean(0)
mcepstd = allmcepvecs.std(0)
for k in mceptracks:
mceptracks[k].values = (mceptracks[k].values - mcepmean) / (4 * mcepstd) * (upper - lower)
f0tracks = {}
for fn in glob(os.path.join(f0_dir, ".".join(["*", F0_EXT]))):
t = Track()
t.load_track(fn)
f0tracks[os.path.basename(fn)] = t
#allf0vecs = np.concatenate([f0tracks[tn].values for tn in sorted(f0tracks)])
allf0vecs = np.concatenate([f0tracks[tn].values[f0tracks[tn].values.nonzero()] for tn in sorted(f0tracks)])
f0mean = allf0vecs.mean(0)
f0std = allf0vecs.std(0)
for k in f0tracks:
f0tracks[k].values = (f0tracks[k].values - f0mean) / (4 * f0std) * (upper - lower)
#Add f0 to mcep track:
for k1, k2 in zip(sorted(mceptracks), sorted(f0tracks)):
mceptracks[k1].values = np.concatenate((mceptracks[k1].values, f0tracks[k2].values), 1)
for fn in mceptracks:
basename = os.path.splitext(os.path.basename(fn))[0]
ttslab.tofile(mceptracks[fn], os.path.join(join_dir, basename + "." + JOIN_EXT))
########## MCEPs
def save_complete_utts(utts):
""" Save Utterances to file...
"""
complete_utt_dir = os.path.join(os.getcwd(), COMPLETE_UTT_DIR)
try:
print("SAVING COMPLETE UTTS...")
try:
os.makedirs(complete_utt_dir)
except OSError:
pass
for utt in utts:
print(utt["file_id"])
ttslab.tofile(utt, os.path.join(complete_utt_dir, ".".join([utt["file_id"], UTT_EXT])))
except RuntimeError:
#check what kind of monster utt caused the recursion limit to be exceeded...
#UTTERANCE CHUNKING IS IMPORTANT...
print(utt)
def make_unit_catalogue(utts):
print("MAKING UNITCATALOGUE...")
unitcatalogue = defaultdict(list)
for utt in utts:
print(utt["file_id"])
unit_item = utt.get_relation("Unit").head_item
while unit_item is not None:
if "lpc-coefs" in unit_item.content.features: #only save unit if lpc-coefs successfully extracted...
unitcatalogue[unit_item["name"]].append(unit_item.content.features)
unit_item = unit_item.next_item
return dict(unitcatalogue)
########################################
## MAIN PROCEDURES
def make_features(featconfig):
"""pitchmark extraction, f0 extraction, lpc and residual
calculation as well as mcep extraction and adding of f0 to mcep
tracks to form joincoefs.
"""
try:
import multiprocessing
POOL = multiprocessing.Pool(processes=multiprocessing.cpu_count())
def map(f, i):
return POOL.map(f, i, chunksize=1)
except ImportError:
pass
wav_dir = os.path.join(os.getcwd(), WAV_DIR)
make_pitchmarks(featconfig, wav_dir)
make_lpcs(featconfig, wav_dir)
make_f0s(featconfig, wav_dir)
make_joincoefs(featconfig, wav_dir)
def make_catalogue(voice):
utt_dir = os.path.join(os.getcwd(), UTT_DIR)
utts = make_units(voice, utt_dir)
##
defaultrecursionlimit = sys.getrecursionlimit()
sys.setrecursionlimit(BIGGER_RECURSION_LIMIT)
utts = add_feats_to_units(utts)
if SAVE_COMPLETE_UTTS:
save_complete_utts(utts)
sys.setrecursionlimit(defaultrecursionlimit)
##
unitcatalogue = make_unit_catalogue(utts)
print("SAVING UNITCATALOGUE...")
ttslab.tofile(unitcatalogue, "unitcatalogue.pickle")
def auto(featconfig, voice):
""" Automatic construction with no interaction...
"""
#make features...
make_features(featconfig)
#create catalogue...
make_catalogue(voice)
class CLIException(Exception):
pass
def main():
try:
voicefile = sys.argv[1]
featconfpath = sys.argv[2]
switch = sys.argv[3]
except IndexError:
print("USAGE: ttslab_make_halfphones.py VOICEFILE FEATSCONF [auto | make_features | make_catalogue]")
sys.exit()
voice = ttslab.fromfile(voicefile)
with open(featconfpath) as conffh:
featconfig = ConfigParser()
featconfig.readfp(conffh)
try:
if switch == "auto":
auto(featconfig, voice)
elif switch == "make_features":
make_features(featconfig)
elif switch == "make_catalogue":
make_catalogue(voice)
else:
raise CLIException
except CLIException:
print("USAGE: ttslab_make_halfphones.py VOICEFILE FEATSCONF [auto | make_features | make_catalogue]")
if __name__ == "__main__":
main()
| 33.168224
| 123
| 0.601691
|
4a0e44a24314921e37a5895eee40735865cde1de
| 882
|
py
|
Python
|
Labs/AI and Machine Learning/Cognitive Toolkit/resources/images2cntk.py
|
varunjha089/computerscience
|
bd90079e4a8701e92c9f88f598bfa86203b6cbb7
|
[
"MIT"
] | 24
|
2017-06-08T01:16:28.000Z
|
2017-08-24T06:49:52.000Z
|
Labs/AI and Machine Learning/Cognitive Toolkit/resources/images2cntk.py
|
varunjha089/computerscience
|
bd90079e4a8701e92c9f88f598bfa86203b6cbb7
|
[
"MIT"
] | null | null | null |
Labs/AI and Machine Learning/Cognitive Toolkit/resources/images2cntk.py
|
varunjha089/computerscience
|
bd90079e4a8701e92c9f88f598bfa86203b6cbb7
|
[
"MIT"
] | 6
|
2017-06-08T05:32:27.000Z
|
2019-03-12T02:47:10.000Z
|
#This script converts the MNIST Data files into PNG files for each respective image in the file set.
from os import walk
from PIL import Image
imagePath = "input-images"
files = []
for (dirpath, dirnames, filenames) in walk(imagePath):
files.extend(filenames)
break
cntkfile = open('Custom-Test-28x28_cntk_text.txt', 'w')
for filename in files:
im = Image.open(imagePath + "/" + filename)
im_grey = im.convert('LA') # convert to grayscale
width,height = im.size
cntkline = "|labels "
fileparts = filename.split("-")
digit = int(fileparts[0])
for i in range(0,10):
label = "0 "
if i == digit:
label = "1 "
cntkline += label
cntkline += "|features"
for i in range(0,height):
for j in range(0,width):
digit = 255 - im_grey.getpixel((j,i))[0]
cntkline += " " + str(digit)
cntkfile.write(cntkline + "\n")
cntkfile.close()
| 19.173913
| 100
| 0.653061
|
4a0e451da6406f82110644e823f7c993e63a624e
| 5,569
|
py
|
Python
|
pdict/special.py
|
thorwhalen/ut
|
353a4629c35a2cca76ef91a4d5209afe766433b4
|
[
"MIT"
] | 4
|
2016-12-17T20:06:10.000Z
|
2021-11-19T04:45:29.000Z
|
pdict/special.py
|
thorwhalen/ut
|
353a4629c35a2cca76ef91a4d5209afe766433b4
|
[
"MIT"
] | 11
|
2021-01-06T05:35:11.000Z
|
2022-03-11T23:28:31.000Z
|
pdict/special.py
|
thorwhalen/ut
|
353a4629c35a2cca76ef91a4d5209afe766433b4
|
[
"MIT"
] | 3
|
2015-06-12T10:44:16.000Z
|
2021-07-26T18:39:47.000Z
|
"""Special dicts"""
__author__ = 'thor'
from collections import defaultdict, UserDict
from ut.pdict.get import set_value_in_nested_key_path
val_unlikely_to_be_value_of_dict = (1987654321, 8239080923)
class keydefaultdict(defaultdict):
def __missing__(self, key):
ret = self[key] = self.default_factory(key)
return ret
class DictDefaultDict(dict):
"""
Acts similarly to collections.defaultdict, except
(1) the defaults depend on the key (given by a dict of key-->default_val at construction)
(2) it is not a function that is called to create the default value (so careful with referenced variables)
"""
def __init__(self, default_dict):
super(DictDefaultDict, self).__init__()
self.default_dict = default_dict
def __getitem__(self, item):
try:
return dict.__getitem__(self, item)
except KeyError:
return self.default_dict[item]
class KeyPathDict(dict):
"""
NOTE: Might want to check out key_path.py (in https://github.com/i2mint/py2mint/) instead.
A dict where you can get and set values from key_paths (i.e. dot-separated strings or lists of nested keys).
Use with care.
Some functionalities that would be expected from such a subclass of dict aren't implemented yet, or only partially.
Further, operating with KeyPathDict is slower. One test showed that getting a value was 80 times slower
But, to be fair, it was in micro-seconds instead of nano-seconds, so this class can still be useful for
convenience when it is not in a bottle neck of a process.
>>> input_dict = {
... 'a': {
... 'b': 1,
... 'c': 'val of a.c',
... 'd': [1, 2]
... },
... 'b': {
... 'A': 'val of b.A',
... 'B': {
... 'AA': 'val of b.B.AA'
... }
... },
... 10: 'val for 10',
... '10': 10
... }
>>>
>>> d = KeyPathDict(input_dict)
>>> d
{'a': {'b': 1, 'c': 'val of a.c', 'd': [1, 2]}, 'b': {'A': 'val of b.A', 'B': {'AA': 'val of b.B.AA'}}, 10: 'val for 10', '10': 10}
>>> d.get('a.c')
'val of a.c'
>>> d.get(['a', 'c']) == d['a.c']
True
>>> d[['a', 'c']] == d['a.c']
True
>>> d.get('non.existent.key', 'default')
'default'
>>> d['b.B.AA']
'val of b.B.AA'
>>> d['b.B.AA'] = 3 # assigning another value to EXISTING key path
>>> d['b.B.AA']
3
>>> d['10'] = 0 # assigning another value to EXISTING key path
>>> d['10']
0
>>> d['new_key'] = 7 # assigning another value to new SINGLE key
>>> d['new_key']
7
>>> d['new.key.path'] = 8 # assigning a value to new key path
>>> d['new.key']
{'path': 8}
>>> d['new.key.old.path'] = 9 # assigning a value to new key path, intersecting with another
>>> d['new.key']
{'path': 8, 'old': {'path': 9}}
>>> d['new.key'] = 'something new' # assigning a value to a key (sub-)path that already exists
>>> d['new.key']
'something new'
"""
def get(self, key_path, d=None):
# return get_value_in_key_path(dict(KeyPathDict), key_path, d)
if isinstance(key_path, str):
key_path = key_path.split('.')
if isinstance(key_path, list):
k_length = len(key_path)
if k_length == 0:
return super(KeyPathDict, self).get(key_path[0], d)
else:
val_so_far = super(KeyPathDict, self).get(key_path[0], d)
for key in key_path[1:]:
if isinstance(val_so_far, dict):
val_so_far = val_so_far.get(key, val_unlikely_to_be_value_of_dict)
if val_so_far == val_unlikely_to_be_value_of_dict:
return d
else:
return d
return val_so_far
else:
return super(KeyPathDict, self).get(key_path, d)
def __getitem__(self, val):
return self.get(val, None)
def __setitem__(self, key_path, val):
"""
Only works with EXISTING key_paths or SINGLE keys
:param key_path:
:param val:
:return:
"""
if isinstance(key_path, str):
key_path = key_path.split('.')
if isinstance(key_path, list):
first_key = key_path[0]
if len(key_path) == 1:
super(KeyPathDict, self).__setitem__(first_key, val)
# self[first_key] = val
else:
if first_key in self:
set_value_in_nested_key_path(self[first_key], key_path[1:], val)
else:
self[first_key] = {}
set_value_in_nested_key_path(self[first_key], key_path[1:], val)
else:
super(KeyPathDict, self).__setitem__(key_path, val)
def __contains__(self, key_path):
if isinstance(key_path, str):
key_path = key_path.split('.')
if isinstance(key_path, list):
if len(key_path) == 1:
return super(KeyPathDict, self).__contains__(key_path[0])
else:
tmp = super(KeyPathDict, self).__getitem__(key_path[0])
for k in key_path[1:]:
if not isinstance(tmp, dict) or k not in tmp:
return False
tmp = tmp[k]
return True
else:
return super(KeyPathDict, self).__contains__(key_path)
| 35.698718
| 135
| 0.543006
|
4a0e45a30481a2e3fa4a663d257495f6a431ea4e
| 36,240
|
py
|
Python
|
mmdet/models/dense_heads/vfnet_head.py
|
zimoqingfeng/UMOP
|
16af670ee10b95015296d4a8da56a10fb7b89f72
|
[
"Apache-2.0"
] | 27
|
2021-09-16T11:24:43.000Z
|
2022-03-29T06:52:20.000Z
|
mmdet/models/dense_heads/vfnet_head.py
|
zimoqingfeng/UMOP
|
16af670ee10b95015296d4a8da56a10fb7b89f72
|
[
"Apache-2.0"
] | 9
|
2021-09-16T08:51:01.000Z
|
2022-01-05T10:37:47.000Z
|
mmdet/models/dense_heads/vfnet_head.py
|
zimoqingfeng/UMOP
|
16af670ee10b95015296d4a8da56a10fb7b89f72
|
[
"Apache-2.0"
] | 4
|
2021-09-16T11:24:58.000Z
|
2021-12-18T01:13:30.000Z
|
import numpy as np
import torch
import torch.nn as nn
from mmcv.cnn import ConvModule, Scale
from mmcv.ops import DeformConv2d
from mmcv.runner import force_fp32
from mmdet.core import (bbox2distance, bbox_overlaps, build_anchor_generator,
build_assigner, build_sampler, distance2bbox,
multi_apply, multiclass_nms, reduce_mean)
from ..builder import HEADS, build_loss
from .atss_head import ATSSHead
from .fcos_head import FCOSHead
INF = 1e8
@HEADS.register_module()
class VFNetHead(ATSSHead, FCOSHead):
"""Head of `VarifocalNet (VFNet): An IoU-aware Dense Object
Detector.<https://arxiv.org/abs/2008.13367>`_.
The VFNet predicts IoU-aware classification scores which mix the
object presence confidence and object localization accuracy as the
detection score. It is built on the FCOS architecture and uses ATSS
for defining positive/negative training examples. The VFNet is trained
with Varifocal Loss and empolys star-shaped deformable convolution to
extract features for a bbox.
Args:
num_classes (int): Number of categories excluding the background
category.
in_channels (int): Number of channels in the input feature map.
regress_ranges (tuple[tuple[int, int]]): Regress range of multiple
level points.
center_sampling (bool): If true, use center sampling. Default: False.
center_sample_radius (float): Radius of center sampling. Default: 1.5.
sync_num_pos (bool): If true, synchronize the number of positive
examples across GPUs. Default: True
gradient_mul (float): The multiplier to gradients from bbox refinement
and recognition. Default: 0.1.
bbox_norm_type (str): The bbox normalization type, 'reg_denom' or
'stride'. Default: reg_denom
loss_cls_fl (dict): Config of focal loss.
use_vfl (bool): If true, use varifocal loss for training.
Default: True.
loss_cls (dict): Config of varifocal loss.
loss_bbox (dict): Config of localization loss, GIoU Loss.
loss_bbox (dict): Config of localization refinement loss, GIoU Loss.
norm_cfg (dict): dictionary to construct and config norm layer.
Default: norm_cfg=dict(type='GN', num_groups=32,
requires_grad=True).
use_atss (bool): If true, use ATSS to define positive/negative
examples. Default: True.
anchor_generator (dict): Config of anchor generator for ATSS.
init_cfg (dict or list[dict], optional): Initialization config dict.
Example:
>>> self = VFNetHead(11, 7)
>>> feats = [torch.rand(1, 7, s, s) for s in [4, 8, 16, 32, 64]]
>>> cls_score, bbox_pred, bbox_pred_refine= self.forward(feats)
>>> assert len(cls_score) == len(self.scales)
""" # noqa: E501
def __init__(self,
num_classes,
in_channels,
regress_ranges=((-1, 64), (64, 128), (128, 256), (256, 512),
(512, INF)),
center_sampling=False,
center_sample_radius=1.5,
sync_num_pos=True,
gradient_mul=0.1,
bbox_norm_type='reg_denom',
loss_cls_fl=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
use_vfl=True,
loss_cls=dict(
type='VarifocalLoss',
use_sigmoid=True,
alpha=0.75,
gamma=2.0,
iou_weighted=True,
loss_weight=1.0),
loss_bbox=dict(type='GIoULoss', loss_weight=1.5),
loss_bbox_refine=dict(type='GIoULoss', loss_weight=2.0),
norm_cfg=dict(type='GN', num_groups=32, requires_grad=True),
use_atss=True,
anchor_generator=dict(
type='AnchorGenerator',
ratios=[1.0],
octave_base_scale=8,
scales_per_octave=1,
center_offset=0.0,
strides=[8, 16, 32, 64, 128]),
init_cfg=dict(
type='Normal',
layer='Conv2d',
std=0.01,
override=dict(
type='Normal',
name='vfnet_cls',
std=0.01,
bias_prob=0.01)),
**kwargs):
# dcn base offsets, adapted from reppoints_head.py
self.num_dconv_points = 9
self.dcn_kernel = int(np.sqrt(self.num_dconv_points))
self.dcn_pad = int((self.dcn_kernel - 1) / 2)
dcn_base = np.arange(-self.dcn_pad,
self.dcn_pad + 1).astype(np.float64)
dcn_base_y = np.repeat(dcn_base, self.dcn_kernel)
dcn_base_x = np.tile(dcn_base, self.dcn_kernel)
dcn_base_offset = np.stack([dcn_base_y, dcn_base_x], axis=1).reshape(
(-1))
self.dcn_base_offset = torch.tensor(dcn_base_offset).view(1, -1, 1, 1)
super(FCOSHead, self).__init__(
num_classes,
in_channels,
norm_cfg=norm_cfg,
init_cfg=init_cfg,
**kwargs)
self.regress_ranges = regress_ranges
self.reg_denoms = [
regress_range[-1] for regress_range in regress_ranges
]
self.reg_denoms[-1] = self.reg_denoms[-2] * 2
self.center_sampling = center_sampling
self.center_sample_radius = center_sample_radius
self.sync_num_pos = sync_num_pos
self.bbox_norm_type = bbox_norm_type
self.gradient_mul = gradient_mul
self.use_vfl = use_vfl
if self.use_vfl:
self.loss_cls = build_loss(loss_cls)
else:
self.loss_cls = build_loss(loss_cls_fl)
self.loss_bbox = build_loss(loss_bbox)
self.loss_bbox_refine = build_loss(loss_bbox_refine)
# for getting ATSS targets
self.use_atss = use_atss
self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False)
self.anchor_generator = build_anchor_generator(anchor_generator)
self.anchor_center_offset = anchor_generator['center_offset']
self.num_anchors = self.anchor_generator.num_base_anchors[0]
self.sampling = False
if self.train_cfg:
self.assigner = build_assigner(self.train_cfg.assigner)
sampler_cfg = dict(type='PseudoSampler')
self.sampler = build_sampler(sampler_cfg, context=self)
def _init_layers(self):
"""Initialize layers of the head."""
super(FCOSHead, self)._init_cls_convs()
super(FCOSHead, self)._init_reg_convs()
self.relu = nn.ReLU(inplace=True)
self.vfnet_reg_conv = ConvModule(
self.feat_channels,
self.feat_channels,
3,
stride=1,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg,
bias=self.conv_bias)
self.vfnet_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1)
self.scales = nn.ModuleList([Scale(1.0) for _ in self.strides])
self.vfnet_reg_refine_dconv = DeformConv2d(
self.feat_channels,
self.feat_channels,
self.dcn_kernel,
1,
padding=self.dcn_pad)
self.vfnet_reg_refine = nn.Conv2d(self.feat_channels, 4, 3, padding=1)
self.scales_refine = nn.ModuleList([Scale(1.0) for _ in self.strides])
self.vfnet_cls_dconv = DeformConv2d(
self.feat_channels,
self.feat_channels,
self.dcn_kernel,
1,
padding=self.dcn_pad)
self.vfnet_cls = nn.Conv2d(
self.feat_channels, self.cls_out_channels, 3, padding=1)
def forward(self, feats):
"""Forward features from the upstream network.
Args:
feats (tuple[Tensor]): Features from the upstream network, each is
a 4D-tensor.
Returns:
tuple:
cls_scores (list[Tensor]): Box iou-aware scores for each scale
level, each is a 4D-tensor, the channel number is
num_points * num_classes.
bbox_preds (list[Tensor]): Box offsets for each
scale level, each is a 4D-tensor, the channel number is
num_points * 4.
bbox_preds_refine (list[Tensor]): Refined Box offsets for
each scale level, each is a 4D-tensor, the channel
number is num_points * 4.
"""
return multi_apply(self.forward_single, feats, self.scales,
self.scales_refine, self.strides, self.reg_denoms)
def forward_single(self, x, scale, scale_refine, stride, reg_denom):
"""Forward features of a single scale level.
Args:
x (Tensor): FPN feature maps of the specified stride.
scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize
the bbox prediction.
scale_refine (:obj: `mmcv.cnn.Scale`): Learnable scale module to
resize the refined bbox prediction.
stride (int): The corresponding stride for feature maps,
used to normalize the bbox prediction when
bbox_norm_type = 'stride'.
reg_denom (int): The corresponding regression range for feature
maps, only used to normalize the bbox prediction when
bbox_norm_type = 'reg_denom'.
Returns:
tuple: iou-aware cls scores for each box, bbox predictions and
refined bbox predictions of input feature maps.
"""
cls_feat = x
reg_feat = x
for cls_layer in self.cls_convs:
cls_feat = cls_layer(cls_feat)
for reg_layer in self.reg_convs:
reg_feat = reg_layer(reg_feat)
# predict the bbox_pred of different level
reg_feat_init = self.vfnet_reg_conv(reg_feat)
if self.bbox_norm_type == 'reg_denom':
bbox_pred = scale(
self.vfnet_reg(reg_feat_init)).float().exp() * reg_denom
elif self.bbox_norm_type == 'stride':
bbox_pred = scale(
self.vfnet_reg(reg_feat_init)).float().exp() * stride
else:
raise NotImplementedError
# compute star deformable convolution offsets
# converting dcn_offset to reg_feat.dtype thus VFNet can be
# trained with FP16
dcn_offset = self.star_dcn_offset(bbox_pred, self.gradient_mul,
stride).to(reg_feat.dtype)
# refine the bbox_pred
reg_feat = self.relu(self.vfnet_reg_refine_dconv(reg_feat, dcn_offset))
bbox_pred_refine = scale_refine(
self.vfnet_reg_refine(reg_feat)).float().exp()
bbox_pred_refine = bbox_pred_refine * bbox_pred.detach()
# predict the iou-aware cls score
cls_feat = self.relu(self.vfnet_cls_dconv(cls_feat, dcn_offset))
cls_score = self.vfnet_cls(cls_feat)
return cls_score, bbox_pred, bbox_pred_refine
def star_dcn_offset(self, bbox_pred, gradient_mul, stride):
"""Compute the star deformable conv offsets.
Args:
bbox_pred (Tensor): Predicted bbox distance offsets (l, r, t, b).
gradient_mul (float): Gradient multiplier.
stride (int): The corresponding stride for feature maps,
used to project the bbox onto the feature map.
Returns:
dcn_offsets (Tensor): The offsets for deformable convolution.
"""
dcn_base_offset = self.dcn_base_offset.type_as(bbox_pred)
bbox_pred_grad_mul = (1 - gradient_mul) * bbox_pred.detach() + \
gradient_mul * bbox_pred
# map to the feature map scale
bbox_pred_grad_mul = bbox_pred_grad_mul / stride
N, C, H, W = bbox_pred.size()
x1 = bbox_pred_grad_mul[:, 0, :, :]
y1 = bbox_pred_grad_mul[:, 1, :, :]
x2 = bbox_pred_grad_mul[:, 2, :, :]
y2 = bbox_pred_grad_mul[:, 3, :, :]
bbox_pred_grad_mul_offset = bbox_pred.new_zeros(
N, 2 * self.num_dconv_points, H, W)
bbox_pred_grad_mul_offset[:, 0, :, :] = -1.0 * y1 # -y1
bbox_pred_grad_mul_offset[:, 1, :, :] = -1.0 * x1 # -x1
bbox_pred_grad_mul_offset[:, 2, :, :] = -1.0 * y1 # -y1
bbox_pred_grad_mul_offset[:, 4, :, :] = -1.0 * y1 # -y1
bbox_pred_grad_mul_offset[:, 5, :, :] = x2 # x2
bbox_pred_grad_mul_offset[:, 7, :, :] = -1.0 * x1 # -x1
bbox_pred_grad_mul_offset[:, 11, :, :] = x2 # x2
bbox_pred_grad_mul_offset[:, 12, :, :] = y2 # y2
bbox_pred_grad_mul_offset[:, 13, :, :] = -1.0 * x1 # -x1
bbox_pred_grad_mul_offset[:, 14, :, :] = y2 # y2
bbox_pred_grad_mul_offset[:, 16, :, :] = y2 # y2
bbox_pred_grad_mul_offset[:, 17, :, :] = x2 # x2
dcn_offset = bbox_pred_grad_mul_offset - dcn_base_offset
return dcn_offset
@force_fp32(apply_to=('cls_scores', 'bbox_preds', 'bbox_preds_refine'))
def loss(self,
cls_scores,
bbox_preds,
bbox_preds_refine,
gt_bboxes,
gt_labels,
img_metas,
gt_bboxes_ignore=None):
"""Compute loss of the head.
Args:
cls_scores (list[Tensor]): Box iou-aware scores for each scale
level, each is a 4D-tensor, the channel number is
num_points * num_classes.
bbox_preds (list[Tensor]): Box offsets for each
scale level, each is a 4D-tensor, the channel number is
num_points * 4.
bbox_preds_refine (list[Tensor]): Refined Box offsets for
each scale level, each is a 4D-tensor, the channel
number is num_points * 4.
gt_bboxes (list[Tensor]): Ground truth bboxes for each image with
shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format.
gt_labels (list[Tensor]): class indices corresponding to each box
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
gt_bboxes_ignore (None | list[Tensor]): specify which bounding
boxes can be ignored when computing the loss.
Default: None.
Returns:
dict[str, Tensor]: A dictionary of loss components.
"""
assert len(cls_scores) == len(bbox_preds) == len(bbox_preds_refine)
featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores]
all_level_points = self.get_points(featmap_sizes, bbox_preds[0].dtype,
bbox_preds[0].device)
labels, label_weights, bbox_targets, bbox_weights = self.get_targets(
cls_scores, all_level_points, gt_bboxes, gt_labels, img_metas,
gt_bboxes_ignore)
num_imgs = cls_scores[0].size(0)
# flatten cls_scores, bbox_preds and bbox_preds_refine
flatten_cls_scores = [
cls_score.permute(0, 2, 3,
1).reshape(-1,
self.cls_out_channels).contiguous()
for cls_score in cls_scores
]
flatten_bbox_preds = [
bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4).contiguous()
for bbox_pred in bbox_preds
]
flatten_bbox_preds_refine = [
bbox_pred_refine.permute(0, 2, 3, 1).reshape(-1, 4).contiguous()
for bbox_pred_refine in bbox_preds_refine
]
###### Insert hook for distribution
start_idx = 0
for idx in range(len(labels)):
if start_idx == 0:
lvl_label_weights = label_weights[ : (start_idx + labels[idx].shape[0])]
else:
lvl_label_weights = label_weights[start_idx : (start_idx + labels[idx].shape[0])]
start_idx += labels[idx].shape[0]
lvl_valid_labels = labels[idx] * lvl_label_weights
num_tot_valid_labels = torch.tensor((len(lvl_valid_labels) * self.num_classes),
dtype=torch.float,
device=lvl_valid_labels.device)
num_tot_valid_labels = reduce_mean(num_tot_valid_labels).item()
num_pos_samples = torch.tensor(len(lvl_valid_labels[lvl_valid_labels != 80]),
dtype=torch.float,
device=lvl_valid_labels.device)
num_pos_samples = reduce_mean(num_pos_samples).item()
if lvl_valid_labels.device.index == 0:
with open('./vfnet_sum.log', 'a') as f:
f.write('\t'.join([str(idx),
str(num_tot_valid_labels),
str(num_pos_samples)]) + '\n')
###### Insert hook for distribution
flatten_cls_scores = torch.cat(flatten_cls_scores)
flatten_bbox_preds = torch.cat(flatten_bbox_preds)
flatten_bbox_preds_refine = torch.cat(flatten_bbox_preds_refine)
flatten_labels = torch.cat(labels)
flatten_bbox_targets = torch.cat(bbox_targets)
# repeat points to align with bbox_preds
flatten_points = torch.cat(
[points.repeat(num_imgs, 1) for points in all_level_points])
# FG cat_id: [0, num_classes - 1], BG cat_id: num_classes
bg_class_ind = self.num_classes
pos_inds = torch.where(
((flatten_labels >= 0) & (flatten_labels < bg_class_ind)) > 0)[0]
num_pos = len(pos_inds)
pos_bbox_preds = flatten_bbox_preds[pos_inds]
pos_bbox_preds_refine = flatten_bbox_preds_refine[pos_inds]
pos_labels = flatten_labels[pos_inds]
# sync num_pos across all gpus
if self.sync_num_pos:
num_pos_avg_per_gpu = reduce_mean(
pos_inds.new_tensor(num_pos).float()).item()
num_pos_avg_per_gpu = max(num_pos_avg_per_gpu, 1.0)
else:
num_pos_avg_per_gpu = num_pos
pos_bbox_targets = flatten_bbox_targets[pos_inds]
pos_points = flatten_points[pos_inds]
pos_decoded_bbox_preds = distance2bbox(pos_points, pos_bbox_preds)
pos_decoded_target_preds = distance2bbox(pos_points, pos_bbox_targets)
iou_targets_ini = bbox_overlaps(
pos_decoded_bbox_preds,
pos_decoded_target_preds.detach(),
is_aligned=True).clamp(min=1e-6)
bbox_weights_ini = iou_targets_ini.clone().detach()
bbox_avg_factor_ini = reduce_mean(
bbox_weights_ini.sum()).clamp_(min=1).item()
pos_decoded_bbox_preds_refine = \
distance2bbox(pos_points, pos_bbox_preds_refine)
iou_targets_rf = bbox_overlaps(
pos_decoded_bbox_preds_refine,
pos_decoded_target_preds.detach(),
is_aligned=True).clamp(min=1e-6)
bbox_weights_rf = iou_targets_rf.clone().detach()
bbox_avg_factor_rf = reduce_mean(
bbox_weights_rf.sum()).clamp_(min=1).item()
if num_pos > 0:
loss_bbox = self.loss_bbox(
pos_decoded_bbox_preds,
pos_decoded_target_preds.detach(),
weight=bbox_weights_ini,
avg_factor=bbox_avg_factor_ini)
loss_bbox_refine = self.loss_bbox_refine(
pos_decoded_bbox_preds_refine,
pos_decoded_target_preds.detach(),
weight=bbox_weights_rf,
avg_factor=bbox_avg_factor_rf)
# build IoU-aware cls_score targets
if self.use_vfl:
pos_ious = iou_targets_rf.clone().detach()
cls_iou_targets = torch.zeros_like(flatten_cls_scores)
cls_iou_targets[pos_inds, pos_labels] = pos_ious
else:
loss_bbox = pos_bbox_preds.sum() * 0
loss_bbox_refine = pos_bbox_preds_refine.sum() * 0
if self.use_vfl:
cls_iou_targets = torch.zeros_like(flatten_cls_scores)
if self.use_vfl:
loss_cls = self.loss_cls(flatten_cls_scores, cls_iou_targets, avg_factor=num_pos_avg_per_gpu)
else:
loss_cls = self.loss_cls(
flatten_cls_scores,
flatten_labels,
weight=label_weights,
avg_factor=num_pos_avg_per_gpu)
return dict(
loss_cls=loss_cls,
loss_bbox=loss_bbox,
loss_bbox_rf=loss_bbox_refine)
@force_fp32(apply_to=('cls_scores', 'bbox_preds', 'bbox_preds_refine'))
def get_bboxes(self,
cls_scores,
bbox_preds,
bbox_preds_refine,
img_metas,
cfg=None,
rescale=None,
with_nms=True):
"""Transform network outputs for a batch into bbox predictions.
Args:
cls_scores (list[Tensor]): Box iou-aware scores for each scale
level with shape (N, num_points * num_classes, H, W).
bbox_preds (list[Tensor]): Box offsets for each scale
level with shape (N, num_points * 4, H, W).
bbox_preds_refine (list[Tensor]): Refined Box offsets for
each scale level with shape (N, num_points * 4, H, W).
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
cfg (mmcv.Config): Test / postprocessing configuration,
if None, test_cfg would be used. Default: None.
rescale (bool): If True, return boxes in original image space.
Default: False.
with_nms (bool): If True, do nms before returning boxes.
Default: True.
Returns:
list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple.
The first item is an (n, 5) tensor, where the first 4 columns
are bounding box positions (tl_x, tl_y, br_x, br_y) and the
5-th column is a score between 0 and 1. The second item is a
(n,) tensor where each item is the predicted class label of
the corresponding box.
"""
assert len(cls_scores) == len(bbox_preds) == len(bbox_preds_refine)
num_levels = len(cls_scores)
featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores]
mlvl_points = self.get_points(featmap_sizes, bbox_preds[0].dtype,
bbox_preds[0].device)
result_list = []
for img_id in range(len(img_metas)):
cls_score_list = [
cls_scores[i][img_id].detach() for i in range(num_levels)
]
bbox_pred_list = [
bbox_preds_refine[i][img_id].detach()
for i in range(num_levels)
]
img_shape = img_metas[img_id]['img_shape']
scale_factor = img_metas[img_id]['scale_factor']
det_bboxes = self._get_bboxes_single(cls_score_list,
bbox_pred_list, mlvl_points,
img_shape, scale_factor, cfg,
rescale, with_nms)
result_list.append(det_bboxes)
return result_list
def _get_bboxes_single(self,
cls_scores,
bbox_preds,
mlvl_points,
img_shape,
scale_factor,
cfg,
rescale=False,
with_nms=True):
"""Transform outputs for a single batch item into bbox predictions.
Args:
cls_scores (list[Tensor]): Box iou-aware scores for a single scale
level with shape (num_points * num_classes, H, W).
bbox_preds (list[Tensor]): Box offsets for a single scale
level with shape (num_points * 4, H, W).
mlvl_points (list[Tensor]): Box reference for a single scale level
with shape (num_total_points, 4).
img_shape (tuple[int]): Shape of the input image,
(height, width, 3).
scale_factor (ndarray): Scale factor of the image arrange as
(w_scale, h_scale, w_scale, h_scale).
cfg (mmcv.Config | None): Test / postprocessing configuration,
if None, test_cfg would be used.
rescale (bool): If True, return boxes in original image space.
Default: False.
with_nms (bool): If True, do nms before returning boxes.
Default: True.
Returns:
tuple(Tensor):
det_bboxes (Tensor): BBox predictions in shape (n, 5), where
the first 4 columns are bounding box positions
(tl_x, tl_y, br_x, br_y) and the 5-th column is a score
between 0 and 1.
det_labels (Tensor): A (n,) tensor where each item is the
predicted class label of the corresponding box.
"""
cfg = self.test_cfg if cfg is None else cfg
assert len(cls_scores) == len(bbox_preds) == len(mlvl_points)
mlvl_bboxes = []
mlvl_scores = []
for cls_score, bbox_pred, points in zip(cls_scores, bbox_preds,
mlvl_points):
assert cls_score.size()[-2:] == bbox_pred.size()[-2:]
scores = cls_score.permute(1, 2, 0).reshape(
-1, self.cls_out_channels).contiguous().sigmoid()
bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4).contiguous()
nms_pre = cfg.get('nms_pre', -1)
if 0 < nms_pre < scores.shape[0]:
max_scores, _ = scores.max(dim=1)
_, topk_inds = max_scores.topk(nms_pre)
points = points[topk_inds, :]
bbox_pred = bbox_pred[topk_inds, :]
scores = scores[topk_inds, :]
bboxes = distance2bbox(points, bbox_pred, max_shape=img_shape)
mlvl_bboxes.append(bboxes)
mlvl_scores.append(scores)
mlvl_bboxes = torch.cat(mlvl_bboxes)
if rescale:
mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor)
mlvl_scores = torch.cat(mlvl_scores)
padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1)
# remind that we set FG labels to [0, num_class-1] since mmdet v2.0
# BG cat_id: num_class
mlvl_scores = torch.cat([mlvl_scores, padding], dim=1)
if with_nms:
det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores,
cfg.score_thr, cfg.nms,
cfg.max_per_img)
return det_bboxes, det_labels
else:
return mlvl_bboxes, mlvl_scores
def _get_points_single(self,
featmap_size,
stride,
dtype,
device,
flatten=False):
"""Get points according to feature map sizes."""
h, w = featmap_size
x_range = torch.arange(
0, w * stride, stride, dtype=dtype, device=device)
y_range = torch.arange(
0, h * stride, stride, dtype=dtype, device=device)
y, x = torch.meshgrid(y_range, x_range)
# to be compatible with anchor points in ATSS
if self.use_atss:
points = torch.stack(
(x.reshape(-1), y.reshape(-1)), dim=-1) + \
stride * self.anchor_center_offset
else:
points = torch.stack(
(x.reshape(-1), y.reshape(-1)), dim=-1) + stride // 2
return points
def get_targets(self, cls_scores, mlvl_points, gt_bboxes, gt_labels,
img_metas, gt_bboxes_ignore):
"""A wrapper for computing ATSS and FCOS targets for points in multiple
images.
Args:
cls_scores (list[Tensor]): Box iou-aware scores for each scale
level with shape (N, num_points * num_classes, H, W).
mlvl_points (list[Tensor]): Points of each fpn level, each has
shape (num_points, 2).
gt_bboxes (list[Tensor]): Ground truth bboxes of each image,
each has shape (num_gt, 4).
gt_labels (list[Tensor]): Ground truth labels of each box,
each has shape (num_gt,).
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
gt_bboxes_ignore (None | Tensor): Ground truth bboxes to be
ignored, shape (num_ignored_gts, 4).
Returns:
tuple:
labels_list (list[Tensor]): Labels of each level.
label_weights (Tensor/None): Label weights of all levels.
bbox_targets_list (list[Tensor]): Regression targets of each
level, (l, t, r, b).
bbox_weights (Tensor/None): Bbox weights of all levels.
"""
if self.use_atss:
return self.get_atss_targets(cls_scores, mlvl_points, gt_bboxes,
gt_labels, img_metas,
gt_bboxes_ignore)
else:
self.norm_on_bbox = False
return self.get_fcos_targets(mlvl_points, gt_bboxes, gt_labels)
def _get_target_single(self, *args, **kwargs):
"""Avoid ambiguity in multiple inheritance."""
if self.use_atss:
return ATSSHead._get_target_single(self, *args, **kwargs)
else:
return FCOSHead._get_target_single(self, *args, **kwargs)
def get_fcos_targets(self, points, gt_bboxes_list, gt_labels_list):
"""Compute FCOS regression and classification targets for points in
multiple images.
Args:
points (list[Tensor]): Points of each fpn level, each has shape
(num_points, 2).
gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image,
each has shape (num_gt, 4).
gt_labels_list (list[Tensor]): Ground truth labels of each box,
each has shape (num_gt,).
Returns:
tuple:
labels (list[Tensor]): Labels of each level.
label_weights: None, to be compatible with ATSS targets.
bbox_targets (list[Tensor]): BBox targets of each level.
bbox_weights: None, to be compatible with ATSS targets.
"""
labels, bbox_targets = FCOSHead.get_targets(self, points,
gt_bboxes_list,
gt_labels_list)
label_weights = None
bbox_weights = None
return labels, label_weights, bbox_targets, bbox_weights
def get_atss_targets(self,
cls_scores,
mlvl_points,
gt_bboxes,
gt_labels,
img_metas,
gt_bboxes_ignore=None):
"""A wrapper for computing ATSS targets for points in multiple images.
Args:
cls_scores (list[Tensor]): Box iou-aware scores for each scale
level with shape (N, num_points * num_classes, H, W).
mlvl_points (list[Tensor]): Points of each fpn level, each has
shape (num_points, 2).
gt_bboxes (list[Tensor]): Ground truth bboxes of each image,
each has shape (num_gt, 4).
gt_labels (list[Tensor]): Ground truth labels of each box,
each has shape (num_gt,).
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
gt_bboxes_ignore (None | Tensor): Ground truth bboxes to be
ignored, shape (num_ignored_gts, 4). Default: None.
Returns:
tuple:
labels_list (list[Tensor]): Labels of each level.
label_weights (Tensor): Label weights of all levels.
bbox_targets_list (list[Tensor]): Regression targets of each
level, (l, t, r, b).
bbox_weights (Tensor): Bbox weights of all levels.
"""
featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores]
assert len(featmap_sizes) == self.anchor_generator.num_levels
device = cls_scores[0].device
anchor_list, valid_flag_list = self.get_anchors(
featmap_sizes, img_metas, device=device)
label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1
cls_reg_targets = ATSSHead.get_targets(
self,
anchor_list,
valid_flag_list,
gt_bboxes,
img_metas,
gt_bboxes_ignore_list=gt_bboxes_ignore,
gt_labels_list=gt_labels,
label_channels=label_channels,
unmap_outputs=True)
if cls_reg_targets is None:
return None
(anchor_list, labels_list, label_weights_list, bbox_targets_list,
bbox_weights_list, num_total_pos, num_total_neg) = cls_reg_targets
bbox_targets_list = [
bbox_targets.reshape(-1, 4) for bbox_targets in bbox_targets_list
]
num_imgs = len(img_metas)
# transform bbox_targets (x1, y1, x2, y2) into (l, t, r, b) format
bbox_targets_list = self.transform_bbox_targets(
bbox_targets_list, mlvl_points, num_imgs)
labels_list = [labels.reshape(-1) for labels in labels_list]
label_weights_list = [
label_weights.reshape(-1) for label_weights in label_weights_list
]
bbox_weights_list = [
bbox_weights.reshape(-1) for bbox_weights in bbox_weights_list
]
label_weights = torch.cat(label_weights_list)
bbox_weights = torch.cat(bbox_weights_list)
return labels_list, label_weights, bbox_targets_list, bbox_weights
def transform_bbox_targets(self, decoded_bboxes, mlvl_points, num_imgs):
"""Transform bbox_targets (x1, y1, x2, y2) into (l, t, r, b) format.
Args:
decoded_bboxes (list[Tensor]): Regression targets of each level,
in the form of (x1, y1, x2, y2).
mlvl_points (list[Tensor]): Points of each fpn level, each has
shape (num_points, 2).
num_imgs (int): the number of images in a batch.
Returns:
bbox_targets (list[Tensor]): Regression targets of each level in
the form of (l, t, r, b).
"""
# TODO: Re-implemented in Class PointCoder
assert len(decoded_bboxes) == len(mlvl_points)
num_levels = len(decoded_bboxes)
mlvl_points = [points.repeat(num_imgs, 1) for points in mlvl_points]
bbox_targets = []
for i in range(num_levels):
bbox_target = bbox2distance(mlvl_points[i], decoded_bboxes[i])
bbox_targets.append(bbox_target)
return bbox_targets
def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict,
missing_keys, unexpected_keys, error_msgs):
"""Override the method in the parent class to avoid changing para's
name."""
pass
| 44.249084
| 105
| 0.57489
|
4a0e46f2597b03f623f94d34fbae84f0ddc9ff76
| 7,284
|
py
|
Python
|
test/scripts/run_test.py
|
ZhiguoZh/oms-client-android
|
c278e866408cfe08fae068645b7bb83478646822
|
[
"Apache-2.0"
] | 1
|
2020-08-03T16:03:53.000Z
|
2020-08-03T16:03:53.000Z
|
test/scripts/run_test.py
|
ZhiguoZh/oms-client-android
|
c278e866408cfe08fae068645b7bb83478646822
|
[
"Apache-2.0"
] | null | null | null |
test/scripts/run_test.py
|
ZhiguoZh/oms-client-android
|
c278e866408cfe08fae068645b7bb83478646822
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import json
import os
import shutil
import subprocess
import sys
import time
HOME_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
TEST_PATH = os.path.join(HOME_PATH, 'test')
BASE_TEST_PATH = os.path.join(TEST_PATH, 'base')
CONF_TEST_PATH = os.path.join(TEST_PATH, 'conference/apiTest')
P2P_TEST_PATH = os.path.join(TEST_PATH, 'p2p/apiTest')
DEPS_PATH = os.path.join(HOME_PATH, 'dependencies')
CONF_TARGET_PACKAGE = 'oms.test.conference.apitest'
P2P_TARGET_PACKAGE = 'oms.test.p2p.apitest'
BASE_TARGET_PACKAGE = 'oms.test.base'
TEST_MODULES = ["':test:util'", "':test:base'", "':test:p2p:util'", "':test:p2p:apiTest'",
"':test:conference:util'", "':test:conference:apiTest'"]
LOGCAT_SUFFIX = str(int(time.time())) + '.log'
def analyse_result(result):
# Return numbers of succeed cases
ok_num = 0
with open(result, 'r') as f:
for line in f:
if 'OK (1 test)' in line:
ok_num += 1
return ok_num
def run_cases(module, cases, log_dir, device):
print '\n> running cases on device', device
result_file = os.path.join(log_dir, module + '-result-' + LOGCAT_SUFFIX)
logcat_file = os.path.join(log_dir, module + '-logcat-' + LOGCAT_SUFFIX)
for case in cases:
adb = ['adb'] if device == None else ['adb', '-s', device]
if module == 'base':
target_package = BASE_TARGET_PACKAGE
elif module == 'conference':
target_package = CONF_TARGET_PACKAGE
elif module == 'p2p':
target_package = P2P_TARGET_PACKAGE
clean_logcat = ['logcat', '-c']
subprocess.call(adb + clean_logcat)
am_cmd = ['shell', 'am', 'instrument', '-w', '-r', '-e', 'debug', 'false', '-e',
'class', target_package + '.' + case,
target_package + '.test/android.test.InstrumentationTestRunner']
with open(result_file, 'a+') as rf:
subprocess.call(adb + am_cmd, stdout=rf)
logcat_cmd = ['logcat', '-d', target_package]
with open(logcat_file, 'a+') as lf:
subprocess.call(adb + logcat_cmd, stdout=lf)
print '> done.'
print ' Result file: <LOG_DIR>/' + module + '-result-' + LOGCAT_SUFFIX
print ' Log file: <LOG_DIR>/' + module + '-logcat-' + LOGCAT_SUFFIX
return analyse_result(result_file)
def install_test(module, device):
print '\n> building and installing test module', module
if module == 'base':
test_path = BASE_TEST_PATH
elif module == 'conference':
test_path = CONF_TEST_PATH
elif module == 'p2p':
test_path = P2P_TEST_PATH
cmd = [HOME_PATH + '/gradlew', '-q', 'assembleDebug']
subprocess.call(cmd, cwd=test_path)
cmd = [HOME_PATH + '/gradlew', '-q', 'assembleDebugAndroidTest']
subprocess.call(cmd, cwd=test_path)
cmd = [HOME_PATH + '/gradlew', '-q', 'uninstallAll']
subprocess.call(cmd, cwd=test_path)
cmd = [HOME_PATH + '/gradlew', '-q', 'installDebug']
subprocess.call(cmd, cwd=test_path)
cmd = [HOME_PATH + '/gradlew', '-q', 'installDebugAndroidTest']
subprocess.call(cmd, cwd=test_path)
print '> done.'
def run_test(case_list, log_dir, device):
# load test cases.
# [ {'module': '<module>', 'cases': ['case']} ]
with open(case_list, 'r') as case_file:
objs = json.loads(case_file.read())
result = True
for obj in objs:
install_test(obj['module'], device)
succeed = run_cases(obj['module'], obj['cases'], log_dir, device)
total = len(obj['cases'])
result = result and (succeed == total)
print '\n>', obj['module'] + ' result: All:', total, \
'Succeed:', succeed, 'Failed:', total - succeed
return result
def change_config():
shutil.copyfile(os.path.join(HOME_PATH, 'settings.gradle'),
os.path.join(HOME_PATH, 'settings.gradle.bk'))
modules_included = ''
for module in TEST_MODULES:
modules_included += '\ninclude ' + module
with open(os.path.join(HOME_PATH, 'settings.gradle'), 'a') as settings_file:
settings_file.write(modules_included)
def recover_config():
shutil.move(os.path.join(HOME_PATH, 'settings.gradle.bk'),
os.path.join(HOME_PATH, 'settings.gradle'))
def build_libs(dependencies_dir):
print '> building sdk libraries...'
cmd = ['mv', os.path.join(DEPS_PATH, 'libwebrtc'), os.path.join(DEPS_PATH, 'libwebrtc.bk')]
subprocess.call(cmd, cwd=DEPS_PATH)
shutil.copytree(dependencies_dir, os.path.join(DEPS_PATH, 'libwebrtc'))
cmd = ['python', HOME_PATH + '/tools/pack.py', '--skip-zip']
if subprocess.call(cmd):
sys.exit(1)
def copy_libs():
print '> copying libs to dependency dirs...'
testLibs = os.path.join(HOME_PATH, 'test/libs')
if os.path.exists(testLibs):
shutil.rmtree(testLibs)
shutil.copytree(os.path.join(HOME_PATH, 'dist/libs'), testLibs)
shutil.move(os.path.join(testLibs, 'webrtc/libwebrtc.jar'), testLibs)
print '> done.'
def validate_caselist(case_list):
# check the existence of case list file.
if not os.path.exists(case_list):
print 'No case list file found:', case_list
return False
# check the format of case list file.
try:
with open(case_list, 'r') as case_file:
json.load(case_file)
except ValueError as e:
print 'Failed to load json:', e
return False
return True
def recover_deps():
shutil.rmtree(os.path.join(DEPS_PATH, 'libwebrtc'))
cmd = ['mv', os.path.join(DEPS_PATH, 'libwebrtc.bk'), os.path.join(DEPS_PATH, 'libwebrtc')]
subprocess.call(cmd)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Run android instrumentation tests.')
parser.add_argument('--build-deps', dest='build', action='store_true', default=False,
help='Indicates if to build sdk libraries.')
parser.add_argument('--caselist', dest='caselist',
default=os.path.join(TEST_PATH, 'case_list.json'),
help='Location of the case list json file.')
parser.add_argument('--device', dest='device',
help='Id of the android device on which the test will run.'
'If there are multiple devices on the test host machine,'
'please indicate the device using this parameter.')
parser.add_argument('--log-dir', dest='log_dir', default=TEST_PATH,
help='Location of the directory where logs for this test will output to.')
parser.add_argument('--dependencies-dir', dest='dependencies_dir', required=True,
help='Location of the dependency libraries.')
args = parser.parse_args()
if not validate_caselist(args.caselist):
sys.exit(1)
# generate sdk libraries.
if args.build:
build_libs(args.dependencies_dir)
copy_libs()
# change settings.gradle to include test modules.
change_config()
result = run_test(args.caselist, args.log_dir, args.device)
# recover the settings.gradle
recover_config()
# recover deps_path
recover_deps()
# collect test results
sys.exit(not result)
| 36.42
| 98
| 0.630972
|
4a0e46fb7232c6ef10c8de5d01f12524b86cb16d
| 5,207
|
py
|
Python
|
src/m3r_accumulating_sequences.py
|
mccormtj/MoreSequences
|
8460701671afaa5d7620c8bbb41f88ff85291189
|
[
"MIT"
] | null | null | null |
src/m3r_accumulating_sequences.py
|
mccormtj/MoreSequences
|
8460701671afaa5d7620c8bbb41f88ff85291189
|
[
"MIT"
] | null | null | null |
src/m3r_accumulating_sequences.py
|
mccormtj/MoreSequences
|
8460701671afaa5d7620c8bbb41f88ff85291189
|
[
"MIT"
] | null | null | null |
"""
This module demonstrates BUILDING-UP a new SEQUENCE,
one item at a time, using the ACCUMULATOR pattern.
-- We will later see a more efficient way to build-up and/or modify
sequences, namely by MUTATING their elements.
Authors: David Mutchler, Dave Fisher, Valerie Galluzzi, Amanda Stouder,
their colleagues and Tyler McCormick.
""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.
# ----------------------------------------------------------------------
# DONE: 2. READ the program below and RUN it.
#
# When you have read it, asking questions as needed,
# and you feel that you understand,
# for each of LISTS, STRINGS and TUPLES:
# -- HOW to BUILD UP them, using the ACCUMULATOR pattern.
# then:
# change the above TO DO to DONE.
# ----------------------------------------------------------------------
def main():
"""
Demonstrates building sequences by using the Accumulator pattern.
"""
print()
print('-----------------------------------------------------------')
print('Build and then print a LIST:')
print('-----------------------------------------------------------')
build_list()
print()
print('-----------------------------------------------------------')
print('Build and then print a TUPLE:')
print('-----------------------------------------------------------')
build_tuple()
print()
print('-----------------------------------------------------------')
print('Build and then print a STRING:')
print('-----------------------------------------------------------')
build_string()
def build_list():
"""
Demonstrates building a new LIST by using the Accumulator pattern.
We will later see a more efficient way to build/modify lists,
namely, by mutating the elements of the list.
"""
# ------------------------------------------------------------------
# Here is the Accumulator pattern for building up LISTs:
#
# 1. BEFORE the loop, initialize the list variable
# (the "accumulator") to the empty list [].
#
# 2. LOOP, appending items one at a time (each time thru the loop)
#
# 3. INSIDE the loop:
#
# a. Use + to concatenate:
# -- the existing list, and (followed by)
# -- the one-element list containing the new item
# thus constructing a new list with the new item appended.
#
# b. Re-assign the list variable to the NEW list.
#
# 4. AFTER the loop, the variable is the entire "built up" list.
# ------------------------------------------------------------------
""" This example builds (and then prints) the LIST
[0, 1, 4, 9, 16, 25, 36, 49, 64, 81] """
sequence = []
for k in range(10):
sequence = sequence + [k ** 2]
print(sequence)
def build_tuple():
"""
Demonstrates building a TUPLE by using the Accumulator pattern.
-- A tuple is just like a list except:
1. It is IMMUTABLE, which means that its elements cannot be
changed (more on that later), and
2. Its notation uses ()s instead of []s. Also,
a one-element tuple requires a COMMA after the item.
"""
# ------------------------------------------------------------------
# The Accumulator pattern for building up TUPLEs
# is the same as for LISTs except:
# -- Initialize the list variable (the "accumulator")
# to the empty TUPLE () instead of the empty LIST [].
# -- Concatenate the one-element TUPLE: (blah,)
# instead of the one-element LIST: [blah]
# NOTE the COMMA required for a one-element tuple.
# ------------------------------------------------------------------
""" This example builds (and then prints) the TUPLE
(0, 1, 4, 9, 16, 25, 36, 49, 64, 81) """
sequence = ()
for k in range(10):
sequence = sequence + (k ** 2,)
print(sequence)
def build_string():
"""
Demonstrates building a STRING by using the Accumulator pattern.
We will later see a more efficient way to build/modify strings,
namely, by using the split/join methods.
"""
# ------------------------------------------------------------------
# The Accumulator pattern for building up STRINGs
# is the same as for LISTs except:
# -- Initialize the list variable (the "accumulator")
# to the empty STRING '' instead of the empty LIST [].
# -- Concatenate the one (or more) element STRING: 'blah'
# instead of the one-element LIST: [blah]
#
# The built-in str function returns a string version
# of its argument.
# ------------------------------------------------------------------
""" This example builds (and then prints) the STRING
0 1 4 9 16 25 36 49 64 81 """
sequence = ''
for k in range(10):
sequence = sequence + str(k ** 2) + ' '
print(sequence)
# ----------------------------------------------------------------------
# Calls main to start the ball rolling.
# ----------------------------------------------------------------------
main()
| 38.007299
| 72
| 0.48262
|
4a0e47af2b0afd3e04a62df700cf850c3576a93e
| 419
|
py
|
Python
|
models/model_configs.py
|
AkibMashrur/AI-Finbot
|
ba2a6f876d5a8dcbc19b713219ddc5574e4032de
|
[
"Apache-2.0"
] | null | null | null |
models/model_configs.py
|
AkibMashrur/AI-Finbot
|
ba2a6f876d5a8dcbc19b713219ddc5574e4032de
|
[
"Apache-2.0"
] | null | null | null |
models/model_configs.py
|
AkibMashrur/AI-Finbot
|
ba2a6f876d5a8dcbc19b713219ddc5574e4032de
|
[
"Apache-2.0"
] | 1
|
2021-11-09T13:01:51.000Z
|
2021-11-09T13:01:51.000Z
|
class SDEnet_configs():
# structure configs
drift_depth = 8
diffusion_depth = 8
in_nodes = 20
latent_nodes = 64
diffusion_nodes = 128
# compilation configs
lr_1 = 1e-6
momentum_1 = 0.9
lr_2 = 0.01
momentum_2 = 0.9
weight_decay = 5e-4
# train configs
noise_scale = 2
# evaluation configs
eval_iters = 100
# prediction configs
pred_iters = 1000
| 17.458333
| 25
| 0.622912
|
4a0e4813b944ca5e9624035d311108a4792bb240
| 2,950
|
py
|
Python
|
backend/news/settings/base.py
|
FreelanceDev217/news
|
c6ab00622883a1b6b4cdc7034fbabb62d2a66c5e
|
[
"MIT"
] | null | null | null |
backend/news/settings/base.py
|
FreelanceDev217/news
|
c6ab00622883a1b6b4cdc7034fbabb62d2a66c5e
|
[
"MIT"
] | 7
|
2020-06-06T01:29:43.000Z
|
2022-02-10T09:54:45.000Z
|
backend/news/settings/base.py
|
FreelanceDev217/news
|
c6ab00622883a1b6b4cdc7034fbabb62d2a66c5e
|
[
"MIT"
] | null | null | null |
# https://docs.djangoproject.com/en/1.10/ref/settings/
import os
from decouple import config # noqa
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def base_dir_join(*args):
return os.path.join(BASE_DIR, *args)
SITE_ID = 1
SECURE_HSTS_PRELOAD = True
DEBUG = True
ADMINS = (
('Admin', 'foo@example.com'),
)
AUTH_USER_MODEL = 'users.User'
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_js_reverse',
'webpack_loader',
'import_export',
'common',
'users',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'news.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [base_dir_join('templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'common.context_processors.sentry_dsn',
'common.context_processors.commit_sha',
],
},
},
]
WSGI_APPLICATION = 'news.wsgi.application'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATICFILES_DIRS = (
base_dir_join('../frontend'),
)
# Webpack
WEBPACK_LOADER = {
'DEFAULT': {
'CACHE': False, # on DEBUG should be False
'STATS_FILE': base_dir_join('../webpack-stats.json'),
'POLL_INTERVAL': 0.1,
'IGNORE': ['.+\.hot-update.js', '.+\.map']
}
}
# Celery
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACKS_LATE = True
# Sentry
SENTRY_DSN = config('SENTRY_DSN', default='')
COMMIT_SHA = config('HEROKU_SLUG_COMMIT', default='')
| 23.6
| 91
| 0.665763
|
4a0e486daa851f7ad2b8b79ec7634739b69b33c3
| 1,737
|
py
|
Python
|
aliyun-python-sdk-devops-rdc/aliyunsdkdevops_rdc/request/v20200303/GetPipelineInstanceBuildNumberStatusRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-devops-rdc/aliyunsdkdevops_rdc/request/v20200303/GetPipelineInstanceBuildNumberStatusRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-devops-rdc/aliyunsdkdevops_rdc/request/v20200303/GetPipelineInstanceBuildNumberStatusRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class GetPipelineInstanceBuildNumberStatusRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'devops-rdc', '2020-03-03', 'GetPipelineInstanceBuildNumberStatus')
self.set_method('POST')
def get_BuildNum(self): # Long
return self.get_body_params().get('BuildNum')
def set_BuildNum(self, BuildNum): # Long
self.add_body_params('BuildNum', BuildNum)
def get_UserPk(self): # String
return self.get_body_params().get('UserPk')
def set_UserPk(self, UserPk): # String
self.add_body_params('UserPk', UserPk)
def get_OrgId(self): # String
return self.get_query_params().get('OrgId')
def set_OrgId(self, OrgId): # String
self.add_query_param('OrgId', OrgId)
def get_PipelineId(self): # Long
return self.get_query_params().get('PipelineId')
def set_PipelineId(self, PipelineId): # Long
self.add_query_param('PipelineId', PipelineId)
| 36.1875
| 96
| 0.751295
|
4a0e4a63b3bb32e2ff62485d3d8dabc33406befd
| 872
|
py
|
Python
|
catalyst/utils/tests/test_image.py
|
sergeyshilin/catalyst
|
f4dfaac7bc3fe98b2a0a9cf0b4347b100750f82f
|
[
"Apache-2.0"
] | 3
|
2019-11-02T05:37:06.000Z
|
2020-01-13T02:26:07.000Z
|
catalyst/utils/tests/test_image.py
|
sergeyshilin/catalyst
|
f4dfaac7bc3fe98b2a0a9cf0b4347b100750f82f
|
[
"Apache-2.0"
] | null | null | null |
catalyst/utils/tests/test_image.py
|
sergeyshilin/catalyst
|
f4dfaac7bc3fe98b2a0a9cf0b4347b100750f82f
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import torch
from torchvision.transforms.functional import normalize, to_tensor
from catalyst.utils.image import tensor_to_ndimage, \
_IMAGENET_MEAN, _IMAGENET_STD
def test_tensor_to_ndimage():
orig_images = np.random.randint(0, 255, (2, 20, 10, 3), np.uint8)
torch_images = torch.stack(
[
normalize(to_tensor(im), _IMAGENET_MEAN, _IMAGENET_STD)
for im in orig_images
],
dim=0
)
byte_images = tensor_to_ndimage(torch_images, dtype=np.uint8)
float_images = tensor_to_ndimage(torch_images, dtype=np.float32)
assert np.allclose(byte_images, orig_images)
assert np.allclose(float_images, orig_images / 255, atol=1e-3, rtol=1e-3)
assert np.allclose(
tensor_to_ndimage(torch_images[0]),
orig_images[0] / 255,
atol=1e-3,
rtol=1e-3
)
| 27.25
| 77
| 0.678899
|
4a0e4ad9791c16168237c8c61eee456a06753b75
| 1,003
|
py
|
Python
|
test/crawler/user_info.py
|
coding-voyage/insta-crawler
|
c5a50af867f3ebbcfaf96379dd615393f4908b0c
|
[
"MIT"
] | null | null | null |
test/crawler/user_info.py
|
coding-voyage/insta-crawler
|
c5a50af867f3ebbcfaf96379dd615393f4908b0c
|
[
"MIT"
] | null | null | null |
test/crawler/user_info.py
|
coding-voyage/insta-crawler
|
c5a50af867f3ebbcfaf96379dd615393f4908b0c
|
[
"MIT"
] | null | null | null |
import logging
import unittest
from crawler.user_info import get_user_info_html_doc, get_followers, search_related_to, get_related_users_by
logger = logging.getLogger(__file__)
TEST_USER_ID = "zuck"
class UserInfoTest(unittest.TestCase):
def test_get_user_info_html_doc(self):
content = get_user_info_html_doc(TEST_USER_ID)
self.assertTrue(content.startswith('<!DOCTYPE html>'))
def test_get_followers(self):
self.assertGreater(get_followers(TEST_USER_ID), 5335000)
def test_search_related_to(self):
self.assertTrue(search_related_to(TEST_USER_ID), list)
def test_get_related_users_by_position(self):
dict_position_user = get_related_users_by(TEST_USER_ID)
for i in dict_position_user:
self.assertTrue("pk" in dict_position_user[i])
self.assertTrue("full_name" in dict_position_user[i])
self.assertTrue("follower_count" in dict_position_user[i])
if __name__ == '__main__':
unittest.main()
| 31.34375
| 108
| 0.743769
|
4a0e4b0f5b4adf9975b07fd27de07f6eaaf71be8
| 3,690
|
py
|
Python
|
parlai/tasks/opensubtitles_ko/build.py
|
hwaranlee/ParlAI
|
5efebdd889851cf80e496d45f0ebc324e0fde001
|
[
"BSD-3-Clause"
] | 1
|
2017-11-05T13:16:53.000Z
|
2017-11-05T13:16:53.000Z
|
parlai/tasks/opensubtitles_ko/build.py
|
hwaranlee/ParlAI
|
5efebdd889851cf80e496d45f0ebc324e0fde001
|
[
"BSD-3-Clause"
] | null | null | null |
parlai/tasks/opensubtitles_ko/build.py
|
hwaranlee/ParlAI
|
5efebdd889851cf80e496d45f0ebc324e0fde001
|
[
"BSD-3-Clause"
] | 1
|
2019-01-12T03:37:12.000Z
|
2019-01-12T03:37:12.000Z
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
# Download and build the data if it does not exist.
import parlai.core.build_data as build_data
import gzip
import os
import re
from konlpy.tag import Komoran
from examples.bot import Bot
komoran = Komoran()
nlg = Bot('exp/exp-emb200-hs1024-lr0.0001-oknlg/exp-emb200-hs1024-lr0.0001-oknlg'
,'exp-opensub_ko_nlg/dict_file_100000.dict', True)
def preprocess(sent):
""" text preprocessing using a parser
"""
return ' '.join(komoran.morphs(sent))
def postprocess(sent):
sent = sent.replace(' __END__', '')
sent = re.sub(' (.)$', '\\1', sent)
print(sent)
return nlg.reply(sent)
def create_fb_format(inpath, outpath):
print('[building fbformat]')
ftrain = open(os.path.join(outpath, 'train.txt'), 'w')
fvalid = open(os.path.join(outpath, 'valid.txt'), 'w')
ftest = open(os.path.join(outpath, 'test.txt'), 'w')
conv_id = 0
# find all the files.
for root, _subfolder, files in os.walk(inpath):
for f in files:
if root.endswith('ko/2016/5833686') and f == '6769905.xml.gz':
continue
if f.endswith('.gz'):
dialog = ''
conv_id = conv_id + 1
with gzip.open(os.path.join(root, f), 'r') as f1:
# print(str(conv_id) + ': ' + f)
words = ''
line_id = 1
turn_id = 1
for line in f1:
line=line.decode('utf-8')
if re.search('<s .*id="', line):
# new sentence
if len(words) > 0:
if (turn_id % 2) == 0:
dialog += str(line_id) + ' ' + preprocess(words)
else:
dialog += '\t' + preprocess(words) + '\n'
line_id += 1
turn_id = turn_id + 1
words = ''
else:
if re.search('<w .*id="', line):
word = line[line.find('>')+1:line.find('</w')]
words = words + ' ' + word.replace('\t', ' ')
handle = ftrain
if (conv_id % 10) == 0:
handle = ftest
if (conv_id % 10) == 1:
handle = fvalid
handle.write(dialog + '\n')
ftrain.close()
fvalid.close()
ftest.close()
def build(opt):
dpath = os.path.join(opt['datapath'], 'OpenSubtitlesKo')
version = None
if not build_data.built(dpath, version_string=version):
print('[building data: ' + dpath + ']')
if build_data.built(dpath):
# An older version exists, so remove these outdated files.
build_data.remove_dir(dpath)
build_data.make_dir(dpath)
# Download the data.
# url = ('http://opus.lingfil.uu.se/download.php?f=OpenSubtitles/en.tar.gz')
# build_data.download(url, dpath, 'OpenSubtitles.tar.gz')
# build_data.untar(dpath, 'OpenSubtitles.tar.gz', deleteTar=False)
create_fb_format(os.path.join(dpath, 'xml', 'ko'), dpath)
# Mark the data as built.
build_data.mark_done(dpath, version_string=version)
| 36.176471
| 84
| 0.513008
|
4a0e4b12ad0efe6ce52590fbf9eda61fe9fbc6b0
| 6,681
|
py
|
Python
|
anaconda_project/project_file.py
|
nbublikov/depcheck
|
2b232382c86f26b7de4d9c50312a647b185f373b
|
[
"Apache-2.0"
] | null | null | null |
anaconda_project/project_file.py
|
nbublikov/depcheck
|
2b232382c86f26b7de4d9c50312a647b185f373b
|
[
"Apache-2.0"
] | null | null | null |
anaconda_project/project_file.py
|
nbublikov/depcheck
|
2b232382c86f26b7de4d9c50312a647b185f373b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2016, Anaconda, Inc. All rights reserved.
#
# Licensed under the terms of the BSD 3-Clause License.
# The full license is in the file LICENSE.txt, distributed with this software.
# -----------------------------------------------------------------------------
"""Project file loading and manipulation."""
from __future__ import absolute_import
import os
from anaconda_project.yaml_file import YamlFile
from anaconda_project.env_spec import EnvSpec
import anaconda_project.internal.conda_api as conda_api
# these are in the order we'll use them if multiple are present
possible_project_file_names = ("anaconda-project.yml", "anaconda-project.yaml", "kapsel.yml", "kapsel.yaml")
DEFAULT_PROJECT_FILENAME = possible_project_file_names[0]
def _empty_default_env_spec():
return (EnvSpec(name="default", channels=[], conda_packages=()), )
class ProjectFile(YamlFile):
"""Represents the ``anaconda-project.yml`` file which describes the project across machines/users.
State that's specific to a machine/user/checkout/deployment
should instead be in ``LocalStateFile``. ``ProjectFile``
would normally be checked in to source control or otherwise
act as a shared resource.
Be careful with creating your own instance of this class,
because you have to think about when other code might load or
save in a way that conflicts with your loads and saves.
"""
template = '''
# This is an Anaconda project file.
#
# Here you can describe your project and how to run it.
# Use `anaconda-project run` to run the project.
# The file is in YAML format, please see http://www.yaml.org/start.html for more.
#
#
# Set the 'name' key to name your project
#
name:
#
# Set the 'icon' key to give your project an icon
#
icon:
#
# Set a one-sentence-or-so 'description' key with project details
#
description:
#
# In the commands section, list your runnable scripts, notebooks, and other code.
# Use `anaconda-project add-command` to add commands.
#
commands: {}
#
# In the variables section, list any environment variables your code depends on.
# Use `anaconda-project add-variable` to add variables.
#
variables: {}
#
# In the services section, list any services that should be
# available before your code runs.
# Use `anaconda-project add-service` to add services.
#
services: {}
#
# In the downloads section, list any URLs to download to local files
# before your code runs.
# Use `anaconda-project add-download` to add downloads.
#
downloads: {}
#
# In the packages section, list any packages that must be installed
# before your code runs.
# Use `anaconda-project add-packages` to add packages.
#
packages: []
#
# In the channels section, list any Conda channel URLs to be searched
# for packages.
#
# For example,
#
# channels:
# - mychannel
#
channels: []
#
# In the platforms section, list platforms the project should work on
# Examples: "linux-64", "osx-64", "win-64"
# Use `anaconda-project add-platforms` to add platforms.
#
platforms: []
#
# You can define multiple, named environment specs.
# Each inherits any global packages or channels,
# but can have its own unique ones also.
# Use `anaconda-project add-env-spec` to add environment specs.
#
env_specs: {}
'''
@classmethod
def load_for_directory(cls, directory, default_env_specs_func=_empty_default_env_spec):
"""Load the project file from the given directory, even if it doesn't exist.
If the directory has no project file, the loaded
``ProjectFile`` will be empty. It won't actually be
created on disk unless you call ``save()``.
If the file has syntax problems, this sets the
``corrupted`` and ``corrupted_error_message`` properties,
and attempts to modify the file will raise an
exception. If the project file has semantic problems, they
are not detected by this class but are reported by the
``Project`` class.
Args:
directory (str): path to the project directory
default_env_specs_func (function makes list of EnvSpec): if file is created, use these
Returns:
a new ``ProjectFile``
"""
for name in possible_project_file_names:
path = os.path.join(directory, name)
if os.path.isfile(path):
return ProjectFile(path)
return ProjectFile(os.path.join(directory, DEFAULT_PROJECT_FILENAME), default_env_specs_func)
def __init__(self, filename, default_env_specs_func=_empty_default_env_spec):
"""Construct a ``ProjectFile`` with the given filename and requirement registry.
It's easier to use ``ProjectFile.load_for_directory()`` in most cases.
If the file has syntax problems, this sets the
``corrupted`` and ``corrupted_error_message`` properties,
and attempts to modify the file will raise an
exception. If the project file has semantic problems, they
are not detected by this class but are reported by the
``Project`` class.
Args:
filename (str): path to the project file
"""
self._default_env_specs_func = default_env_specs_func
super(ProjectFile, self).__init__(filename)
def _fill_default_content(self, as_json):
as_json['name'] = os.path.basename(os.path.dirname(self.filename))
as_json['platforms'].extend(conda_api.default_platforms_with_current())
assert self._default_env_specs_func is not None
default_env_specs = self._default_env_specs_func()
assert default_env_specs is not None
for env_spec in default_env_specs:
as_json['env_specs'][env_spec.name] = env_spec.to_json()
if len(default_env_specs) == 1:
# if there's only one env spec, keep it for name/description
# and put the packages and channels up in the global sections
spec_name = next(iter(as_json['env_specs']))
spec_json = as_json['env_specs'][spec_name]
def move_list_elements(src, dest):
# we want to preserve the dest list object with comments
del dest[:]
dest.extend(src)
del src[:]
if 'packages' in spec_json:
move_list_elements(spec_json['packages'], as_json['packages'])
if 'channels' in spec_json:
move_list_elements(spec_json['channels'], as_json['channels'])
if 'platforms' in spec_json:
move_list_elements(spec_json['platforms'], as_json['platforms'])
| 33.572864
| 108
| 0.673402
|
4a0e4bfe6cd4ec16dea8b7daced9d8738ad3bc6a
| 8,970
|
py
|
Python
|
userbot/modules/offline.py
|
ClownSkyz/Man-Userbot
|
f85add4707dab2af1dcf232737ed8ab98f32ceed
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/offline.py
|
ClownSkyz/Man-Userbot
|
f85add4707dab2af1dcf232737ed8ab98f32ceed
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/offline.py
|
ClownSkyz/Man-Userbot
|
f85add4707dab2af1dcf232737ed8ab98f32ceed
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
# Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.d (the "License");
# you may not use this file except in compliance with the License.
#
import time
from datetime import datetime
from random import choice, randint
from telethon.events import NewMessage, StopPropagation
from telethon.tl.functions.account import UpdateProfileRequest
from userbot import AFKREASON, BOTLOG_CHATID, PM_AUTO_BAN, bot, owner
from userbot.events import man_cmd
# ========================= CONSTANTS ============================
AFKSTR = [
f"**Maaf {owner} Lagi Sibuk!**",
f"**Maaf {owner} Lagi Sibuk Tunggu Sampai Online!**",
f"**{owner} Lagi Sibuk Tunggulah Sampai Online**",
f"**Maaf {owner} Lagi Sibuk!**",
]
ISAFK = False
USER_AFK = {}
afk_time = None
afk_start = {}
# =================================================================
@bot.on(man_cmd(outgoing=True, pattern=r"off(?: |$)(.*)"))
async def set_afk(afk_e):
string = afk_e.pattern_match.group(1)
global ISAFK
global AFKREASON
global USER_AFK
global afk_time
global afk_start
global afk_end
user = await afk_e.client.get_me()
USER_AFK = {}
afk_time = None
afk_end = {}
start_1 = datetime.now()
afk_start = start_1.replace(microsecond=0)
if string:
AFKREASON = string
await afk_e.edit(
f"**۩۩ {owner} Sedang Sibuk ۩۩**\
\n✦҈͜͡➳ **Karena :** `{string}`"
)
else:
await afk_e.edit(f"**۩۩ {owner} Sedang Sibuk ۩۩**")
if user.last_name:
await afk_e.client(
UpdateProfileRequest(
first_name=user.first_name, last_name=user.last_name + "【 Sibuk 】"
)
)
else:
await afk_e.client(
UpdateProfileRequest(first_name=user.first_name, last_name="【 Sibuk 】")
)
if BOTLOG_CHATID:
await afk_e.client.send_message(BOTLOG_CHATID, f"#OFF\n**{owner} Sedang Sibuk!**")
ISAFK = True
afk_time = datetime.now()
raise StopPropagation
@bot.on(NewMessage(outgoing=True))
async def type_afk_is_not_true(notafk):
global ISAFK
global COUNT_MSG
global USERS
global AFKREASON
global USER_AFK
global afk_time
global afk_start
global afk_end
user = await notafk.client.get_me()
last = user.last_name
if last and last.endswith("【 OFF 】"):
last1 = last[:-12]
else:
last1 = ""
back_alive = datetime.now()
afk_end = back_alive.replace(microsecond=0)
if ISAFK:
ISAFK = False
msg = await notafk.respond(f"**{owner} Aktif Kembali!**")
time.sleep(7)
await msg.delete()
await notafk.client(
UpdateProfileRequest(first_name=user.first_name, last_name=last1)
)
if BOTLOG_CHATID:
await notafk.client.send_message(
BOTLOG_CHATID,
"Anda Mendapatkan "
+ str(COUNT_MSG)
+ " Pesan Dari "
+ str(len(USERS))
+ " Obrolan Saat Anda OFFLINE",
)
for i in USERS:
name = await notafk.client.get_entity(i)
name0 = str(name.first_name)
await notafk.client.send_message(
BOTLOG_CHATID,
"["
+ name0
+ "](tg://user?id="
+ str(i)
+ ")"
+ " Mengirim Mu "
+ "`"
+ str(USERS[i])
+ " Pesan`",
)
COUNT_MSG = 0
USERS = {}
AFKREASON = None
@bot.on(NewMessage(incoming=True))
async def mention_afk(mention):
global COUNT_MSG
global USERS
global ISAFK
global USER_AFK
global afk_time
global afk_start
global afk_end
await mention.client.get_me()
back_alivee = datetime.now()
afk_end = back_alivee.replace(microsecond=0)
afk_since = "**Terakhir Online**"
if mention.message.mentioned and not (await mention.get_sender()).bot and ISAFK:
now = datetime.now()
datime_since_afk = now - afk_time
time = float(datime_since_afk.seconds)
days = time // (24 * 3600)
time %= 24 * 3600
hours = time // 3600
time %= 3600
minutes = time // 60
time %= 60
seconds = time
if days == 1:
afk_since = "**Kemarin**"
elif days > 1:
if days > 6:
date = now + datetime.timedelta(
days=-days, hours=-hours, minutes=-minutes
)
afk_since = date.strftime("%A, %Y %B %m, %H:%I")
else:
wday = now + datetime.timedelta(days=-days)
afk_since = wday.strftime("%A")
elif hours > 1:
afk_since = f"`{int(hours)} Jam {int(minutes)} Menit`"
elif minutes > 0:
afk_since = f"`{int(minutes)} Menit {int(seconds)} Detik`"
else:
afk_since = f"`{int(seconds)} Detik`"
if mention.sender_id not in USERS:
if AFKREASON:
await mention.reply(
f"**۩۩ {owner} Sedang Sibuk ۩۩** {afk_since} **Yang Lalu.**\
\n✦҈͜͡➳ **Karena :** `{AFKREASON}`"
)
else:
await mention.reply(str(choice(AFKSTR)))
USERS.update({mention.sender_id: 1})
else:
if USERS[mention.sender_id] % randint(2, 4) == 0:
if AFKREASON:
await mention.reply(
f"**۩۩ {owner} Masih Off sibuk ۩۩** {afk_since} **Yang Lalu.**\
\n✦҈͜͡➳ **Karena :** `{AFKREASON}`"
)
else:
await mention.reply(str(choice(AFKSTR)))
USERS[mention.sender_id] = USERS[mention.sender_id] + 1
COUNT_MSG = COUNT_MSG + 1
@bot.on(NewMessage(incoming=True, func=lambda e: e.is_private))
async def afk_on_pm(sender):
global ISAFK
global USERS
global COUNT_MSG
global COUNT_MSG
global USERS
global ISAFK
global USER_AFK
global afk_time
global afk_start
global afk_end
back_alivee = datetime.now()
afk_end = back_alivee.replace(microsecond=0)
afk_since = "**Belum Lama**"
if (
sender.is_private
and sender.sender_id != 777000
and not (await sender.get_sender()).bot
):
if PM_AUTO_BAN:
try:
from userbot.modules.sql_helper.pm_permit_sql import is_approved
apprv = is_approved(sender.sender_id)
except AttributeError:
apprv = True
else:
apprv = True
if apprv and ISAFK:
now = datetime.now()
datime_since_afk = now - afk_time # pylint:disable=E0602
time = float(datime_since_afk.seconds)
days = time // (24 * 3600)
time %= 24 * 3600
hours = time // 3600
time %= 3600
minutes = time // 60
time %= 60
seconds = time
if days == 1:
afk_since = "**Kemarin**"
elif days > 1:
if days > 6:
date = now + datetime.timedelta(
days=-days, hours=-hours, minutes=-minutes
)
afk_since = date.strftime("%A, %Y %B %m, %H:%I")
else:
wday = now + datetime.timedelta(days=-days)
afk_since = wday.strftime("%A")
elif hours > 1:
afk_since = f"`{int(hours)} Jam {int(minutes)} Menit`"
elif minutes > 0:
afk_since = f"`{int(minutes)} Menit {int(seconds)} Detik`"
else:
afk_since = f"`{int(seconds)} Detik`"
if sender.sender_id not in USERS:
if AFKREASON:
await sender.reply(
f"۩۩ **{owner} Sedang Sibuk ۩۩** {afk_since} **Yang Lalu** ۩۩.\
\n✦҈͜͡➳ **Karena :** `{AFKREASON}`"
)
else:
await sender.reply(str(choice(AFKSTR)))
USERS.update({sender.sender_id: 1})
COUNT_MSG = COUNT_MSG + 1
elif apprv:
if USERS[sender.sender_id] % randint(2, 4) == 0:
if AFKREASON:
await sender.reply(
f"۩۩ **{owner} Sedang Sibuk ۩۩** {afk_since} **Yang Lalu. ۩۩**\
\n✦҈͜͡➳ **Karena :** `{AFKREASON}`"
)
else:
await sender.reply(str(choice(AFKSTR)))
USERS[sender.sender_id] = USERS[sender.sender_id] + 1
COUNT_MSG = COUNT_MSG + 1
| 33.470149
| 91
| 0.508919
|
4a0e4f5c733b4cfa84e960bfc196a92a17fb75ea
| 5,939
|
py
|
Python
|
cli/src/rnpfind/postar_data_load.py
|
mnahinkhan/RNPFind
|
8b561e087f943421c847dcb708ee386ee6439fa5
|
[
"MIT"
] | 1
|
2020-03-07T18:58:06.000Z
|
2020-03-07T18:58:06.000Z
|
cli/src/rnpfind/postar_data_load.py
|
mnahinkhan/RNPFind
|
8b561e087f943421c847dcb708ee386ee6439fa5
|
[
"MIT"
] | null | null | null |
cli/src/rnpfind/postar_data_load.py
|
mnahinkhan/RNPFind
|
8b561e087f943421c847dcb708ee386ee6439fa5
|
[
"MIT"
] | null | null | null |
"""
This module contains functions for loading data from the postar database.
"""
import bisect
import os
from .config import ANNOTATION_COLUMN_DELIMITER, POSTAR_PATH
postar_all_column_names = [
"chrom",
"chromStart",
"chromEnd",
"postarID",
"nil",
"strand",
"rbpName",
"dataSource",
"cellType",
"expSource",
"postarScore",
]
postar_all_column_descriptions = [
"chromosome number",
"start coordinate",
"end coordinate",
"POSTAR database ID",
"not sure",
"strand",
"RBP Name",
"Data Source",
"Cell type",
"experimental source",
"score",
]
postar_columns_of_interest = [3, 7, 8, 9, 10]
postar_default_label_index = [8]
POSTAR_DEFAULT_MOUSE_OVER_INDEX = 9
postar_column_names = [
postar_all_column_names[i] for i in postar_columns_of_interest
]
postar_column_descriptions = [
postar_all_column_descriptions[i] for i in postar_columns_of_interest
]
class Query:
"""
Represents a region of interest within a genome.
Supports comparison with a line from a file containing details on binding
sites, in a format much like POSTAR's public database files. Since the file
is sorted, this allows quick binary search over the file using this class's
comparison function as a comparator against the lines in the file (to find
the region of interst, stored in an instance of this class at __init__).
"""
def __init__(self, query):
self.query = query
def __lt__(self, line):
rna_chr_no, rna_start_chr_coord, rna_end_chr_coord = self.query
line_parts = line.split()
# print(line_parts)
return (line_parts[0], int(line_parts[1]), int(line_parts[2])) > (
"chr" + str(rna_chr_no),
rna_start_chr_coord,
rna_end_chr_coord,
)
def __str__(self):
return str(self.query)
class FileSearcher:
"""
Class for representing a file as an indexable element. This allows binary
search over the file using in-built libraries (bisect.bisect)
"""
def __init__(self, file_pointer):
self.file_pointer = file_pointer
self.file_pointer.seek(0, os.SEEK_END)
self.num_bytes = self.file_pointer.tell() - 1
def __len__(self):
return self.num_bytes
def __getitem__(self, i):
# TODO: Fix the three corner case bugs inherent in a binary search
# algorithm like this
# See: (http://pts.github.io/pts-line-bisect/line_bisect_evolution.html)
self.file_pointer.seek(i)
self.file_pointer.readline()
return self.file_pointer.readline()
def binary_search_populate(file_path, rna_info, debug=False):
"""
Searches a file containing sorted binding sites for a region of interest.
Returns the subset of binding sites required as a generator / iterator
object.
:param file_path: a file path containing sorted binding sites on the genome.
:param rna_info: a dictionary containing chromosome number, start, and end
coordinate to slice out of the genome-wide binding sites file.
:param debug: prints useful information if set to True, for debugging.
(Default value = False)
"""
# TODO: Fix a bug here that causes genes without any data to start
# collecting the whole genome!!
rna_chr_no = rna_info["chr_n"]
rna_start_chr_coord = rna_info["start_coord"]
rna_end_chr_coord = rna_info["end_coord"]
query = Query((rna_chr_no, rna_start_chr_coord, rna_end_chr_coord))
with open(file_path) as postar_data_file:
search_file = FileSearcher(postar_data_file)
to_seek = bisect.bisect(search_file, query)
postar_data_file.seek(to_seek)
postar_line_parts = postar_data_file.readline().split()
# print(postar_line_parts)
is_found = False
seen = []
not_found_counter = 0
while postar_line_parts:
if (
postar_line_parts[0] == "chr" + str(rna_chr_no)
and int(postar_line_parts[1]) > rna_start_chr_coord
and int(postar_line_parts[2]) < rna_end_chr_coord
):
is_found = True
if debug:
if (postar_line_parts[7]) not in seen:
print(";".join(postar_line_parts))
seen += [postar_line_parts[7]]
rbp = postar_line_parts[6]
start, end = postar_line_parts[1], postar_line_parts[2]
# Assumption: POSTAR coordinates are 0-based, half-open
# Fact: Input RNA coordinates are 1-based, fully-closed
# Fact: the output is expected to be 0-based, half-open
start = int(start) - rna_start_chr_coord + 1
end = int(end) - rna_start_chr_coord + 1
# TODO: Consider reformatting the annotation for visual appeal
annotation = ANNOTATION_COLUMN_DELIMITER.join(
[postar_line_parts[i] for i in postar_columns_of_interest]
)
yield rbp, start, end, annotation
elif is_found:
break
if not is_found:
not_found_counter += 1
if not_found_counter >= 4:
break
postar_line_parts = postar_data_file.readline().split()
def postar_data_load(rna_info):
"""
Returns a generator containing binding sites on input RNA molecule, as
found on the POSTAR database.
:param rna_info: dictionary containing input RNA information, such as
chromosome number, start coordinate, and end coordinate.
"""
file_path = f"{POSTAR_PATH}/postar-human-RBP-binding-sites-sorted.txt"
return binary_search_populate(file_path, rna_info)
if __name__ == "__main__":
test_rna_info = ["MALAT1", 11, 65497688, 65506516]
postar_data_load(test_rna_info)
| 32.102703
| 80
| 0.646573
|
4a0e4f8f8184de0cb0c0300099b9fcd9cdce07ee
| 4,704
|
py
|
Python
|
scripts/azurerm_function_app.py
|
x280852/Testing123
|
afbf0b08a1a45afcfa5022270e31c7d146b7f467
|
[
"MIT"
] | null | null | null |
scripts/azurerm_function_app.py
|
x280852/Testing123
|
afbf0b08a1a45afcfa5022270e31c7d146b7f467
|
[
"MIT"
] | null | null | null |
scripts/azurerm_function_app.py
|
x280852/Testing123
|
afbf0b08a1a45afcfa5022270e31c7d146b7f467
|
[
"MIT"
] | null | null | null |
# azurerm_function_app
def azurerm_function_app(crf,cde,crg,headers,requests,sub,json,az2tfmess,cldurl):
tfp="azurerm_function_app"
tcode="620-"
azr=""
if crf in tfp:
# REST or cli
# print "REST Function App"
url="https://" + cldurl + "/subscriptions/" + sub + "/providers/Microsoft.Web/sites"
params = {'api-version': '2018-02-01'}
r = requests.get(url, headers=headers, params=params)
azr= r.json()["value"]
tfrmf=tcode+tfp+"-staterm.sh"
tfimf=tcode+tfp+"-stateimp.sh"
tfrm=open(tfrmf, 'a')
tfim=open(tfimf, 'a')
print ("# " + tfp,)
count=len(azr)
print (count)
for i in range(0, count):
kind=azr[i]["kind"]
if kind != "functionapp": continue
name=azr[i]["name"]
loc=azr[i]["location"]
id=azr[i]["id"]
rg=id.split("/")[4].replace(".","-").lower()
if rg[0].isdigit(): rg="rg_"+rg
rgs=id.split("/")[4]
if crg is not None:
if rgs.lower() != crg.lower():
continue # back to for
if cde:
print(json.dumps(azr[i], indent=4, separators=(',', ': ')))
rname=name.replace(".","-")
prefix=tfp+"."+rg+'__'+rname
#print prefix
rfilename=prefix+".tf"
fr=open(rfilename, 'w')
fr.write(az2tfmess)
fr.write('resource ' + tfp + ' ' + rg + '__' + rname + ' {\n')
fr.write('\tname = "' + name + '"\n')
fr.write('\tlocation = "'+ loc + '"\n')
fr.write('\tresource_group_name = "'+ rgs + '"\n')
https=azr[i]["properties"]["httpsOnly"]
appplid=azr[i]["properties"]["serverFarmId"]
# case issues - so use resource id directly
# fr.write('\t app_service_plan_id = "${azurerm_app_service_plan. + '__' + .id}'"' prg pnam + '"\n')
fr.write('\tapp_service_plan_id = "' + appplid + '"\n')
fr.write('\thttps_only = ' + str(https).lower() + '\n')
blog=False
strcon=""
url="https://management.azure.com/" + id + "/config/appsettings/list"
params = {'api-version': '2018-02-01'}
r = requests.post(url, headers=headers, params=params)
appset= r.json()
fr.write('\tapp_settings = { \n')
#app settings
try:
for setting in appset["properties"]:
value=appset["properties"][setting]
fr.write('\t\t'+ setting + ' = "' + value + '"\n')
except KeyError:
pass
try:
aval=appset["properties"]["AzureWebJobsDashboard"]
if len(aval) > 3:
blog=True
except KeyError:
pass
fr.write('\t }' + '\n')
try:
strcon=appset["properties"]["AzureWebJobsStorage"]
except KeyError:
pass
if len(strcon) >= 3 :
fr.write('\tstorage_connection_string = "' + strcon + '" \n')
else:
fr.write('\tstorage_connection_string = ""\n')
try:
vers=appset["properties"]["FUNCTIONS_EXTENSION_VERSION"]
fr.write('\tversion = "' + vers + '"\n')
except KeyError:
pass
fr.write('\tenable_builtin_logging = ' + str(blog).lower() + '\n')
# tags block
try:
mtags=azr[i]["tags"]
fr.write('\ttags = { \n')
for key in mtags.keys():
tval=mtags[key]
fr.write(('\t "' + key + '"="' + tval + '"\n'))
fr.write('\t}\n')
except KeyError:
pass
fr.write('}\n')
fr.close() # close .tf file
if cde:
with open(rfilename) as f:
print (f.read())
tfrm.write('terraform state rm '+tfp+'.'+rg+'__'+rname + '\n')
tfim.write('echo "importing ' + str(i) + ' of ' + str(count-1) + '"' + '\n')
tfcomm='terraform import '+tfp+'.'+rg+'__'+rname+' '+id+'\n'
tfim.write(tfcomm)
# end for i loop
tfrm.close()
tfim.close()
#end stub
| 35.908397
| 113
| 0.429422
|
4a0e503f2bc0cfdc7db01a3485aecab82a290793
| 1,678
|
py
|
Python
|
tensorflow_model_analysis/evaluators/counter_util_test.py
|
BioGeek/model-analysis
|
03db02c21e21b092bc409c8bf263174b90c4e2ae
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_model_analysis/evaluators/counter_util_test.py
|
BioGeek/model-analysis
|
03db02c21e21b092bc409c8bf263174b90c4e2ae
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_model_analysis/evaluators/counter_util_test.py
|
BioGeek/model-analysis
|
03db02c21e21b092bc409c8bf263174b90c4e2ae
|
[
"Apache-2.0"
] | null | null | null |
# Lint as: python3
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for counter utility to count all the metrics computed."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import apache_beam as beam
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tensorflow_model_analysis import constants
from tensorflow_model_analysis.evaluators import counter_util
from tensorflow_model_analysis.post_export_metrics import post_export_metrics
class CounterUtilTest(tf.test.TestCase):
def testMetricComputedBeamCounter(self):
with beam.Pipeline() as pipeline:
auc = post_export_metrics.auc()
_ = pipeline | counter_util.IncrementMetricsComputationCounters([auc])
result = pipeline.run()
metric_filter = beam.metrics.metric.MetricsFilter().with_namespace(
constants.METRICS_NAMESPACE).with_name('metric_computed_auc')
actual_metrics_count = result.metrics().query(
filter=metric_filter)['counters'][0].committed
self.assertEqual(actual_metrics_count, 1)
if __name__ == '__main__':
tf.test.main()
| 35.702128
| 79
| 0.779499
|
4a0e51066c7de0885d2671df4db0433583a553b3
| 9,133
|
py
|
Python
|
built-in/PyTorch/Official/cv/image_classification/DenseNet169_ID0454_for_Pytorch/torchvision/datasets/folder.py
|
Ascend/modelzoo
|
f018cfed33dbb1cc2110b9ea2e233333f71cc509
|
[
"Apache-2.0"
] | 12
|
2020-12-13T08:34:24.000Z
|
2022-03-20T15:17:17.000Z
|
built-in/PyTorch/Official/cv/image_classification/DenseNet161_ID0455_for_Pytorch/torchvision/datasets/folder.py
|
Ascend/modelzoo
|
f018cfed33dbb1cc2110b9ea2e233333f71cc509
|
[
"Apache-2.0"
] | 1
|
2022-01-20T03:11:05.000Z
|
2022-01-20T06:53:39.000Z
|
built-in/PyTorch/Official/cv/image_classification/DenseNet169_ID0454_for_Pytorch/torchvision/datasets/folder.py
|
Ascend/modelzoo
|
f018cfed33dbb1cc2110b9ea2e233333f71cc509
|
[
"Apache-2.0"
] | 2
|
2021-07-10T12:40:46.000Z
|
2021-12-17T07:55:15.000Z
|
#
# BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
#from .vision import VisionDataset
from PIL import Image
import os
import os.path
import sys
def has_file_allowed_extension(filename, extensions):
"""Checks if a file is an allowed extension.
Args:
filename (string): path to a file
extensions (tuple of strings): extensions to consider (lowercase)
Returns:
bool: True if the filename ends with one of given extensions
"""
return filename.lower().endswith(extensions)
def is_image_file(filename):
"""Checks if a file is an allowed image extension.
Args:
filename (string): path to a file
Returns:
bool: True if the filename ends with a known image extension
"""
return has_file_allowed_extension(filename, IMG_EXTENSIONS)
def make_dataset(dir, class_to_idx, extensions=None, is_valid_file=None):
images = []
dir = os.path.expanduser(dir)
if not ((extensions is None) ^ (is_valid_file is None)):
raise ValueError("Both extensions and is_valid_file cannot be None or not None at the same time")
if extensions is not None:
def is_valid_file(x):
return has_file_allowed_extension(x, extensions)
for target in sorted(class_to_idx.keys()):
d = os.path.join(dir, target)
if not os.path.isdir(d):
continue
for root, _, fnames in sorted(os.walk(d)):
for fname in sorted(fnames):
path = os.path.join(root, fname)
if is_valid_file(path):
item = (path, class_to_idx[target])
images.append(item)
return images
class DatasetFolder(VisionDataset):
"""A generic data loader where the samples are arranged in this way: ::
root/class_x/xxx.ext
root/class_x/xxy.ext
root/class_x/xxz.ext
root/class_y/123.ext
root/class_y/nsdf3.ext
root/class_y/asd932_.ext
Args:
root (string): Root directory path.
loader (callable): A function to load a sample given its path.
extensions (tuple[string]): A list of allowed extensions.
both extensions and is_valid_file should not be passed.
transform (callable, optional): A function/transform that takes in
a sample and returns a transformed version.
E.g, ``transforms.RandomCrop`` for images.
target_transform (callable, optional): A function/transform that takes
in the target and transforms it.
is_valid_file (callable, optional): A function that takes path of an Image file
and check if the file is a valid_file (used to check of corrupt files)
both extensions and is_valid_file should not be passed.
Attributes:
classes (list): List of the class names.
class_to_idx (dict): Dict with items (class_name, class_index).
samples (list): List of (sample path, class_index) tuples
targets (list): The class_index value for each image in the dataset
"""
def __init__(self, root, loader, extensions=None, transform=None,
target_transform=None, is_valid_file=None):
super(DatasetFolder, self).__init__(root, transform=transform,
target_transform=target_transform)
classes, class_to_idx = self._find_classes(self.root)
samples = make_dataset(self.root, class_to_idx, extensions, is_valid_file)
if len(samples) == 0:
raise (RuntimeError("Found 0 files in subfolders of: " + self.root + "\n"
"Supported extensions are: " + ",".join(extensions)))
self.loader = loader
self.extensions = extensions
self.classes = classes
self.class_to_idx = class_to_idx
self.samples = samples
self.targets = [s[1] for s in samples]
def _find_classes(self, dir):
"""
Finds the class folders in a dataset.
Args:
dir (string): Root directory path.
Returns:
tuple: (classes, class_to_idx) where classes are relative to (dir), and class_to_idx is a dictionary.
Ensures:
No class is a subdirectory of another.
"""
if sys.version_info >= (3, 5):
# Faster and available in Python 3.5 and above
classes = [d.name for d in os.scandir(dir) if d.is_dir()]
else:
classes = [d for d in os.listdir(dir) if os.path.isdir(os.path.join(dir, d))]
classes.sort()
class_to_idx = {classes[i]: i for i in range(len(classes))}
return classes, class_to_idx
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (sample, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
sample = self.loader(path)
if self.transform is not None:
sample = self.transform(sample)
if self.target_transform is not None:
target = self.target_transform(target)
return sample, target
def __len__(self):
return len(self.samples)
IMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif', '.tiff', '.webp')
def pil_loader(path):
# open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)
with open(path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
def accimage_loader(path):
import accimage
try:
return accimage.Image(path)
except IOError:
# Potentially a decoding problem, fall back to PIL.Image
return pil_loader(path)
def default_loader(path):
from torchvision import get_image_backend
if get_image_backend() == 'accimage':
return accimage_loader(path)
else:
return pil_loader(path)
class ImageFolder(DatasetFolder):
"""A generic data loader where the images are arranged in this way: ::
root/dog/xxx.png
root/dog/xxy.png
root/dog/xxz.png
root/cat/123.png
root/cat/nsdf3.png
root/cat/asd932_.png
Args:
root (string): Root directory path.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
loader (callable, optional): A function to load an image given its path.
is_valid_file (callable, optional): A function that takes path of an Image file
and check if the file is a valid_file (used to check of corrupt files)
Attributes:
classes (list): List of the class names.
class_to_idx (dict): Dict with items (class_name, class_index).
imgs (list): List of (image path, class_index) tuples
"""
def __init__(self, root, transform=None, target_transform=None,
loader=default_loader, is_valid_file=None):
super(ImageFolder, self).__init__(root, loader, IMG_EXTENSIONS if is_valid_file is None else None,
transform=transform,
target_transform=target_transform,
is_valid_file=is_valid_file)
self.imgs = self.samples
| 37.584362
| 113
| 0.647323
|
4a0e5222158a61e769b331e2821bdc5075f1d52d
| 32,492
|
py
|
Python
|
htf/test-py/test_tensorflow.py
|
whitead/hoomd-tf
|
d0fae8aac17c5affa2a415f66fa99bb7cd2fc9b9
|
[
"MIT"
] | 28
|
2019-04-02T11:05:07.000Z
|
2021-09-30T23:51:35.000Z
|
htf/test-py/test_tensorflow.py
|
whitead/hoomd-tf
|
d0fae8aac17c5affa2a415f66fa99bb7cd2fc9b9
|
[
"MIT"
] | 263
|
2019-04-01T05:32:27.000Z
|
2022-01-26T13:38:18.000Z
|
htf/test-py/test_tensorflow.py
|
whitead/hoomd-tf
|
d0fae8aac17c5affa2a415f66fa99bb7cd2fc9b9
|
[
"MIT"
] | 7
|
2019-07-22T22:57:12.000Z
|
2022-01-09T11:17:54.000Z
|
# Copyright (c) 2020 HOOMD-TF Developers
import hoomd
import hoomd.md
import hoomd.htf as htf
import unittest
import os
import tempfile
import shutil
import pickle
import glob
import numpy as np
import math
import tensorflow as tf
import build_examples
from hoomd.htf.simmodel import _make_reverse_indices
def compute_forces(system, rcut):
'''1 / r^2 force'''
snapshot = system.take_snapshot()
position = snapshot.particles.position
N = len(position)
forces = np.zeros((N, 3))
for i in range(N):
for j in range(i + 1, N):
r = position[j] - position[i]
r = np.array(snapshot.box.min_image(r))
rd = np.sqrt(np.sum(r**2))
if rd <= rcut:
f = -r / rd
forces[i, :] += f
forces[j, :] -= f
return forces
class test_access(unittest.TestCase):
def setUp(self):
hoomd.context.initialize('')
self.tmp = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmp)
def test_access(self):
model = build_examples.SimplePotential(32)
tfcompute = htf.tfcompute(model)
rcut = 3
# create a system with a few types
cell = hoomd.lattice.unitcell(
N=3,
a1=[6, 0, 0],
a2=[0, 6, 0],
a3=[0, 0, 6],
position=[[2, 2, 2], [1, 3, 1], [3, 1, 1]],
type_name=['A', 'B', 'C'])
system = hoomd.init.create_lattice(unitcell=cell, n=5)
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all())
tfcompute.attach(nlist, r_cut=rcut)
hoomd.run(1)
tfcompute.get_virial_array()
tfcompute.get_forces_array()
pa = tfcompute.get_positions_array()
nl = tfcompute.get_nlist_array()
# make sure we get the 3 types
self.assertEqual(len(np.unique(nl[:, :, 3].astype(np.int))), 3)
self.assertEqual(len(np.unique(pa[:, 3].astype(np.int))), 3)
class test_compute(unittest.TestCase):
def setUp(self):
hoomd.context.initialize('')
self.tmp = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmp)
def test_force_overwrite(self):
N = 3 * 3
NN = N - 1
rcut = 5.0
model = build_examples.SimplePotential(NN)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(kT=2, seed=2)
tfcompute.attach(nlist, r_cut=rcut)
# use these to throw off timesteps
hoomd.run(1)
hoomd.run(1)
for i in range(3):
py_forces = compute_forces(system, rcut)
for j in range(N):
np.testing.assert_allclose(system.particles[j].net_force,
py_forces[j, :], atol=1e-5)
hoomd.run(100)
def test_force_overwrite_batched(self):
N = 3 * 3
NN = N - 1
rcut = 5.0
model = build_examples.SimplePotential(NN)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(kT=2, seed=2)
tfcompute.attach(nlist, r_cut=rcut, batch_size=4)
# use these to throw off timesteps
hoomd.run(1)
hoomd.run(1)
for i in range(3):
py_forces = compute_forces(system, rcut)
for j in range(N):
np.testing.assert_allclose(system.particles[j].net_force,
py_forces[j, :], atol=1e-5)
hoomd.run(100)
def test_nonlist(self):
model = build_examples.BenchmarkNonlistModel(0)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[32, 32])
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(kT=2, seed=2)
tfcompute.attach()
hoomd.run(10)
def test_full_batch(self):
model = build_examples.BenchmarkNonlistModel(0)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[32, 32])
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(kT=2, seed=2)
tfcompute.attach(batch_size=None)
hoomd.run(10)
def test_trainable(self):
model = build_examples.TrainableGraph(16, output_forces=False)
model.compile(
optimizer=tf.keras.optimizers.Nadam(0.01),
loss='MeanSquaredError')
start = model.get_layer('lj').trainable_weights[0].numpy()
tfcompute = htf.tfcompute(model)
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0), n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(kT=2, seed=2)
tfcompute.attach(nlist, r_cut=rcut, batch_size=4, train=True)
lj = hoomd.md.pair.lj(r_cut=5.0, nlist=nlist)
lj.pair_coeff.set('A', 'A', epsilon=1.1, sigma=0.9)
hoomd.run(25)
end = model.get_layer('lj').trainable_weights[0].numpy()
assert np.sum((start - end)**2) > 0.01**2, 'No training observed'
def test_model_save(self):
'''Saves model after training
'''
model = build_examples.TrainableGraph(16, output_forces=False)
model.compile(
optimizer=tf.keras.optimizers.Nadam(0.01),
loss='MeanSquaredError')
tfcompute = htf.tfcompute(model)
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(kT=2, seed=2)
tfcompute.attach(nlist, train=True, r_cut=rcut)
hoomd.run(5)
model.save(os.path.join(self.tmp, 'test-model'))
def test_model_load(self):
''' Saves model after training and then uses
if for inference
'''
model = build_examples.TrainableGraph(16, output_forces=False)
model.compile(
optimizer=tf.keras.optimizers.Nadam(0.01),
loss='MeanSquaredError')
tfcompute = htf.tfcompute(model)
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(kT=2, seed=2)
tfcompute.attach(nlist, train=True, r_cut=rcut)
hoomd.run(5)
model.save(os.path.join(self.tmp, 'test-model'))
model = tf.keras.models.load_model(
os.path.join(self.tmp, 'test-model'))
infer_model = build_examples.TrainableGraph(16, output_forces=True)
infer_model.set_weights(model.get_weights())
tfcompute.disable()
tfcompute = htf.tfcompute(infer_model)
tfcompute.attach(nlist, r_cut=rcut)
hoomd.run(5)
def test_model_load_serial(self):
''' Saves model after training and then uses
if for inference
'''
model = build_examples.TrainableGraph(16, output_forces=False)
model.compile(
optimizer=tf.keras.optimizers.Nadam(0.01),
loss='MeanSquaredError')
tfcompute = htf.tfcompute(model)
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(kT=2, seed=2)
tfcompute.attach(nlist, train=True, r_cut=rcut)
hoomd.run(5)
model.save(os.path.join(self.tmp, 'test-model'))
del model
return
# We are having trouble
# get_config in SimModel fails if I call super - don't know why
# Because I cannot call super this code doesn't work
# We keep the partial test because it calls the get_config methods,
# checking that they are at least callable.
model = tf.keras.models.load_model(
os.path.join(self.tmp, 'test-model'),
custom_objects={**hoomd.htf.custom_objects,
'TrainableGraph': build_examples.TrainableGraph})
tfcompute.disable()
tfcompute = htf.tfcompute(model)
tfcompute.attach(nlist, r_cut=rcut)
hoomd.run(5)
def test_print(self):
N = 3 * 3
NN = N - 1
rcut = 5.0
model = build_examples.PrintModel(NN)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(kT=4, seed=1)
tfcompute.attach(nlist, r_cut=rcut, batch_size=4)
for i in range(3):
hoomd.run(2)
def test_noforce_graph(self):
model = build_examples.NoForceModel(9, output_forces=False)
tfcompute = htf.tfcompute(model)
N = 3 * 3
NN = N - 1
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all())
tfcompute.attach(nlist, r_cut=rcut)
for i in range(3):
hoomd.run(1)
for j in range(N):
np.testing.assert_allclose(
system.particles[j].net_force, [0, 0, 0], rtol=1e-5)
def test_wrap(self):
model = build_examples.WrapModel(0, output_forces=False)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all())
tfcompute.attach()
hoomd.run(1)
def test_skew_fails(self):
model = build_examples.WrapModel(0, output_forces=False)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
hoomd.update.box_resize(xy=0.5)
hoomd.run(1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nve(group=hoomd.group.all())
tfcompute.attach()
with self.assertRaises(tf.errors.InvalidArgumentError):
hoomd.run(1)
def test_lj_forces(self):
model = build_examples.LJModel(32)
tfcompute = htf.tfcompute(model)
T = 10
N = 5 * 5
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=3.0),
n=[5, 5])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(),
kT=1, tau=0.2
).randomize_velocities(seed=1)
tfcompute.attach(nlist, r_cut=rcut)
hoomd.run(20)
tf_forces = []
for i in range(T):
hoomd.run(1)
snapshot = system.take_snapshot()
tf_forces.append([system.particles[j].net_force
for j in range(N)])
tf_forces = np.array(tf_forces)
# now run with stock lj
hoomd.context.initialize('')
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=3.0),
n=[5, 5])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(), kT=1, tau=0.2
).randomize_velocities(seed=1)
lj = hoomd.md.pair.lj(r_cut=5.0, nlist=nlist)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
hoomd.run(20)
lj_forces = []
for i in range(T):
hoomd.run(1)
snapshot = system.take_snapshot()
lj_forces.append([system.particles[j].net_force for j in range(N)])
lj_forces = np.array(lj_forces)
for i in range(T):
for j in range(N):
np.testing.assert_allclose(tf_forces[i, j],
lj_forces[i, j], atol=1e-5)
# make sure we wrote test to have non-zero forces
assert np.sum(
lj_forces[i, j]**2) > 1e-4**2, 'Forces are too low to assess!'
def test_running_mean(self):
model = build_examples.LJRunningMeanModel(32)
tfcompute = htf.tfcompute(model)
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.001)
hoomd.md.integrate.nve(group=hoomd.group.all()
).randomize_velocities(seed=1, kT=0.8)
tfcompute.attach(nlist, r_cut=rcut, batch_size=4)
hoomd.run(10)
result = model.avg_energy.result().numpy()
assert result < 0
def test_force_output(self):
Ne = 5
c = hoomd.context.initialize('')
model = build_examples.LJModel(32, output_forces=False)
model.compile(loss='MeanSquaredError')
tfcompute = htf.tfcompute(model)
rcut = 3.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=2.0),
n=[Ne, Ne])
c.sorter.disable()
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.01)
lj = hoomd.md.pair.lj(r_cut=rcut, nlist=nlist)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
lj2 = hoomd.md.pair.lj(r_cut=rcut, nlist=nlist)
lj2.pair_coeff.set('A', 'A', epsilon=4.0, sigma=0.8)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(seed=1, kT=0.8)
tfcompute.attach(nlist, train=True, r_cut=rcut, period=100)
tfcompute.set_reference_forces(lj)
hoomd.run(300)
error = model.metrics[0].result().numpy()
assert abs(error) < 1e-5
# now check difference between particle forces and
# forces from htf
lj_forces = np.array([lj.forces[j].force for j in range(Ne**2)])
lj_energy = np.array([lj.forces[j].energy for j in range(Ne**2)])
np.testing.assert_allclose(tfcompute.get_forces_array(
)[:, :3], lj_forces)
np.testing.assert_allclose(tfcompute.get_forces_array(
)[:, 3], lj_energy)
def test_rdf(self):
model = build_examples.LJRDF(32)
tfcompute = htf.tfcompute(model)
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.001)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(seed=1, kT=0.8)
tfcompute.attach(nlist, r_cut=rcut, batch_size=4)
hoomd.run(10)
rdf = model.avg_rdf.result().numpy()
assert len(rdf) > 5
assert np.sum(rdf) > 0
def test_typed_rdf(self):
'''Test RDF typing
'''
model = build_examples.LJTypedModel(32)
tfcompute = htf.tfcompute(model)
rcut = 10.0
# build system using example from hoomd
snapshot = hoomd.data.make_snapshot(N=10,
box=hoomd.data.boxdim(Lx=10,
Ly=10,
Lz=10),
particle_types=['A', 'B'],
bond_types=['polymer'])
snapshot.particles.position[:] = [[-4.5, 0, 0], [-3.5, 0, 0],
[-2.5, 0, 0], [-1.5, 0, 0],
[-0.5, 0, 0], [0.5, 0, 0],
[1.5, 0, 0], [2.5, 0, 0],
[3.5, 0, 0], [4.5, 0, 0]]
snapshot.particles.typeid[0:7] = 0
snapshot.particles.typeid[7:10] = 1
snapshot.bonds.resize(9)
snapshot.bonds.group[:] = [[0, 1], [1, 2], [2, 3],
[3, 4], [4, 5], [5, 6],
[6, 7], [7, 8], [8, 9]]
snapshot.replicate(3, 3, 3)
system = hoomd.init.read_snapshot(snapshot)
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.001)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(seed=1, kT=0.8)
tfcompute.attach(nlist, r_cut=rcut)
hoomd.run(10)
rdfa = model.avg_rdfa.result().numpy()
rdfb = model.avg_rdfb.result().numpy()
assert np.sum(rdfa) > 0
np.testing.assert_array_almost_equal(rdfa, rdfb)
def test_training_flag(self):
model = build_examples.TrainModel(4, dim=1, top_neighs=2)
model.compile(
optimizer=tf.keras.optimizers.Nadam(0.01),
loss='MeanSquaredError')
tfcompute = htf.tfcompute(model)
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.001)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(seed=1, kT=0.8)
tfcompute.attach(nlist, train=True, r_cut=rcut, batch_size=4)
hoomd.run(10)
tfcompute.attach(nlist, train=False, r_cut=rcut, batch_size=4)
hoomd.run(10)
def test_retrace(self):
model = build_examples.TrainModel(4, dim=1, top_neighs=2)
tfcompute = htf.tfcompute(model)
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.001)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(seed=1, kT=0.8)
tfcompute.attach(nlist, r_cut=rcut, save_output_period=1)
hoomd.run(1)
assert tfcompute.outputs[0][-1] != 0
# without retrace
model.output_zero = True
hoomd.run(1)
assert tfcompute.outputs[0][-1] != 0
# with retrace
model.retrace_compute()
hoomd.run(1)
assert tfcompute.outputs[0][-1] == 0
def test_lj_energy(self):
model = build_examples.LJModel(32)
tfcompute = htf.tfcompute(model)
N = 3 * 3
NN = N - 1
T = 10
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.001)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(seed=1, kT=0.8)
log = hoomd.analyze.log(filename=None,
quantities=['potential_energy',
'kinetic_energy'], period=1)
tfcompute.attach(nlist, r_cut=rcut)
energy = []
for i in range(T):
hoomd.run(250)
energy.append(log.query('potential_energy'
) + log.query('kinetic_energy'))
if i > 1:
np.testing.assert_allclose(energy[-1],
energy[-2], atol=1e-3)
def test_nlist_count(self):
'''Make sure nlist is full, not half
'''
model = build_examples.LJModel(32)
tfcompute = htf.tfcompute(model)
N = 3 * 3
NN = N - 1
T = 10
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.001)
hoomd.md.integrate.nve(group=hoomd.group.all(
)).randomize_velocities(seed=1, kT=0.8)
tfcompute.attach(nlist, r_cut=rcut)
hoomd.run(1) # in lattice, should have 4 neighbors
nl = tfcompute.get_nlist_array()
ncount = np.sum(np.sum(nl**2, axis=2) > 0.1, axis=1)
self.assertEqual(np.min(ncount), 4)
def test_mapped_nlist(self):
'''Compute mapped nlist
'''
N = 3 * 3
NN = N - 1
T = 10
CGN = 2
rcut = 5.0
model = build_examples.MappedNlist(NN, output_forces=False)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
self.assertEqual(len(system.particles), N)
aa_group, mapped_group = tfcompute.enable_mapped_nlist(
system, build_examples.MappedNlist.my_map)
assert len(aa_group) == N
assert len(mapped_group) == 2
# 2 CG sites
self.assertEqual(len(system.particles), N + CGN)
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.001)
hoomd.md.integrate.nve(
group=aa_group).randomize_velocities(seed=1, kT=0.8)
tfcompute.attach(nlist, r_cut=rcut, save_output_period=2)
hoomd.run(8)
positions = tfcompute.outputs[0].reshape(-1, N + CGN, 4)
# check that mapping function was applied
np.testing.assert_allclose(
positions[1:, N, :3], np.mean(positions[1:, :-1, :3], axis=1), atol=1e-5)
# check that there is no mixing betwee neighbor lists
aa = set(np.unique(tfcompute.outputs[1][..., -1].astype(int)))
cg = set(np.unique(tfcompute.outputs[2][..., -1].astype(int)))
self.assertTrue(aa.intersection(cg) == set([0]))
def test_lj_pressure(self):
# TODO The virials are off by 1e-6, leading to
# pressure differences of 1e-3.
# I can't figure out why, but since PE and forces are
# matching exactly, I'll leave the tol
# set that high.
model = build_examples.LJVirialModel(32, virial=True)
tfcompute = htf.tfcompute(model)
N = 3 * 3
NN = N - 1
rcut = 5.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(),
kT=1, tau=0.2).randomize_velocities(seed=1)
tfcompute.attach(nlist, r_cut=rcut)
log = hoomd.analyze.log(filename=None, quantities=[
'potential_energy', 'pressure'], period=1)
thermo_scalars = []
tf_virial = []
for i in range(5):
hoomd.run(3)
snapshot = system.take_snapshot()
tf_virial.append(tfcompute.get_virial_array())
thermo_scalars.append([log.query('potential_energy'
), log.query('pressure')])
# now run with stock lj
hoomd.context.initialize('')
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(), kT=1,
tau=0.2).randomize_velocities(seed=1)
lj = hoomd.md.pair.lj(r_cut=5.0, nlist=nlist)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
log = hoomd.analyze.log(filename=None,
quantities=['potential_energy', 'pressure'],
period=1)
for i in range(5):
hoomd.run(3)
snapshot = system.take_snapshot()
v = snapshot.particles.velocity
lj_virial = np.array([lj.forces[j].virial for j in range(N)])
for j in range(N):
np.testing.assert_allclose(lj_virial[j][0:2],
tf_virial[i][j][0:2], atol=1e-5)
# np.testing.assert_allclose([log.query('potential_energy'),
# log.query('pressure')], thermo_scalars[i], rtol=1e-3)
class test_mol_batching(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.mkdtemp()
self.c = hoomd.context.initialize('')
def tearDown(self):
shutil.rmtree(self.tmp)
def test_single_atom(self):
N = 3 * 3
NN = N - 1
rcut = 5.0
system = hoomd.init.create_lattice(unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
mol_indices = htf.find_molecules(system)
model = build_examples.LJMolModel(
MN=1, mol_indices=mol_indices, nneighbor_cutoff=NN)
tfcompute = htf.tfcompute(model)
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(), kT=1, tau=0.2)
assert self.c.sorter.enabled
tfcompute.attach(nlist, r_cut=rcut)
# make sure tfcompute disabled the sorting
assert not self.c.sorter.enabled
hoomd.run(8)
def test_single_atom_batched(self):
N = 3 * 3
NN = N - 1
rcut = 5.0
system = hoomd.init.create_lattice(unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
mol_indices = htf.find_molecules(system)
model = build_examples.LJMolModel(
MN=1, mol_indices=mol_indices, nneighbor_cutoff=NN)
tfcompute = htf.tfcompute(model)
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(), kT=1, tau=0.2)
with self.assertRaises(ValueError):
tfcompute.attach(nlist, r_cut=rcut, batch_size=3)
def test_single_atom_malformed(self):
with self.assertRaises(TypeError):
build_examples.LJMolModel(
MN=1, mol_indices=[1, 1, 4, 24], nneighbor_cutoff=10)
def test_multi_atom(self):
N = 3 * 3
NN = N - 1
rcut = 5.0
model = build_examples.LJMolModel(
MN=3, mol_indices=[[0, 1, 2], [3, 4], [5, 6, 7], [8]],
nneighbor_cutoff=NN)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(), kT=1, tau=0.2)
tfcompute.attach(nlist, r_cut=rcut)
hoomd.run(8)
def test_mol_force_output(self):
N = 3 * 3
NN = N - 1
rcut = 5.0
model = build_examples.LJMolModel(
MN=3, mol_indices=[[0, 1, 2], [3, 4], [5, 6, 7], [8]],
nneighbor_cutoff=NN, output_forces=False)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
nlist = hoomd.md.nlist.cell()
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(), kT=1, tau=0.2)
tfcompute.attach(nlist, r_cut=rcut)
hoomd.run(8)
def test_reverse_mol_index(self):
# each element is the index of atoms in the molecule
mi = [[1, 2, 0, 0, 0], [3, 0, 0, 0, 0], [4, 5, 7, 8, 9]]
rmi = _make_reverse_indices(mi)
# should be
rmi_ref = [
[0, 0],
[0, 1],
[1, 0],
[2, 0],
[2, 1],
[-1, -1],
[2, 2],
[2, 3],
[2, 4]
]
self.assertEqual(rmi, rmi_ref)
class test_saving(unittest.TestCase):
def setUp(self):
hoomd.context.initialize('')
self.tmp = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmp)
def test_tensor_save(self):
model = build_examples.TensorSaveModel(0, output_forces=False)
tfcompute = htf.tfcompute(model)
system = hoomd.init.create_lattice(unitcell=hoomd.lattice.sq(a=4.0),
n=[3, 3])
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(), kT=1, tau=0.2)
tfcompute.attach(batch_size=3, save_output_period=2)
hoomd.run(8)
# reshape to remove batch_size effect
array = tfcompute.outputs[0].reshape(-1, 9)
assert array.shape == (4, 9)
class test_bad_models(unittest.TestCase):
def test_no_compute(self):
class MyModel(htf.SimModel):
def call(self, x):
return x
with self.assertRaises(AttributeError):
m = MyModel(0)
def test_no_molcompute(self):
class MyModel(htf.MolSimModel):
def compute(self, nlist):
return nlist
with self.assertRaises(AttributeError):
MyModel(1, [[1]], 0)
def test_bad_molargs(self):
class MyModel(htf.MolSimModel):
def mol_compute(self, nlist):
return nlist
with self.assertRaises(AttributeError):
MyModel(1, [[1]], 0)
class test_nlist(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.mkdtemp()
hoomd.context.initialize('')
def tearDown(self):
shutil.rmtree(self.tmp)
def test_overflow(self):
'''Use too small neighbor list and ensure error is thrown
'''
N = 8 * 8
model = build_examples.LJModel(4, check_nlist=True)
tfcompute = htf.tfcompute(model)
T = 10
rcut = 10.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[8, 8])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(),
kT=1, tau=0.2
).randomize_velocities(seed=1)
tfcompute.attach(nlist, r_cut=rcut)
with self.assertRaises(tf.errors.InvalidArgumentError):
hoomd.run(2)
def test_sorted(self):
N = 8 * 8
model = build_examples.NlistNN(64, dim=32, top_neighs=8)
tfcompute = htf.tfcompute(model)
T = 10
rcut = 10.0
system = hoomd.init.create_lattice(
unitcell=hoomd.lattice.sq(a=4.0),
n=[8, 8])
nlist = hoomd.md.nlist.cell(check_period=1)
hoomd.md.integrate.mode_standard(dt=0.005)
hoomd.md.integrate.nvt(group=hoomd.group.all(),
kT=1, tau=0.2
).randomize_velocities(seed=1)
tfcompute.attach(nlist, r_cut=rcut)
hoomd.run(10)
if __name__ == '__main__':
unittest.main()
| 37.347126
| 85
| 0.562939
|
4a0e52fc6d8fe57865d9020d5daab348abb55688
| 495
|
py
|
Python
|
.history/ClassFiles/PythonModulesPackages/NameAttribute_20210107134607.py
|
minefarmer/Comprehensive-Python
|
f97b9b83ec328fc4e4815607e6a65de90bb8de66
|
[
"Unlicense"
] | null | null | null |
.history/ClassFiles/PythonModulesPackages/NameAttribute_20210107134607.py
|
minefarmer/Comprehensive-Python
|
f97b9b83ec328fc4e4815607e6a65de90bb8de66
|
[
"Unlicense"
] | null | null | null |
.history/ClassFiles/PythonModulesPackages/NameAttribute_20210107134607.py
|
minefarmer/Comprehensive-Python
|
f97b9b83ec328fc4e4815607e6a65de90bb8de66
|
[
"Unlicense"
] | null | null | null |
''' Python name attribute
Every module in Python has a special variable called name.
The value of the name attribute is set to main when the module is executed as the main program.
The value of the name can be set to another module if imported or module it is called from.
The if statement prevents certain code within the block from being executed.
The if__name__ == "__main__" allows me t run Python files as a reusable modules or a standalone program.
'''
| 45
| 104
| 0.729293
|
4a0e539126e345ff890a1ac93f73d313321a1590
| 5,678
|
py
|
Python
|
lib_pypy/cffi/setuptools_ext.py
|
pypyjs/pypy
|
2c2c5c73d780ff71f03adc2f1bf2c1c4bb7cc031
|
[
"Apache-2.0",
"OpenSSL"
] | 34
|
2015-07-09T04:53:27.000Z
|
2021-07-19T05:22:27.000Z
|
lib_pypy/cffi/setuptools_ext.py
|
pypyjs/pypy
|
2c2c5c73d780ff71f03adc2f1bf2c1c4bb7cc031
|
[
"Apache-2.0",
"OpenSSL"
] | 6
|
2015-05-30T17:20:45.000Z
|
2017-06-12T14:29:23.000Z
|
lib_pypy/cffi/setuptools_ext.py
|
pypyjs/pypy
|
2c2c5c73d780ff71f03adc2f1bf2c1c4bb7cc031
|
[
"Apache-2.0",
"OpenSSL"
] | 11
|
2015-09-07T14:26:08.000Z
|
2020-04-10T07:20:41.000Z
|
import os
try:
basestring
except NameError:
# Python 3.x
basestring = str
def error(msg):
from distutils.errors import DistutilsSetupError
raise DistutilsSetupError(msg)
def execfile(filename, glob):
# We use execfile() (here rewritten for Python 3) instead of
# __import__() to load the build script. The problem with
# a normal import is that in some packages, the intermediate
# __init__.py files may already try to import the file that
# we are generating.
with open(filename) as f:
code = compile(f.read(), filename, 'exec')
exec(code, glob, glob)
def add_cffi_module(dist, mod_spec):
from cffi.api import FFI
if not isinstance(mod_spec, basestring):
error("argument to 'cffi_modules=...' must be a str or a list of str,"
" not %r" % (type(mod_spec).__name__,))
mod_spec = str(mod_spec)
try:
build_file_name, ffi_var_name = mod_spec.split(':')
except ValueError:
error("%r must be of the form 'path/build.py:ffi_variable'" %
(mod_spec,))
if not os.path.exists(build_file_name):
ext = ''
rewritten = build_file_name.replace('.', '/') + '.py'
if os.path.exists(rewritten):
ext = ' (rewrite cffi_modules to [%r])' % (
rewritten + ':' + ffi_var_name,)
error("%r does not name an existing file%s" % (build_file_name, ext))
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
execfile(build_file_name, mod_vars)
try:
ffi = mod_vars[ffi_var_name]
except KeyError:
error("%r: object %r not found in module" % (mod_spec,
ffi_var_name))
if not isinstance(ffi, FFI):
ffi = ffi() # maybe it's a function instead of directly an ffi
if not isinstance(ffi, FFI):
error("%r is not an FFI instance (got %r)" % (mod_spec,
type(ffi).__name__))
if not hasattr(ffi, '_assigned_source'):
error("%r: the set_source() method was not called" % (mod_spec,))
module_name, source, source_extension, kwds = ffi._assigned_source
if ffi._windows_unicode:
kwds = kwds.copy()
ffi._apply_windows_unicode(kwds)
if source is None:
_add_py_module(dist, ffi, module_name)
else:
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
from distutils.core import Extension
from distutils.command.build_ext import build_ext
from distutils.dir_util import mkpath
from distutils import log
from cffi import recompiler
allsources = ['$PLACEHOLDER']
allsources.extend(kwds.pop('sources', []))
ext = Extension(name=module_name, sources=allsources, **kwds)
def make_mod(tmpdir):
c_file = os.path.join(tmpdir, module_name + source_extension)
log.info("generating cffi module %r" % c_file)
mkpath(tmpdir)
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
if not updated:
log.info("already up-to-date")
return c_file
if dist.ext_modules is None:
dist.ext_modules = []
dist.ext_modules.append(ext)
base_class = dist.cmdclass.get('build_ext', build_ext)
class build_ext_make_mod(base_class):
def run(self):
if ext.sources[0] == '$PLACEHOLDER':
ext.sources[0] = make_mod(self.build_temp)
base_class.run(self)
dist.cmdclass['build_ext'] = build_ext_make_mod
# NB. multiple runs here will create multiple 'build_ext_make_mod'
# classes. Even in this case the 'build_ext' command should be
# run once; but just in case, the logic above does nothing if
# called again.
def _add_py_module(dist, ffi, module_name):
from distutils.dir_util import mkpath
from distutils.command.build_py import build_py
from distutils.command.build_ext import build_ext
from distutils import log
from cffi import recompiler
def generate_mod(py_file):
log.info("generating cffi module %r" % py_file)
mkpath(os.path.dirname(py_file))
updated = recompiler.make_py_source(ffi, module_name, py_file)
if not updated:
log.info("already up-to-date")
base_class = dist.cmdclass.get('build_py', build_py)
class build_py_make_mod(base_class):
def run(self):
base_class.run(self)
module_path = module_name.split('.')
module_path[-1] += '.py'
generate_mod(os.path.join(self.build_lib, *module_path))
dist.cmdclass['build_py'] = build_py_make_mod
# the following is only for "build_ext -i"
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
class build_ext_make_mod(base_class_2):
def run(self):
base_class_2.run(self)
if self.inplace:
# from get_ext_fullpath() in distutils/command/build_ext.py
module_path = module_name.split('.')
package = '.'.join(module_path[:-1])
build_py = self.get_finalized_command('build_py')
package_dir = build_py.get_package_dir(package)
file_name = module_path[-1] + '.py'
generate_mod(os.path.join(package_dir, file_name))
dist.cmdclass['build_ext'] = build_ext_make_mod
def cffi_modules(dist, attr, value):
assert attr == 'cffi_modules'
if isinstance(value, basestring):
value = [value]
for cffi_module in value:
add_cffi_module(dist, cffi_module)
| 37.111111
| 78
| 0.637548
|
4a0e5479654c8bb457218e99ce5facb786eab588
| 1,237
|
py
|
Python
|
website/addons/box/views.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | null | null | null |
website/addons/box/views.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | null | null | null |
website/addons/box/views.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | null | null | null |
"""Views for the node settings page."""
# -*- coding: utf-8 -*-
from flask import request
from website.addons.base import generic_views
from website.addons.box.serializer import BoxSerializer
from website.project.decorators import must_have_addon, must_be_addon_authorizer
SHORT_NAME = 'box'
FULL_NAME = 'Box'
box_account_list = generic_views.account_list(
SHORT_NAME,
BoxSerializer
)
box_import_auth = generic_views.import_auth(
SHORT_NAME,
BoxSerializer
)
@must_have_addon(SHORT_NAME, 'node')
@must_be_addon_authorizer(SHORT_NAME)
def box_folder_list(node_addon, **kwargs):
""" Returns all the subsequent folders under the folder id passed.
"""
folder_id = request.args.get('folder_id')
return node_addon.get_folders(folder_id=folder_id)
box_get_config = generic_views.get_config(
SHORT_NAME,
BoxSerializer
)
def _set_folder(node_addon, folder, auth):
uid = folder['id']
node_addon.set_folder(uid, auth=auth)
node_addon.save()
box_set_config = generic_views.set_config(
SHORT_NAME,
FULL_NAME,
BoxSerializer,
_set_folder
)
box_deauthorize_node = generic_views.deauthorize_node(
SHORT_NAME
)
box_root_folder = generic_views.root_folder(
SHORT_NAME
)
| 22.490909
| 80
| 0.756669
|
4a0e54aaf82ac6afebe684406233be898902f103
| 5,792
|
py
|
Python
|
jacket/objects/compute/notification.py
|
bopopescu/jacket
|
d7ad3147fcb43131098c2a5210847634ff5fb325
|
[
"Apache-2.0"
] | null | null | null |
jacket/objects/compute/notification.py
|
bopopescu/jacket
|
d7ad3147fcb43131098c2a5210847634ff5fb325
|
[
"Apache-2.0"
] | null | null | null |
jacket/objects/compute/notification.py
|
bopopescu/jacket
|
d7ad3147fcb43131098c2a5210847634ff5fb325
|
[
"Apache-2.0"
] | 2
|
2016-08-10T02:21:49.000Z
|
2020-07-24T01:57:21.000Z
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from jacket.objects.compute import base
from jacket.objects.compute import fields
from jacket import rpc
@base.NovaObjectRegistry.register
class EventType(base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'object': fields.StringField(nullable=False),
'action': fields.NotificationActionField(nullable=False),
'phase': fields.NotificationPhaseField(nullable=True),
}
def to_notification_event_type_field(self):
"""Serialize the object to the wire format."""
s = '%s.%s' % (self.object, self.action)
if self.obj_attr_is_set('phase'):
s += '.%s' % self.phase
return s
# Note(gibi): It is explicitly not registered as this class shall not be used
# directly, it is just a base class for notification payloads.
@base.NovaObjectRegistry.register_if(False)
class NotificationPayloadBase(base.NovaObject):
"""Base class for the payload of versioned notifications."""
# SCHEMA defines how to populate the payload fields. It is a dictionary
# where every key value pair has the following format:
# <payload_field_name>: (<data_source_name>,
# <field_of_the_data_source>)
# The <payload_field_name> is the name where the data will be stored in the
# payload object, this field has to be defined as a field of the payload.
# The <data_source_name> shall refer to name of the parameter passed as
# kwarg to the payload's populate_schema() call and this object will be
# used as the source of the data. The <field_of_the_data_source> shall be
# a valid field of the passed argument.
# The SCHEMA needs to be applied with the populate_schema() call before the
# notification can be emitted.
# The value of the payload.<payload_field_name> field will be set by the
# <data_source_name>.<field_of_the_data_source> field. The
# <data_source_name> will not be part of the payload object internal or
# external representation.
# Payload fields that are not set by the SCHEMA can be filled in the same
# way as in any versioned object.
SCHEMA = {}
# Version 1.0: Initial version
VERSION = '1.0'
def __init__(self, *args, **kwargs):
super(NotificationPayloadBase, self).__init__(*args, **kwargs)
self.populated = not self.SCHEMA
def populate_schema(self, **kwargs):
"""Populate the object based on the SCHEMA and the source objects
:param kwargs: A dict contains the source object at the key defined in
the SCHEMA
"""
for key, (obj, field) in self.SCHEMA.items():
source = kwargs[obj]
if source.obj_attr_is_set(field):
setattr(self, key, getattr(source, field))
self.populated = True
@base.NovaObjectRegistry.register
class NotificationPublisher(base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'host': fields.StringField(nullable=False),
'binary': fields.StringField(nullable=False),
}
@classmethod
def from_service_obj(cls, service):
return cls(host=service.host, binary=service.binary)
# Note(gibi): It is explicitly not registered as this class shall not be used
# directly, it is just a base class for notification.
@base.NovaObjectRegistry.register_if(False)
class NotificationBase(base.NovaObject):
"""Base class for versioned notifications.
Every subclass shall define a 'payload' field.
"""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'priority': fields.NotificationPriorityField(),
'event_type': fields.ObjectField('EventType'),
'publisher': fields.ObjectField('NotificationPublisher'),
}
def _emit(self, context, event_type, publisher_id, payload):
notifier = rpc.get_versioned_notifier(publisher_id)
notify = getattr(notifier, self.priority)
notify(context, event_type=event_type, payload=payload)
def emit(self, context):
"""Send the notification."""
assert self.payload.populated
# Note(gibi): notification payload will be a newly populated object
# therefore every field of it will look changed so this does not carry
# any extra information so we drop this from the payload.
self.payload.obj_reset_changes(recursive=False)
self._emit(context,
event_type=
self.event_type.to_notification_event_type_field(),
publisher_id='%s:%s' %
(self.publisher.binary,
self.publisher.host),
payload=self.payload.obj_to_primitive())
def notification_sample(sample):
"""Class decorator to attach the notification sample information
to the notification object for documentation generation purposes.
:param sample: the path of the sample json file relative to the
doc/notification_samples/ directory in the compute repository
root.
"""
def wrap(cls):
cls.sample = sample
return cls
return wrap
| 38.357616
| 80
| 0.672997
|
4a0e55be85e971c3f6a32baa1002042dc5e38e4c
| 1,531
|
py
|
Python
|
deeplearning/chainer/chainer_trainer_example.py
|
terasakisatoshi/pythonCodes
|
baee095ecee96f6b5ec6431267cdc6c40512a542
|
[
"MIT"
] | null | null | null |
deeplearning/chainer/chainer_trainer_example.py
|
terasakisatoshi/pythonCodes
|
baee095ecee96f6b5ec6431267cdc6c40512a542
|
[
"MIT"
] | null | null | null |
deeplearning/chainer/chainer_trainer_example.py
|
terasakisatoshi/pythonCodes
|
baee095ecee96f6b5ec6431267cdc6c40512a542
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import unicode_literals
import chainer
import chainer.datasets
from chainer import training
from chainer.training import extensions
import chainer.links as L
import chainer.functions as F
class MLP(chainer.Chain):
"""docstring for MLP"""
def __init__(self, n_units,n_out):
super(MLP, self).__init__(
l1=L.Linear(None,n_units),
l2=L.Linear(None,n_units),
l3=L.Linear(None,n_out),
)
def __call__(self,x):
h1=F.relu(self.l1(x))
h2=F.relu(self.l2(h1))
return self.l3(h2)
def main():
train,test=chainer.datasets.get_mnist()
train_iter=chainer.iterators.SerialIterator(train,100)
test_iter=chainer.iterators.SerialIterator(test,100,repeat=False,shuffle=False)
model=L.Classifier(MLP(784,10))
optimizer=chainer.optimizers.SGD()
optimizer.setup(model)
updater=training.StandardUpdater(train_iter,optimizer,device=-1)
trainer=training.Trainer(updater,(500,'epoch'),out='result_')
trainer.extend(extensions.Evaluator(test_iter,model,device=-1))
trainer.extend(extensions.dump_graph('main/loss'))
trainer.extend(extensions.snapshot(),trigger=(100,'iteration'))
trainer.extend(extensions.LogReport())
trainer.extend(extensions.PrintReport(
['epoch','main/loss','validation/main/loss','main/accuracy','validation/main/acuracy']))
trainer.extend(extensions.ProgressBar())
trainer.run()
if __name__ == '__main__':
main()
| 34.022222
| 96
| 0.706728
|
4a0e55c5cac64b8df4f9147b92e766fd9436a38f
| 3,053
|
py
|
Python
|
cafebot/bot.py
|
saccho/my_linebot
|
a606e2f2b333a8d678c8b6cc4c4ce7ee5e60ce62
|
[
"MIT"
] | null | null | null |
cafebot/bot.py
|
saccho/my_linebot
|
a606e2f2b333a8d678c8b6cc4c4ce7ee5e60ce62
|
[
"MIT"
] | null | null | null |
cafebot/bot.py
|
saccho/my_linebot
|
a606e2f2b333a8d678c8b6cc4c4ce7ee5e60ce62
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# coding: utf-8
import os
import sys
import urllib3
from cafebot import settings
from cafebot.gnavi import Gnavi
from flask import Flask, request, abort
from linebot.models import (
MessageEvent, TextMessage, TextSendMessage,
SourceUser, SourceGroup, SourceRoom,
TemplateSendMessage, ConfirmTemplate, MessageTemplateAction,
ButtonsTemplate, ImageCarouselTemplate, ImageCarouselColumn, URITemplateAction,
CarouselTemplate, CarouselColumn, LocationMessage, LocationSendMessage,
)
from linebot import (
LineBotApi, WebhookHandler
)
app = Flask(__name__)
line_bot_api = settings.line_bot_api
handler = settings.handler
static_tmp_path = settings.static_tmp_path
gnavi_key = settings.gnavi_key
def run():
port = int(os.getenv("PORT", 5000))
# # create tmp dir for download content
make_static_tmp_dir()
app.run(host="0.0.0.0", port=port)
# function for create tmp dir for download content
def make_static_tmp_dir():
try:
os.makedirs(static_tmp_path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(static_tmp_path):
pass
else:
raise
@app.route("/callback", methods=['POST'])
def callback():
# get X-Line-Signature header value
signature = request.headers['X-Line-Signature']
# get request body as text
body = request.get_data(as_text=True)
app.logger.info("Request body: " + body)
# handle webhook body
try:
handler.handle(body, signature)
except InvalidSignatureError:
abort(400)
return 'OK'
@handler.add(MessageEvent, message=LocationMessage)
def handle_location_message(event):
gnavi = Gnavi(gnavi_key, event)
gnavi_data = gnavi.gnavi()
if gnavi_data['error_message'] == None:
shop_name = gnavi_data['name']
shop_address = gnavi_data['address']
destatitude = gnavi_data['latitude']
deestLongitude = gnavi_data['longitude']
# shop_image = gnavi_data['shop_image1']
srcLatitude = str(event.message.latitude)
srcLongitude = str(event.message.longitude)
destLatitude = str(destatitude)
destLongitude = str(deestLongitude)
route_url = 'http://maps.google.com/maps'\
+ '?saddr=' + srcLatitude + ',' + srcLongitude\
+ '&daddr=' + destLatitude + ',' + destLongitude\
+ '&dirflg=w'
buttons_template = ButtonsTemplate(
title=shop_name, text='ここからのルートを表示します', actions=[
URITemplateAction(
label='ルートを表示', uri=route_url),
])
template_message = TemplateSendMessage(
alt_text='Buttons', template=buttons_template)
line_bot_api.reply_message(event.reply_token, template_message)
else:
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text=gnavi_data['error_message'])
)
| 33.184783
| 84
| 0.649198
|
4a0e568d1d9769308a5e82825d077e49b697c11b
| 36,719
|
py
|
Python
|
src/transformers/models/openai/modeling_openai.py
|
studytutorials/transformers
|
27b1516d32b691533fc497e7ee4ceb88c39cdfdf
|
[
"Apache-2.0"
] | 2
|
2022-01-11T19:17:40.000Z
|
2022-01-11T19:49:48.000Z
|
src/transformers/models/openai/modeling_openai.py
|
feifeivv/transformers
|
08a5f57567d8a975d900b66658bfd3c28c9dbec5
|
[
"Apache-2.0"
] | 1
|
2021-11-11T16:16:49.000Z
|
2021-11-12T17:49:08.000Z
|
src/transformers/models/openai/modeling_openai.py
|
feifeivv/transformers
|
08a5f57567d8a975d900b66658bfd3c28c9dbec5
|
[
"Apache-2.0"
] | 2
|
2021-02-18T03:12:51.000Z
|
2021-04-16T13:16:58.000Z
|
# coding=utf-8
# Copyright 2018 The OpenAI Team Authors and HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch OpenAI GPT model."""
import json
import math
import os
from dataclasses import dataclass
from typing import Optional, Tuple
import torch
from torch import nn
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
from ...activations import gelu_new, silu
from ...file_utils import (
ModelOutput,
add_code_sample_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_outputs import BaseModelOutput, CausalLMOutput, SequenceClassifierOutput
from ...modeling_utils import (
Conv1D,
PreTrainedModel,
SequenceSummary,
find_pruneable_heads_and_indices,
prune_conv1d_layer,
)
from ...utils import logging
from .configuration_openai import OpenAIGPTConfig
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "openai-gpt"
_CONFIG_FOR_DOC = "OpenAIGPTConfig"
_TOKENIZER_FOR_DOC = "OpenAIGPTTokenizer"
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST = [
"openai-gpt",
# See all OpenAI GPT models at https://huggingface.co/models?filter=openai-gpt
]
def load_tf_weights_in_openai_gpt(model, config, openai_checkpoint_folder_path):
"""Load tf pre-trained weights in a pytorch model (from NumPy arrays here)"""
import re
import numpy as np
if ".ckpt" in openai_checkpoint_folder_path:
openai_checkpoint_folder_path = os.path.dirname(openai_checkpoint_folder_path)
logger.info(f"Loading weights from {openai_checkpoint_folder_path}")
with open(openai_checkpoint_folder_path + "/parameters_names.json", "r", encoding="utf-8") as names_handle:
names = json.load(names_handle)
with open(openai_checkpoint_folder_path + "/params_shapes.json", "r", encoding="utf-8") as shapes_handle:
shapes = json.load(shapes_handle)
offsets = np.cumsum([np.prod(shape) for shape in shapes])
init_params = [np.load(openai_checkpoint_folder_path + f"/params_{n}.npy") for n in range(10)]
init_params = np.split(np.concatenate(init_params, 0), offsets)[:-1]
init_params = [param.reshape(shape) for param, shape in zip(init_params, shapes)]
# This was used when we had a single embedding matrix for positions and tokens
# init_params[0] = np.concatenate([init_params[1], init_params[0]], 0)
# del init_params[1]
init_params = [arr.squeeze() for arr in init_params]
try:
assert model.tokens_embed.weight.shape == init_params[1].shape
assert model.positions_embed.weight.shape == init_params[0].shape
except AssertionError as e:
e.args += (model.tokens_embed.weight.shape, init_params[1].shape)
e.args += (model.positions_embed.weight.shape, init_params[0].shape)
raise
model.tokens_embed.weight.data = torch.from_numpy(init_params[1])
model.positions_embed.weight.data = torch.from_numpy(init_params[0])
names.pop(0)
# Pop position and token embedding arrays
init_params.pop(0)
init_params.pop(0)
for name, array in zip(names, init_params): # names[1:n_transfer], init_params[1:n_transfer]):
name = name[6:] # skip "model/"
assert name[-2:] == ":0"
name = name[:-2]
name = name.split("/")
pointer = model
for m_name in name:
if re.fullmatch(r"[A-Za-z]+\d+", m_name):
scope_names = re.split(r"(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] == "g":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "b":
pointer = getattr(pointer, "bias")
elif scope_names[0] == "w":
pointer = getattr(pointer, "weight")
else:
pointer = getattr(pointer, scope_names[0])
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
try:
assert (
pointer.shape == array.shape
), f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched"
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
try:
assert (
pointer.shape == array.shape
), f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched"
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
logger.info(f"Initialize PyTorch weight {name}")
pointer.data = torch.from_numpy(array)
return model
ACT_FNS = {"relu": nn.ReLU, "silu": silu, "gelu": gelu_new, "swish": silu}
class Attention(nn.Module):
def __init__(self, nx, n_positions, config, scale=False):
super().__init__()
n_state = nx # in Attention: n_state=768 (nx=n_embd)
# [switch nx => n_state from Block to Attention to keep identical to TF implementation]
assert n_state % config.n_head == 0
self.register_buffer(
"bias", torch.tril(torch.ones(n_positions, n_positions)).view(1, 1, n_positions, n_positions)
)
self.n_head = config.n_head
self.split_size = n_state
self.scale = scale
self.c_attn = Conv1D(n_state * 3, nx)
self.c_proj = Conv1D(n_state, nx)
self.attn_dropout = nn.Dropout(config.attn_pdrop)
self.resid_dropout = nn.Dropout(config.resid_pdrop)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
heads, index = find_pruneable_heads_and_indices(
heads, self.n_head, self.split_size // self.n_head, self.pruned_heads
)
index_attn = torch.cat([index, index + self.split_size, index + (2 * self.split_size)])
# Prune conv1d layers
self.c_attn = prune_conv1d_layer(self.c_attn, index_attn, dim=1)
self.c_proj = prune_conv1d_layer(self.c_proj, index, dim=0)
# Update hyper params
self.split_size = (self.split_size // self.n_head) * (self.n_head - len(heads))
self.n_head = self.n_head - len(heads)
self.pruned_heads = self.pruned_heads.union(heads)
def _attn(self, q, k, v, attention_mask=None, head_mask=None, output_attentions=False):
w = torch.matmul(q, k)
if self.scale:
w = w / math.sqrt(v.size(-1))
# w = w * self.bias + -1e9 * (1 - self.bias) # TF implementation method: mask_attn_weights
# XD: self.b may be larger than w, so we need to crop it
b = self.bias[:, :, : w.size(-2), : w.size(-1)]
w = w * b + -1e4 * (1 - b)
if attention_mask is not None:
# Apply the attention mask
w = w + attention_mask
w = nn.Softmax(dim=-1)(w)
w = self.attn_dropout(w)
# Mask heads if we want to
if head_mask is not None:
w = w * head_mask
outputs = [torch.matmul(w, v)]
if output_attentions:
outputs.append(w)
return outputs
def merge_heads(self, x):
x = x.permute(0, 2, 1, 3).contiguous()
new_x_shape = x.size()[:-2] + (x.size(-2) * x.size(-1),)
return x.view(*new_x_shape) # in Tensorflow implementation: fct merge_states
def split_heads(self, x, k=False):
new_x_shape = x.size()[:-1] + (self.n_head, x.size(-1) // self.n_head)
x = x.view(*new_x_shape) # in Tensorflow implementation: fct split_states
if k:
return x.permute(0, 2, 3, 1)
else:
return x.permute(0, 2, 1, 3)
def forward(self, x, attention_mask=None, head_mask=None, output_attentions=False):
x = self.c_attn(x)
query, key, value = x.split(self.split_size, dim=2)
query = self.split_heads(query)
key = self.split_heads(key, k=True)
value = self.split_heads(value)
attn_outputs = self._attn(query, key, value, attention_mask, head_mask, output_attentions)
a = attn_outputs[0]
a = self.merge_heads(a)
a = self.c_proj(a)
a = self.resid_dropout(a)
outputs = [a] + attn_outputs[1:]
return outputs # a, (attentions)
class MLP(nn.Module):
def __init__(self, n_state, config): # in MLP: n_state=3072 (4 * n_embd)
super().__init__()
nx = config.n_embd
self.c_fc = Conv1D(n_state, nx)
self.c_proj = Conv1D(nx, n_state)
self.act = ACT_FNS[config.afn]
self.dropout = nn.Dropout(config.resid_pdrop)
def forward(self, x):
h = self.act(self.c_fc(x))
h2 = self.c_proj(h)
return self.dropout(h2)
class Block(nn.Module):
def __init__(self, n_positions, config, scale=False):
super().__init__()
nx = config.n_embd
self.attn = Attention(nx, n_positions, config, scale)
self.ln_1 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon)
self.mlp = MLP(4 * nx, config)
self.ln_2 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon)
def forward(self, x, attention_mask=None, head_mask=None, output_attentions=False):
attn_outputs = self.attn(
x,
attention_mask=attention_mask,
head_mask=head_mask,
output_attentions=output_attentions,
)
a = attn_outputs[0]
n = self.ln_1(x + a)
m = self.mlp(n)
h = self.ln_2(n + m)
outputs = [h] + attn_outputs[1:]
return outputs
class OpenAIGPTPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = OpenAIGPTConfig
load_tf_weights = load_tf_weights_in_openai_gpt
base_model_prefix = "transformer"
_keys_to_ignore_on_load_missing = [r"position_ids"]
def _init_weights(self, module):
"""Initialize the weights."""
if isinstance(module, (nn.Linear, Conv1D)):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
@dataclass
class OpenAIGPTDoubleHeadsModelOutput(ModelOutput):
"""
Base class for outputs of models predicting if two sentences are consecutive or not.
Args:
loss (:obj:`torch.FloatTensor` of shape :obj:`(1,)`, `optional`, returned when ``labels`` is provided):
Language modeling loss.
mc_loss (:obj:`torch.FloatTensor` of shape :obj:`(1,)`, `optional`, returned when :obj:`mc_labels` is provided):
Multiple choice classification loss.
logits (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, num_choices, sequence_length, config.vocab_size)`):
Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax).
mc_logits (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, num_choices)`):
Prediction scores of the multiple choice classification head (scores for each choice before SoftMax).
hidden_states (:obj:`tuple(torch.FloatTensor)`, `optional`, returned when ``output_hidden_states=True`` is passed or when ``config.output_hidden_states=True``):
Tuple of :obj:`torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer)
of shape :obj:`(batch_size, sequence_length, hidden_size)`.
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
attentions (:obj:`tuple(torch.FloatTensor)`, `optional`, returned when ``output_attentions=True`` is passed or when ``config.output_attentions=True``):
Tuple of :obj:`torch.FloatTensor` (one for each layer) of shape :obj:`(batch_size, num_heads,
sequence_length, sequence_length)`.
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention
heads.
"""
loss: Optional[torch.FloatTensor] = None
mc_loss: Optional[torch.FloatTensor] = None
logits: torch.FloatTensor = None
mc_logits: torch.FloatTensor = None
hidden_states: Optional[Tuple[torch.FloatTensor]] = None
attentions: Optional[Tuple[torch.FloatTensor]] = None
OPENAI_GPT_START_DOCSTRING = r"""
This model inherits from :class:`~transformers.PreTrainedModel`. Check the superclass documentation for the generic
methods the library implements for all its model (such as downloading or saving, resizing the input embeddings,
pruning heads etc.)
This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__
subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to
general usage and behavior.
Parameters:
config (:class:`~transformers.OpenAIGPTConfig`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model
weights.
"""
OPENAI_GPT_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.OpenAIGPTTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
token_type_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
1]``:
- 0 corresponds to a `sentence A` token,
- 1 corresponds to a `sentence B` token.
`What are token type IDs? <../glossary.html#token-type-ids>`_
position_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
config.max_position_embeddings - 1]``.
`What are position IDs? <../glossary.html#position-ids>`__
head_mask (:obj:`torch.FloatTensor` of shape :obj:`(num_heads,)` or :obj:`(num_layers, num_heads)`, `optional`):
Mask to nullify selected heads of the self-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert :obj:`input_ids` indices into associated
vectors than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
@add_start_docstrings(
"The bare OpenAI GPT transformer model outputting raw hidden-states without any specific head on top.",
OPENAI_GPT_START_DOCSTRING,
)
class OpenAIGPTModel(OpenAIGPTPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.tokens_embed = nn.Embedding(config.vocab_size, config.n_embd)
self.positions_embed = nn.Embedding(config.n_positions, config.n_embd)
self.drop = nn.Dropout(config.embd_pdrop)
self.h = nn.ModuleList([Block(config.n_positions, config, scale=True) for _ in range(config.n_layer)])
self.register_buffer("position_ids", torch.arange(config.n_positions))
self.init_weights()
def get_input_embeddings(self):
return self.tokens_embed
def set_input_embeddings(self, new_embeddings):
self.tokens_embed = new_embeddings
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
"""
for layer, heads in heads_to_prune.items():
self.h[layer].attn.prune_heads(heads)
@add_start_docstrings_to_model_forward(OPENAI_GPT_INPUTS_DOCSTRING)
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=BaseModelOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
if position_ids is None:
# Code is different from when we had a single embedding matrix from position and token embeddings
position_ids = self.position_ids[None, : input_shape[-1]]
# Attention mask.
if attention_mask is not None:
# We create a 3D attention mask from a 2D tensor mask.
# Sizes are [batch_size, 1, 1, to_seq_length]
# So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
# this attention mask is more simple than the triangular masking of causal attention
# used in OpenAI GPT, we just need to prepare the broadcast dimension here.
attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)
# Since attention_mask is 1.0 for positions we want to attend and 0.0 for
# masked positions, this operation will create a tensor which is 0.0 for
# positions we want to attend and -10000.0 for masked positions.
# Since we are adding it to the raw scores before the softmax, this is
# effectively the same as removing these entirely.
attention_mask = attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility
attention_mask = (1.0 - attention_mask) * -10000.0
# Prepare head mask if needed
head_mask = self.get_head_mask(head_mask, self.config.n_layer)
if inputs_embeds is None:
inputs_embeds = self.tokens_embed(input_ids)
position_embeds = self.positions_embed(position_ids)
if token_type_ids is not None:
token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1))
token_type_embeds = self.tokens_embed(token_type_ids)
else:
token_type_embeds = 0
hidden_states = inputs_embeds + position_embeds + token_type_embeds
hidden_states = self.drop(hidden_states)
output_shape = input_shape + (hidden_states.size(-1),)
all_attentions = () if output_attentions else None
all_hidden_states = () if output_hidden_states else None
for i, block in enumerate(self.h):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
outputs = block(hidden_states, attention_mask, head_mask[i], output_attentions=output_attentions)
hidden_states = outputs[0]
if output_attentions:
all_attentions = all_attentions + (outputs[1],)
hidden_states = hidden_states.view(*output_shape)
# Add last layer
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(v for v in [hidden_states, all_hidden_states, all_attentions] if v is not None)
return BaseModelOutput(
last_hidden_state=hidden_states,
hidden_states=all_hidden_states,
attentions=all_attentions,
)
@add_start_docstrings(
"""
OpenAI GPT Model transformer with a language modeling head on top (linear layer with weights tied to the input
embeddings).
""",
OPENAI_GPT_START_DOCSTRING,
)
class OpenAIGPTLMHeadModel(OpenAIGPTPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.transformer = OpenAIGPTModel(config)
self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False)
self.init_weights()
def get_output_embeddings(self):
return self.lm_head
def set_output_embeddings(self, new_embeddings):
self.lm_head = new_embeddings
@add_start_docstrings_to_model_forward(OPENAI_GPT_INPUTS_DOCSTRING)
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=CausalLMOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
``labels = input_ids`` Indices are selected in ``[-100, 0, ..., config.vocab_size]`` All labels set to
``-100`` are ignored (masked), the loss is only computed for labels in ``[0, ..., config.vocab_size]``
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
transformer_outputs = self.transformer(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = transformer_outputs[0]
lm_logits = self.lm_head(hidden_states)
loss = None
if labels is not None:
# Shift so that tokens < n predict n
shift_logits = lm_logits[..., :-1, :].contiguous()
shift_labels = labels[..., 1:].contiguous()
# Flatten the tokens
loss_fct = CrossEntropyLoss()
loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1))
if not return_dict:
output = (lm_logits,) + transformer_outputs[1:]
return ((loss,) + output) if loss is not None else output
return CausalLMOutput(
loss=loss,
logits=lm_logits,
hidden_states=transformer_outputs.hidden_states,
attentions=transformer_outputs.attentions,
)
@add_start_docstrings(
"""
OpenAI GPT Model transformer with a language modeling and a multiple-choice classification head on top e.g. for
RocStories/SWAG tasks. The two heads are two linear layers. The language modeling head has its weights tied to the
input embeddings, the classification head takes as input the input of a specified classification token index in the
input sequence).
""",
OPENAI_GPT_START_DOCSTRING,
)
class OpenAIGPTDoubleHeadsModel(OpenAIGPTPreTrainedModel):
def __init__(self, config):
super().__init__(config)
config.num_labels = 1
self.transformer = OpenAIGPTModel(config)
self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False)
self.multiple_choice_head = SequenceSummary(config)
self.init_weights()
def get_output_embeddings(self):
return self.lm_head
def set_output_embeddings(self, new_embeddings):
self.lm_head = new_embeddings
@add_start_docstrings_to_model_forward(OPENAI_GPT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=OpenAIGPTDoubleHeadsModelOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
mc_token_ids=None,
labels=None,
mc_labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
mc_token_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, num_choices)`, `optional`, default to index of the last token of the input):
Index of the classification token in each input sequence. Selected in the range ``[0, input_ids.size(-1) -
1]``.
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
``labels = input_ids`` Indices are selected in ``[-1, 0, ..., config.vocab_size]`` All labels set to
``-100`` are ignored (masked), the loss is only computed for labels in ``[0, ..., config.vocab_size]``
mc_labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size)`, `optional`):
Labels for computing the multiple choice classification loss. Indices should be in ``[0, ...,
num_choices]`` where `num_choices` is the size of the second dimension of the input tensors. (see
`input_ids` above)
Return:
Examples::
>>> from transformers import OpenAIGPTTokenizer, OpenAIGPTDoubleHeadsModel
>>> import torch
>>> tokenizer = OpenAIGPTTokenizer.from_pretrained('openai-gpt')
>>> model = OpenAIGPTDoubleHeadsModel.from_pretrained('openai-gpt')
>>> tokenizer.add_special_tokens({'cls_token': '[CLS]'}) # Add a [CLS] to the vocabulary (we should train it also!)
>>> model.resize_token_embeddings(len(tokenizer))
>>> choices = ["Hello, my dog is cute [CLS]", "Hello, my cat is cute [CLS]"]
>>> input_ids = torch.tensor([tokenizer.encode(s) for s in choices]).unsqueeze(0) # Batch size 1, 2 choices
>>> mc_token_ids = torch.tensor([input_ids.size(-1)-1, input_ids.size(-1)-1]).unsqueeze(0) # Batch size 1
>>> outputs = model(input_ids, mc_token_ids=mc_token_ids)
>>> lm_logits = outputs.lm_logits
>>> mc_logits = outputs.mc_logits
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
transformer_outputs = self.transformer(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = transformer_outputs[0]
lm_logits = self.lm_head(hidden_states)
mc_logits = self.multiple_choice_head(hidden_states, mc_token_ids).squeeze(-1)
lm_loss, mc_loss = None, None
if mc_labels is not None:
loss_fct = CrossEntropyLoss()
mc_loss = loss_fct(mc_logits.view(-1, mc_logits.size(-1)), mc_labels.view(-1))
if labels is not None:
shift_logits = lm_logits[..., :-1, :].contiguous()
shift_labels = labels[..., 1:].contiguous()
loss_fct = CrossEntropyLoss()
lm_loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1))
if not return_dict:
output = (lm_logits, mc_logits) + transformer_outputs[1:]
if mc_loss is not None:
output = (mc_loss,) + output
return ((lm_loss,) + output) if lm_loss is not None else output
return OpenAIGPTDoubleHeadsModelOutput(
loss=lm_loss,
mc_loss=mc_loss,
logits=lm_logits,
mc_logits=mc_logits,
hidden_states=transformer_outputs.hidden_states,
attentions=transformer_outputs.attentions,
)
@add_start_docstrings(
"""
The Original OpenAI GPT Model transformer with a sequence classification head on top (linear layer).
:class:`~transformers.OpenAIGPTForSequenceClassification` uses the last token in order to do the classification, as
other causal models (e.g. GPT-2) do. Since it does classification on the last token, it requires to know the
position of the last token. If a :obj:`pad_token_id` is defined in the configuration, it finds the last token that
is not a padding token in each row. If no :obj:`pad_token_id` is defined, it simply takes the last value in each
row of the batch. Since it cannot guess the padding tokens when :obj:`inputs_embeds` are passed instead of
:obj:`input_ids`, it does the same (take the last value in each row of the batch).
""",
OPENAI_GPT_START_DOCSTRING,
)
class OpenAIGPTForSequenceClassification(OpenAIGPTPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.transformer = OpenAIGPTModel(config)
self.score = nn.Linear(config.n_embd, self.num_labels, bias=False)
self.init_weights()
@add_start_docstrings_to_model_forward(OPENAI_GPT_INPUTS_DOCSTRING)
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=SequenceClassifierOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
transformer_outputs = self.transformer(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = transformer_outputs[0]
logits = self.score(hidden_states)
if input_ids is not None:
batch_size, sequence_length = input_ids.shape[:2]
else:
batch_size, sequence_length = inputs_embeds.shape[:2]
assert (
self.config.pad_token_id is not None or batch_size == 1
), "Cannot handle batch sizes > 1 if no padding token is defined."
if self.config.pad_token_id is None:
sequence_lengths = -1
else:
if input_ids is not None:
sequence_lengths = torch.ne(input_ids, self.config.pad_token_id).sum(-1) - 1
else:
sequence_lengths = -1
logger.warning(
f"{self.__class__.__name__} will not detect padding tokens in `inputs_embeds`. Results may be "
f"unexpected if using padding tokens in conjunction with `inputs_embeds.`"
)
pooled_logits = logits[range(batch_size), sequence_lengths]
loss = None
if labels is not None:
if self.config.problem_type is None:
if self.num_labels == 1:
self.config.problem_type = "regression"
elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
self.config.problem_type = "single_label_classification"
else:
self.config.problem_type = "multi_label_classification"
if self.config.problem_type == "regression":
loss_fct = MSELoss()
if self.num_labels == 1:
loss = loss_fct(pooled_logits.squeeze(), labels.squeeze())
else:
loss = loss_fct(pooled_logits, labels)
elif self.config.problem_type == "single_label_classification":
loss_fct = CrossEntropyLoss()
loss = loss_fct(pooled_logits.view(-1, self.num_labels), labels.view(-1))
elif self.config.problem_type == "multi_label_classification":
loss_fct = BCEWithLogitsLoss()
loss = loss_fct(pooled_logits, labels)
if not return_dict:
output = (pooled_logits,) + transformer_outputs[1:]
return ((loss,) + output) if loss is not None else output
return SequenceClassifierOutput(
loss=loss,
logits=pooled_logits,
hidden_states=transformer_outputs.hidden_states,
attentions=transformer_outputs.attentions,
)
| 42.896028
| 168
| 0.652169
|
4a0e56ad34608c756cd26053499bbb524c9a3bf6
| 1,319
|
py
|
Python
|
semkov/apps/pages/models.py
|
manti-by/Semkov
|
c48d3c21d1b39aa7e02b6c31c2cf5ad385a62723
|
[
"BSD-3-Clause"
] | null | null | null |
semkov/apps/pages/models.py
|
manti-by/Semkov
|
c48d3c21d1b39aa7e02b6c31c2cf5ad385a62723
|
[
"BSD-3-Clause"
] | 2
|
2021-04-13T21:30:21.000Z
|
2021-04-13T21:32:45.000Z
|
semkov/apps/pages/models.py
|
manti-by/Semkov
|
c48d3c21d1b39aa7e02b6c31c2cf5ad385a62723
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db import models
from taggit.models import TaggedItemBase
from modelcluster.fields import ParentalKey
from modelcluster.contrib.taggit import ClusterTaggableManager
from wagtail.core.models import Page
from wagtail.admin.edit_handlers import FieldPanel
from wagtail.documents.edit_handlers import DocumentChooserPanel
from semkov.apps.core.mixins import ImagesMixin, ArticleMixin, MenuMixin
class PageTag(TaggedItemBase):
content_object = ParentalKey("pages.PageModel", related_name="page_tags")
class PageModel(MenuMixin, ImagesMixin, ArticleMixin, Page):
document = models.ForeignKey(
"wagtaildocs.Document",
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name="+",
)
map = models.TextField(blank=True)
tags = ClusterTaggableManager(through="pages.PageTag", blank=True)
content_panels = (
[DocumentChooserPanel("document"), FieldPanel("map"), FieldPanel("tags")]
+ Page.content_panels
+ ArticleMixin.content_panels
+ ImagesMixin.content_panels
)
promote_panels = Page.promote_panels + MenuMixin.promote_panels
def tag_slugs(self):
return self.tags.values_list("slug", flat=True)
class Meta:
verbose_name = "Page"
verbose_name_plural = "Pages"
| 27.479167
| 81
| 0.727824
|
4a0e57ff0839725f00f92a18c3f177bba9ec5f02
| 563
|
py
|
Python
|
bot_links.py
|
Eduardo-Barreto/Marselo-Bot
|
7f8faf372b06419c0f8731156fd6c64bbcb6755f
|
[
"MIT"
] | 2
|
2021-07-11T21:50:01.000Z
|
2021-10-01T11:04:17.000Z
|
bot_links.py
|
Eduardo-Barreto/Marselo-Bot
|
7f8faf372b06419c0f8731156fd6c64bbcb6755f
|
[
"MIT"
] | null | null | null |
bot_links.py
|
Eduardo-Barreto/Marselo-Bot
|
7f8faf372b06419c0f8731156fd6c64bbcb6755f
|
[
"MIT"
] | null | null | null |
banido = 'https://tenor.com/view/ednaldo-pereira-banido-desbanido-meme-meme-banido-gif-19429642'
desbanido = 'https://tenor.com/view/desbanido-ednaldo-pereira-ednaldo-pereira-banido-gif-19443137'
repositorio = 'https://github.com/Eduardo-Barreto/Marselo-Bot'
readme = 'https://github.com/Eduardo-Barreto/Marselo-Bot/blob/master/README.md'
rickrolling = 'https://youtu.be/dQw4w9WgXcQ'
playlist = 'https://open.spotify.com/playlist/0TWxX4f5AiAfE4FVFj8Vcq?si=d79e763e368743f2'
conexao_digital = 'https://conexaodigital2em.sesisp.org.br/calendar/view.php?view=month'
| 70.375
| 98
| 0.797513
|
4a0e5859941055924180004569e4fd20c451f01c
| 181
|
py
|
Python
|
oface/__init__.py
|
007gzs/oface
|
9e4ca8ee572783cff4417c58217cdcb8c7af07f4
|
[
"MIT"
] | 2
|
2021-06-01T01:33:32.000Z
|
2021-11-23T10:39:35.000Z
|
oface/__init__.py
|
007gzs/oface
|
9e4ca8ee572783cff4417c58217cdcb8c7af07f4
|
[
"MIT"
] | null | null | null |
oface/__init__.py
|
007gzs/oface
|
9e4ca8ee572783cff4417c58217cdcb8c7af07f4
|
[
"MIT"
] | 1
|
2021-07-16T22:42:55.000Z
|
2021-07-16T22:42:55.000Z
|
# encoding: utf-8
from __future__ import absolute_import, unicode_literals
from .face_analysis import FaceAnalysis, Face
__all__ = ['FaceAnalysis', 'Face']
__version__ = '0.1.12'
| 22.625
| 56
| 0.773481
|
4a0e59a92ea1eafe46ee66c57c01043972053585
| 3,705
|
py
|
Python
|
kisa.py
|
Kagami/kisa
|
2597f67e519b8d66fec2684ff5a7726436bb029b
|
[
"CC0-1.0"
] | 7
|
2015-04-28T13:26:11.000Z
|
2020-02-09T17:01:04.000Z
|
kisa.py
|
Kagami/kisa
|
2597f67e519b8d66fec2684ff5a7726436bb029b
|
[
"CC0-1.0"
] | null | null | null |
kisa.py
|
Kagami/kisa
|
2597f67e519b8d66fec2684ff5a7726436bb029b
|
[
"CC0-1.0"
] | 3
|
2015-03-10T20:56:17.000Z
|
2021-08-21T02:44:24.000Z
|
#!/usr/bin/env python
import sys
import os.path
try:
import twisted.words
except ImportError:
path = os.path.join(os.path.dirname(__file__), "lib")
sys.path.insert(0, path)
import random
import optparse
from twisted.python import log
try:
from twisted.internet import epollreactor
epollreactor.install()
except:
pass
from twisted.internet import defer, reactor
from database import get_db
import modes.chat
import modes.register
import utils
program_name = os.path.basename(__file__)
parser = optparse.OptionParser()
try:
import config
except ImportError:
class DummyConfig(object): pass
config = DummyConfig()
# Set up defaults.
if not hasattr(config, "verbose"): config.verbose = 0
parser.set_defaults(verbose=config.verbose)
if not hasattr(config, "bot_count"): config.bot_count = 300
parser.set_defaults(bot_count=config.bot_count)
if not hasattr(config, "interval"): config.interval = 0.01
parser.set_defaults(interval=config.interval)
if hasattr(config, "mode"): parser.set_defaults(mode=config.mode)
if hasattr(config, "jid"): parser.set_defaults(jid=config.jid.encode("utf-8"))
if hasattr(config, "text"):
parser.set_defaults(text=config.text.encode("utf-8"))
# Set up options.
parser.add_option("-v", "--verbose", action="count",
help="print debug info; -vv prints more")
parser.add_option("-q", "--quiet", dest="verbose",
action="store_const", const=0, help="be quiet")
parser.add_option("-m", "--mode", choices=("chat", "register"),
help="set mode; supported modes: chat, register")
group = optparse.OptionGroup(parser, "chat mode options")
group.add_option("-c", "--bot-count", type="int",
help="number of bots running in parallel")
group.add_option("-n", "--interval", type="float",
help="number of seconds between message sends")
group.add_option("-j", "--jid", help="destination jid")
group.add_option("-t", "--text")
parser.add_option_group(group)
# Parse args.
(options, args) = parser.parse_args()
if args:
parser.error("unknown options; see `%s --help' "
"for details" % program_name)
if options.mode is None:
parser.error("you should set up working mode (--mode)")
if options.mode == "chat":
if options.jid is None:
parser.error("you should set up jid (--jid)")
if options.text is None:
parser.error("you should set up text (--text)")
if options.verbose > 1:
log.startLogging(sys.stdout)
@defer.inlineCallbacks
def chat_mode():
db = yield get_db()
accounts = yield db.get_all_accounts()
if not accounts:
print "No accounts in the database, exiting."
reactor.stop()
return
if len(accounts) > options.bot_count:
accounts = random.sample(accounts, options.bot_count)
print "Starting test using %d accounts." % len(accounts)
for jid, password in accounts:
modes.chat.ChatBot(
jid, password,
options.jid.decode("utf-8"), options.text.decode("utf-8"),
options.interval, db, options.verbose)
@defer.inlineCallbacks
def register_mode():
db = yield get_db()
path = os.path.join(os.path.dirname(__file__), "data", "good_servers.txt")
servers = open(path).read().split()
while True:
for server in servers:
bot = modes.register.RegisterBot(options.verbose)
try:
account = yield bot.register_account(server)
except modes.register.RegisterError:
pass
else:
yield db.add_account(*account)
yield utils.sleep(1)
reactor.callWhenRunning(locals()[options.mode + "_mode"])
reactor.run()
| 33.378378
| 78
| 0.666937
|
4a0e5aa1a5cdddc2f8433300afd067c32145aeba
| 1,651
|
py
|
Python
|
vpc_hyp2/hyp2_vm.py
|
dhanraj-vedanth/IaaS_VPC_CDN
|
262dbc7db63d5e76398dadc8015256fb37986e36
|
[
"MIT"
] | null | null | null |
vpc_hyp2/hyp2_vm.py
|
dhanraj-vedanth/IaaS_VPC_CDN
|
262dbc7db63d5e76398dadc8015256fb37986e36
|
[
"MIT"
] | null | null | null |
vpc_hyp2/hyp2_vm.py
|
dhanraj-vedanth/IaaS_VPC_CDN
|
262dbc7db63d5e76398dadc8015256fb37986e36
|
[
"MIT"
] | null | null | null |
import re
import commands
import json
import sys
import os
tenant_check = int(sys.argv[1])
domain_name = sys.argv[2]
each = int(sys.argv[3])
print("GETTING EXECUTED DA DEI BAADE")
def spin_vms(tenant_check,domain_name,each):
print("Am I even getting called??")
domain_name = "tenant" + str(tenant_check) + "vm" + str(each)
try:
os.system("sudo cp ./img_to_cpy.img /var/lib/libvirt/images/" + str(domain_name) + ".img")
print("Yo, copied successfully")
except Exception as e:
print(e)
with open("./ansible/roles/gen_vm/vars/main.yml", "w+") as w:
print("---\n")
print("domain_name: tenant" + str(tenant_check) + "vm" + str(each) + "\n")
print("network_to_attach: tenant" + str(tenant_check) + "vm" + str(each) + "\n")
print("image:/var/lib/libvirt/images/tenant" + str(tenant_check) + "vm" + str(each) + ".img\n")
w.write("---\n")
w.write("# vars file for gen_vm\n")
w.write("domain_name: tenant" + str(tenant_check) + "vm" + str(each) + "\n")
w.write("network_to_attach: tenant" + str(tenant_check) + "br" + "\n")
w.write("vcpu: 3\n")
w.write("image: tenant" + str(tenant_check) + "vm" + str(each) + ".img\n")
print("Running the play for " + str(domain_name) + " creation!")
opx = os.system("pwd")
op1 = os.system("sudo ansible-playbook /home/ece792/vpc/ansible/vm_create.yml")
print(op1)
op2 = os.system("sudo virsh define /etc/libvirt/qemu/" + str(domain_name) + ".xml")
print(op2)
op3 = os.system("sudo virsh start " + str(domain_name))
print(op3)
spin_vms(tenant_check,domain_name,each)
| 39.309524
| 103
| 0.619624
|
4a0e5ae6841d6b90f5ec8d10824ab511984f2a30
| 1,578
|
py
|
Python
|
2018/day10.py
|
tcbegley/advent-of-code
|
e293d06e9cd994b26c0d10619672a6d8d2d65377
|
[
"MIT"
] | 6
|
2021-12-05T11:21:17.000Z
|
2021-12-07T03:04:24.000Z
|
2018/day10.py
|
tcbegley/advent-of-code
|
e293d06e9cd994b26c0d10619672a6d8d2d65377
|
[
"MIT"
] | null | null | null |
2018/day10.py
|
tcbegley/advent-of-code
|
e293d06e9cd994b26c0d10619672a6d8d2d65377
|
[
"MIT"
] | null | null | null |
import re
import sys
class Particle:
def __init__(self, px, py, vx, vy):
self.px = px
self.py = py
self.vx = vx
self.vy = vy
def pos(self, time):
return (self.px + time * self.vx, self.py + time * self.vy)
def answer(path):
with open(path) as f:
data = f.read().strip().split("\n")
particles = []
pattern = re.compile(r"<([^<>]*)>")
for line in data:
m = pattern.findall(line)
px, py = [int(i) for i in m[0].split(",")]
vx, vy = [int(i) for i in m[1].split(",")]
particles.append(Particle(px, py, vx, vy))
minw = float("inf")
to_print = None
increasing = 0
i = 0
while True:
i += 1
pos = [p.pos(i) for p in particles]
minx = min(p[0] for p in pos)
miny = min(p[1] for p in pos)
maxx = max(p[0] for p in pos)
maxy = max(p[1] for p in pos)
if maxx - minx < minw:
minw = maxx - minx
to_print = i
increasing = 0
else:
increasing += 1
if increasing > 10:
break
pos = [p.pos(to_print) for p in particles]
minx = min(p[0] for p in pos)
miny = min(p[1] for p in pos)
maxx = max(p[0] for p in pos)
maxy = max(p[1] for p in pos)
for y in range(miny, maxy + 1):
line = ""
for x in range(minx, maxx + 1):
if (x, y) in pos:
line += "#"
else:
line += "."
print(line)
if __name__ == "__main__":
answer(sys.argv[1])
| 22.869565
| 67
| 0.473384
|
4a0e5afe84e2a25f5b780fadb06e66edcb0c799b
| 39,604
|
py
|
Python
|
cinder/tests/unit/backup/drivers/test_backup_swift.py
|
wzhou007/stx-cinder
|
bdc6cc8ae5466f218de5af835e9ec040d537c541
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/backup/drivers/test_backup_swift.py
|
wzhou007/stx-cinder
|
bdc6cc8ae5466f218de5af835e9ec040d537c541
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/backup/drivers/test_backup_swift.py
|
wzhou007/stx-cinder
|
bdc6cc8ae5466f218de5af835e9ec040d537c541
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for Backup swift code.
"""
import bz2
import ddt
import filecmp
import hashlib
import os
import shutil
import tempfile
import threading
import zlib
import mock
from oslo_config import cfg
from swiftclient import client as swift
from cinder.backup.drivers import swift as swift_dr
from cinder import context
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder import objects
from cinder import test
from cinder.tests.unit.backup import fake_swift_client
from cinder.tests.unit.backup import fake_swift_client2
from cinder.tests.unit import fake_constants as fake
CONF = cfg.CONF
ANY = mock.ANY
def fake_md5(arg):
class result(object):
def hexdigest(self):
return 'fake-md5-sum'
ret = result()
return ret
@ddt.ddt
class BackupSwiftTestCase(test.TestCase):
"""Test Case for swift."""
_DEFAULT_VOLUME_ID = 'c7eb81f4-bec6-4730-a60f-8888885874df'
def _create_volume_db_entry(self, volume_id=_DEFAULT_VOLUME_ID):
vol = {'id': volume_id,
'size': 1,
'status': 'available'}
return db.volume_create(self.ctxt, vol)['id']
def _create_backup_db_entry(self,
volume_id=_DEFAULT_VOLUME_ID,
container='test-container',
backup_id=fake.BACKUP_ID, parent_id=None,
service_metadata=None):
try:
db.volume_get(self.ctxt, volume_id)
except exception.NotFound:
self._create_volume_db_entry(volume_id=volume_id)
backup = {'id': backup_id,
'size': 1,
'container': container,
'volume_id': volume_id,
'parent_id': parent_id,
'user_id': fake.USER_ID,
'project_id': fake.PROJECT_ID,
'service_metadata': service_metadata,
}
return db.backup_create(self.ctxt, backup)['id']
def setUp(self):
super(BackupSwiftTestCase, self).setUp()
service_catalog = [{u'type': u'object-store', u'name': u'swift',
u'endpoints': [{
u'publicURL': u'http://example.com'}]},
{u'type': u'identity', u'name': u'keystone',
u'endpoints': [{
u'publicURL': u'http://example.com'}]}]
self.ctxt = context.get_admin_context()
self.ctxt.service_catalog = service_catalog
self.mock_object(swift, 'Connection',
fake_swift_client.FakeSwiftClient.Connection)
self.mock_object(hashlib, 'md5', fake_md5)
self.volume_file = tempfile.NamedTemporaryFile()
self.temp_dir = tempfile.mkdtemp()
self.addCleanup(self.volume_file.close)
# Remove tempdir.
self.addCleanup(shutil.rmtree, self.temp_dir)
for _i in range(0, 64):
self.volume_file.write(os.urandom(1024))
notify_patcher = mock.patch(
'cinder.volume.utils.notify_about_backup_usage')
notify_patcher.start()
self.addCleanup(notify_patcher.stop)
def test_backup_swift_url(self):
self.ctxt.service_catalog = [{u'type': u'object-store',
u'name': u'swift',
u'endpoints': [{
u'adminURL':
u'http://example.com'}]},
{u'type': u'identity',
u'name': u'keystone',
u'endpoints': [{
u'publicURL':
u'http://example.com'}]}]
self.assertRaises(exception.BackupDriverException,
swift_dr.SwiftBackupDriver,
self.ctxt)
def test_backup_swift_auth_url(self):
self.ctxt.service_catalog = [{u'type': u'object-store',
u'name': u'swift',
u'endpoints': [{
u'publicURL':
u'http://example.com'}]},
{u'type': u'identity',
u'name': u'keystone',
u'endpoints': [{
u'adminURL':
u'http://example.com'}]}]
self.override_config("backup_swift_auth",
"single_user")
self.override_config("backup_swift_user",
"fake_user")
self.assertRaises(exception.BackupDriverException,
swift_dr.SwiftBackupDriver,
self.ctxt)
def test_backup_swift_url_conf(self):
self.ctxt.service_catalog = [{u'type': u'object-store',
u'name': u'swift',
u'endpoints': [{
u'adminURL':
u'http://example.com'}]},
{u'type': u'identity',
u'name': u'keystone',
u'endpoints': [{
u'publicURL':
u'http://example.com'}]}]
self.ctxt.project_id = fake.PROJECT_ID
self.override_config("backup_swift_url",
"http://public.example.com/")
backup = swift_dr.SwiftBackupDriver(self.ctxt)
self.assertEqual("%s%s" % (CONF.backup_swift_url,
self.ctxt.project_id),
backup.swift_url)
def test_backup_swift_url_conf_nocatalog(self):
self.ctxt.service_catalog = []
self.ctxt.project_id = fake.PROJECT_ID
self.override_config("backup_swift_url",
"http://public.example.com/")
backup = swift_dr.SwiftBackupDriver(self.ctxt)
self.assertEqual("%s%s" % (CONF.backup_swift_url,
self.ctxt.project_id),
backup.swift_url)
def test_backup_swift_auth_url_conf(self):
self.ctxt.service_catalog = [{u'type': u'object-store',
u'name': u'swift',
u'endpoints': [{
u'publicURL':
u'http://example.com'}]},
{u'type': u'identity',
u'name': u'keystone',
u'endpoints': [{
u'adminURL':
u'http://example.com'}]}]
self.ctxt.project_id = fake.PROJECT_ID
self.override_config("backup_swift_auth_url",
"http://public.example.com")
self.override_config("backup_swift_auth",
"single_user")
self.override_config("backup_swift_user",
"fake_user")
backup = swift_dr.SwiftBackupDriver(self.ctxt)
self.assertEqual(CONF.backup_swift_auth_url, backup.auth_url)
def test_backup_swift_info(self):
self.override_config("swift_catalog_info", "dummy")
self.assertRaises(exception.BackupDriverException,
swift_dr.SwiftBackupDriver,
self.ctxt)
@ddt.data(
{'auth': 'single_user', 'insecure': True},
{'auth': 'single_user', 'insecure': False},
{'auth': 'per_user', 'insecure': True},
{'auth': 'per_user', 'insecure': False},
)
@ddt.unpack
def test_backup_swift_auth_insecure(self, auth, insecure):
self.override_config("backup_swift_auth_insecure", insecure)
self.override_config('backup_swift_auth', auth)
if auth == 'single_user':
self.override_config('backup_swift_user', 'swift-user')
mock_connection = self.mock_object(swift, 'Connection')
swift_dr.SwiftBackupDriver(self.ctxt)
if auth == 'single_user':
mock_connection.assert_called_once_with(insecure=insecure,
authurl=ANY,
auth_version=ANY,
tenant_name=ANY,
user=ANY,
key=ANY,
os_options={},
retries=ANY,
starting_backoff=ANY,
cacert=ANY)
else:
mock_connection.assert_called_once_with(insecure=insecure,
retries=ANY,
preauthurl=ANY,
preauthtoken=ANY,
starting_backoff=ANY,
cacert=ANY)
@ddt.data(
{'auth_version': '3', 'user_domain': 'UserDomain',
'project': 'Project', 'project_domain': 'ProjectDomain'},
{'auth_version': '3', 'user_domain': None,
'project': 'Project', 'project_domain': 'ProjectDomain'},
{'auth_version': '3', 'user_domain': 'UserDomain',
'project': None, 'project_domain': 'ProjectDomain'},
{'auth_version': '3', 'user_domain': 'UserDomain',
'project': 'Project', 'project_domain': None},
{'auth_version': '3', 'user_domain': None,
'project': None, 'project_domain': None},
)
@ddt.unpack
def test_backup_swift_auth_v3_single_user(self, auth_version, user_domain,
project, project_domain):
self.override_config('backup_swift_auth', 'single_user')
self.override_config('backup_swift_user', 'swift-user')
self.override_config('backup_swift_auth_version', auth_version)
self.override_config('backup_swift_user_domain', user_domain)
self.override_config('backup_swift_project', project)
self.override_config('backup_swift_project_domain', project_domain)
os_options = {}
if user_domain is not None:
os_options['user_domain_name'] = user_domain
if project is not None:
os_options['project_name'] = project
if project_domain is not None:
os_options['project_domain_name'] = project_domain
mock_connection = self.mock_object(swift, 'Connection')
swift_dr.SwiftBackupDriver(self.ctxt)
mock_connection.assert_called_once_with(insecure=ANY,
authurl=ANY,
auth_version=auth_version,
tenant_name=ANY,
user=ANY,
key=ANY,
os_options=os_options,
retries=ANY,
starting_backoff=ANY,
cacert=ANY)
def test_backup_uncompressed(self):
volume_id = '2b9f10a3-42b4-4fdf-b316-000000ceb039'
self._create_backup_db_entry(volume_id=volume_id)
self.flags(backup_compression_algorithm='none')
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
def test_backup_bz2(self):
volume_id = 'dc0fee35-b44e-4f13-80d6-000000e1b50c'
self._create_backup_db_entry(volume_id=volume_id)
self.flags(backup_compression_algorithm='bz2')
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
def test_backup_zlib(self):
volume_id = '5cea0535-b6fb-4531-9a38-000000bea094'
self._create_backup_db_entry(volume_id=volume_id)
self.flags(backup_compression_algorithm='zlib')
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
@mock.patch.object(db, 'backup_update', wraps=db.backup_update)
def test_backup_default_container(self, backup_update_mock):
volume_id = '9552017f-c8b9-4e4e-a876-00000053349c'
self._create_backup_db_entry(volume_id=volume_id,
container=None)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertEqual('volumebackups', backup['container'])
self.assertEqual(3, backup_update_mock.call_count)
@mock.patch.object(db, 'backup_update', wraps=db.backup_update)
def test_backup_db_container(self, backup_update_mock):
volume_id = '9552017f-c8b9-4e4e-a876-00000053349c'
self._create_backup_db_entry(volume_id=volume_id,
container='existing_name')
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertEqual('existing_name', backup['container'])
# Make sure we are not making a DB update when we are using the same
# value that's already in the DB.
self.assertEqual(2, backup_update_mock.call_count)
@mock.patch.object(db, 'backup_update', wraps=db.backup_update)
def test_backup_driver_container(self, backup_update_mock):
volume_id = '9552017f-c8b9-4e4e-a876-00000053349c'
self._create_backup_db_entry(volume_id=volume_id,
container=None)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
with mock.patch.object(service, 'update_container_name',
return_value='driver_name'):
service.backup(backup, self.volume_file)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertEqual('driver_name', backup['container'])
self.assertEqual(3, backup_update_mock.call_count)
@mock.patch('cinder.backup.drivers.swift.SwiftBackupDriver.'
'_send_progress_end')
@mock.patch('cinder.backup.drivers.swift.SwiftBackupDriver.'
'_send_progress_notification')
def test_backup_default_container_notify(self, _send_progress,
_send_progress_end):
volume_id = '87dd0eed-2598-4ebd-8ebb-000000ac578a'
self._create_backup_db_entry(volume_id=volume_id,
container=None)
# If the backup_object_number_per_notification is set to 1,
# the _send_progress method will be called for sure.
CONF.set_override("backup_object_number_per_notification", 1)
CONF.set_override("backup_swift_enable_progress_timer", False)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
self.assertTrue(_send_progress.called)
self.assertTrue(_send_progress_end.called)
# If the backup_object_number_per_notification is increased to
# another value, the _send_progress method will not be called.
_send_progress.reset_mock()
_send_progress_end.reset_mock()
CONF.set_override("backup_object_number_per_notification", 10)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
self.assertFalse(_send_progress.called)
self.assertTrue(_send_progress_end.called)
# If the timer is enabled, the _send_progress will be called,
# since the timer can trigger the progress notification.
_send_progress.reset_mock()
_send_progress_end.reset_mock()
CONF.set_override("backup_object_number_per_notification", 10)
CONF.set_override("backup_swift_enable_progress_timer", True)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
self.assertTrue(_send_progress.called)
self.assertTrue(_send_progress_end.called)
def test_backup_custom_container(self):
volume_id = '1da9859e-77e5-4731-bd58-000000ca119e'
container_name = 'fake99'
self._create_backup_db_entry(volume_id=volume_id,
container=container_name)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertEqual(container_name, backup['container'])
def test_backup_shafile(self):
volume_id = '6465dad4-22af-48f7-8a1a-000000218907'
def _fake_generate_object_name_prefix(self, backup):
az = 'az_fake'
backup_name = '%s_backup_%s' % (az, backup['id'])
volume = 'volume_%s' % (backup['volume_id'])
prefix = volume + '_' + backup_name
return prefix
self.mock_object(swift_dr.SwiftBackupDriver,
'_generate_object_name_prefix',
_fake_generate_object_name_prefix)
container_name = self.temp_dir.replace(tempfile.gettempdir() + '/',
'', 1)
self._create_backup_db_entry(volume_id=volume_id,
container=container_name)
self.mock_object(swift, 'Connection',
fake_swift_client2.FakeSwiftClient2.Connection)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertEqual(container_name, backup['container'])
# Verify sha contents
content1 = service._read_sha256file(backup)
self.assertEqual(64 * 1024 / content1['chunk_size'],
len(content1['sha256s']))
def test_backup_cmp_shafiles(self):
volume_id = '1a99ac67-c534-4fe3-b472-0000001785e2'
def _fake_generate_object_name_prefix(self, backup):
az = 'az_fake'
backup_name = '%s_backup_%s' % (az, backup['id'])
volume = 'volume_%s' % (backup['volume_id'])
prefix = volume + '_' + backup_name
return prefix
self.mock_object(swift_dr.SwiftBackupDriver,
'_generate_object_name_prefix',
_fake_generate_object_name_prefix)
container_name = self.temp_dir.replace(tempfile.gettempdir() + '/',
'', 1)
self._create_backup_db_entry(volume_id=volume_id,
container=container_name,
backup_id=fake.BACKUP_ID)
self.mock_object(swift, 'Connection',
fake_swift_client2.FakeSwiftClient2.Connection)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertEqual(container_name, backup['container'])
# Create incremental backup with no change to contents
self._create_backup_db_entry(volume_id=volume_id,
container=container_name,
backup_id=fake.BACKUP2_ID,
parent_id=fake.BACKUP_ID)
self.mock_object(swift, 'Connection',
fake_swift_client2.FakeSwiftClient2.Connection)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
deltabackup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP2_ID)
service.backup(deltabackup, self.volume_file)
deltabackup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP2_ID)
self.assertEqual(container_name, deltabackup['container'])
# Compare shas from both files
content1 = service._read_sha256file(backup)
content2 = service._read_sha256file(deltabackup)
self.assertEqual(len(content1['sha256s']), len(content2['sha256s']))
self.assertEqual(set(content1['sha256s']), set(content2['sha256s']))
def test_backup_delta_two_objects_change(self):
volume_id = '30dab288-265a-4583-9abe-000000d42c67'
def _fake_generate_object_name_prefix(self, backup):
az = 'az_fake'
backup_name = '%s_backup_%s' % (az, backup['id'])
volume = 'volume_%s' % (backup['volume_id'])
prefix = volume + '_' + backup_name
return prefix
self.mock_object(swift_dr.SwiftBackupDriver,
'_generate_object_name_prefix',
_fake_generate_object_name_prefix)
self.flags(backup_swift_object_size=8 * 1024)
self.flags(backup_swift_block_size=1024)
container_name = self.temp_dir.replace(tempfile.gettempdir() + '/',
'', 1)
self._create_backup_db_entry(volume_id=volume_id,
container=container_name,
backup_id=fake.BACKUP_ID)
self.mock_object(swift, 'Connection',
fake_swift_client2.FakeSwiftClient2.Connection)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertEqual(container_name, backup['container'])
# Create incremental backup with no change to contents
self.volume_file.seek(2 * 8 * 1024)
self.volume_file.write(os.urandom(1024))
self.volume_file.seek(4 * 8 * 1024)
self.volume_file.write(os.urandom(1024))
self._create_backup_db_entry(volume_id=volume_id,
container=container_name,
backup_id=fake.BACKUP2_ID,
parent_id=fake.BACKUP_ID)
self.mock_object(swift, 'Connection',
fake_swift_client2.FakeSwiftClient2.Connection)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
deltabackup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP2_ID)
service.backup(deltabackup, self.volume_file)
deltabackup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP2_ID)
self.assertEqual(container_name, deltabackup['container'])
content1 = service._read_sha256file(backup)
content2 = service._read_sha256file(deltabackup)
# Verify that two shas are changed at index 16 and 32
self.assertNotEqual(content1['sha256s'][16], content2['sha256s'][16])
self.assertNotEqual(content1['sha256s'][32], content2['sha256s'][32])
def test_backup_delta_two_blocks_in_object_change(self):
volume_id = 'b943e84f-aa67-4331-9ab2-000000cf19ba'
def _fake_generate_object_name_prefix(self, backup):
az = 'az_fake'
backup_name = '%s_backup_%s' % (az, backup['id'])
volume = 'volume_%s' % (backup['volume_id'])
prefix = volume + '_' + backup_name
return prefix
self.mock_object(swift_dr.SwiftBackupDriver,
'_generate_object_name_prefix',
_fake_generate_object_name_prefix)
self.flags(backup_swift_object_size=8 * 1024)
self.flags(backup_swift_block_size=1024)
container_name = self.temp_dir.replace(tempfile.gettempdir() + '/',
'', 1)
self._create_backup_db_entry(volume_id=volume_id,
container=container_name,
backup_id=fake.BACKUP_ID)
self.mock_object(swift, 'Connection',
fake_swift_client2.FakeSwiftClient2.Connection)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertEqual(container_name, backup['container'])
# Create incremental backup with no change to contents
self.volume_file.seek(16 * 1024)
self.volume_file.write(os.urandom(1024))
self.volume_file.seek(20 * 1024)
self.volume_file.write(os.urandom(1024))
self._create_backup_db_entry(volume_id=volume_id,
container=container_name,
backup_id=fake.BACKUP2_ID,
parent_id=fake.BACKUP_ID)
self.mock_object(swift, 'Connection',
fake_swift_client2.FakeSwiftClient2.Connection)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
deltabackup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP2_ID)
service.backup(deltabackup, self.volume_file)
deltabackup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP2_ID)
self.assertEqual(container_name, deltabackup['container'])
# Verify that two shas are changed at index 16 and 20
content1 = service._read_sha256file(backup)
content2 = service._read_sha256file(deltabackup)
self.assertNotEqual(content1['sha256s'][16], content2['sha256s'][16])
self.assertNotEqual(content1['sha256s'][20], content2['sha256s'][20])
def test_create_backup_put_object_wraps_socket_error(self):
volume_id = 'c09b1ad4-5f0e-4d3f-8b9e-0000004caec8'
container_name = 'socket_error_on_put'
self._create_backup_db_entry(volume_id=volume_id,
container=container_name)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertRaises(exception.SwiftConnectionFailed,
service.backup,
backup, self.volume_file)
def test_backup_backup_metadata_fail(self):
"""Test of when an exception occurs in backup().
In backup(), after an exception occurs in
self._backup_metadata(), we want to check the process of an
exception handler.
"""
volume_id = '020d9142-339c-4876-a445-000000f1520c'
self._create_backup_db_entry(volume_id=volume_id)
self.flags(backup_compression_algorithm='none')
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
def fake_backup_metadata(self, backup, object_meta):
raise exception.BackupDriverException(message=_('fake'))
# Raise a pseudo exception.BackupDriverException.
self.mock_object(swift_dr.SwiftBackupDriver, '_backup_metadata',
fake_backup_metadata)
# We expect that an exception be notified directly.
self.assertRaises(exception.BackupDriverException,
service.backup,
backup, self.volume_file)
def test_backup_backup_metadata_fail2(self):
"""Test of when an exception occurs in an exception handler.
In backup(), after an exception occurs in
self._backup_metadata(), we want to check the process when the
second exception occurs in self.delete_backup().
"""
volume_id = '2164421d-f181-4db7-b9bd-000000eeb628'
self._create_backup_db_entry(volume_id=volume_id)
self.flags(backup_compression_algorithm='none')
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
def fake_backup_metadata(self, backup, object_meta):
raise exception.BackupDriverException(message=_('fake'))
# Raise a pseudo exception.BackupDriverException.
self.mock_object(swift_dr.SwiftBackupDriver, '_backup_metadata',
fake_backup_metadata)
def fake_delete(self, backup):
raise exception.BackupOperationError()
# Raise a pseudo exception.BackupOperationError.
self.mock_object(swift_dr.SwiftBackupDriver, 'delete_backup',
fake_delete)
# We expect that the second exception is notified.
self.assertRaises(exception.BackupOperationError,
service.backup,
backup, self.volume_file)
def test_restore(self):
volume_id = 'c2a81f09-f480-4325-8424-00000071685b'
self._create_backup_db_entry(volume_id=volume_id)
service = swift_dr.SwiftBackupDriver(self.ctxt)
with tempfile.NamedTemporaryFile() as volume_file:
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.restore(backup, volume_id, volume_file)
def test_restore_delta(self):
volume_id = '04d83506-bcf7-4ff5-9c65-00000051bd2e'
def _fake_generate_object_name_prefix(self, backup):
az = 'az_fake'
backup_name = '%s_backup_%s' % (az, backup['id'])
volume = 'volume_%s' % (backup['volume_id'])
prefix = volume + '_' + backup_name
return prefix
self.mock_object(swift_dr.SwiftBackupDriver,
'_generate_object_name_prefix',
_fake_generate_object_name_prefix)
self.flags(backup_swift_object_size=8 * 1024)
self.flags(backup_swift_block_size=1024)
container_name = self.temp_dir.replace(tempfile.gettempdir() + '/',
'', 1)
self._create_backup_db_entry(volume_id=volume_id,
container=container_name,
backup_id=fake.BACKUP_ID)
self.mock_object(swift, 'Connection',
fake_swift_client2.FakeSwiftClient2.Connection)
service = swift_dr.SwiftBackupDriver(self.ctxt)
self.volume_file.seek(0)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.backup(backup, self.volume_file)
# Create incremental backup with no change to contents
self.volume_file.seek(16 * 1024)
self.volume_file.write(os.urandom(1024))
self.volume_file.seek(20 * 1024)
self.volume_file.write(os.urandom(1024))
self._create_backup_db_entry(volume_id=volume_id,
container=container_name,
backup_id=fake.BACKUP2_ID,
parent_id=fake.BACKUP_ID)
self.volume_file.seek(0)
deltabackup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP2_ID)
service.backup(deltabackup, self.volume_file, True)
deltabackup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP2_ID)
with tempfile.NamedTemporaryFile() as restored_file:
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP2_ID)
service.restore(backup, volume_id,
restored_file)
self.assertTrue(filecmp.cmp(self.volume_file.name,
restored_file.name))
def test_restore_wraps_socket_error(self):
volume_id = 'c1160de7-2774-4f20-bf14-0000001ac139'
container_name = 'socket_error_on_get'
self._create_backup_db_entry(volume_id=volume_id,
container=container_name)
service = swift_dr.SwiftBackupDriver(self.ctxt)
with tempfile.NamedTemporaryFile() as volume_file:
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertRaises(exception.SwiftConnectionFailed,
service.restore,
backup, volume_id, volume_file)
def test_restore_unsupported_version(self):
volume_id = '390db8c1-32d3-42ca-82c9-00000010c703'
container_name = 'unsupported_version'
self._create_backup_db_entry(volume_id=volume_id,
container=container_name)
service = swift_dr.SwiftBackupDriver(self.ctxt)
with tempfile.NamedTemporaryFile() as volume_file:
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertRaises(exception.InvalidBackup,
service.restore,
backup, volume_id, volume_file)
def test_delete(self):
volume_id = '9ab256c8-3175-4ad8-baa1-0000007f9d31'
object_prefix = 'test_prefix'
self._create_backup_db_entry(volume_id=volume_id,
service_metadata=object_prefix)
service = swift_dr.SwiftBackupDriver(self.ctxt)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.delete_backup(backup)
def test_delete_wraps_socket_error(self):
volume_id = 'f74cb6fa-2900-40df-87ac-0000000f72ea'
container_name = 'socket_error_on_delete'
object_prefix = 'test_prefix'
self._create_backup_db_entry(volume_id=volume_id,
container=container_name,
service_metadata=object_prefix)
service = swift_dr.SwiftBackupDriver(self.ctxt)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
self.assertRaises(exception.SwiftConnectionFailed,
service.delete_backup,
backup)
def test_delete_without_object_prefix(self):
volume_id = 'ee30d649-72a6-49a5-b78d-000000edb6b1'
def _fake_delete_object(self, container, object_name):
raise AssertionError('delete_object method should not be called.')
self.mock_object(swift_dr.SwiftBackupDriver,
'delete_object',
_fake_delete_object)
self._create_backup_db_entry(volume_id=volume_id)
service = swift_dr.SwiftBackupDriver(self.ctxt)
backup = objects.Backup.get_by_id(self.ctxt, fake.BACKUP_ID)
service.delete_backup(backup)
def test_get_compressor(self):
service = swift_dr.SwiftBackupDriver(self.ctxt)
compressor = service._get_compressor('None')
self.assertIsNone(compressor)
compressor = service._get_compressor('zlib')
self.assertEqual(zlib, compressor)
compressor = service._get_compressor('bz2')
self.assertEqual(bz2, compressor)
self.assertRaises(ValueError, service._get_compressor, 'fake')
def test_prepare_output_data_effective_compression(self):
"""Test compression works on a native thread."""
# Use dictionary to share data between threads
thread_dict = {}
original_compress = zlib.compress
def my_compress(data, *args, **kwargs):
thread_dict['compress'] = threading.current_thread()
return original_compress(data)
service = swift_dr.SwiftBackupDriver(self.ctxt)
# Set up buffer of 128 zeroed bytes
fake_data = b'\0' * 128
with mock.patch.object(service.compressor, 'compress',
side_effect=my_compress):
result = service._prepare_output_data(fake_data)
self.assertEqual('zlib', result[0])
self.assertGreater(len(fake_data), len(result[1]))
self.assertNotEqual(threading.current_thread(),
thread_dict['compress'])
def test_prepare_output_data_no_compresssion(self):
self.flags(backup_compression_algorithm='none')
service = swift_dr.SwiftBackupDriver(self.ctxt)
# Set up buffer of 128 zeroed bytes
fake_data = b'\0' * 128
result = service._prepare_output_data(fake_data)
self.assertEqual('none', result[0])
self.assertEqual(fake_data, result[1])
def test_prepare_output_data_ineffective_compression(self):
service = swift_dr.SwiftBackupDriver(self.ctxt)
# Set up buffer of 128 zeroed bytes
fake_data = b'\0' * 128
# Pre-compress so that compression in the driver will be ineffective.
already_compressed_data = service.compressor.compress(fake_data)
result = service._prepare_output_data(already_compressed_data)
self.assertEqual('none', result[0])
self.assertEqual(already_compressed_data, result[1])
| 45.784971
| 78
| 0.594056
|
4a0e5b1b4e71ec75e8f72bbd6cb251fd6c90fc3c
| 2,065
|
py
|
Python
|
hbkit/config.py
|
saintdoo/hbkit
|
7acfe1a825aa5c2a07e3c92e31d5c36d75cb789c
|
[
"MIT"
] | 1
|
2021-07-22T05:25:35.000Z
|
2021-07-22T05:25:35.000Z
|
hbkit/config.py
|
saintdoo/hbkit
|
7acfe1a825aa5c2a07e3c92e31d5c36d75cb789c
|
[
"MIT"
] | 37
|
2017-07-27T06:07:25.000Z
|
2020-12-11T12:57:31.000Z
|
hbkit/config.py
|
saintdoo/hbkit
|
7acfe1a825aa5c2a07e3c92e31d5c36d75cb789c
|
[
"MIT"
] | 1
|
2019-04-02T08:36:32.000Z
|
2019-04-02T08:36:32.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from builtins import * # noqa
import click
@click.group('config')
def cli():
"""Commands about configuration management."""
@cli.command('list')
@click.option('--local', is_flag=True, help='Only local configurations.')
@click.option('--default', is_flag=True, help='Show default values.')
@click.pass_obj
def cli_list(g, local, default):
"""List current settings."""
line_format = '- [{key}]: {value}'
options = []
if default:
header_line = 'Default settings:'
options = [(i.key, i.default) for i in g.config.list()]
elif local:
header_line = 'Local settings:'
options = [(i.key, i.value) for i in g.config.list()
if i.value is not None]
else:
header_line = 'Current settings:'
options = map(
lambda i: (i.key, i.default if i.value is None else i.value),
g.config.list()
)
click.echo(header_line)
for key, value in options:
click.echo(line_format.format(key=key, value=value))
@cli.command('set')
@click.argument('key')
@click.argument('value', required=False)
@click.pass_obj
def cli_set(g, key, value):
"""Set an option and save to local config file."""
try:
current_value = str(g.config.get(key))
except g.config.OptionNotFound:
raise click.UsageError('Unknown key: ' + key)
if value is None:
value = click.prompt('Value for "{}"'.format(key))
g.config.set(key, value)
click.echo('Option [{}] changed'.format(key))
click.echo(click.style(' From: ' + current_value, fg='red'))
click.echo(click.style(' TO : ' + value, fg='green'))
g.config.save_to_file()
@cli.command('unset')
@click.argument('key')
@click.pass_obj
def cli_unset(g, key):
"""Unset an local option."""
try:
g.config.set(key, None)
except g.config.OptionNotFound:
raise click.UsageError('Unknown key: ' + key)
click.echo('Option [{}] unseted'.format(key))
g.config.save_to_file()
| 30.367647
| 73
| 0.620823
|
4a0e5b4e22ca803d3bf911bf494c30fcac33c59a
| 3,535
|
py
|
Python
|
hood/settings.py
|
AbdimulhinYussuf3675/home-watch
|
d0009d2adcd6fa112fc6a775e72a41ea12c619fc
|
[
"MIT"
] | 1
|
2021-08-06T04:29:38.000Z
|
2021-08-06T04:29:38.000Z
|
hood/settings.py
|
AbdimulhinYussuf3675/home-watch
|
d0009d2adcd6fa112fc6a775e72a41ea12c619fc
|
[
"MIT"
] | null | null | null |
hood/settings.py
|
AbdimulhinYussuf3675/home-watch
|
d0009d2adcd6fa112fc6a775e72a41ea12c619fc
|
[
"MIT"
] | null | null | null |
import os
import django_heroku
import dj_database_url
from decouple import config,Csv
MODE=config("MODE", default="dev")
SECRET_KEY = config('SECRET_KEY')
DEBUG = config('DEBUG', default=False, cast=bool)
# development
if config('MODE')=="dev":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME'),
'USER': config('DB_USER'),
'PASSWORD': config('DB_PASSWORD'),
'HOST': config('DB_HOST'),
'PORT': '',
}
}
# production
else:
DATABASES = {
'default': dj_database_url.config(
default=config('DATABASE_URL')
)
}
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv())
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'home',
'bootstrap3',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'hood.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'hood.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Nairobi'
USE_I18N = True
USE_L10N = True
USE_TZ = True
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# configuring the location for media
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Configure Django App for Heroku.
django_heroku.settings(locals())
| 25.431655
| 91
| 0.680905
|
4a0e5bc5c9f97ecf82398a9852158f401744623e
| 1,700
|
py
|
Python
|
plugins/virtualkeyboard/qt.py
|
lazyboozer/electrum-desire
|
42d204d9e7deef17b18bf9d7f43ce5c45cda5fc8
|
[
"MIT"
] | null | null | null |
plugins/virtualkeyboard/qt.py
|
lazyboozer/electrum-desire
|
42d204d9e7deef17b18bf9d7f43ce5c45cda5fc8
|
[
"MIT"
] | null | null | null |
plugins/virtualkeyboard/qt.py
|
lazyboozer/electrum-desire
|
42d204d9e7deef17b18bf9d7f43ce5c45cda5fc8
|
[
"MIT"
] | null | null | null |
from PyQt4.QtGui import *
from electrum_desire.plugins import BasePlugin, hook
from electrum_desire.i18n import _
import random
class Plugin(BasePlugin):
vkb = None
vkb_index = 0
@hook
def password_dialog(self, pw, grid, pos):
vkb_button = QPushButton(_("+"))
vkb_button.setFixedWidth(20)
vkb_button.clicked.connect(lambda: self.toggle_vkb(grid, pw))
grid.addWidget(vkb_button, pos, 2)
self.kb_pos = 2
self.vkb = None
def toggle_vkb(self, grid, pw):
if self.vkb:
grid.removeItem(self.vkb)
self.vkb = self.virtual_keyboard(self.vkb_index, pw)
grid.addLayout(self.vkb, self.kb_pos, 0, 1, 3)
self.vkb_index += 1
def virtual_keyboard(self, i, pw):
i = i%3
if i == 0:
chars = 'abcdefghijklmnopqrstuvwxyz '
elif i == 1:
chars = 'ABCDEFGHIJKLMNOPQRTSUVWXYZ '
elif i == 2:
chars = '1234567890!?.,;:/%&()[]{}+-'
n = len(chars)
s = []
for i in xrange(n):
while True:
k = random.randint(0,n-1)
if k not in s:
s.append(k)
break
def add_target(t):
return lambda: pw.setText(str(pw.text()) + t)
vbox = QVBoxLayout()
grid = QGridLayout()
grid.setSpacing(2)
for i in range(n):
l_button = QPushButton(chars[s[i]])
l_button.setFixedWidth(25)
l_button.setFixedHeight(25)
l_button.clicked.connect(add_target(chars[s[i]]))
grid.addWidget(l_button, i/6, i%6)
vbox.addLayout(grid)
return vbox
| 27.868852
| 69
| 0.545882
|
4a0e5bc9b76124085534879cf5f1264900c99a26
| 1,012
|
py
|
Python
|
python-datastructure-exercise/python-ds-practice/31_truncate/truncate.py
|
ryankrdh/Springboard-Assignments
|
9c9b132a814fc818810978dce1f33c4052028353
|
[
"MIT"
] | null | null | null |
python-datastructure-exercise/python-ds-practice/31_truncate/truncate.py
|
ryankrdh/Springboard-Assignments
|
9c9b132a814fc818810978dce1f33c4052028353
|
[
"MIT"
] | null | null | null |
python-datastructure-exercise/python-ds-practice/31_truncate/truncate.py
|
ryankrdh/Springboard-Assignments
|
9c9b132a814fc818810978dce1f33c4052028353
|
[
"MIT"
] | null | null | null |
def truncate(phrase, n):
"""Return truncated-at-n-chars version of phrase.
If the phrase is longer than, or the same size as, n make sure it ends with '...' and is no
longer than n.
>>> truncate("Hello World", 6)
'Hel...'
>>> truncate("Problem solving is the best!", 10)
'Problem...'
>>> truncate("Yo", 100)
'Yo'
The smallest legal value of n is 3; if less, return a message:
>>> truncate('Cool', 1)
'Truncation must be at least 3 characters.'
>>> truncate("Woah", 4)
'W...'
>>> truncate("Woah", 3)
'...'
"""
if n < 3:
return "Truncation must be at least 3 characters."
# this line returns the phrase without the periods if the n is greater than +2 of the phrase.
if n > len(phrase) + 2:
return phrase
return phrase[:n - 3] + "..."
# print(truncate("Hello World", 6))
# print(truncate("Hello World", 13))
| 27.351351
| 97
| 0.52668
|
4a0e5beb987d9ffa64fa51d1849ee6a04ff403fa
| 1,229
|
py
|
Python
|
model_zoo/inception_v3/load_model_save_graph.py
|
KonduitAI/ImportTests
|
1b05adac04d1b04fe4492d3fd35f3c4573774ceb
|
[
"Apache-2.0"
] | 6
|
2019-11-24T01:48:22.000Z
|
2021-09-26T12:49:00.000Z
|
model_zoo/inception_v3/load_model_save_graph.py
|
KonduitAI/ImportTests
|
1b05adac04d1b04fe4492d3fd35f3c4573774ceb
|
[
"Apache-2.0"
] | 19
|
2019-11-19T23:12:53.000Z
|
2022-02-10T00:27:44.000Z
|
model_zoo/inception_v3/load_model_save_graph.py
|
KonduitAI/ImportTests
|
1b05adac04d1b04fe4492d3fd35f3c4573774ceb
|
[
"Apache-2.0"
] | 1
|
2019-11-19T08:27:51.000Z
|
2019-11-19T08:27:51.000Z
|
import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.contrib.slim.python.slim.nets import inception_v3
import numpy as np
from tfoptests import persistor
from model_zoo.inception_v3 import save_dir, get_input
height = 299
width = 299
channels = 3
# Create graph
X = tf.placeholder(tf.float32, shape=[None, height, width, channels],name="input")
my_feed_dict = {}
my_feed_dict[X] = get_input("input")
with slim.arg_scope(inception_v3.inception_v3_arg_scope()):
net, end_points = inception_v3.inception_v3(X,num_classes=1001)
logits = end_points['Logits']
output = tf.nn.softmax(logits,name="output")
all_saver = tf.train.Saver()
# Execute graph
with tf.Session() as sess:
all_saver.restore(sess, "/Users/susaneraly/SKYMIND/TFImport/TF_SOURCE_CODE/downloads_from_slim/inception_v3/inception_v3.ckpt")
prediction = output.eval(feed_dict=my_feed_dict)
print prediction
print prediction.shape
print(np.sort(prediction.ravel()))
tf.train.write_graph(sess.graph_def, '/Users/susaneraly/SKYMIND/TFImport/TF_SOURCE_CODE/downloads_from_slim/inception_v3', 'inception_v3.pbtxt')
persistor.save_graph(sess, all_saver, save_dir)
persistor.save_prediction(save_dir, prediction)
| 38.40625
| 148
| 0.786005
|
4a0e5d2b3f07b2545c2c72811134dafe9c142f43
| 1,363
|
py
|
Python
|
tests/test_json.py
|
ysegorov/jukoro
|
8be1d01b471c6091056df77ffacaee5cc8c470d0
|
[
"MIT"
] | null | null | null |
tests/test_json.py
|
ysegorov/jukoro
|
8be1d01b471c6091056df77ffacaee5cc8c470d0
|
[
"MIT"
] | null | null | null |
tests/test_json.py
|
ysegorov/jukoro
|
8be1d01b471c6091056df77ffacaee5cc8c470d0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime
import decimal
from unittest import TestCase
from jukoro import arrow
from jukoro import json
from jukoro import pg
A = arrow.JuArrow
D = decimal.Decimal
class TestJson(TestCase):
def test_arrow(self):
utcnow = arrow.utcnow()
now = arrow.now()
a = {
'a': utcnow,
'b': now,
}
jsoned = json.dumps(a)
b = json.loads(jsoned)
self.assertEqual(utcnow, arrow.get(b['a']))
self.assertEqual(utcnow.to('local'), arrow.get(b['a']))
self.assertEqual(now, arrow.get(b['b']))
self.assertEqual(now.to('UTC'), arrow.get(b['b']))
def test_dict(self):
now = datetime.datetime.now()
utcnow = datetime.datetime.utcnow()
a = {
'a': 12,
'b': D('1.2'),
'c': now,
'd': utcnow,
}
jsoned = json.dumps(a)
b = json.loads(jsoned)
self.assertEqual(a['a'], b['a'])
self.assertIsInstance(b['b'], D)
self.assertEqual(a['b'], b['b'])
self.assertEqual(a['c'].isoformat(), b['c'])
self.assertEqual(a['d'].isoformat(), b['d'])
def test_pg(self):
c = {
'e': pg.AbstractUser(123)
}
d = json.loads(json.dumps(c))
self.assertEqual(c['e'].entity_id, d['e'])
| 23.912281
| 63
| 0.517241
|
4a0e5fd39b7ab5342c73af15a91785a3a6b5f534
| 7,341
|
py
|
Python
|
examples/deep_dream.py
|
mokaruu/moka
|
72f1ce4ed4396a308f831c717d8c2856a7a4e86b
|
[
"MIT"
] | 7
|
2017-01-16T23:12:00.000Z
|
2021-03-11T04:39:14.000Z
|
examples/deep_dream.py
|
mokaruu/moka
|
72f1ce4ed4396a308f831c717d8c2856a7a4e86b
|
[
"MIT"
] | null | null | null |
examples/deep_dream.py
|
mokaruu/moka
|
72f1ce4ed4396a308f831c717d8c2856a7a4e86b
|
[
"MIT"
] | 1
|
2017-08-02T01:47:54.000Z
|
2017-08-02T01:47:54.000Z
|
'''Deep Dreaming in Keras.
Run the script with:
```
python deep_dream.py path_to_your_base_image.jpg prefix_for_results
```
e.g.:
```
python deep_dream.py img/mypic.jpg results/dream
```
It is preferable to run this script on GPU, for speed.
If running on CPU, prefer the TensorFlow backend (much faster).
Example results: http://i.imgur.com/FX6ROg9.jpg
'''
from __future__ import print_function
from keras.preprocessing.image import load_img, img_to_array
import numpy as np
from scipy.misc import imsave
from scipy.optimize import fmin_l_bfgs_b
import time
import argparse
from keras.applications import vgg16
from keras import backend as K
from keras.layers import Input
parser = argparse.ArgumentParser(description='Deep Dreams with Keras.')
parser.add_argument('base_image_path', metavar='base', type=str,
help='Path to the image to transform.')
parser.add_argument('result_prefix', metavar='res_prefix', type=str,
help='Prefix for the saved results.')
args = parser.parse_args()
base_image_path = args.base_image_path
result_prefix = args.result_prefix
# dimensions of the generated picture.
img_width = 600
img_height = 600
# path to the model weights file.
weights_path = 'vgg16_weights.h5'
# some settings we found interesting
saved_settings = {
'bad_trip': {'features': {'block4_conv1': 0.05,
'block4_conv2': 0.01,
'block4_conv3': 0.01},
'continuity': 0.1,
'dream_l2': 0.8,
'jitter': 5},
'dreamy': {'features': {'block5_conv1': 0.05,
'block5_conv2': 0.02},
'continuity': 0.1,
'dream_l2': 0.02,
'jitter': 0},
}
# the settings we will use in this experiment
settings = saved_settings['dreamy']
# util function to open, resize and format pictures into appropriate tensors
def preprocess_image(image_path):
img = load_img(image_path, target_size=(img_width, img_height))
img = img_to_array(img)
img = np.expand_dims(img, axis=0)
img = vgg16.preprocess_input(img)
return img
# util function to convert a tensor into a valid image
def deprocess_image(x):
if K.image_dim_ordering() == 'th':
x = x.reshape((3, img_width, img_height))
x = x.transpose((1, 2, 0))
else:
x = x.reshape((img_width, img_height, 3))
# Remove zero-center by mean pixel
x[:, :, 0] += 103.939
x[:, :, 1] += 116.779
x[:, :, 2] += 123.68
# 'BGR'->'RGB'
x = x[:, :, ::-1]
x = np.clip(x, 0, 255).astype('uint8')
return x
if K.image_dim_ordering() == 'th':
img_size = (3, img_width, img_height)
else:
img_size = (img_width, img_height, 3)
# this will contain our generated image
dream = Input(batch_shape=(1,) + img_size)
# build the VGG16 network with our placeholder
# the model will be loaded with pre-trained ImageNet weights
model = vgg16.VGG16(input_tensor=dream,
weights='imagenet', include_top=False)
print('Model loaded.')
# get the symbolic outputs of each "key" layer (we gave them unique names).
layer_dict = dict([(layer.name, layer) for layer in model.layers])
# continuity loss util function
def continuity_loss(x):
assert K.ndim(x) == 4
if K.image_dim_ordering() == 'th':
a = K.square(x[:, :, :img_width - 1, :img_height - 1] -
x[:, :, 1:, :img_height - 1])
b = K.square(x[:, :, :img_width - 1, :img_height - 1] -
x[:, :, :img_width - 1, 1:])
else:
a = K.square(x[:, :img_width - 1, :img_height - 1, :] -
x[:, 1:, :img_height - 1, :])
b = K.square(x[:, :img_width - 1, :img_height - 1, :] -
x[:, :img_width - 1, 1:, :])
return K.sum(K.pow(a + b, 1.25))
# define the loss
loss = K.variable(0.)
for layer_name in settings['features']:
# add the L2 norm of the features of a layer to the loss
assert layer_name in layer_dict.keys(), 'Layer ' + layer_name + ' not found in model.'
coeff = settings['features'][layer_name]
x = layer_dict[layer_name].output
shape = layer_dict[layer_name].output_shape
# we avoid border artifacts by only involving non-border pixels in the loss
if K.image_dim_ordering() == 'th':
loss -= coeff * K.sum(K.square(x[:, :, 2: shape[2] - 2, 2: shape[3] - 2])) / np.prod(shape[1:])
else:
loss -= coeff * K.sum(K.square(x[:, 2: shape[1] - 2, 2: shape[2] - 2, :])) / np.prod(shape[1:])
# add continuity loss (gives image local coherence, can result in an artful blur)
loss += settings['continuity'] * continuity_loss(dream) / np.prod(img_size)
# add image L2 norm to loss (prevents pixels from taking very high values, makes image darker)
loss += settings['dream_l2'] * K.sum(K.square(dream)) / np.prod(img_size)
# feel free to further modify the loss as you see fit, to achieve new effects...
# compute the gradients of the dream wrt the loss
grads = K.gradients(loss, dream)
outputs = [loss]
if isinstance(grads, (list, tuple)):
outputs += grads
else:
outputs.append(grads)
f_outputs = K.function([dream], outputs)
def eval_loss_and_grads(x):
x = x.reshape((1,) + img_size)
outs = f_outputs([x])
loss_value = outs[0]
if len(outs[1:]) == 1:
grad_values = outs[1].flatten().astype('float64')
else:
grad_values = np.array(outs[1:]).flatten().astype('float64')
return loss_value, grad_values
# this Evaluator class makes it possible
# to compute loss and gradients in one pass
# while retrieving them via two separate functions,
# "loss" and "grads". This is done because scipy.optimize
# requires separate functions for loss and gradients,
# but computing them separately would be inefficient.
class Evaluator(object):
def __init__(self):
self.loss_value = None
self.grad_values = None
def loss(self, x):
assert self.loss_value is None
loss_value, grad_values = eval_loss_and_grads(x)
self.loss_value = loss_value
self.grad_values = grad_values
return self.loss_value
def grads(self, x):
assert self.loss_value is not None
grad_values = np.copy(self.grad_values)
self.loss_value = None
self.grad_values = None
return grad_values
evaluator = Evaluator()
# run scipy-based optimization (L-BFGS) over the pixels of the generated image
# so as to minimize the loss
x = preprocess_image(base_image_path)
for i in range(5):
print('Start of iteration', i)
start_time = time.time()
# add a random jitter to the initial image. This will be reverted at decoding time
random_jitter = (settings['jitter'] * 2) * (np.random.random(img_size) - 0.5)
x += random_jitter
# run L-BFGS for 7 steps
x, min_val, info = fmin_l_bfgs_b(evaluator.loss, x.flatten(),
fprime=evaluator.grads, maxfun=7)
print('Current loss value:', min_val)
# decode the dream and save it
x = x.reshape(img_size)
x -= random_jitter
img = deprocess_image(np.copy(x))
fname = result_prefix + '_at_iteration_%d.png' % i
imsave(fname, img)
end_time = time.time()
print('Image saved as', fname)
print('Iteration %d completed in %ds' % (i, end_time - start_time))
| 33.217195
| 103
| 0.643509
|
4a0e619cb589227bfdb5b638bbc4fcf0743dfcd3
| 1,257
|
py
|
Python
|
app/crud.py
|
ridhanf/python-fastapi-challenge
|
041a2156c222dbd84805d6b6ee1d9b88b8227db3
|
[
"MIT"
] | null | null | null |
app/crud.py
|
ridhanf/python-fastapi-challenge
|
041a2156c222dbd84805d6b6ee1d9b88b8227db3
|
[
"MIT"
] | null | null | null |
app/crud.py
|
ridhanf/python-fastapi-challenge
|
041a2156c222dbd84805d6b6ee1d9b88b8227db3
|
[
"MIT"
] | null | null | null |
from sqlalchemy.orm import Session
from app import models, schemas
from app.constants import Role
def get_users(db: Session, skip: int = 0, limit: int = 100):
return db.query(models.User).offset(skip).limit(limit).all()
def get_user(db: Session, user_id: int):
return db.query(models.User).filter(models.User.id == user_id).first()
def get_user_by_email(db: Session, email: str):
return db.query(models.User).filter(models.User.email == email).first()
def create_user(db: Session, user: schemas.UserCreate):
fake_hashed_password = user.password + "notreallyhashed"
db_user = models.User(
username=user.username,
email=user.email,
fullname=user.fullname,
hashed_password=fake_hashed_password,
gender=user.gender,
roles=[Role.USER]
)
db.add(db_user)
db.commit()
db.refresh(db_user)
return db_user
def create_user_course(db: Session, course: schemas.CourseCreate, user_id: int):
db_course = models.Course(**course.dict(), owner_id=user_id)
db.add(db_course)
db.commit()
db.refresh(db_course)
return db_course
def get_courses(db: Session, skip: int = 0, limit: int = 100):
return db.query(models.Course).offset(skip).limit(limit).all()
| 27.933333
| 80
| 0.694511
|
4a0e61c22b82753627ea864e6e0fe50ee65208dd
| 5,104
|
py
|
Python
|
deepchem/models/multitask.py
|
cjgalvin/deepchem
|
64993a129e7f0f78fed9500298b1828ac8a0757a
|
[
"MIT"
] | 3
|
2019-05-29T19:18:25.000Z
|
2021-01-25T05:44:05.000Z
|
deepchem/models/multitask.py
|
cjgalvin/deepchem
|
64993a129e7f0f78fed9500298b1828ac8a0757a
|
[
"MIT"
] | 10
|
2017-02-23T19:39:22.000Z
|
2017-08-31T22:21:18.000Z
|
deepchem/models/multitask.py
|
cjgalvin/deepchem
|
64993a129e7f0f78fed9500298b1828ac8a0757a
|
[
"MIT"
] | 1
|
2018-09-22T00:53:53.000Z
|
2018-09-22T00:53:53.000Z
|
"""
Convenience class that lets singletask models fit on multitask data.
"""
import os
import sklearn
import tempfile
import numpy as np
import shutil
import logging
from deepchem.models import Model
from deepchem.data import DiskDataset
from deepchem.trans import undo_transforms
logger = logging.getLogger(__name__)
class SingletaskToMultitask(Model):
"""Convenience class to let singletask models be fit on multitask data.
This wrapper class groups a set of singletask `SklearnModel` objects to
create a multitask model. This class exists primarily to facilitate
benchmarking.
Note
----
This current implementation is only functional for sklearn models.
"""
def __init__(self, tasks, model_builder, model_dir=None):
super(SingletaskToMultitask, self).__init__(self, model_dir=model_dir)
self.tasks = tasks
self.task_model_dirs = {}
self.model_builder = model_builder
logger.info("About to initialize singletask to multitask model")
for task in self.tasks:
task_model_dir = os.path.join(self.model_dir, str(task))
if not os.path.exists(task_model_dir):
os.makedirs(task_model_dir)
logger.info("Initializing directory for task %s" % task)
self.task_model_dirs[task] = task_model_dir
def _create_task_datasets(self, dataset):
"""Make directories to hold data for tasks"""
task_data_dirs = []
for task in self.tasks:
task_data_dir = os.path.join(self.model_dir, str(task) + "_data")
if os.path.exists(task_data_dir):
shutil.rmtree(task_data_dir)
os.makedirs(task_data_dir)
task_data_dirs.append(task_data_dir)
task_datasets = self._to_singletask(dataset, task_data_dirs)
for task, task_dataset in zip(self.tasks, task_datasets):
logger.info("Dataset for task %s has shape %s" %
(task, str(task_dataset.get_shape())))
return task_datasets
@staticmethod
def _to_singletask(dataset, task_dirs):
"""Transforms a multitask dataset to a collection of singletask datasets."""
tasks = dataset.get_task_names()
assert len(tasks) == len(task_dirs)
logger.info("Splitting multitask dataset into singletask datasets")
task_datasets = [
DiskDataset.create_dataset([], task_dirs[task_num], [task])
for (task_num, task) in enumerate(tasks)
]
#task_metadata_rows = {task: [] for task in tasks}
for shard_num, (X, y, w, ids) in enumerate(dataset.itershards()):
logger.info("Processing shard %d" % shard_num)
basename = "dataset-%d" % shard_num
for task_num, task in enumerate(tasks):
logger.info("\tTask %s" % task)
if len(w.shape) == 1:
w_task = w
elif w.shape[1] == 1:
w_task = w[:, 0]
else:
w_task = w[:, task_num]
y_task = y[:, task_num]
# Extract those datapoints which are present for this task
X_nonzero = X[w_task != 0]
num_datapoints = X_nonzero.shape[0]
y_nonzero = np.reshape(y_task[w_task != 0], (num_datapoints, 1))
w_nonzero = np.reshape(w_task[w_task != 0], (num_datapoints, 1))
ids_nonzero = ids[w_task != 0]
task_datasets[task_num].add_shard(X_nonzero, y_nonzero, w_nonzero,
ids_nonzero)
return task_datasets
def fit(self, dataset, **kwargs):
"""
Updates all singletask models with new information.
Note
----
This current implementation is only functional for sklearn models.
"""
if not isinstance(dataset, DiskDataset):
raise ValueError('SingletaskToMultitask only works with DiskDatasets')
logger.info("About to create task-specific datasets")
task_datasets = self._create_task_datasets(dataset)
for ind, task in enumerate(self.tasks):
logger.info("Fitting model for task %s" % task)
task_model = self.model_builder(self.task_model_dirs[task])
task_model.fit(task_datasets[ind], **kwargs)
task_model.save()
def predict_on_batch(self, X):
"""
Concatenates results from all singletask models.
"""
n_tasks = len(self.tasks)
n_samples = X.shape[0]
y_preds = []
for ind, task in enumerate(self.tasks):
task_model = self.model_builder(self.task_model_dirs[task])
task_model.reload()
y_preds.append(task_model.predict_on_batch(X))
y_pred = np.stack(y_preds, axis=1)
return y_pred
def predict(self, dataset, transformers=[]):
"""
Prediction for multitask models.
"""
n_tasks = len(self.tasks)
n_samples = len(dataset)
y_preds = []
for ind, task in enumerate(self.tasks):
task_model = self.model_builder(self.task_model_dirs[task])
task_model.reload()
y_preds.append(task_model.predict(dataset, []))
y_pred = np.stack(y_preds, axis=1)
y_pred = undo_transforms(y_pred, transformers)
return y_pred
def save(self):
"""Save all models
TODO(rbharath): Saving is not yet supported for this model.
"""
pass
def reload(self):
"""Load all models"""
# Loading is done on-the-fly
pass
| 33.359477
| 80
| 0.676528
|
4a0e62476acf9e9f74d203ad4b97d181f69db729
| 6,864
|
py
|
Python
|
path_tracing/jit/pathtracer.py
|
vincentbonnetcg/Toy-Code1
|
633b37482bc470fd144fceb0d74bc6a89e8309a6
|
[
"MIT"
] | 14
|
2019-05-04T00:42:47.000Z
|
2021-09-07T09:57:44.000Z
|
path_tracing/jit/pathtracer.py
|
vincentbonnetcg/Toy-Code
|
633b37482bc470fd144fceb0d74bc6a89e8309a6
|
[
"MIT"
] | null | null | null |
path_tracing/jit/pathtracer.py
|
vincentbonnetcg/Toy-Code
|
633b37482bc470fd144fceb0d74bc6a89e8309a6
|
[
"MIT"
] | 5
|
2020-12-07T21:44:41.000Z
|
2021-09-13T05:29:54.000Z
|
"""
@author: Vincent Bonnet
@description : basic render routines
"""
import time
import math
import random
import numba
import numpy as np
from . import core as jit_core
from .maths import dot, copy, axpy, gamma_correction, clamp, tri_interpolation, normalize
from . import intersect
# pathtracer settings
BLACK = np.zeros(3)
WHITE = np.ones(3)
MAX_DEPTH = 1 # max hit
NUM_SAMPLES = 1 # number of sample per pixel
RANDOM_SEED = 10
INV_PDF = 2.0 * math.pi; # inverse of probability density function
INV_PI = 1.0 / math.pi
SUPERSAMPLING = 2 # supersampling 2x2
CPU_COUNT = 4 # number of cpu
LIGHT_MATERIAL_ID = 1
@numba.njit(inline='always')
def update_ray_from_uniform_distribution(mempool):
i = mempool.depth
copy(mempool.ray_o, mempool.hit_p[i])
# Find ray direction from uniform around hemisphere
# Unit hemisphere from spherical coordinates
# the unit hemisphere is at origin and y is the up vector
# theta [0, 2*PI) and phi [0, PI/2]
# px = cos(theta)*sin(phi)
# py = sin(theta)*sin(phi)
# pz = cos(phi)
# A uniform distribution (avoid more samples at the pole)
# theta = 2*PI*rand()
# phi = acos(rand()) not phi = PI/2*rand() !
# Optimization
# cos(phi) = cos(acos(rand())) = rand()
# sin(phi) = sin(acos(rand())) = sqrt(1 - rand()^2)
theta = 2*math.pi*random.random()
cos_phi = random.random()
sin_phi = math.sqrt(1.0 - cos_phi**2)
v0 = math.cos(theta)*sin_phi
v1 = cos_phi
v2 = math.sin(theta)*sin_phi
# compute the world sample
mempool.ray_d[0] = v0*mempool.hit_bn[i][0] + v1*mempool.hit_n[i][0] + v2*mempool.hit_tn[i][0]
mempool.ray_d[1] = v0*mempool.hit_bn[i][1] + v1*mempool.hit_n[i][1] + v2*mempool.hit_tn[i][1]
mempool.ray_d[2] = v0*mempool.hit_bn[i][2] + v1*mempool.hit_n[i][2] + v2*mempool.hit_tn[i][2]
@numba.njit
def ray_tri_details(details, mempool):
# details from Scene.tri_details()
skip_face_id = -1
if mempool.depth >= 0: # skip face based on previous hit
skip_face_id = mempool.hit_face_id[mempool.depth]
mempool.next_hit() # use the next allocated hit
nearest_t = np.finfo(numba.float64).max
nearest_u = 0.0
nearest_v = 0.0
data = details[0]
tri_vertices = data.tri_vertices
hit_id = -1
# intersection test with triangles
num_triangles = len(tri_vertices)
for i in range(num_triangles):
if i == skip_face_id:
continue
#if intersect.ray_aabb(mempool, tri_vertices[i])==False:
# continue
uvt = intersect.ray_triangle(mempool, tri_vertices[i])
mempool.total_intersection += 1
if uvt[2] > 0.0 and uvt[2] < nearest_t:
nearest_t = uvt[2]
nearest_u = uvt[0]
nearest_v = uvt[1]
hit_id = i
if hit_id >= 0:
i = mempool.depth
# store distance
mempool.hit_t[i] = nearest_t
# store hit point
axpy(nearest_t, mempool.ray_d, mempool.ray_o, mempool.hit_p[i])
# store face normals/tangents/binormals
copy(mempool.hit_n[i], data.face_normals[hit_id])
copy(mempool.hit_tn[i], data.face_tangents[hit_id])
copy(mempool.hit_bn[i], data.face_binormals[hit_id])
# compute interpolated normal for shading
tri_interpolation(data.tri_normals[hit_id], nearest_u, nearest_v, mempool.hit_in[i])
normalize(mempool.hit_in[i])
# store faceid and material
mempool.hit_face_id[i] = hit_id
copy(mempool.hit_material[i], data.face_materials[hit_id])
mempool.hit_materialtype[i] = data.face_materialtype[hit_id]
# two-sided intersection
if dot(mempool.ray_d, mempool.hit_n[i]) > 0:
mempool.hit_n[i] *= -1.0
if dot(mempool.ray_d, mempool.hit_in[i]) > 0:
mempool.hit_in[i] *= -1.0
@numba.njit
def rendering_equation(details, mempool):
# update ray and compute weakening factor
update_ray_from_uniform_distribution(mempool)
weakening_factor = dot(mempool.ray_d, mempool.hit_in[mempool.depth])
# rendering equation : emittance + (BRDF * incoming * cos_theta / pdf);
mempool.result *= mempool.hit_material[mempool.depth]
mempool.result *= INV_PI * weakening_factor * INV_PDF
recursive_trace(details, mempool)
@numba.njit
def recursive_trace(details, mempool):
if mempool.depth + 1 >= MAX_DEPTH: # can another hit be allocated ?
copy(mempool.result, BLACK)
return
ray_tri_details(details, mempool)
if not mempool.valid_hit():
copy(mempool.result, BLACK)
return
if mempool.hit_materialtype[mempool.depth]==LIGHT_MATERIAL_ID:
mempool.result *= mempool.hit_material[mempool.depth]
return
rendering_equation(details, mempool)
@numba.njit
def start_trace(details, mempool):
ray_tri_details(details, mempool)
if not mempool.valid_hit():
copy(mempool.result, BLACK)
return
if MAX_DEPTH == 0:
copy(mempool.result, mempool.hit_material[0])
mempool.result *= abs(dot(mempool.hit_in[0], mempool.ray_d))
return
if mempool.hit_materialtype[0]==LIGHT_MATERIAL_ID:
copy(mempool.result, mempool.hit_material[0])
return
copy(mempool.result, WHITE)
rendering_equation(details, mempool)
@numba.njit(nogil=True)
def render(image, camera, details, start_time, thread_id=0):
mempool = jit_core.MemoryPool(NUM_SAMPLES)
random.seed(RANDOM_SEED)
row_step = CPU_COUNT
row_start = thread_id
for j in range(row_start, camera.height,row_step):
for i in range(camera.width):
jj = camera.height-1-j
ii = camera.width-1-i
for sx in range(SUPERSAMPLING):
for sy in range(SUPERSAMPLING):
# compute shade
c = np.zeros(3)
for _ in range(NUM_SAMPLES):
mempool.result[0:3] = 0.0
camera.get_ray(i, j, sx, sy, mempool)
start_trace(details, mempool)
mempool.result /= NUM_SAMPLES
c += mempool.result
clamp(c)
c /= (SUPERSAMPLING * SUPERSAMPLING)
image[jj, ii] += c
gamma_correction(image[jj, ii])
with numba.objmode():
p = (j+1) / camera.height
print('. completed : %.2f' % (p * 100.0), ' %')
if time.time() != start_time:
t = time.time() - start_time
estimated_time_left = (1.0 - p) / p * t
print(' estimated time left: %.2f sec (threadId %d)' % (estimated_time_left, thread_id))
with numba.objmode():
print('Total intersections : %d (threadId %d)' % (mempool.total_intersection, thread_id))
return mempool.total_intersection
| 35.020408
| 107
| 0.629808
|
4a0e62918756b321b31485f3d9ee74a339c5d541
| 15,816
|
py
|
Python
|
solc/install.py
|
circleclick-labs/py-solc
|
fc76877f5b9bb577b2cd66abcb6d1df443156d0d
|
[
"MIT"
] | 153
|
2017-11-23T20:33:35.000Z
|
2022-03-20T22:26:11.000Z
|
solc/install.py
|
circleclick-labs/py-solc
|
fc76877f5b9bb577b2cd66abcb6d1df443156d0d
|
[
"MIT"
] | 31
|
2017-11-29T01:01:28.000Z
|
2022-02-20T05:13:48.000Z
|
solc/install.py
|
circleclick-labs/py-solc
|
fc76877f5b9bb577b2cd66abcb6d1df443156d0d
|
[
"MIT"
] | 57
|
2017-11-23T00:50:09.000Z
|
2022-03-28T18:55:52.000Z
|
"""
Install solc
"""
import functools
import os
import stat
import subprocess
import sys
import contextlib
import zipfile
V0_4_1 = 'v0.4.1'
V0_4_2 = 'v0.4.2'
V0_4_6 = 'v0.4.6'
V0_4_7 = 'v0.4.7'
V0_4_8 = 'v0.4.8'
V0_4_9 = 'v0.4.9'
V0_4_11 = 'v0.4.11'
V0_4_12 = 'v0.4.12'
V0_4_13 = 'v0.4.13'
V0_4_14 = 'v0.4.14'
V0_4_15 = 'v0.4.15'
V0_4_16 = 'v0.4.16'
V0_4_17 = 'v0.4.17'
V0_4_18 = 'v0.4.18'
V0_4_19 = 'v0.4.19'
V0_4_20 = 'v0.4.20'
V0_4_21 = 'v0.4.21'
V0_4_22 = 'v0.4.22'
V0_4_23 = 'v0.4.23'
V0_4_24 = 'v0.4.24'
V0_4_25 = 'v0.4.25'
LINUX = 'linux'
OSX = 'darwin'
WINDOWS = 'win32'
#
# System utilities.
#
@contextlib.contextmanager
def chdir(path):
original_path = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(original_path)
def get_platform():
if sys.platform.startswith('linux'):
return LINUX
elif sys.platform == OSX:
return OSX
elif sys.platform == WINDOWS:
return WINDOWS
else:
raise KeyError("Unknown platform: {0}".format(sys.platform))
def is_executable_available(program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath = os.path.dirname(program)
if fpath:
if is_exe(program):
return True
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return True
return False
def ensure_path_exists(dir_path):
"""
Make sure that a path exists
"""
if not os.path.exists(dir_path):
os.makedirs(dir_path)
return True
return False
def ensure_parent_dir_exists(path):
ensure_path_exists(os.path.dirname(path))
def check_subprocess_call(command, message=None, stderr=subprocess.STDOUT, **proc_kwargs):
if message:
print(message)
print("Executing: {0}".format(" ".join(command)))
return subprocess.check_call(
command,
stderr=subprocess.STDOUT,
**proc_kwargs
)
def check_subprocess_output(command, message=None, stderr=subprocess.STDOUT, **proc_kwargs):
if message:
print(message)
print("Executing: {0}".format(" ".join(command)))
return subprocess.check_output(
command,
stderr=subprocess.STDOUT,
**proc_kwargs
)
def chmod_plus_x(executable_path):
current_st = os.stat(executable_path)
os.chmod(executable_path, current_st.st_mode | stat.S_IEXEC)
SOLIDITY_GIT_URI = "https://github.com/ethereum/solidity.git"
def is_git_repository(path):
git_dir = os.path.join(
path,
'.git',
)
return os.path.exists(git_dir)
#
# Installation filesystem path utilities
#
def get_base_install_path(identifier):
if 'SOLC_BASE_INSTALL_PATH' in os.environ:
return os.path.join(
os.environ['SOLC_BASE_INSTALL_PATH'],
'solc-{0}'.format(identifier),
)
else:
return os.path.expanduser(os.path.join(
'~',
'.py-solc',
'solc-{0}'.format(identifier),
))
def get_repository_path(identifier):
return os.path.join(
get_base_install_path(identifier),
'source',
)
def get_release_zipfile_path(identifier):
return os.path.join(
get_base_install_path(identifier),
'release.zip',
)
def get_extract_path(identifier):
return os.path.join(
get_base_install_path(identifier),
'bin',
)
def get_executable_path(identifier):
extract_path = get_extract_path(identifier)
return os.path.join(
extract_path,
'solc',
)
def get_build_dir(identifier):
repository_path = get_repository_path(identifier)
return os.path.join(
repository_path,
'build',
)
def get_built_executable_path(identifier):
build_dir = get_build_dir(identifier)
return os.path.join(
build_dir,
'solc',
'solc',
)
#
# Installation primitives.
#
def clone_solidity_repository(identifier):
if not is_executable_available('git'):
raise OSError("The `git` is required but was not found")
repository_path = get_repository_path(identifier)
ensure_parent_dir_exists(repository_path)
command = [
"git", "clone",
"--recurse-submodules",
"--branch", identifier,
"--depth", "10",
SOLIDITY_GIT_URI,
repository_path,
]
return check_subprocess_call(
command,
message="Checking out solidity repository @ {0}".format(identifier),
)
def initialize_repository_submodules(identifier):
if not is_executable_available('git'):
raise OSError("The `git` is required but was not found")
repository_path = get_repository_path(identifier)
command = [
"git", "submodule", "update", "--init", "--recursive",
]
check_subprocess_call(
command,
"Initializing repository submodules @ {0}".format(repository_path),
)
DOWNLOAD_UBUNTU_RELEASE_URI_TEMPLATE = "https://github.com/ethereum/solidity/releases/download/{0}/solidity-ubuntu-trusty.zip" # noqa: E501
def download_ubuntu_release(identifier):
download_uri = DOWNLOAD_UBUNTU_RELEASE_URI_TEMPLATE.format(identifier)
release_zipfile_path = get_release_zipfile_path(identifier)
ensure_parent_dir_exists(release_zipfile_path)
command = [
"wget", download_uri,
'-c', # resume previously incomplete download.
'-O', release_zipfile_path,
]
return check_subprocess_call(
command,
message="Downloading ubuntu release from {0}".format(download_uri),
)
DOWNLOAD_STATIC_RELEASE_URI_TEMPLATE = "https://github.com/ethereum/solidity/releases/download/{0}/solc-static-linux" # noqa: E501
def download_static_release(identifier):
download_uri = DOWNLOAD_STATIC_RELEASE_URI_TEMPLATE.format(identifier)
static_binary_path = get_executable_path(identifier)
ensure_parent_dir_exists(static_binary_path)
command = [
"wget", download_uri,
'-c', # resume previously incomplete download.
'-O', static_binary_path,
]
return check_subprocess_call(
command,
message="Downloading static linux binary from {0}".format(download_uri),
)
def extract_release(identifier):
release_zipfile_path = get_release_zipfile_path(identifier)
extract_path = get_extract_path(identifier)
ensure_path_exists(extract_path)
print("Extracting zipfile: {0} -> {1}".format(release_zipfile_path, extract_path))
with zipfile.ZipFile(release_zipfile_path) as zipfile_file:
zipfile_file.extractall(extract_path)
executable_path = get_executable_path(identifier)
print("Making `solc` binary executable: `chmod +x {0}`".format(executable_path))
chmod_plus_x(executable_path)
def install_solc_dependencies(identifier):
repository_path = get_repository_path(identifier)
if not is_git_repository(repository_path):
raise OSError("Git repository not found @ {0}".format(repository_path))
with chdir(repository_path):
install_deps_script_path = os.path.join(repository_path, 'scripts', 'install_deps.sh')
return check_subprocess_call(
command=["sh", install_deps_script_path],
message="Running dependency installation script `install_deps.sh` @ {0}".format(
install_deps_script_path,
),
)
def install_solc_from_ubuntu_release_zip(identifier):
download_ubuntu_release(identifier)
extract_release(identifier)
extract_path = get_extract_path(identifier)
executable_path = get_executable_path(identifier)
assert os.path.exists(executable_path), "Executable not found @".format(executable_path)
check_version_command = [executable_path, '--version']
check_subprocess_output(
check_version_command,
message="Checking installed executable version @ {0}".format(executable_path),
env={'LD_LIBRARY_PATH': extract_path},
)
print("solc successfully installed at: {0}".format(executable_path))
def install_solc_from_static_linux(identifier):
download_static_release(identifier)
executable_path = get_executable_path(identifier)
chmod_plus_x(executable_path)
check_version_command = [executable_path, '--version']
check_subprocess_output(
check_version_command,
message="Checking installed executable version @ {0}".format(executable_path),
)
print("solc successfully installed at: {0}".format(executable_path))
def build_solc_from_source(identifier):
if not is_git_repository(get_repository_path(identifier)):
clone_solidity_repository(identifier)
build_dir = get_build_dir(identifier)
ensure_path_exists(build_dir)
with chdir(build_dir):
cmake_command = ["cmake", ".."]
check_subprocess_call(
cmake_command,
message="Running cmake build command",
)
make_command = ["make"]
check_subprocess_call(
make_command,
message="Running make command",
)
built_executable_path = get_built_executable_path(identifier)
chmod_plus_x(built_executable_path)
executable_path = get_executable_path(identifier)
ensure_parent_dir_exists(executable_path)
os.symlink(built_executable_path, executable_path)
chmod_plus_x(executable_path)
def install_from_ubuntu_release(identifier):
if not is_git_repository(get_repository_path(identifier)):
clone_solidity_repository(identifier)
install_solc_dependencies(identifier)
install_solc_from_ubuntu_release_zip(identifier)
executable_path = get_executable_path(identifier)
print("Succesfully installed solc @ `{0}`".format(executable_path))
install_v0_4_1_linux = functools.partial(install_from_ubuntu_release, V0_4_1)
install_v0_4_2_linux = functools.partial(install_from_ubuntu_release, V0_4_2)
install_v0_4_6_linux = functools.partial(install_from_ubuntu_release, V0_4_6)
install_v0_4_7_linux = functools.partial(install_from_ubuntu_release, V0_4_7)
install_v0_4_8_linux = functools.partial(install_from_ubuntu_release, V0_4_8)
install_v0_4_9_linux = functools.partial(install_from_ubuntu_release, V0_4_9)
def install_from_static_linux(identifier):
install_solc_from_static_linux(identifier)
executable_path = get_executable_path(identifier)
print("Succesfully installed solc @ `{0}`".format(executable_path))
install_v0_4_11_linux = functools.partial(install_solc_from_static_linux, V0_4_11)
install_v0_4_12_linux = functools.partial(install_solc_from_static_linux, V0_4_12)
install_v0_4_13_linux = functools.partial(install_solc_from_static_linux, V0_4_13)
install_v0_4_14_linux = functools.partial(install_solc_from_static_linux, V0_4_14)
install_v0_4_15_linux = functools.partial(install_solc_from_static_linux, V0_4_15)
install_v0_4_16_linux = functools.partial(install_solc_from_static_linux, V0_4_16)
install_v0_4_17_linux = functools.partial(install_solc_from_static_linux, V0_4_17)
install_v0_4_18_linux = functools.partial(install_solc_from_static_linux, V0_4_18)
install_v0_4_19_linux = functools.partial(install_solc_from_static_linux, V0_4_19)
install_v0_4_20_linux = functools.partial(install_solc_from_static_linux, V0_4_20)
install_v0_4_21_linux = functools.partial(install_solc_from_static_linux, V0_4_21)
install_v0_4_22_linux = functools.partial(install_solc_from_static_linux, V0_4_22)
install_v0_4_23_linux = functools.partial(install_solc_from_static_linux, V0_4_23)
install_v0_4_24_linux = functools.partial(install_solc_from_static_linux, V0_4_24)
install_v0_4_25_linux = functools.partial(install_solc_from_static_linux, V0_4_25)
def install_from_source(identifier):
if not is_git_repository(get_repository_path(identifier)):
clone_solidity_repository(identifier)
install_solc_dependencies(identifier)
build_solc_from_source(identifier)
executable_path = get_executable_path(identifier)
print("Succesfully installed solc @ `{0}`".format(executable_path))
install_v0_4_8_osx = functools.partial(install_from_source, V0_4_8)
install_v0_4_11_osx = functools.partial(install_from_source, V0_4_11)
install_v0_4_12_osx = functools.partial(install_from_source, V0_4_12)
install_v0_4_13_osx = functools.partial(install_from_source, V0_4_13)
install_v0_4_14_osx = functools.partial(install_from_source, V0_4_14)
install_v0_4_15_osx = functools.partial(install_from_source, V0_4_15)
install_v0_4_16_osx = functools.partial(install_from_source, V0_4_16)
install_v0_4_17_osx = functools.partial(install_from_source, V0_4_17)
install_v0_4_18_osx = functools.partial(install_from_source, V0_4_18)
install_v0_4_19_osx = functools.partial(install_from_source, V0_4_19)
install_v0_4_20_osx = functools.partial(install_from_source, V0_4_20)
install_v0_4_21_osx = functools.partial(install_from_source, V0_4_21)
install_v0_4_22_osx = functools.partial(install_from_source, V0_4_22)
install_v0_4_23_osx = functools.partial(install_from_source, V0_4_23)
install_v0_4_24_osx = functools.partial(install_from_source, V0_4_24)
install_v0_4_25_osx = functools.partial(install_from_source, V0_4_25)
INSTALL_FUNCTIONS = {
LINUX: {
V0_4_1: install_v0_4_1_linux,
V0_4_2: install_v0_4_2_linux,
V0_4_6: install_v0_4_6_linux,
V0_4_7: install_v0_4_7_linux,
V0_4_8: install_v0_4_8_linux,
V0_4_9: install_v0_4_9_linux,
V0_4_11: install_v0_4_11_linux,
V0_4_12: install_v0_4_12_linux,
V0_4_13: install_v0_4_13_linux,
V0_4_14: install_v0_4_14_linux,
V0_4_15: install_v0_4_15_linux,
V0_4_16: install_v0_4_16_linux,
V0_4_17: install_v0_4_17_linux,
V0_4_18: install_v0_4_18_linux,
V0_4_19: install_v0_4_19_linux,
V0_4_20: install_v0_4_20_linux,
V0_4_21: install_v0_4_21_linux,
V0_4_22: install_v0_4_22_linux,
V0_4_23: install_v0_4_23_linux,
V0_4_24: install_v0_4_24_linux,
V0_4_25: install_v0_4_25_linux,
},
OSX: {
V0_4_8: install_v0_4_8_osx,
V0_4_11: install_v0_4_11_osx,
V0_4_12: install_v0_4_12_osx,
V0_4_13: install_v0_4_13_osx,
V0_4_14: install_v0_4_14_osx,
V0_4_15: install_v0_4_15_osx,
V0_4_16: install_v0_4_16_osx,
V0_4_17: install_v0_4_17_osx,
V0_4_18: install_v0_4_18_osx,
V0_4_19: install_v0_4_19_osx,
V0_4_20: install_v0_4_20_osx,
V0_4_21: install_v0_4_21_osx,
V0_4_22: install_v0_4_22_osx,
V0_4_23: install_v0_4_23_osx,
V0_4_24: install_v0_4_24_osx,
V0_4_25: install_v0_4_25_osx,
}
}
def install_solc(identifier, platform=None):
if platform is None:
platform = get_platform()
if platform not in INSTALL_FUNCTIONS:
raise ValueError(
"Installation of solidity is not supported on your platform ({0}). "
"Supported platforms are: {1}".format(
platform,
', '.join(sorted(INSTALL_FUNCTIONS.keys())),
)
)
elif identifier not in INSTALL_FUNCTIONS[platform]:
raise ValueError(
"Installation of solidity=={0} is not supported. Must be one of {1}".format(
identifier,
', '.join(sorted(INSTALL_FUNCTIONS[platform].keys())),
)
)
install_fn = INSTALL_FUNCTIONS[platform][identifier]
install_fn()
if __name__ == "__main__":
try:
identifier = sys.argv[1]
except IndexError:
print("Invocation error. Should be invoked as `./install_solc.py <release-tag>`")
sys.exit(1)
install_solc(identifier)
| 30.183206
| 140
| 0.714719
|
4a0e630efb5c991d2b0dc11279f0fd81235d517c
| 8,152
|
py
|
Python
|
options/base_options.py
|
natsala13/FinalProject
|
10e3e3550e574f07f9e2eec0b5891fa09a822f4d
|
[
"BSD-3-Clause"
] | null | null | null |
options/base_options.py
|
natsala13/FinalProject
|
10e3e3550e574f07f9e2eec0b5891fa09a822f4d
|
[
"BSD-3-Clause"
] | null | null | null |
options/base_options.py
|
natsala13/FinalProject
|
10e3e3550e574f07f9e2eec0b5891fa09a822f4d
|
[
"BSD-3-Clause"
] | null | null | null |
import argparse
import os
from util import util
import torch
import models
import data
class BaseOptions():
"""This class defines options used during both training and test time.
It also implements several helper functions such as parsing, printing, and saving the options.
It also gathers additional options defined in <modify_commandline_options> functions in both dataset class and model class.
"""
def __init__(self):
"""Reset the class; indicates the class hasn't been initailized"""
self.initialized = False
def initialize(self, parser):
"""Define the common options that are used in both training and test."""
# basic parameters
parser.add_argument('--dataroot', required=True, help='path to images (should have subfolders trainA, trainB, valA, valB, etc)')
parser.add_argument('--name', type=str, default='experiment_name', help='name of the experiment. It decides where to store samples and models')
parser.add_argument('--gpu_ids', type=str, default='0', help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU')
parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints', help='models are saved here')
# model parameters
parser.add_argument('--model', type=str, default='cycle_gan', help='chooses which model to use. [cycle_gan | pix2pix | test | colorization]')
parser.add_argument('--input_nc', type=int, default=3, help='# of input image channels: 3 for RGB and 1 for grayscale')
parser.add_argument('--output_nc', type=int, default=3, help='# of output image channels: 3 for RGB and 1 for grayscale')
parser.add_argument('--ngf', type=int, default=64, help='# of gen filters in the last conv layer')
parser.add_argument('--ndf', type=int, default=64, help='# of discrim filters in the first conv layer')
parser.add_argument('--netD', type=str, default='basic', help='specify discriminator architecture [basic | n_layers | pixel]. The basic model is a 70x70 PatchGAN. n_layers allows you to specify the layers in the discriminator')
parser.add_argument('--netG', type=str, default='resnet_9blocks', help='specify generator architecture [resnet_9blocks | resnet_6blocks | unet_256 | unet_128]')
parser.add_argument('--n_layers_D', type=int, default=3, help='only used if netD==n_layers')
parser.add_argument('--norm', type=str, default='instance', help='instance normalization or batch normalization [instance | batch | none]')
parser.add_argument('--init_type', type=str, default='normal', help='network initialization [normal | xavier | kaiming | orthogonal]')
parser.add_argument('--init_gain', type=float, default=0.02, help='scaling factor for normal, xavier and orthogonal.')
parser.add_argument('--no_dropout', action='store_true', help='no dropout for the generator')
# dataset parameters
parser.add_argument('--dataset_mode', type=str, default='unaligned', help='chooses how datasets are loaded. [unaligned | aligned | single | colorization]')
parser.add_argument('--direction', type=str, default='AtoB', help='AtoB or BtoA')
parser.add_argument('--serial_batches', action='store_true', help='if true, takes images in order to make batches, otherwise takes them randomly')
parser.add_argument('--num_threads', default=4, type=int, help='# threads for loading data')
parser.add_argument('--batch_size', type=int, default=1, help='input batch size')
parser.add_argument('--load_size', type=int, default=286, help='scale images to this size')
parser.add_argument('--crop_size', type=int, default=256, help='then crop to this size')
parser.add_argument('--max_dataset_size', type=int, default=float("inf"), help='Maximum number of samples allowed per dataset. If the dataset directory contains more than max_dataset_size, only a subset is loaded.')
parser.add_argument('--preprocess', type=str, default='resize_and_crop', help='scaling and cropping of images at load time [resize_and_crop | crop | scale_width | scale_width_and_crop | none]')
parser.add_argument('--no_flip', action='store_true', help='if specified, do not flip the images for data augmentation')
parser.add_argument('--display_winsize', type=int, default=256, help='display window size for both visdom and HTML')
# additional parameters
parser.add_argument('--epoch', type=str, default='latest', help='which epoch to load? set to latest to use latest cached model')
parser.add_argument('--load_iter', type=int, default='0', help='which iteration to load? if load_iter > 0, the code will load models by iter_[load_iter]; otherwise, the code will load models by [epoch]')
parser.add_argument('--verbose', action='store_true', help='if specified, print more debugging information')
parser.add_argument('--suffix', default='', type=str, help='customized suffix: opt.name = opt.name + suffix: e.g., {model}_{netG}_size{load_size}')
self.initialized = True
return parser
def gather_options(self):
"""Initialize our parser with basic options(only once).
Add additional model-specific and dataset-specific options.
These options are defined in the <modify_commandline_options> function
in model and dataset classes.
"""
if not self.initialized: # check if it has been initialized
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser = self.initialize(parser)
# get the basic options
opt, _ = parser.parse_known_args()
# modify model-related parser options
model_name = opt.model
model_option_setter = models.get_option_setter(model_name)
parser = model_option_setter(parser, self.isTrain)
opt, _ = parser.parse_known_args() # parse again with new defaults
# modify dataset-related parser options
dataset_name = opt.dataset_mode
dataset_option_setter = data.get_option_setter(dataset_name)
parser = dataset_option_setter(parser, self.isTrain)
# save and return the parser
self.parser = parser
return parser.parse_args()
def print_options(self, opt):
"""Print and save options
It will print both current options and default values(if different).
It will save options into a text file / [checkpoints_dir] / opt.txt
"""
message = ''
message += '----------------- Options ---------------\n'
for k, v in sorted(vars(opt).items()):
comment = ''
default = self.parser.get_default(k)
if v != default:
comment = '\t[default: %s]' % str(default)
message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment)
message += '----------------- End -------------------'
print(message)
# save to the disk
expr_dir = os.path.join(opt.checkpoints_dir, opt.name)
util.mkdirs(expr_dir)
file_name = os.path.join(expr_dir, 'opt.txt')
with open(file_name, 'wt') as opt_file:
opt_file.write(message)
opt_file.write('\n')
def parse(self):
"""Parse our options, create checkpoints directory suffix, and set up gpu device."""
opt = self.gather_options()
opt.isTrain = self.isTrain # train or test
# process opt.suffix
if opt.suffix:
suffix = ('_' + opt.suffix.format(**vars(opt))) if opt.suffix != '' else ''
opt.name = opt.name + suffix
self.print_options(opt)
# set gpu ids
str_ids = opt.gpu_ids.split(',')
opt.gpu_ids = []
for str_id in str_ids:
id = int(str_id)
if id >= 0:
opt.gpu_ids.append(id)
if len(opt.gpu_ids) > 0:
print('is cuda availlable - ', torch.cuda.is_available())
torch.cuda.set_device(opt.gpu_ids[0])
# torch.cuda.set_device('gpu')
self.opt = opt
return self.opt
| 58.647482
| 235
| 0.660329
|
4a0e6339dfa009ee53402336cb76b5b58a7b2e17
| 1,472
|
py
|
Python
|
sdk/datalake/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/create_trusted_id_provider_with_account_parameters.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 8
|
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
sdk/datalake/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/create_trusted_id_provider_with_account_parameters.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 226
|
2019-07-24T07:57:21.000Z
|
2019-10-15T01:07:24.000Z
|
sdk/datalake/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/create_trusted_id_provider_with_account_parameters.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CreateTrustedIdProviderWithAccountParameters(Model):
"""The parameters used to create a new trusted identity provider while
creating a new Data Lake Store account.
All required parameters must be populated in order to send to Azure.
:param name: Required. The unique name of the trusted identity provider to
create.
:type name: str
:param id_provider: Required. The URL of this trusted identity provider.
:type id_provider: str
"""
_validation = {
'name': {'required': True},
'id_provider': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id_provider': {'key': 'properties.idProvider', 'type': 'str'},
}
def __init__(self, **kwargs):
super(CreateTrustedIdProviderWithAccountParameters, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id_provider = kwargs.get('id_provider', None)
| 35.047619
| 84
| 0.616848
|
4a0e643fabd0bde2efea8265fc35d6f0a91757b3
| 1,076
|
py
|
Python
|
dimka/test/core/test_config.py
|
madmis/dimka-binance
|
8a9173ac923d2fb122afeb68f85e4aba07d08c5b
|
[
"MIT"
] | null | null | null |
dimka/test/core/test_config.py
|
madmis/dimka-binance
|
8a9173ac923d2fb122afeb68f85e4aba07d08c5b
|
[
"MIT"
] | null | null | null |
dimka/test/core/test_config.py
|
madmis/dimka-binance
|
8a9173ac923d2fb122afeb68f85e4aba07d08c5b
|
[
"MIT"
] | null | null | null |
import unittest
import os.path
import tempfile
import argparse
import dimka.core.config as config
class TestConfig(unittest.TestCase):
def test_correct_config(self):
file = os.path.join(tempfile.gettempdir(), "conf.yaml")
if os.path.isfile(file):
os.remove(file)
f = open(file, "a+")
f.write("db_path: /var/www/data/test_app.sqlite3\n")
f.write("key: api_key\n")
f.write("secret: api_secret\n")
f.write("pair: ETHBTC\n")
f.close()
conf = config.Config()
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument("--config")
result = conf.parse_config(parser.parse_args(["--config", file]))
self.assertEqual(result.get('db_path'), '/var/www/data/test_app.sqlite3')
self.assertEqual(result.get('key'), 'api_key')
self.assertEqual(result.get('secret'), 'api_secret')
self.assertEqual(result.get('pair'), 'ETHBTC')
if __name__ == '__main__':
unittest.main()
| 27.589744
| 81
| 0.627323
|
4a0e65237a51569d0111fbfbc885bd1d8461852c
| 325
|
py
|
Python
|
docs/src/tutorial/path/code010.py
|
eadwinCode/django-ninja
|
4033b70399f3f58db747fa1a41075488306accb7
|
[
"MIT"
] | 2,809
|
2020-06-21T08:48:40.000Z
|
2022-03-30T16:42:11.000Z
|
docs/src/tutorial/path/code010.py
|
eadwinCode/django-ninja
|
4033b70399f3f58db747fa1a41075488306accb7
|
[
"MIT"
] | 311
|
2020-06-22T07:59:27.000Z
|
2022-03-31T18:01:23.000Z
|
docs/src/tutorial/path/code010.py
|
eadwinCode/django-ninja
|
4033b70399f3f58db747fa1a41075488306accb7
|
[
"MIT"
] | 178
|
2020-07-08T00:40:43.000Z
|
2022-03-29T02:05:20.000Z
|
import datetime
from ninja import Schema, Path
class PathDate(Schema):
year: int
month: int
day: int
def value(self):
return datetime.date(self.year, self.month, self.day)
@api.get("/events/{year}/{month}/{day}")
def events(request, date: PathDate = Path(...)):
return {"date": date.value()}
| 19.117647
| 61
| 0.64
|
4a0e6644c5a29485562e8ba7598a6739ddbce649
| 6,616
|
py
|
Python
|
configs/cascade_mask_rcnn_r50_fpn_1x.py
|
minhnvl/Worms_Detection
|
a809e3926538b9b777538994fc32fde475dc2796
|
[
"Apache-2.0"
] | null | null | null |
configs/cascade_mask_rcnn_r50_fpn_1x.py
|
minhnvl/Worms_Detection
|
a809e3926538b9b777538994fc32fde475dc2796
|
[
"Apache-2.0"
] | null | null | null |
configs/cascade_mask_rcnn_r50_fpn_1x.py
|
minhnvl/Worms_Detection
|
a809e3926538b9b777538994fc32fde475dc2796
|
[
"Apache-2.0"
] | null | null | null |
# model settings
model = dict(
type='CascadeRCNN',
num_stages=3,
pretrained='modelzoo://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
use_sigmoid_cls=True),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=13,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=True),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=13,
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1],
reg_class_agnostic=True),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=13,
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067],
reg_class_agnostic=True)
],
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='FCNMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=13))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
smoothl1_beta=1 / 9.0,
debug=False),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False)
],
stage_loss_weights=[1, 0.5, 0.25])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100,
mask_thr_binary=0.5),
keep_all_stages=False)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
img_scale=(1280,720),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0.5,
with_mask=True,
with_crowd=True,
with_label=True),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
img_scale=(1280,720),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_mask=True,
with_crowd=True,
with_label=True),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
img_scale=(1280,720),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_mask=True,
with_label=False,
test_mode=True))
# optimizer
optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 100
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/cascade_mask_rcnn_r50_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| 29.017544
| 77
| 0.535973
|
4a0e66d5a7f64efb5fd6cf787d9e85a448730118
| 9,455
|
py
|
Python
|
airport/utils/cache/heap.py
|
zen-xu/airport
|
fb5cfdd885f3f78b9be0abc90c04199b24d5560e
|
[
"Apache-2.0"
] | null | null | null |
airport/utils/cache/heap.py
|
zen-xu/airport
|
fb5cfdd885f3f78b9be0abc90c04199b24d5560e
|
[
"Apache-2.0"
] | null | null | null |
airport/utils/cache/heap.py
|
zen-xu/airport
|
fb5cfdd885f3f78b9be0abc90c04199b24d5560e
|
[
"Apache-2.0"
] | null | null | null |
from dataclasses import dataclass
from dataclasses import field
from threading import Condition
from threading import Lock
from threading import RLock
from typing import Callable
from typing import Dict
from typing import Generic
from typing import List
from typing import Optional
from typing import TypeVar
from typing_extensions import Protocol
class HeapError(Exception):
message: str = ""
def __init__(self, msg=""):
msg = msg or self.message
super().__init__(msg)
class HeapClosed(HeapError):
message = "heap is closed"
class HeapObjectNotFound(HeapError):
message = "object not found"
class HeapObjectAlreadyRemoved(HeapError):
message = "object was removed from heap data"
class HeapKeyFuncError(HeapError):
...
class HeapLessFuncError(HeapError):
...
T = TypeVar("T")
@dataclass
class HeapItem(Generic[T]):
obj: T
index: int
@dataclass
class ItemKeyValue(Generic[T]):
key: str
obj: T
KeyFunc = Callable[[T], str]
LessFunc = Callable[[T, T], bool]
@dataclass
class HeapData(Generic[T]):
items: Dict[str, HeapItem[T]] = field(init=False)
queue: List[str] = field(init=False)
def __init__(self, key_func: KeyFunc, less_func: LessFunc):
self._key_func = key_func
self._less_func = less_func
self.items = dict()
self.queue = list()
@property
def key_func(self) -> KeyFunc:
return self._key_func
@property
def less_func(self) -> LessFunc:
return self._less_func
def less(self, i: int, j: int) -> bool:
"""
:raises HeapLessFuncError
"""
if i > len(self.queue) or j > len(self.queue):
return False
if (item_i := self.items.get(self.queue[i])) is None:
return False
if (item_j := self.items.get(self.queue[j])) is None:
return False
return self.less_func(item_i.obj, item_j.obj)
def swap(self, i: int, j: int):
self.queue[i], self.queue[j] = self.queue[j], self.queue[i]
item = self.items[self.queue[i]]
item.index = i
item = self.items[self.queue[j]]
item.index = j
def push(self, kv: ItemKeyValue[T]):
queue_count = len(self.queue)
self.items[kv.key] = HeapItem(obj=kv.obj, index=queue_count)
self.queue.append(kv.key)
def pop(self) -> Optional[T]:
key = self.queue[-1]
self.queue = self.queue[:-1]
try:
item = self.items.pop(key)
return item.obj
except KeyError:
return None
def __len__(self):
return len(self.queue)
class HeapInterface(Protocol[T]):
def __len__(self) -> int:
...
def push(self, obj: T):
...
def pop(self) -> Optional[T]:
...
def less(self, i: int, j: int) -> bool:
...
def swap(self, i: int, j: int):
...
def init(h: HeapInterface):
n = len(h)
for i in reversed(range(n // 2 - 1)):
down(h, i, n)
def fix(h: HeapInterface, index: int):
if not down(h, index, len(h)):
up(h, index)
def down(h: HeapInterface, i0: int, n: int) -> bool:
i = i0
while True:
j1 = 2 * i + 1
if j1 >= n or j1 < 0:
break
j = j1
if (j2 := j1 + 1) < n and h.less(j2, j1):
j = j2
if not h.less(j, i):
break
h.swap(i, j)
i = j
return i > i0
def up(h: HeapInterface, j: int):
while True:
i = (j - 1) // 2
if i < 0:
break
if i == j or not h.less(j, i):
break
h.swap(i, j)
j = i
def push(h: HeapInterface, x: T):
h.push(x)
up(h, len(h) - 1)
def pop(h: HeapInterface) -> Optional[T]:
n = len(h) - 1
h.swap(0, n)
down(h, 0, n)
return h.pop()
def remove(h: HeapInterface, i: int):
n = len(h) - 1
if n != i:
h.swap(i, n)
if not down(h, i, n):
up(h, i)
return h.pop()
@dataclass
class Heap(Generic[T]):
lock: Lock
rlock: RLock
cond: Condition
data: HeapData[T]
closed: bool = False
def __post_init__(self):
self.lock = Lock()
self.rlock = RLock()
self.cond = Condition(self.lock)
@classmethod
def new(cls, key_fn: KeyFunc, less_fn: LessFunc) -> "Heap":
heap_data: HeapData[T] = HeapData(key_func=key_fn, less_func=less_fn)
lock = Lock()
rlock = RLock()
cond = Condition(lock)
return Heap(lock=lock, rlock=rlock, cond=cond, data=heap_data)
def close(self):
"""
Close the Heap and signals condition variables that may be waiting to pop
items from the heap.
"""
with self.lock:
self.closed = True
self.cond.notify_all()
def add(self, obj: T):
"""
Inserts an item, and puts it in the queue. The item is updated if it
already exists.
:raises HeapClosed
:raises HeapKeyFuncError
"""
key = self.data.key_func(obj)
with self.lock:
if self.closed:
raise HeapClosed
if key in self.data.items:
self.data.items[key].obj = obj
fix(self.data, self.data.items[key].index)
else:
self.add_if_not_present_locked(key, obj)
self.cond.notify_all()
def bulk_add(self, objs: List[T]):
"""
Adds all the items in the list to the queue and then signals the condition
variable. It is useful when the caller would like to add all of the items
to the queue before consumer starts processing them.
:raises HeapClosed
:raises HeapKeyFuncError
"""
with self.lock:
if self.closed:
raise HeapClosed
for obj in objs:
key = self.data.key_func(obj)
if key in self.data.items:
self.data.items[key].obj = obj
fix(self.data, self.data.items[key].index)
else:
self.add_if_not_present_locked(key, obj)
self.cond.notify_all()
def add_if_not_present(self, obj: T):
"""
Inserts an item, and puts it in the queue. If an item with
the key is present in the map, no changes is made to the item.
This is useful in a single producer/consumer scenario so that the consumer can
safely retry items without contending with the producer and potentially enqueueing
stale items.
:raises HeapClosed
:raises HeapKeyFuncError
"""
key = self.data.key_func(obj)
with self.lock:
if self.closed:
raise HeapClosed
self.add_if_not_present_locked(key, obj)
self.cond.notify_all()
def add_if_not_present_locked(self, key: str, obj: T):
"""
Assumes the lock is already held and adds the provided
item to the queue if it does not already exist.
"""
if key in self.data.items:
return
push(self.data, ItemKeyValue(key=key, obj=obj))
def update(self, obj: T):
"""
Update is the same as Add in this implementation. When the item does not
exist, it is added.
:raises HeapClosed
:raises HeapKeyFuncError
"""
self.add(obj)
def delete(self, obj: T):
"""
Removes an item.
:raises HeapKeyFuncError
:raises HeapObjectNotFound
"""
key = self.data.key_func(obj)
with self.lock:
if (item := self.data.items.get(key)) is None:
raise HeapObjectNotFound
remove(self.data, item.index)
def pop(self) -> T:
"""
Pop waits until an item is ready. If multiple items are
ready, they are returned in the order given by `Heap.data.less_func`.
:raises HeapClosed
:raises HeapObjectAlreadyRemoved
"""
with self.lock:
while len(self.data.queue) == 0:
if self.closed:
raise HeapClosed
self.cond.wait()
obj: Optional[T] = pop(self.data)
if obj is None:
raise HeapObjectAlreadyRemoved
else:
return obj
def list(self) -> List[T]:
"""
:return: a list of all the items.
"""
with self.rlock:
return [item.obj for item in self.data.items.values()]
def list_keys(self) -> List[str]:
"""
:return: a list of all the keys of the objects currently in the Heap.
"""
with self.rlock:
return list(self.data.items.keys())
def get(self, obj: T) -> Optional[T]:
"""
:raises HeapKeyFuncError
:return: the requested item.
"""
key = self.data.key_func(obj)
return self.get_by_key(key)
def get_by_key(self, key: str) -> Optional[T]:
"""
:return: the requested item
"""
with self.rlock:
if (item := self.data.items.get(key)) is not None:
return item.obj
return None
def is_closed(self) -> bool:
"""
:return:true if the queue is closed
"""
with self.rlock:
return self.closed
| 23.578554
| 90
| 0.553464
|
4a0e69437e143c877d4893d3d34ccdb63f050c04
| 8,090
|
py
|
Python
|
test.py
|
MOONJOOYOUNG/CutMix-PyTorch
|
e54b8387ad6f63d2b9cb2c1f9dc332aad2d185e1
|
[
"MIT"
] | 1
|
2021-07-09T21:01:08.000Z
|
2021-07-09T21:01:08.000Z
|
test.py
|
MOONJOOYOUNG/CutMix-PyTorch
|
e54b8387ad6f63d2b9cb2c1f9dc332aad2d185e1
|
[
"MIT"
] | null | null | null |
test.py
|
MOONJOOYOUNG/CutMix-PyTorch
|
e54b8387ad6f63d2b9cb2c1f9dc332aad2d185e1
|
[
"MIT"
] | null | null | null |
# original code: https://github.com/dyhan0920/PyramidNet-PyTorch/blob/master/train.py
import argparse
import os
import shutil
import time
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torch.utils.data.distributed
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
import resnet as RN
import pyramidnet as PYRM
import warnings
warnings.filterwarnings("ignore")
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
parser = argparse.ArgumentParser(description='Cutmix PyTorch CIFAR-10, CIFAR-100 and ImageNet-1k Test')
parser.add_argument('--net_type', default='pyramidnet', type=str,
help='networktype: resnet, and pyamidnet')
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--epochs', default=90, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('-b', '--batch_size', default=128, type=int,
metavar='N', help='mini-batch size (default: 256)')
parser.add_argument('--print-freq', '-p', default=1, type=int,
metavar='N', help='print frequency (default: 10)')
parser.add_argument('--depth', default=32, type=int,
help='depth of the network (default: 32)')
parser.add_argument('--no-bottleneck', dest='bottleneck', action='store_false',
help='to use basicblock for CIFAR datasets (default: bottleneck)')
parser.add_argument('--dataset', dest='dataset', default='imagenet', type=str,
help='dataset (options: cifar10, cifar100, and imagenet)')
parser.add_argument('--alpha', default=300, type=float,
help='number of new channel increases per depth (default: 300)')
parser.add_argument('--no-verbose', dest='verbose', action='store_false',
help='to print the status at every iteration')
parser.add_argument('--pretrained', default='/set/your/model/path', type=str, metavar='PATH')
parser.set_defaults(bottleneck=True)
parser.set_defaults(verbose=True)
best_err1 = 100
best_err5 = 100
def main():
global args, best_err1, best_err5
args = parser.parse_args()
if args.dataset.startswith('cifar'):
normalize = transforms.Normalize(mean=[x / 255.0 for x in [125.3, 123.0, 113.9]],
std=[x / 255.0 for x in [63.0, 62.1, 66.7]])
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
])
transform_test = transforms.Compose([
transforms.ToTensor(),
normalize
])
if args.dataset == 'cifar100':
val_loader = torch.utils.data.DataLoader(
datasets.CIFAR100('../data', train=False, transform=transform_test),
batch_size=args.batch_size, shuffle=True, num_workers=args.workers, pin_memory=True)
numberofclass = 100
elif args.dataset == 'cifar10':
val_loader = torch.utils.data.DataLoader(
datasets.CIFAR10('../data', train=False, transform=transform_test),
batch_size=args.batch_size, shuffle=True, num_workers=args.workers, pin_memory=True)
numberofclass = 10
else:
raise Exception('unknown dataset: {}'.format(args.dataset))
elif args.dataset == 'imagenet':
valdir = os.path.join('/home/data/ILSVRC/val')
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
val_loader = torch.utils.data.DataLoader(
datasets.ImageFolder(valdir, transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
])),
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
numberofclass = 1000
else:
raise Exception('unknown dataset: {}'.format(args.dataset))
print("=> creating model '{}'".format(args.net_type))
if args.net_type == 'resnet':
model = RN.ResNet(args.dataset, args.depth, numberofclass, args.bottleneck) # for ResNet
elif args.net_type == 'pyramidnet':
model = PYRM.PyramidNet(args.dataset, args.depth, args.alpha, numberofclass,
args.bottleneck)
else:
raise Exception('unknown network architecture: {}'.format(args.net_type))
model = torch.nn.DataParallel(model).cuda()
if os.path.isfile(args.pretrained):
print("=> loading checkpoint '{}'".format(args.pretrained))
checkpoint = torch.load(args.pretrained)
model.load_state_dict(checkpoint['state_dict'])
print("=> loaded checkpoint '{}'".format(args.pretrained))
else:
raise Exception("=> no checkpoint found at '{}'".format(args.pretrained))
print(model)
print('the number of model parameters: {}'.format(sum([p.data.nelement() for p in model.parameters()])))
# define loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda()
cudnn.benchmark = True
# evaluate on validation set
err1, err5, val_loss = validate(val_loader, model, criterion)
print('Accuracy (top-1 and 5 error):', err1, err5)
def validate(val_loader, model, criterion):
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
for i, (input, target) in enumerate(val_loader):
target = target.cuda()
input_var = torch.autograd.Variable(input)
target_var = torch.autograd.Variable(target)
output = model(input_var)
loss = criterion(output, target_var)
# measure accuracy and record loss
err1, err5 = accuracy(output.data, target, topk=(1, 5))
losses.update(loss.item(), input.size(0))
top1.update(err1.item(), input.size(0))
top5.update(err5.item(), input.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0 and args.verbose == True:
print('Test (on val set): [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Top 1-err {top1.val:.4f} ({top1.avg:.4f})\t'
'Top 5-err {top5.val:.4f} ({top5.avg:.4f})'.format(
i, len(val_loader), batch_time=batch_time, loss=losses,
top1=top1, top5=top5))
return top1.avg, top5.avg, losses.avg
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
wrong_k = batch_size - correct_k
res.append(wrong_k.mul_(100.0 / batch_size))
return res
if __name__ == '__main__':
main()
| 35.79646
| 108
| 0.617553
|
4a0e696393fe47f38ef6cca7ae376f29c6f83d29
| 13,589
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/util/custom.py
|
major/azure-cli
|
13c46f6e1d94a2eddf31539a0bbf1d5f2f28f63a
|
[
"MIT"
] | 7
|
2020-04-26T09:54:05.000Z
|
2021-07-22T16:54:41.000Z
|
src/azure-cli/azure/cli/command_modules/util/custom.py
|
major/azure-cli
|
13c46f6e1d94a2eddf31539a0bbf1d5f2f28f63a
|
[
"MIT"
] | 2
|
2020-05-04T22:45:39.000Z
|
2021-12-02T22:45:10.000Z
|
src/azure-cli/azure/cli/command_modules/util/custom.py
|
major/azure-cli
|
13c46f6e1d94a2eddf31539a0bbf1d5f2f28f63a
|
[
"MIT"
] | 13
|
2020-06-30T16:23:36.000Z
|
2022-03-29T17:12:05.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.log import get_logger
logger = get_logger(__name__)
UPGRADE_MSG = 'Not able to upgrade automatically. Instructions can be found at https://aka.ms/doc/InstallAzureCli'
def rest_call(cmd, url, method=None, headers=None, uri_parameters=None,
body=None, skip_authorization_header=False, resource=None, output_file=None):
from azure.cli.core.util import send_raw_request
r = send_raw_request(cmd.cli_ctx, method, url, headers, uri_parameters, body,
skip_authorization_header, resource, output_file)
if not output_file and r.content:
try:
return r.json()
except ValueError:
logger.warning('Not a json response, outputting to stdout. For binary data '
'suggest use "--output-file" to write to a file')
print(r.text)
return None
def show_version(cmd): # pylint: disable=unused-argument
from azure.cli.core.util import get_az_version_json
versions = get_az_version_json()
return versions
def upgrade_version(cmd, update_all=None, yes=None): # pylint: disable=too-many-locals, too-many-statements, too-many-branches, no-member, unused-argument
import os
import platform
import sys
import subprocess
import azure.cli.core.telemetry as telemetry
from azure.cli.core import __version__ as local_version
from azure.cli.core._environment import _ENV_AZ_INSTALLER
from azure.cli.core.extension import get_extensions, WheelExtension
from packaging.version import parse
from knack.util import CLIError
update_cli = True
from azure.cli.core.util import get_latest_from_github
try:
latest_version = get_latest_from_github()
if latest_version and parse(latest_version) <= parse(local_version):
logger.warning("You already have the latest azure-cli version: %s", local_version)
update_cli = False
if not update_all:
return
except Exception as ex: # pylint: disable=broad-except
logger.debug("Failed to get the latest version. %s", str(ex))
exts = [ext.name for ext in get_extensions(ext_type=WheelExtension)] if update_all else []
exit_code = 0
installer = os.getenv(_ENV_AZ_INSTALLER) or ''
installer = installer.upper()
if update_cli:
latest_version_msg = 'It will be updated to {}.'.format(latest_version) if yes \
else 'Latest version available is {}.'.format(latest_version)
logger.warning("Your current Azure CLI version is %s. %s", local_version, latest_version_msg)
from knack.prompting import prompt_y_n, NoTTYException
if not yes:
logger.warning("Please check the release notes first: https://docs.microsoft.com/"
"cli/azure/release-notes-azure-cli")
try:
confirmation = prompt_y_n("Do you want to continue?", default='y')
except NoTTYException:
from azure.cli.core.azclierror import UnclassifiedUserFault
raise UnclassifiedUserFault("No tty available.", "Please run command with --yes.")
if not confirmation:
telemetry.set_success("Upgrade stopped by user")
return
if installer == 'DEB':
from azure.cli.core.util import in_cloud_console
if in_cloud_console():
raise CLIError("az upgrade is not supported in Cloud Shell.")
apt_update_cmd = 'apt-get update'.split()
az_update_cmd = 'apt-get install --only-upgrade -y azure-cli'.split()
if os.geteuid() != 0: # pylint: disable=no-member
apt_update_cmd.insert(0, 'sudo')
az_update_cmd.insert(0, 'sudo')
exit_code = subprocess.call(apt_update_cmd)
if exit_code == 0:
logger.debug("Update azure cli with '%s'", " ".join(az_update_cmd))
exit_code = subprocess.call(az_update_cmd)
elif installer == 'RPM':
from azure.cli.core.util import get_linux_distro
distname, _ = get_linux_distro()
if not distname:
logger.warning(UPGRADE_MSG)
else:
distname = distname.lower().strip()
if any(x in distname for x in ['centos', 'rhel', 'red hat', 'fedora']):
update_cmd = 'yum update -y azure-cli'.split()
if os.geteuid() != 0: # pylint: disable=no-member
update_cmd.insert(0, 'sudo')
logger.debug("Update azure cli with '%s'", " ".join(update_cmd))
exit_code = subprocess.call(update_cmd)
elif any(x in distname for x in ['opensuse', 'suse', 'sles']):
zypper_refresh_cmd = ['zypper', 'refresh']
az_update_cmd = 'zypper update -y azure-cli'.split()
if os.geteuid() != 0: # pylint: disable=no-member
zypper_refresh_cmd.insert(0, 'sudo')
az_update_cmd.insert(0, 'sudo')
exit_code = subprocess.call(zypper_refresh_cmd)
if exit_code == 0:
logger.debug("Update azure cli with '%s'", " ".join(az_update_cmd))
exit_code = subprocess.call(az_update_cmd)
else:
logger.warning(UPGRADE_MSG)
elif installer == 'HOMEBREW':
logger.debug("Update homebrew formulae")
exit_code = subprocess.call(['brew', 'update'])
if exit_code == 0:
update_cmd = ['brew', 'upgrade', 'azure-cli']
logger.debug("Update azure cli with '%s'", " ".join(update_cmd))
exit_code = subprocess.call(update_cmd)
elif installer == 'PIP':
pip_args = [sys.executable, '-m', 'pip', 'install', '--upgrade', 'azure-cli', '-vv',
'--disable-pip-version-check', '--no-cache-dir']
logger.debug("Update azure cli with '%s'", " ".join(pip_args))
exit_code = subprocess.call(pip_args, shell=platform.system() == 'Windows')
elif installer == 'DOCKER':
logger.warning("Exit the container to pull latest image with 'docker pull mcr.microsoft.com/azure-cli' "
"or run 'pip install --upgrade azure-cli' in this container")
elif installer == 'MSI':
logger.debug("Update azure cli with MSI from https://aka.ms/installazurecliwindows")
exit_code = subprocess.call(['powershell.exe', '-NoProfile', "Start-Process msiexec.exe -Wait -ArgumentList '/i https://aka.ms/installazurecliwindows'"]) # pylint: disable=line-too-long
else:
logger.warning(UPGRADE_MSG)
if exit_code:
err_msg = "CLI upgrade failed."
logger.warning(err_msg)
telemetry.set_failure(err_msg)
sys.exit(exit_code)
# Avoid using python modules directly as they may have been changed due to upgrade.
# If you do need to use them, you may need to reload them and their dependent modules.
# Otherwise you may have such issue https://github.com/Azure/azure-cli/issues/16952
import importlib
import json
importlib.reload(subprocess)
importlib.reload(json)
version_result = subprocess.check_output(['az', 'version', '-o', 'json'], shell=platform.system() == 'Windows')
version_json = json.loads(version_result)
new_version = version_json['azure-cli-core']
if update_cli and new_version == local_version:
err_msg = "CLI upgrade failed or aborted."
logger.warning(err_msg)
telemetry.set_failure(err_msg)
sys.exit(1)
if exts:
logger.warning("Upgrading extensions")
for ext_name in exts:
try:
logger.warning("Checking update for %s", ext_name)
subprocess.call(['az', 'extension', 'update', '-n', ext_name],
shell=platform.system() == 'Windows')
except Exception as ex: # pylint: disable=broad-except
msg = "Extension {} update failed during az upgrade. {}".format(ext_name, str(ex))
raise CLIError(msg)
auto_upgrade_msg = "You can enable auto-upgrade with 'az config set auto-upgrade.enable=yes'. " \
"More details in https://docs.microsoft.com/cli/azure/update-azure-cli#automatic-update"
logger.warning("Upgrade finished.%s", "" if cmd.cli_ctx.config.getboolean('auto-upgrade', 'enable', False)
else auto_upgrade_msg)
def demo_style(cmd, theme=None): # pylint: disable=unused-argument
from azure.cli.core.style import Style, print_styled_text, format_styled_text
if theme:
format_styled_text.theme = theme
print_styled_text("[How to call print_styled_text]")
# Print an empty line
print_styled_text()
# Various methods to print
print_styled_text("- Print using a str")
print_styled_text("- Print using multiple", "strs")
print_styled_text((Style.PRIMARY, "- Print using a tuple"))
print_styled_text((Style.PRIMARY, "- Print using multiple"), (Style.IMPORTANT, "tuples"))
print_styled_text([(Style.PRIMARY, "- Print using a "), (Style.IMPORTANT, "list")])
print_styled_text([(Style.PRIMARY, "- Print using multiple")], [(Style.IMPORTANT, "lists")])
print_styled_text()
print_styled_text("[Available styles]\n")
placeholder = '████ {:8s}: {}\n'
styled_text = [
(Style.PRIMARY, placeholder.format("White", "Primary text color")),
(Style.SECONDARY, placeholder.format("Grey", "Secondary text color")),
(Style.IMPORTANT, placeholder.format("Magenta", "Important text color")),
(Style.ACTION, placeholder.format(
"Blue", "Commands, parameters, and system inputs (White in legacy powershell terminal)")),
(Style.HYPERLINK, placeholder.format("Cyan", "Hyperlink")),
(Style.ERROR, placeholder.format("Red", "Error message indicator")),
(Style.SUCCESS, placeholder.format("Green", "Success message indicator")),
(Style.WARNING, placeholder.format("Yellow", "Warning message indicator")),
]
print_styled_text(styled_text)
print_styled_text("[interactive]\n")
# NOTE! Unicode character ⦾ ⦿ will most likely not be displayed correctly
styled_text = [
(Style.ACTION, "?"),
(Style.PRIMARY, " Select a SKU for your app:\n"),
(Style.PRIMARY, "⦾ Free "),
(Style.SECONDARY, "Dev/Test workloads: 1 GB memory, 60 minutes/day compute\n"),
(Style.PRIMARY, "⦾ Basic "),
(Style.SECONDARY, "Dev/Test workloads: 1.75 GB memory, monthly charges apply\n"),
(Style.PRIMARY, "⦾ Standard "),
(Style.SECONDARY, "Production workloads: 1.75 GB memory, monthly charges apply\n"),
(Style.ACTION, "⦿ Premium "),
(Style.SECONDARY, "Production workloads: 3.5 GB memory, monthly charges apply\n"),
]
print_styled_text(styled_text)
print_styled_text("[progress report]\n")
# NOTE! Unicode character ✓ will most likely not be displayed correctly
styled_text = [
(Style.SUCCESS, '(✓) Done: '),
(Style.PRIMARY, "Creating a resource group for myfancyapp\n"),
(Style.SUCCESS, '(✓) Done: '),
(Style.PRIMARY, "Creating an App Service Plan for myfancyappplan on a "),
(Style.IMPORTANT, "premium instance"),
(Style.PRIMARY, " that has a "),
(Style.IMPORTANT, "monthly charge"),
(Style.PRIMARY, "\n"),
(Style.SUCCESS, '(✓) Done: '),
(Style.PRIMARY, "Creating a webapp named myfancyapp\n"),
]
print_styled_text(styled_text)
print_styled_text("[error handing]\n")
styled_text = [
(Style.ERROR, "ERROR: Command not found: az storage create\n"),
(Style.PRIMARY, "TRY\n"),
(Style.ACTION, "az storage account create --name"),
(Style.PRIMARY, " mystorageaccount "),
(Style.ACTION, "--resource-group"),
(Style.PRIMARY, " MyResourceGroup\n"),
(Style.SECONDARY, "Create a storage account. For more detail, see "),
(Style.HYPERLINK, "https://docs.microsoft.com/en-us/azure/storage/common/storage-account-create?"
"tabs=azure-cli#create-a-storage-account-1"),
(Style.SECONDARY, "\n"),
]
print_styled_text(styled_text)
print_styled_text("[post-output hint]\n")
styled_text = [
(Style.PRIMARY, "The default subscription is "),
(Style.IMPORTANT, "AzureSDKTest (0b1f6471-1bf0-4dda-aec3-cb9272f09590)"),
(Style.PRIMARY, ". To switch to another subscription, run "),
(Style.ACTION, "az account set --subscription"),
(Style.PRIMARY, " <subscription ID>\n"),
(Style.WARNING, "WARNING: The subscription has been disabled!\n")
]
print_styled_text(styled_text)
print_styled_text("[logs]\n")
# Print logs
logger.debug("This is a debug log entry.")
logger.info("This is a info log entry.")
logger.warning("This is a warning log entry.")
logger.error("This is a error log entry.")
logger.critical("This is a critical log entry.")
| 49.414545
| 198
| 0.618221
|
4a0e6a60e8a99e8eb9c37445f80b55bb3bfcf3b8
| 190,172
|
py
|
Python
|
pysnmp/LIEBERT-GP-FLEXIBLE-CONDITIONS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/LIEBERT-GP-FLEXIBLE-CONDITIONS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/LIEBERT-GP-FLEXIBLE-CONDITIONS-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module LIEBERT-GP-FLEXIBLE-CONDITIONS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/LIEBERT-GP-FLEXIBLE-CONDITIONS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:56:10 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
lgpFlexConditions, = mibBuilder.importSymbols("LIEBERT-GP-CONDITIONS-MIB", "lgpFlexConditions")
liebertFlexibleConditionsModuleReg, = mibBuilder.importSymbols("LIEBERT-GP-REGISTRATION-MIB", "liebertFlexibleConditionsModuleReg")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Bits, NotificationType, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, Integer32, Counter32, Gauge32, Unsigned32, Counter64, TimeTicks, MibIdentifier, ModuleIdentity, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "NotificationType", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "Integer32", "Counter32", "Gauge32", "Unsigned32", "Counter64", "TimeTicks", "MibIdentifier", "ModuleIdentity", "ObjectIdentity")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
liebertGlobalProductsFlexibleConditionsModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 1, 11, 1))
liebertGlobalProductsFlexibleConditionsModule.setRevisions(('2016-06-08 00:00',))
if mibBuilder.loadTexts: liebertGlobalProductsFlexibleConditionsModule.setLastUpdated('201606080000Z')
if mibBuilder.loadTexts: liebertGlobalProductsFlexibleConditionsModule.setOrganization('Liebert Corporation')
lgpFlexConditionsWellKnown = MibIdentifier((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1))
lgpCondId4122SystemInputPowerProblem = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4122))
if mibBuilder.loadTexts: lgpCondId4122SystemInputPowerProblem.setStatus('current')
lgpCondId4132BypassOverloadPhaseA = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4132))
if mibBuilder.loadTexts: lgpCondId4132BypassOverloadPhaseA.setStatus('current')
lgpCondId4133BypassOverloadPhaseB = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4133))
if mibBuilder.loadTexts: lgpCondId4133BypassOverloadPhaseB.setStatus('current')
lgpCondId4134BypassOverloadPhaseC = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4134))
if mibBuilder.loadTexts: lgpCondId4134BypassOverloadPhaseC.setStatus('current')
lgpCondId4135BypassNotAvailable = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4135))
if mibBuilder.loadTexts: lgpCondId4135BypassNotAvailable.setStatus('current')
lgpCondId4137BypassAutoRetransferPrimed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4137))
if mibBuilder.loadTexts: lgpCondId4137BypassAutoRetransferPrimed.setStatus('current')
lgpCondId4138BypassAutoRetransferFailed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4138))
if mibBuilder.loadTexts: lgpCondId4138BypassAutoRetransferFailed.setStatus('current')
lgpCondId4139BypassExcessAutoRetransfers = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4139))
if mibBuilder.loadTexts: lgpCondId4139BypassExcessAutoRetransfers.setStatus('current')
lgpCondId4140BypassRestartInhibitExternal = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4140))
if mibBuilder.loadTexts: lgpCondId4140BypassRestartInhibitExternal.setStatus('current')
lgpCondId4141BypassBreakerClosed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4141))
if mibBuilder.loadTexts: lgpCondId4141BypassBreakerClosed.setStatus('current')
lgpCondId4142BypassStaticSwitchOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4142))
if mibBuilder.loadTexts: lgpCondId4142BypassStaticSwitchOverload.setStatus('current')
lgpCondId4143BypassStaticSwitchUnavailable = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4143))
if mibBuilder.loadTexts: lgpCondId4143BypassStaticSwitchUnavailable.setStatus('current')
lgpCondId4144BypassExcessivePulseParallel = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4144))
if mibBuilder.loadTexts: lgpCondId4144BypassExcessivePulseParallel.setStatus('current')
lgpCondId4145BypassAutoTransferFailed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4145))
if mibBuilder.loadTexts: lgpCondId4145BypassAutoTransferFailed.setStatus('current')
lgpCondId4146SystemInputPhsRotationError = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4146))
if mibBuilder.loadTexts: lgpCondId4146SystemInputPhsRotationError.setStatus('current')
lgpCondId4147SystemInputCurrentLimit = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4147))
if mibBuilder.loadTexts: lgpCondId4147SystemInputCurrentLimit.setStatus('current')
lgpCondId4162BatteryLow = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4162))
if mibBuilder.loadTexts: lgpCondId4162BatteryLow.setStatus('current')
lgpCondId4163OutputOffEndofDischarge = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4163))
if mibBuilder.loadTexts: lgpCondId4163OutputOffEndofDischarge.setStatus('current')
lgpCondId4164BatteryChargingError = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4164))
if mibBuilder.loadTexts: lgpCondId4164BatteryChargingError.setStatus('current')
lgpCondId4165BatteryChargingReducedExtrnl = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4165))
if mibBuilder.loadTexts: lgpCondId4165BatteryChargingReducedExtrnl.setStatus('current')
lgpCondId4166BatteryCapacityLow = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4166))
if mibBuilder.loadTexts: lgpCondId4166BatteryCapacityLow.setStatus('current')
lgpCondId4167OutputOff = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4167))
if mibBuilder.loadTexts: lgpCondId4167OutputOff.setStatus('current')
lgpCondId4168BatteryDischarging = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4168))
if mibBuilder.loadTexts: lgpCondId4168BatteryDischarging.setStatus('current')
lgpCondId4169BatteryTemperatureImbalance = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4169))
if mibBuilder.loadTexts: lgpCondId4169BatteryTemperatureImbalance.setStatus('current')
lgpCondId4170BatteryEqualize = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4170))
if mibBuilder.loadTexts: lgpCondId4170BatteryEqualize.setStatus('current')
lgpCondId4171BatteryManualTestInProgress = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4171))
if mibBuilder.loadTexts: lgpCondId4171BatteryManualTestInProgress.setStatus('current')
lgpCondId4172BatteryAutoTestInProgress = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4172))
if mibBuilder.loadTexts: lgpCondId4172BatteryAutoTestInProgress.setStatus('current')
lgpCondId4173MainBatteryDisconnectOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4173))
if mibBuilder.loadTexts: lgpCondId4173MainBatteryDisconnectOpen.setStatus('current')
lgpCondId4174BatteryTemperatureSensorFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4174))
if mibBuilder.loadTexts: lgpCondId4174BatteryTemperatureSensorFault.setStatus('current')
lgpCondId4175BypassFrequencyError = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4175))
if mibBuilder.loadTexts: lgpCondId4175BypassFrequencyError.setStatus('current')
lgpCondId4176BatteryCircuitBreaker1Open = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4176))
if mibBuilder.loadTexts: lgpCondId4176BatteryCircuitBreaker1Open.setStatus('current')
lgpCondId4177BatteryBreaker1OpenFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4177))
if mibBuilder.loadTexts: lgpCondId4177BatteryBreaker1OpenFailure.setStatus('current')
lgpCondId4178BatteryBreaker1CloseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4178))
if mibBuilder.loadTexts: lgpCondId4178BatteryBreaker1CloseFailure.setStatus('current')
lgpCondId4179BatteryCircuitBreaker2Open = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4179))
if mibBuilder.loadTexts: lgpCondId4179BatteryCircuitBreaker2Open.setStatus('current')
lgpCondId4180BatteryBreaker2OpenFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4180))
if mibBuilder.loadTexts: lgpCondId4180BatteryBreaker2OpenFailure.setStatus('current')
lgpCondId4181BatteryBreaker2CloseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4181))
if mibBuilder.loadTexts: lgpCondId4181BatteryBreaker2CloseFailure.setStatus('current')
lgpCondId4182BatteryCircuitBreaker3Open = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4182))
if mibBuilder.loadTexts: lgpCondId4182BatteryCircuitBreaker3Open.setStatus('current')
lgpCondId4183BatteryBreaker3OpenFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4183))
if mibBuilder.loadTexts: lgpCondId4183BatteryBreaker3OpenFailure.setStatus('current')
lgpCondId4184BatteryBreaker3CloseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4184))
if mibBuilder.loadTexts: lgpCondId4184BatteryBreaker3CloseFailure.setStatus('current')
lgpCondId4185BatteryCircuitBreaker4Open = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4185))
if mibBuilder.loadTexts: lgpCondId4185BatteryCircuitBreaker4Open.setStatus('current')
lgpCondId4186BatteryBreaker4OpenFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4186))
if mibBuilder.loadTexts: lgpCondId4186BatteryBreaker4OpenFailure.setStatus('current')
lgpCondId4187BatteryBreaker4CloseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4187))
if mibBuilder.loadTexts: lgpCondId4187BatteryBreaker4CloseFailure.setStatus('current')
lgpCondId4188BatteryCircuitBreaker5Open = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4188))
if mibBuilder.loadTexts: lgpCondId4188BatteryCircuitBreaker5Open.setStatus('current')
lgpCondId4189BatteryBreaker5OpenFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4189))
if mibBuilder.loadTexts: lgpCondId4189BatteryBreaker5OpenFailure.setStatus('current')
lgpCondId4190BatteryBreaker5CloseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4190))
if mibBuilder.loadTexts: lgpCondId4190BatteryBreaker5CloseFailure.setStatus('current')
lgpCondId4191BatteryCircuitBreaker6Open = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4191))
if mibBuilder.loadTexts: lgpCondId4191BatteryCircuitBreaker6Open.setStatus('current')
lgpCondId4192BatteryBreaker6OpenFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4192))
if mibBuilder.loadTexts: lgpCondId4192BatteryBreaker6OpenFailure.setStatus('current')
lgpCondId4193BatteryBreaker6CloseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4193))
if mibBuilder.loadTexts: lgpCondId4193BatteryBreaker6CloseFailure.setStatus('current')
lgpCondId4194BatteryCircuitBreaker7Open = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4194))
if mibBuilder.loadTexts: lgpCondId4194BatteryCircuitBreaker7Open.setStatus('current')
lgpCondId4195BatteryBreaker7OpenFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4195))
if mibBuilder.loadTexts: lgpCondId4195BatteryBreaker7OpenFailure.setStatus('current')
lgpCondId4196BatteryBreaker7CloseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4196))
if mibBuilder.loadTexts: lgpCondId4196BatteryBreaker7CloseFailure.setStatus('current')
lgpCondId4197BatteryCircuitBreaker8Open = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4197))
if mibBuilder.loadTexts: lgpCondId4197BatteryCircuitBreaker8Open.setStatus('current')
lgpCondId4198BatteryBreaker8OpenFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4198))
if mibBuilder.loadTexts: lgpCondId4198BatteryBreaker8OpenFailure.setStatus('current')
lgpCondId4199BatteryBreaker8CloseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4199))
if mibBuilder.loadTexts: lgpCondId4199BatteryBreaker8CloseFailure.setStatus('current')
lgpCondId4200BatteryChargingInhibited = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4200))
if mibBuilder.loadTexts: lgpCondId4200BatteryChargingInhibited.setStatus('current')
lgpCondId4213SystemShutdownEPO = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4213))
if mibBuilder.loadTexts: lgpCondId4213SystemShutdownEPO.setStatus('current')
lgpCondId4214SystemShutdownREPO = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4214))
if mibBuilder.loadTexts: lgpCondId4214SystemShutdownREPO.setStatus('current')
lgpCondId4215SystemOutputOff = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4215))
if mibBuilder.loadTexts: lgpCondId4215SystemOutputOff.setStatus('current')
lgpCondId4216BypassBackfeedDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4216))
if mibBuilder.loadTexts: lgpCondId4216BypassBackfeedDetected.setStatus('current')
lgpCondId4217BypassManualXfrInhibited = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4217))
if mibBuilder.loadTexts: lgpCondId4217BypassManualXfrInhibited.setStatus('current')
lgpCondId4218BypassManualRexfrInhibited = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4218))
if mibBuilder.loadTexts: lgpCondId4218BypassManualRexfrInhibited.setStatus('current')
lgpCondId4219BatteryOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4219))
if mibBuilder.loadTexts: lgpCondId4219BatteryOverTemperature.setStatus('current')
lgpCondId4220BatteryExternalMonitor1 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4220))
if mibBuilder.loadTexts: lgpCondId4220BatteryExternalMonitor1.setStatus('current')
lgpCondId4221BatteryExternalMonitor2 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4221))
if mibBuilder.loadTexts: lgpCondId4221BatteryExternalMonitor2.setStatus('current')
lgpCondId4222BatteryGroundFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4222))
if mibBuilder.loadTexts: lgpCondId4222BatteryGroundFault.setStatus('current')
lgpCondId4229EmergencyPowerOffLatched = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4229))
if mibBuilder.loadTexts: lgpCondId4229EmergencyPowerOffLatched.setStatus('current')
lgpCondId4230SystemOutputLowPowerFactor = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4230))
if mibBuilder.loadTexts: lgpCondId4230SystemOutputLowPowerFactor.setStatus('current')
lgpCondId4231OutputCurrentExceedsThreshold = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4231))
if mibBuilder.loadTexts: lgpCondId4231OutputCurrentExceedsThreshold.setStatus('current')
lgpCondId4233InverterFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4233))
if mibBuilder.loadTexts: lgpCondId4233InverterFailure.setStatus('current')
lgpCondId4234InverterOverloadPhaseA = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4234))
if mibBuilder.loadTexts: lgpCondId4234InverterOverloadPhaseA.setStatus('current')
lgpCondId4235InverterOverloadPhaseB = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4235))
if mibBuilder.loadTexts: lgpCondId4235InverterOverloadPhaseB.setStatus('current')
lgpCondId4236InverterOverloadPhaseC = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4236))
if mibBuilder.loadTexts: lgpCondId4236InverterOverloadPhaseC.setStatus('current')
lgpCondId4237InverterInhibitExternal = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4237))
if mibBuilder.loadTexts: lgpCondId4237InverterInhibitExternal.setStatus('current')
lgpCondId4238InverterOutBreakerOpenFail = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4238))
if mibBuilder.loadTexts: lgpCondId4238InverterOutBreakerOpenFail.setStatus('current')
lgpCondId4239InverterOutBreakerCloseFail = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4239))
if mibBuilder.loadTexts: lgpCondId4239InverterOutBreakerCloseFail.setStatus('current')
lgpCondId4270InputContact01 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4270))
if mibBuilder.loadTexts: lgpCondId4270InputContact01.setStatus('current')
lgpCondId4271InputContact02 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4271))
if mibBuilder.loadTexts: lgpCondId4271InputContact02.setStatus('current')
lgpCondId4272InputContact03 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4272))
if mibBuilder.loadTexts: lgpCondId4272InputContact03.setStatus('current')
lgpCondId4273InputContact04 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4273))
if mibBuilder.loadTexts: lgpCondId4273InputContact04.setStatus('current')
lgpCondId4274InputContact05 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4274))
if mibBuilder.loadTexts: lgpCondId4274InputContact05.setStatus('current')
lgpCondId4275InputContact06 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4275))
if mibBuilder.loadTexts: lgpCondId4275InputContact06.setStatus('current')
lgpCondId4276InputContact07 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4276))
if mibBuilder.loadTexts: lgpCondId4276InputContact07.setStatus('current')
lgpCondId4277InputContact08 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4277))
if mibBuilder.loadTexts: lgpCondId4277InputContact08.setStatus('current')
lgpCondId4278InputContact09 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4278))
if mibBuilder.loadTexts: lgpCondId4278InputContact09.setStatus('current')
lgpCondId4279InputContact10 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4279))
if mibBuilder.loadTexts: lgpCondId4279InputContact10.setStatus('current')
lgpCondId4280InputContact11 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4280))
if mibBuilder.loadTexts: lgpCondId4280InputContact11.setStatus('current')
lgpCondId4281InputContact12 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4281))
if mibBuilder.loadTexts: lgpCondId4281InputContact12.setStatus('current')
lgpCondId4282InputContact13 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4282))
if mibBuilder.loadTexts: lgpCondId4282InputContact13.setStatus('current')
lgpCondId4283InputContact14 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4283))
if mibBuilder.loadTexts: lgpCondId4283InputContact14.setStatus('current')
lgpCondId4284InputContact15 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4284))
if mibBuilder.loadTexts: lgpCondId4284InputContact15.setStatus('current')
lgpCondId4285InputContact16 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4285))
if mibBuilder.loadTexts: lgpCondId4285InputContact16.setStatus('current')
lgpCondId4286OutputAmpOverUserLimitPhsA = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4286))
if mibBuilder.loadTexts: lgpCondId4286OutputAmpOverUserLimitPhsA.setStatus('current')
lgpCondId4287OutputAmpOverUserLimitPhsB = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4287))
if mibBuilder.loadTexts: lgpCondId4287OutputAmpOverUserLimitPhsB.setStatus('current')
lgpCondId4288OutputAmpOverUserLimitPhsC = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4288))
if mibBuilder.loadTexts: lgpCondId4288OutputAmpOverUserLimitPhsC.setStatus('current')
lgpCondId4289InverterTransferInhibitExt = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4289))
if mibBuilder.loadTexts: lgpCondId4289InverterTransferInhibitExt.setStatus('current')
lgpCondId4290InverterShutdownOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4290))
if mibBuilder.loadTexts: lgpCondId4290InverterShutdownOverload.setStatus('current')
lgpCondId4294InletAirOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4294))
if mibBuilder.loadTexts: lgpCondId4294InletAirOverTemperature.setStatus('current')
lgpCondId4295RectifierFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4295))
if mibBuilder.loadTexts: lgpCondId4295RectifierFailure.setStatus('current')
lgpCondId4296RectifierOperationInhibitExt = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4296))
if mibBuilder.loadTexts: lgpCondId4296RectifierOperationInhibitExt.setStatus('current')
lgpCondId4297UPSOutputonInverter = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4297))
if mibBuilder.loadTexts: lgpCondId4297UPSOutputonInverter.setStatus('current')
lgpCondId4298UPSOutputonBypass = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4298))
if mibBuilder.loadTexts: lgpCondId4298UPSOutputonBypass.setStatus('current')
lgpCondId4299OutputLoadonMaintBypass = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4299))
if mibBuilder.loadTexts: lgpCondId4299OutputLoadonMaintBypass.setStatus('current')
lgpCondId4300InternalCommunicationsFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4300))
if mibBuilder.loadTexts: lgpCondId4300InternalCommunicationsFailure.setStatus('current')
lgpCondId4308DCBusGroundFaultPositive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4308))
if mibBuilder.loadTexts: lgpCondId4308DCBusGroundFaultPositive.setStatus('current')
lgpCondId4309DCBusGroundFaultNegative = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4309))
if mibBuilder.loadTexts: lgpCondId4309DCBusGroundFaultNegative.setStatus('current')
lgpCondId4310EquipmentOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4310))
if mibBuilder.loadTexts: lgpCondId4310EquipmentOverTemperature.setStatus('current')
lgpCondId4311SystemFanFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4311))
if mibBuilder.loadTexts: lgpCondId4311SystemFanFailure.setStatus('current')
lgpCondId4313PasswordChanged = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4313))
if mibBuilder.loadTexts: lgpCondId4313PasswordChanged.setStatus('current')
lgpCondId4314PowerSupplyFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4314))
if mibBuilder.loadTexts: lgpCondId4314PowerSupplyFailure.setStatus('current')
lgpCondId4315OnGenerator = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4315))
if mibBuilder.loadTexts: lgpCondId4315OnGenerator.setStatus('current')
lgpCondId4316AutoRestartInProgress = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4316))
if mibBuilder.loadTexts: lgpCondId4316AutoRestartInProgress.setStatus('current')
lgpCondId4317AutoRestartInhibitedExt = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4317))
if mibBuilder.loadTexts: lgpCondId4317AutoRestartInhibitedExt.setStatus('current')
lgpCondId4320InitiatedTransfertoBypass = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4320))
if mibBuilder.loadTexts: lgpCondId4320InitiatedTransfertoBypass.setStatus('current')
lgpCondId4321InitiatedTransfertoInverter = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4321))
if mibBuilder.loadTexts: lgpCondId4321InitiatedTransfertoInverter.setStatus('current')
lgpCondId4322BatteryTestPassed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4322))
if mibBuilder.loadTexts: lgpCondId4322BatteryTestPassed.setStatus('current')
lgpCondId4323BatteryTestFailed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4323))
if mibBuilder.loadTexts: lgpCondId4323BatteryTestFailed.setStatus('current')
lgpCondId4324BatteryTestManuallyStopped = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4324))
if mibBuilder.loadTexts: lgpCondId4324BatteryTestManuallyStopped.setStatus('current')
lgpCondId4325BackfeedBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4325))
if mibBuilder.loadTexts: lgpCondId4325BackfeedBreakerOpen.setStatus('current')
lgpCondId4341VelocityAuthenticationFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4341))
if mibBuilder.loadTexts: lgpCondId4341VelocityAuthenticationFailure.setStatus('current')
lgpCondId4360ReceptacleOverCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4360))
if mibBuilder.loadTexts: lgpCondId4360ReceptacleOverCurrent.setStatus('current')
lgpCondId4361ReceptacleUnderCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4361))
if mibBuilder.loadTexts: lgpCondId4361ReceptacleUnderCurrent.setStatus('current')
lgpCondId4382SystemInputCurrentImbalance = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4382))
if mibBuilder.loadTexts: lgpCondId4382SystemInputCurrentImbalance.setStatus('current')
lgpCondId4383BypassStaticSwitchOffExtrnl = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4383))
if mibBuilder.loadTexts: lgpCondId4383BypassStaticSwitchOffExtrnl.setStatus('current')
lgpCondId4384BatteryEoDDisconnect = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4384))
if mibBuilder.loadTexts: lgpCondId4384BatteryEoDDisconnect.setStatus('current')
lgpCondId4389SystemOutputFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4389))
if mibBuilder.loadTexts: lgpCondId4389SystemOutputFault.setStatus('current')
lgpCondId4390InverterOffExternal = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4390))
if mibBuilder.loadTexts: lgpCondId4390InverterOffExternal.setStatus('current')
lgpCondId4391InverterStaticSwitchSCRShort = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4391))
if mibBuilder.loadTexts: lgpCondId4391InverterStaticSwitchSCRShort.setStatus('current')
lgpCondId4392TemperatureSensorError = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4392))
if mibBuilder.loadTexts: lgpCondId4392TemperatureSensorError.setStatus('current')
lgpCondId4406BranchOverCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4406))
if mibBuilder.loadTexts: lgpCondId4406BranchOverCurrent.setStatus('current')
lgpCondId4407BranchUnderCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4407))
if mibBuilder.loadTexts: lgpCondId4407BranchUnderCurrent.setStatus('current')
lgpCondId4416BranchOverCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4416))
if mibBuilder.loadTexts: lgpCondId4416BranchOverCurrent.setStatus('current')
lgpCondId4417BranchUnderCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4417))
if mibBuilder.loadTexts: lgpCondId4417BranchUnderCurrent.setStatus('current')
lgpCondId4421BranchFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4421))
if mibBuilder.loadTexts: lgpCondId4421BranchFailure.setStatus('current')
lgpCondId4436PDUOverCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4436))
if mibBuilder.loadTexts: lgpCondId4436PDUOverCurrent.setStatus('current')
lgpCondId4437PDUUnderCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4437))
if mibBuilder.loadTexts: lgpCondId4437PDUUnderCurrent.setStatus('current')
lgpCondId4438SystemInternalTemperatureRise = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4438))
if mibBuilder.loadTexts: lgpCondId4438SystemInternalTemperatureRise.setStatus('current')
lgpCondId4439AutomaticRestartFailed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4439))
if mibBuilder.loadTexts: lgpCondId4439AutomaticRestartFailed.setStatus('current')
lgpCondId4440FuseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4440))
if mibBuilder.loadTexts: lgpCondId4440FuseFailure.setStatus('current')
lgpCondId4441SystemControllerError = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4441))
if mibBuilder.loadTexts: lgpCondId4441SystemControllerError.setStatus('current')
lgpCondId4442SystemBreakersOpenFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4442))
if mibBuilder.loadTexts: lgpCondId4442SystemBreakersOpenFailure.setStatus('current')
lgpCondId4448PDUOverCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4448))
if mibBuilder.loadTexts: lgpCondId4448PDUOverCurrent.setStatus('current')
lgpCondId4449PDUUnderCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4449))
if mibBuilder.loadTexts: lgpCondId4449PDUUnderCurrent.setStatus('current')
lgpCondId4468PDUOverCurrentL1 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4468))
if mibBuilder.loadTexts: lgpCondId4468PDUOverCurrentL1.setStatus('current')
lgpCondId4469PDUOverCurrentL2 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4469))
if mibBuilder.loadTexts: lgpCondId4469PDUOverCurrentL2.setStatus('current')
lgpCondId4470PDUOverCurrentL3 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4470))
if mibBuilder.loadTexts: lgpCondId4470PDUOverCurrentL3.setStatus('current')
lgpCondId4471PDUUnderCurrentL1 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4471))
if mibBuilder.loadTexts: lgpCondId4471PDUUnderCurrentL1.setStatus('current')
lgpCondId4472PDUUnderCurrentL2 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4472))
if mibBuilder.loadTexts: lgpCondId4472PDUUnderCurrentL2.setStatus('current')
lgpCondId4473PDUUnderCurrentL3 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4473))
if mibBuilder.loadTexts: lgpCondId4473PDUUnderCurrentL3.setStatus('current')
lgpCondId4492ReceptaclePowerStateOn = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4492))
if mibBuilder.loadTexts: lgpCondId4492ReceptaclePowerStateOn.setStatus('current')
lgpCondId4493ReceptaclePowerStateOff = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4493))
if mibBuilder.loadTexts: lgpCondId4493ReceptaclePowerStateOff.setStatus('current')
lgpCondId4494BranchBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4494))
if mibBuilder.loadTexts: lgpCondId4494BranchBreakerOpen.setStatus('current')
lgpCondId4495DeviceConfigurationChange = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4495))
if mibBuilder.loadTexts: lgpCondId4495DeviceConfigurationChange.setStatus('current')
lgpCondId4496BasicDisplayModuleRemoved = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4496))
if mibBuilder.loadTexts: lgpCondId4496BasicDisplayModuleRemoved.setStatus('current')
lgpCondId4497BasicDisplayModuleDiscovered = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4497))
if mibBuilder.loadTexts: lgpCondId4497BasicDisplayModuleDiscovered.setStatus('current')
lgpCondId4500PDUOverCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4500))
if mibBuilder.loadTexts: lgpCondId4500PDUOverCurrent.setStatus('current')
lgpCondId4501PDUUnderCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4501))
if mibBuilder.loadTexts: lgpCondId4501PDUUnderCurrent.setStatus('current')
lgpCondId4502PDUFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4502))
if mibBuilder.loadTexts: lgpCondId4502PDUFailure.setStatus('current')
lgpCondId4503PDUCommunicationFail = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4503))
if mibBuilder.loadTexts: lgpCondId4503PDUCommunicationFail.setStatus('current')
lgpCondId4504BranchRemoved = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4504))
if mibBuilder.loadTexts: lgpCondId4504BranchRemoved.setStatus('current')
lgpCondId4505BranchDiscovered = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4505))
if mibBuilder.loadTexts: lgpCondId4505BranchDiscovered.setStatus('current')
lgpCondId4506BranchOverCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4506))
if mibBuilder.loadTexts: lgpCondId4506BranchOverCurrent.setStatus('current')
lgpCondId4507BranchCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4507))
if mibBuilder.loadTexts: lgpCondId4507BranchCurrent.setStatus('current')
lgpCondId4508ReceptacleLoadRemoved = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4508))
if mibBuilder.loadTexts: lgpCondId4508ReceptacleLoadRemoved.setStatus('current')
lgpCondId4509ReceptacleLoadAdded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4509))
if mibBuilder.loadTexts: lgpCondId4509ReceptacleLoadAdded.setStatus('current')
lgpCondId4523ModuleRemoved = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4523))
if mibBuilder.loadTexts: lgpCondId4523ModuleRemoved.setStatus('current')
lgpCondId4524ModuleAdded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4524))
if mibBuilder.loadTexts: lgpCondId4524ModuleAdded.setStatus('current')
lgpCondId4550FirmwareUpdateRequired = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4550))
if mibBuilder.loadTexts: lgpCondId4550FirmwareUpdateRequired.setStatus('current')
lgpCondId4551GenericTestEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4551))
if mibBuilder.loadTexts: lgpCondId4551GenericTestEvent.setStatus('current')
lgpCondId4580OverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4580))
if mibBuilder.loadTexts: lgpCondId4580OverTemperature.setStatus('current')
lgpCondId4581UnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4581))
if mibBuilder.loadTexts: lgpCondId4581UnderTemperature.setStatus('current')
lgpCondId4588OverRelativeHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4588))
if mibBuilder.loadTexts: lgpCondId4588OverRelativeHumidity.setStatus('current')
lgpCondId4589UnderRelativeHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4589))
if mibBuilder.loadTexts: lgpCondId4589UnderRelativeHumidity.setStatus('current')
lgpCondId4601ExternalAirSensorAOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4601))
if mibBuilder.loadTexts: lgpCondId4601ExternalAirSensorAOverTemperature.setStatus('current')
lgpCondId4604ExternalAirSensorBOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4604))
if mibBuilder.loadTexts: lgpCondId4604ExternalAirSensorBOverTemperature.setStatus('current')
lgpCondId4608ExtAirSensorAUnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4608))
if mibBuilder.loadTexts: lgpCondId4608ExtAirSensorAUnderTemperature.setStatus('current')
lgpCondId4611ExtAirSensorBUnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4611))
if mibBuilder.loadTexts: lgpCondId4611ExtAirSensorBUnderTemperature.setStatus('current')
lgpCondId4615ExtDewPointOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4615))
if mibBuilder.loadTexts: lgpCondId4615ExtDewPointOverTemperature.setStatus('current')
lgpCondId4618ExternalAirSensorAIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4618))
if mibBuilder.loadTexts: lgpCondId4618ExternalAirSensorAIssue.setStatus('current')
lgpCondId4621ExternalAirSensorBIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4621))
if mibBuilder.loadTexts: lgpCondId4621ExternalAirSensorBIssue.setStatus('current')
lgpCondId4626SupplyChilledWaterOverTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4626))
if mibBuilder.loadTexts: lgpCondId4626SupplyChilledWaterOverTemp.setStatus('current')
lgpCondId4629SupplyChilledWaterTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4629))
if mibBuilder.loadTexts: lgpCondId4629SupplyChilledWaterTempSensorIssue.setStatus('current')
lgpCondId4634SupplyRefrigerantOverTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4634))
if mibBuilder.loadTexts: lgpCondId4634SupplyRefrigerantOverTemp.setStatus('current')
lgpCondId4637SupplyRefrigerantUnderTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4637))
if mibBuilder.loadTexts: lgpCondId4637SupplyRefrigerantUnderTemp.setStatus('current')
lgpCondId4640SupplyRefrigerantTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4640))
if mibBuilder.loadTexts: lgpCondId4640SupplyRefrigerantTempSensorIssue.setStatus('current')
lgpCondId4645SupplyFluidOverTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4645))
if mibBuilder.loadTexts: lgpCondId4645SupplyFluidOverTemp.setStatus('current')
lgpCondId4648SupplyFluidUnderTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4648))
if mibBuilder.loadTexts: lgpCondId4648SupplyFluidUnderTemp.setStatus('current')
lgpCondId4651SupplyFluidTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4651))
if mibBuilder.loadTexts: lgpCondId4651SupplyFluidTempSensorIssue.setStatus('current')
lgpCondId4656Pump1LossofFlow = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4656))
if mibBuilder.loadTexts: lgpCondId4656Pump1LossofFlow.setStatus('current')
lgpCondId4659Pump2LossofFlow = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4659))
if mibBuilder.loadTexts: lgpCondId4659Pump2LossofFlow.setStatus('current')
lgpCondId4662PumpShortCycle = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4662))
if mibBuilder.loadTexts: lgpCondId4662PumpShortCycle.setStatus('current')
lgpCondId4669Compressor1AHighHeadPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4669))
if mibBuilder.loadTexts: lgpCondId4669Compressor1AHighHeadPressure.setStatus('current')
lgpCondId4672Compressor1BHighHeadPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4672))
if mibBuilder.loadTexts: lgpCondId4672Compressor1BHighHeadPressure.setStatus('current')
lgpCondId4675Compressor2AHighHeadPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4675))
if mibBuilder.loadTexts: lgpCondId4675Compressor2AHighHeadPressure.setStatus('current')
lgpCondId4678Compressor2BHighHeadPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4678))
if mibBuilder.loadTexts: lgpCondId4678Compressor2BHighHeadPressure.setStatus('current')
lgpCondId4681Compressor1AShortCycle = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4681))
if mibBuilder.loadTexts: lgpCondId4681Compressor1AShortCycle.setStatus('current')
lgpCondId4684Compressor1BShortCycle = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4684))
if mibBuilder.loadTexts: lgpCondId4684Compressor1BShortCycle.setStatus('current')
lgpCondId4687Compressor2AShortCycle = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4687))
if mibBuilder.loadTexts: lgpCondId4687Compressor2AShortCycle.setStatus('current')
lgpCondId4690Compressor2BShortCycle = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4690))
if mibBuilder.loadTexts: lgpCondId4690Compressor2BShortCycle.setStatus('current')
lgpCondId4693Tandem1LowSuctionPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4693))
if mibBuilder.loadTexts: lgpCondId4693Tandem1LowSuctionPressure.setStatus('current')
lgpCondId4696Tandem2LowSuctionPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4696))
if mibBuilder.loadTexts: lgpCondId4696Tandem2LowSuctionPressure.setStatus('current')
lgpCondId4703ChilledWaterControlValvePosition = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4703))
if mibBuilder.loadTexts: lgpCondId4703ChilledWaterControlValvePosition.setStatus('current')
lgpCondId4711SystemCondensationDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4711))
if mibBuilder.loadTexts: lgpCondId4711SystemCondensationDetected.setStatus('current')
lgpCondId4714ShutdownLossOfPower = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4714))
if mibBuilder.loadTexts: lgpCondId4714ShutdownLossOfPower.setStatus('current')
lgpCondId4720SmokeDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4720))
if mibBuilder.loadTexts: lgpCondId4720SmokeDetected.setStatus('current')
lgpCondId4723WaterUnderFloor = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4723))
if mibBuilder.loadTexts: lgpCondId4723WaterUnderFloor.setStatus('current')
lgpCondId4726ServiceRequired = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4726))
if mibBuilder.loadTexts: lgpCondId4726ServiceRequired.setStatus('current')
lgpCondId4729FanIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4729))
if mibBuilder.loadTexts: lgpCondId4729FanIssue.setStatus('current')
lgpCondId4732ReceptacleLoadDropped = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4732))
if mibBuilder.loadTexts: lgpCondId4732ReceptacleLoadDropped.setStatus('current')
lgpCondId4740BatteryAutomaticTestInhibited = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4740))
if mibBuilder.loadTexts: lgpCondId4740BatteryAutomaticTestInhibited.setStatus('current')
lgpCondId4741BatterySelfTest = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4741))
if mibBuilder.loadTexts: lgpCondId4741BatterySelfTest.setStatus('current')
lgpCondId4742BatteryLowShutdown = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4742))
if mibBuilder.loadTexts: lgpCondId4742BatteryLowShutdown.setStatus('current')
lgpCondId4747EquipmentTemperatureSensorFail = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4747))
if mibBuilder.loadTexts: lgpCondId4747EquipmentTemperatureSensorFail.setStatus('current')
lgpCondId4749SystemFanFailureRedundant = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4749))
if mibBuilder.loadTexts: lgpCondId4749SystemFanFailureRedundant.setStatus('current')
lgpCondId4750MultipleFanFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4750))
if mibBuilder.loadTexts: lgpCondId4750MultipleFanFailure.setStatus('current')
lgpCondId4753MainControllerFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4753))
if mibBuilder.loadTexts: lgpCondId4753MainControllerFault.setStatus('current')
lgpCondId4754SystemBreakersCloseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4754))
if mibBuilder.loadTexts: lgpCondId4754SystemBreakersCloseFailure.setStatus('current')
lgpCondId4755InputFilterCycleLock = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4755))
if mibBuilder.loadTexts: lgpCondId4755InputFilterCycleLock.setStatus('current')
lgpCondId4756ServiceCodeActive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4756))
if mibBuilder.loadTexts: lgpCondId4756ServiceCodeActive.setStatus('current')
lgpCondId4757LBSActive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4757))
if mibBuilder.loadTexts: lgpCondId4757LBSActive.setStatus('current')
lgpCondId4758LBSInhibited = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4758))
if mibBuilder.loadTexts: lgpCondId4758LBSInhibited.setStatus('current')
lgpCondId4759LeadingPowerFactor = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4759))
if mibBuilder.loadTexts: lgpCondId4759LeadingPowerFactor.setStatus('current')
lgpCondId4760ControlsResetRequired = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4760))
if mibBuilder.loadTexts: lgpCondId4760ControlsResetRequired.setStatus('current')
lgpCondId4823ParallelCommWarning = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4823))
if mibBuilder.loadTexts: lgpCondId4823ParallelCommWarning.setStatus('current')
lgpCondId4824SystemCommFail = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4824))
if mibBuilder.loadTexts: lgpCondId4824SystemCommFail.setStatus('current')
lgpCondId4825LossofRedundancy = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4825))
if mibBuilder.loadTexts: lgpCondId4825LossofRedundancy.setStatus('current')
lgpCondId4826BPSSStartupInhibit = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4826))
if mibBuilder.loadTexts: lgpCondId4826BPSSStartupInhibit.setStatus('current')
lgpCondId4827MMSTransferInhibit = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4827))
if mibBuilder.loadTexts: lgpCondId4827MMSTransferInhibit.setStatus('current')
lgpCondId4828MMSRetransferInhibit = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4828))
if mibBuilder.loadTexts: lgpCondId4828MMSRetransferInhibit.setStatus('current')
lgpCondId4830MMSLossofSyncPulse = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4830))
if mibBuilder.loadTexts: lgpCondId4830MMSLossofSyncPulse.setStatus('current')
lgpCondId4831MMSOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4831))
if mibBuilder.loadTexts: lgpCondId4831MMSOverload.setStatus('current')
lgpCondId4834MMSOnBattery = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4834))
if mibBuilder.loadTexts: lgpCondId4834MMSOnBattery.setStatus('current')
lgpCondId4835MMSLowBatteryWarning = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4835))
if mibBuilder.loadTexts: lgpCondId4835MMSLowBatteryWarning.setStatus('current')
lgpCondId4906LowAmbientTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4906))
if mibBuilder.loadTexts: lgpCondId4906LowAmbientTemperature.setStatus('current')
lgpCondId4907HighAmbientTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4907))
if mibBuilder.loadTexts: lgpCondId4907HighAmbientTemperature.setStatus('current')
lgpCondId4908LowOverallVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4908))
if mibBuilder.loadTexts: lgpCondId4908LowOverallVoltage.setStatus('current')
lgpCondId4909HighOverallVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4909))
if mibBuilder.loadTexts: lgpCondId4909HighOverallVoltage.setStatus('current')
lgpCondId4910HighBatteryStringCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4910))
if mibBuilder.loadTexts: lgpCondId4910HighBatteryStringCurrent.setStatus('current')
lgpCondId4911LowBatteryStringFloatCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4911))
if mibBuilder.loadTexts: lgpCondId4911LowBatteryStringFloatCurrent.setStatus('current')
lgpCondId4912HighBatteryStringFloatCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4912))
if mibBuilder.loadTexts: lgpCondId4912HighBatteryStringFloatCurrent.setStatus('current')
lgpCondId4913HighBatteryStringRippleCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4913))
if mibBuilder.loadTexts: lgpCondId4913HighBatteryStringRippleCurrent.setStatus('current')
lgpCondId4914BatteryStringDischargeDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4914))
if mibBuilder.loadTexts: lgpCondId4914BatteryStringDischargeDetected.setStatus('current')
lgpCondId4915MaximumDischargeTimeExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4915))
if mibBuilder.loadTexts: lgpCondId4915MaximumDischargeTimeExceeded.setStatus('current')
lgpCondId4916DischargeLowOverallVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4916))
if mibBuilder.loadTexts: lgpCondId4916DischargeLowOverallVoltage.setStatus('current')
lgpCondId4917DischargeLowCellVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4917))
if mibBuilder.loadTexts: lgpCondId4917DischargeLowCellVoltage.setStatus('current')
lgpCondId4918DischargeHighBatteryStringCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4918))
if mibBuilder.loadTexts: lgpCondId4918DischargeHighBatteryStringCurrent.setStatus('current')
lgpCondId4919ExcessiveCelltoCellTemperatureDeviation = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4919))
if mibBuilder.loadTexts: lgpCondId4919ExcessiveCelltoCellTemperatureDeviation.setStatus('current')
lgpCondId4920ExcessiveCelltoAmbientTemperatureDeviation = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4920))
if mibBuilder.loadTexts: lgpCondId4920ExcessiveCelltoAmbientTemperatureDeviation.setStatus('current')
lgpCondId4964LowCellVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4964))
if mibBuilder.loadTexts: lgpCondId4964LowCellVoltage.setStatus('current')
lgpCondId4965HighCellVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4965))
if mibBuilder.loadTexts: lgpCondId4965HighCellVoltage.setStatus('current')
lgpCondId4966LowCellTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4966))
if mibBuilder.loadTexts: lgpCondId4966LowCellTemperature.setStatus('current')
lgpCondId4967HighCellTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4967))
if mibBuilder.loadTexts: lgpCondId4967HighCellTemperature.setStatus('current')
lgpCondId4968LowInternalResistance = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4968))
if mibBuilder.loadTexts: lgpCondId4968LowInternalResistance.setStatus('current')
lgpCondId4969HighInternalResistance = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4969))
if mibBuilder.loadTexts: lgpCondId4969HighInternalResistance.setStatus('current')
lgpCondId4970HighIntercellResistance = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4970))
if mibBuilder.loadTexts: lgpCondId4970HighIntercellResistance.setStatus('current')
lgpCondId4978IntertierResistanceHigh = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4978))
if mibBuilder.loadTexts: lgpCondId4978IntertierResistanceHigh.setStatus('current')
lgpCondId4980SupplyChilledWaterLossofFlow = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4980))
if mibBuilder.loadTexts: lgpCondId4980SupplyChilledWaterLossofFlow.setStatus('current')
lgpCondId4983SupplyRefrigOverTempBand1 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4983))
if mibBuilder.loadTexts: lgpCondId4983SupplyRefrigOverTempBand1.setStatus('current')
lgpCondId4986SupplyRefrigUnderTempBand1 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4986))
if mibBuilder.loadTexts: lgpCondId4986SupplyRefrigUnderTempBand1.setStatus('current')
lgpCondId4990SupplyRefrigOverTempBand2 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4990))
if mibBuilder.loadTexts: lgpCondId4990SupplyRefrigOverTempBand2.setStatus('current')
lgpCondId4993SupplyRefrigUnderTempBand2 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4993))
if mibBuilder.loadTexts: lgpCondId4993SupplyRefrigUnderTempBand2.setStatus('current')
lgpCondId4996Inverter1ShortCycle = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4996))
if mibBuilder.loadTexts: lgpCondId4996Inverter1ShortCycle.setStatus('current')
lgpCondId4999Inverter2ShortCycle = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 4999))
if mibBuilder.loadTexts: lgpCondId4999Inverter2ShortCycle.setStatus('current')
lgpCondId5015SupplyAirOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5015))
if mibBuilder.loadTexts: lgpCondId5015SupplyAirOverTemperature.setStatus('current')
lgpCondId5019SupplyAirUnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5019))
if mibBuilder.loadTexts: lgpCondId5019SupplyAirUnderTemperature.setStatus('current')
lgpCondId5023ReturnAirOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5023))
if mibBuilder.loadTexts: lgpCondId5023ReturnAirOverTemperature.setStatus('current')
lgpCondId5026SupplyAirSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5026))
if mibBuilder.loadTexts: lgpCondId5026SupplyAirSensorIssue.setStatus('current')
lgpCondId5034HighReturnHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5034))
if mibBuilder.loadTexts: lgpCondId5034HighReturnHumidity.setStatus('current')
lgpCondId5036LowReturnHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5036))
if mibBuilder.loadTexts: lgpCondId5036LowReturnHumidity.setStatus('current')
lgpCondId5037HumidifierHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5037))
if mibBuilder.loadTexts: lgpCondId5037HumidifierHoursExceeded.setStatus('current')
lgpCondId5038DehumidifierHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5038))
if mibBuilder.loadTexts: lgpCondId5038DehumidifierHoursExceeded.setStatus('current')
lgpCondId5039HumidifierUnderCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5039))
if mibBuilder.loadTexts: lgpCondId5039HumidifierUnderCurrent.setStatus('current')
lgpCondId5040HumidifierOverCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5040))
if mibBuilder.loadTexts: lgpCondId5040HumidifierOverCurrent.setStatus('current')
lgpCondId5041HumidifierLowWater = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5041))
if mibBuilder.loadTexts: lgpCondId5041HumidifierLowWater.setStatus('current')
lgpCondId5042HumidifierCylinderWorn = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5042))
if mibBuilder.loadTexts: lgpCondId5042HumidifierCylinderWorn.setStatus('current')
lgpCondId5043HumidifierIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5043))
if mibBuilder.loadTexts: lgpCondId5043HumidifierIssue.setStatus('current')
lgpCondId5044ExtHumidifierLockout = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5044))
if mibBuilder.loadTexts: lgpCondId5044ExtHumidifierLockout.setStatus('current')
lgpCondId5045HumidifierControlBoardNotDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5045))
if mibBuilder.loadTexts: lgpCondId5045HumidifierControlBoardNotDetected.setStatus('current')
lgpCondId5046ReturnHumidityOutOfProportionalBand = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5046))
if mibBuilder.loadTexts: lgpCondId5046ReturnHumidityOutOfProportionalBand.setStatus('current')
lgpCondId5053LossofAirFlow = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5053))
if mibBuilder.loadTexts: lgpCondId5053LossofAirFlow.setStatus('current')
lgpCondId5054FanHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5054))
if mibBuilder.loadTexts: lgpCondId5054FanHoursExceeded.setStatus('current')
lgpCondId5055TopFanIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5055))
if mibBuilder.loadTexts: lgpCondId5055TopFanIssue.setStatus('current')
lgpCondId5056BottomFanIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5056))
if mibBuilder.loadTexts: lgpCondId5056BottomFanIssue.setStatus('current')
lgpCondId5060RemoteSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5060))
if mibBuilder.loadTexts: lgpCondId5060RemoteSensorIssue.setStatus('current')
lgpCondId5062Compressor1LowSuctionPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5062))
if mibBuilder.loadTexts: lgpCondId5062Compressor1LowSuctionPressure.setStatus('current')
lgpCondId5063Compressor1HoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5063))
if mibBuilder.loadTexts: lgpCondId5063Compressor1HoursExceeded.setStatus('current')
lgpCondId5064DigScrollComp1TempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5064))
if mibBuilder.loadTexts: lgpCondId5064DigScrollComp1TempSensorIssue.setStatus('current')
lgpCondId5065DigScrollComp1OverTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5065))
if mibBuilder.loadTexts: lgpCondId5065DigScrollComp1OverTemp.setStatus('current')
lgpCondId5066Compressor1LowPressureTransducerIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5066))
if mibBuilder.loadTexts: lgpCondId5066Compressor1LowPressureTransducerIssue.setStatus('current')
lgpCondId5067ExtCompressorLockout = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5067))
if mibBuilder.loadTexts: lgpCondId5067ExtCompressorLockout.setStatus('current')
lgpCondId5068ReheaterOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5068))
if mibBuilder.loadTexts: lgpCondId5068ReheaterOverTemperature.setStatus('current')
lgpCondId5069ElectricReheater1HoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5069))
if mibBuilder.loadTexts: lgpCondId5069ElectricReheater1HoursExceeded.setStatus('current')
lgpCondId5070ExtReheatLockout = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5070))
if mibBuilder.loadTexts: lgpCondId5070ExtReheatLockout.setStatus('current')
lgpCondId5071Condenser1Issue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5071))
if mibBuilder.loadTexts: lgpCondId5071Condenser1Issue.setStatus('current')
lgpCondId5072CondenserVFDIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5072))
if mibBuilder.loadTexts: lgpCondId5072CondenserVFDIssue.setStatus('current')
lgpCondId5073CondenserTVSSIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5073))
if mibBuilder.loadTexts: lgpCondId5073CondenserTVSSIssue.setStatus('current')
lgpCondId5104ExtOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5104))
if mibBuilder.loadTexts: lgpCondId5104ExtOverTemperature.setStatus('current')
lgpCondId5105ExtLossofFlow = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5105))
if mibBuilder.loadTexts: lgpCondId5105ExtLossofFlow.setStatus('current')
lgpCondId5106ExtCondenserPumpHighWater = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5106))
if mibBuilder.loadTexts: lgpCondId5106ExtCondenserPumpHighWater.setStatus('current')
lgpCondId5107ExtStandbyGlycolPumpOn = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5107))
if mibBuilder.loadTexts: lgpCondId5107ExtStandbyGlycolPumpOn.setStatus('current')
lgpCondId5108ExternalFireDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5108))
if mibBuilder.loadTexts: lgpCondId5108ExternalFireDetected.setStatus('current')
lgpCondId5109UnitOn = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5109))
if mibBuilder.loadTexts: lgpCondId5109UnitOn.setStatus('current')
lgpCondId5110UnitOff = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5110))
if mibBuilder.loadTexts: lgpCondId5110UnitOff.setStatus('current')
lgpCondId5111UnitStandby = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5111))
if mibBuilder.loadTexts: lgpCondId5111UnitStandby.setStatus('current')
lgpCondId5112UnitPartialShutdown = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5112))
if mibBuilder.loadTexts: lgpCondId5112UnitPartialShutdown.setStatus('current')
lgpCondId5113UnitShutdown = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5113))
if mibBuilder.loadTexts: lgpCondId5113UnitShutdown.setStatus('current')
lgpCondId5114WaterLeakageDetectorSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5114))
if mibBuilder.loadTexts: lgpCondId5114WaterLeakageDetectorSensorIssue.setStatus('current')
lgpCondId5115BMSCommunicationsTimeout = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5115))
if mibBuilder.loadTexts: lgpCondId5115BMSCommunicationsTimeout.setStatus('current')
lgpCondId5116MaintenanceDue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5116))
if mibBuilder.loadTexts: lgpCondId5116MaintenanceDue.setStatus('current')
lgpCondId5117MaintenanceCompleted = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5117))
if mibBuilder.loadTexts: lgpCondId5117MaintenanceCompleted.setStatus('current')
lgpCondId5118CloggedAirFilter = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5118))
if mibBuilder.loadTexts: lgpCondId5118CloggedAirFilter.setStatus('current')
lgpCondId5119RAMBatteryIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5119))
if mibBuilder.loadTexts: lgpCondId5119RAMBatteryIssue.setStatus('current')
lgpCondId5120MasterUnitCommunicationLost = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5120))
if mibBuilder.loadTexts: lgpCondId5120MasterUnitCommunicationLost.setStatus('current')
lgpCondId5121HighPowerShutdown = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5121))
if mibBuilder.loadTexts: lgpCondId5121HighPowerShutdown.setStatus('current')
lgpCondId5126DigScrollComp2OverTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5126))
if mibBuilder.loadTexts: lgpCondId5126DigScrollComp2OverTemp.setStatus('current')
lgpCondId5144OutputOfUf = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5144))
if mibBuilder.loadTexts: lgpCondId5144OutputOfUf.setStatus('current')
lgpCondId5145MMSModuleAlarmActive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5145))
if mibBuilder.loadTexts: lgpCondId5145MMSModuleAlarmActive.setStatus('current')
lgpCondId5146CompressorPumpDownIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5146))
if mibBuilder.loadTexts: lgpCondId5146CompressorPumpDownIssue.setStatus('current')
lgpCondId5147ReturnAirSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5147))
if mibBuilder.loadTexts: lgpCondId5147ReturnAirSensorIssue.setStatus('current')
lgpCondId5148CompressorHighPressureTransducerIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5148))
if mibBuilder.loadTexts: lgpCondId5148CompressorHighPressureTransducerIssue.setStatus('current')
lgpCondId5149BatteryNotQualified = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5149))
if mibBuilder.loadTexts: lgpCondId5149BatteryNotQualified.setStatus('current')
lgpCondId5150BatteryTerminalsReversed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5150))
if mibBuilder.loadTexts: lgpCondId5150BatteryTerminalsReversed.setStatus('current')
lgpCondId5151BatteryConverterFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5151))
if mibBuilder.loadTexts: lgpCondId5151BatteryConverterFailure.setStatus('current')
lgpCondId5152InverterSCROpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5152))
if mibBuilder.loadTexts: lgpCondId5152InverterSCROpen.setStatus('current')
lgpCondId5153LoadSharingFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5153))
if mibBuilder.loadTexts: lgpCondId5153LoadSharingFault.setStatus('current')
lgpCondId5154DCBusAbnormal = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5154))
if mibBuilder.loadTexts: lgpCondId5154DCBusAbnormal.setStatus('current')
lgpCondId5155MainsInputNeutralLost = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5155))
if mibBuilder.loadTexts: lgpCondId5155MainsInputNeutralLost.setStatus('current')
lgpCondId5156LoadImpactTransfer = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5156))
if mibBuilder.loadTexts: lgpCondId5156LoadImpactTransfer.setStatus('current')
lgpCondId5157UserOperationInvalid = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5157))
if mibBuilder.loadTexts: lgpCondId5157UserOperationInvalid.setStatus('current')
lgpCondId5158PowerSubModuleFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5158))
if mibBuilder.loadTexts: lgpCondId5158PowerSubModuleFault.setStatus('current')
lgpCondId5178OutputOvervoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5178))
if mibBuilder.loadTexts: lgpCondId5178OutputOvervoltage.setStatus('current')
lgpCondId5179OutputUndervoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5179))
if mibBuilder.loadTexts: lgpCondId5179OutputUndervoltage.setStatus('current')
lgpCondId5180OutputOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5180))
if mibBuilder.loadTexts: lgpCondId5180OutputOvercurrent.setStatus('current')
lgpCondId5181NeutralOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5181))
if mibBuilder.loadTexts: lgpCondId5181NeutralOvercurrent.setStatus('current')
lgpCondId5182GroundOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5182))
if mibBuilder.loadTexts: lgpCondId5182GroundOvercurrent.setStatus('current')
lgpCondId5183OutputVoltageTHD = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5183))
if mibBuilder.loadTexts: lgpCondId5183OutputVoltageTHD.setStatus('current')
lgpCondId5184OutputFrequencyError = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5184))
if mibBuilder.loadTexts: lgpCondId5184OutputFrequencyError.setStatus('current')
lgpCondId5185TransformerOvertemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5185))
if mibBuilder.loadTexts: lgpCondId5185TransformerOvertemperature.setStatus('current')
lgpCondId5212PanelSummaryStatus = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5212))
if mibBuilder.loadTexts: lgpCondId5212PanelSummaryStatus.setStatus('current')
lgpCondId5213PanelOvervoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5213))
if mibBuilder.loadTexts: lgpCondId5213PanelOvervoltage.setStatus('current')
lgpCondId5214PanelUndervoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5214))
if mibBuilder.loadTexts: lgpCondId5214PanelUndervoltage.setStatus('current')
lgpCondId5215PanelOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5215))
if mibBuilder.loadTexts: lgpCondId5215PanelOvercurrent.setStatus('current')
lgpCondId5216PanelNeutralOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5216))
if mibBuilder.loadTexts: lgpCondId5216PanelNeutralOvercurrent.setStatus('current')
lgpCondId5217PanelGroundOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5217))
if mibBuilder.loadTexts: lgpCondId5217PanelGroundOvercurrent.setStatus('current')
lgpCondId5226BranchOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5226))
if mibBuilder.loadTexts: lgpCondId5226BranchOvercurrent.setStatus('current')
lgpCondId5227BranchUndercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5227))
if mibBuilder.loadTexts: lgpCondId5227BranchUndercurrent.setStatus('current')
lgpCondId5245SubfeedPhaseOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5245))
if mibBuilder.loadTexts: lgpCondId5245SubfeedPhaseOvercurrent.setStatus('current')
lgpCondId5246SubfeedNeutralOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5246))
if mibBuilder.loadTexts: lgpCondId5246SubfeedNeutralOvercurrent.setStatus('current')
lgpCondId5247SubfeedGroundOvercurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5247))
if mibBuilder.loadTexts: lgpCondId5247SubfeedGroundOvercurrent.setStatus('current')
lgpCondId5249EventState = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5249))
if mibBuilder.loadTexts: lgpCondId5249EventState.setStatus('current')
lgpCondId5263CompressorNotStopping = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5263))
if mibBuilder.loadTexts: lgpCondId5263CompressorNotStopping.setStatus('current')
lgpCondId5269CompressorHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5269))
if mibBuilder.loadTexts: lgpCondId5269CompressorHoursExceeded.setStatus('current')
lgpCondId5270CompressorHighHeadPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5270))
if mibBuilder.loadTexts: lgpCondId5270CompressorHighHeadPressure.setStatus('current')
lgpCondId5271CompressorLowSuctionPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5271))
if mibBuilder.loadTexts: lgpCondId5271CompressorLowSuctionPressure.setStatus('current')
lgpCondId5272CompressorThermalOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5272))
if mibBuilder.loadTexts: lgpCondId5272CompressorThermalOverload.setStatus('current')
lgpCondId5273CompressorLowOilPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5273))
if mibBuilder.loadTexts: lgpCondId5273CompressorLowOilPressure.setStatus('current')
lgpCondId5274CompressorHeadPressureOverThreshold = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5274))
if mibBuilder.loadTexts: lgpCondId5274CompressorHeadPressureOverThreshold.setStatus('current')
lgpCondId5275CompressorLossofDifferentialPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5275))
if mibBuilder.loadTexts: lgpCondId5275CompressorLossofDifferentialPressure.setStatus('current')
lgpCondId5277CondenserFanIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5277))
if mibBuilder.loadTexts: lgpCondId5277CondenserFanIssue.setStatus('current')
lgpCondId5278LowCondenserRefrigerantPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5278))
if mibBuilder.loadTexts: lgpCondId5278LowCondenserRefrigerantPressure.setStatus('current')
lgpCondId5280LowFluidPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5280))
if mibBuilder.loadTexts: lgpCondId5280LowFluidPressure.setStatus('current')
lgpCondId5293ReturnFluidOverTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5293))
if mibBuilder.loadTexts: lgpCondId5293ReturnFluidOverTemp.setStatus('current')
lgpCondId5294ReturnFluidUnderTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5294))
if mibBuilder.loadTexts: lgpCondId5294ReturnFluidUnderTemp.setStatus('current')
lgpCondId5295ReturnFluidTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5295))
if mibBuilder.loadTexts: lgpCondId5295ReturnFluidTempSensorIssue.setStatus('current')
lgpCondId5296TeamworkReturnFluidTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5296))
if mibBuilder.loadTexts: lgpCondId5296TeamworkReturnFluidTempSensorIssue.setStatus('current')
lgpCondId5297AllPumpsLossofFlow = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5297))
if mibBuilder.loadTexts: lgpCondId5297AllPumpsLossofFlow.setStatus('current')
lgpCondId5300PumpHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5300))
if mibBuilder.loadTexts: lgpCondId5300PumpHoursExceeded.setStatus('current')
lgpCondId5306FreeCoolingValveHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5306))
if mibBuilder.loadTexts: lgpCondId5306FreeCoolingValveHoursExceeded.setStatus('current')
lgpCondId5308EvaporatorInletTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5308))
if mibBuilder.loadTexts: lgpCondId5308EvaporatorInletTempSensorIssue.setStatus('current')
lgpCondId5309TeamworkEvaporatorInletTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5309))
if mibBuilder.loadTexts: lgpCondId5309TeamworkEvaporatorInletTempSensorIssue.setStatus('current')
lgpCondId5310EvaporatorFluidFreezeAutoReset = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5310))
if mibBuilder.loadTexts: lgpCondId5310EvaporatorFluidFreezeAutoReset.setStatus('current')
lgpCondId5311EvaporatorFluidFreezeManualResetRequired = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5311))
if mibBuilder.loadTexts: lgpCondId5311EvaporatorFluidFreezeManualResetRequired.setStatus('current')
lgpCondId5315SubgroupEventOccurredDuringCommunicationLoss = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5315))
if mibBuilder.loadTexts: lgpCondId5315SubgroupEventOccurredDuringCommunicationLoss.setStatus('current')
lgpCondId5335ReturnAirUnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5335))
if mibBuilder.loadTexts: lgpCondId5335ReturnAirUnderTemperature.setStatus('current')
lgpCondId5349ExtAirSensorAHighHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5349))
if mibBuilder.loadTexts: lgpCondId5349ExtAirSensorAHighHumidity.setStatus('current')
lgpCondId5351ExtAirSensorALowHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5351))
if mibBuilder.loadTexts: lgpCondId5351ExtAirSensorALowHumidity.setStatus('current')
lgpCondId5352CompressorShortCycle = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5352))
if mibBuilder.loadTexts: lgpCondId5352CompressorShortCycle.setStatus('current')
lgpCondId5354DigScrollCompDischargeTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5354))
if mibBuilder.loadTexts: lgpCondId5354DigScrollCompDischargeTempSensorIssue.setStatus('current')
lgpCondId5355DigScrollCompOverTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5355))
if mibBuilder.loadTexts: lgpCondId5355DigScrollCompOverTemp.setStatus('current')
lgpCondId5361ExtFreeCoolingLockout = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5361))
if mibBuilder.loadTexts: lgpCondId5361ExtFreeCoolingLockout.setStatus('current')
lgpCondId5362FreeCoolingTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5362))
if mibBuilder.loadTexts: lgpCondId5362FreeCoolingTempSensorIssue.setStatus('current')
lgpCondId5365HotWaterHotGasValveHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5365))
if mibBuilder.loadTexts: lgpCondId5365HotWaterHotGasValveHoursExceeded.setStatus('current')
lgpCondId5368ElectricReheaterHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5368))
if mibBuilder.loadTexts: lgpCondId5368ElectricReheaterHoursExceeded.setStatus('current')
lgpCondId5376MainFanOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5376))
if mibBuilder.loadTexts: lgpCondId5376MainFanOverload.setStatus('current')
lgpCondId5377Condenser = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5377))
if mibBuilder.loadTexts: lgpCondId5377Condenser.setStatus('current')
lgpCondId5415ExtLossofAirBlower = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5415))
if mibBuilder.loadTexts: lgpCondId5415ExtLossofAirBlower.setStatus('current')
lgpCondId5416ExtStandbyUnitOn = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5416))
if mibBuilder.loadTexts: lgpCondId5416ExtStandbyUnitOn.setStatus('current')
lgpCondId5417DigitalOutputBoardNotDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5417))
if mibBuilder.loadTexts: lgpCondId5417DigitalOutputBoardNotDetected.setStatus('current')
lgpCondId5418UnitCodeMissing = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5418))
if mibBuilder.loadTexts: lgpCondId5418UnitCodeMissing.setStatus('current')
lgpCondId5419UnitCommunicationLost = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5419))
if mibBuilder.loadTexts: lgpCondId5419UnitCommunicationLost.setStatus('current')
lgpCondId5422OvertemperaturePowerOff = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5422))
if mibBuilder.loadTexts: lgpCondId5422OvertemperaturePowerOff.setStatus('current')
lgpCondId5423TooManySensors = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5423))
if mibBuilder.loadTexts: lgpCondId5423TooManySensors.setStatus('current')
lgpCondId5432TransformerOvertemperaturePowerOff = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5432))
if mibBuilder.loadTexts: lgpCondId5432TransformerOvertemperaturePowerOff.setStatus('current')
lgpCondId5433TransformerOvertemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5433))
if mibBuilder.loadTexts: lgpCondId5433TransformerOvertemperature.setStatus('current')
lgpCondId5434TransformerTemperatureSensorFail = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5434))
if mibBuilder.loadTexts: lgpCondId5434TransformerTemperatureSensorFail.setStatus('current')
lgpCondId5436LowAmbientTemperatureProbeTwo = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5436))
if mibBuilder.loadTexts: lgpCondId5436LowAmbientTemperatureProbeTwo.setStatus('current')
lgpCondId5437HighAmbientTemperatureProbeTwo = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5437))
if mibBuilder.loadTexts: lgpCondId5437HighAmbientTemperatureProbeTwo.setStatus('current')
lgpCondId5438ThermalRunawayDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5438))
if mibBuilder.loadTexts: lgpCondId5438ThermalRunawayDetected.setStatus('current')
lgpCondId5439BatteryStringEqualize = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5439))
if mibBuilder.loadTexts: lgpCondId5439BatteryStringEqualize.setStatus('current')
lgpCondId5440BatteryStringOffline = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5440))
if mibBuilder.loadTexts: lgpCondId5440BatteryStringOffline.setStatus('current')
lgpCondId5442DischargeLowCellVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5442))
if mibBuilder.loadTexts: lgpCondId5442DischargeLowCellVoltage.setStatus('current')
lgpCondId5447MMSPowerSharing = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5447))
if mibBuilder.loadTexts: lgpCondId5447MMSPowerSharing.setStatus('current')
lgpCondId5453ModuleInStandbyIntelligentParalleling = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5453))
if mibBuilder.loadTexts: lgpCondId5453ModuleInStandbyIntelligentParalleling.setStatus('current')
lgpCondId5456ECOModeActive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5456))
if mibBuilder.loadTexts: lgpCondId5456ECOModeActive.setStatus('current')
lgpCondId5457ECOModeSuspended = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5457))
if mibBuilder.loadTexts: lgpCondId5457ECOModeSuspended.setStatus('current')
lgpCondId5458ExcessECOSuspends = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5458))
if mibBuilder.loadTexts: lgpCondId5458ExcessECOSuspends.setStatus('current')
lgpCondId5471DoorOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5471))
if mibBuilder.loadTexts: lgpCondId5471DoorOpen.setStatus('current')
lgpCondId5472DoorSensorDisconnected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5472))
if mibBuilder.loadTexts: lgpCondId5472DoorSensorDisconnected.setStatus('current')
lgpCondId5479ContactClosureOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5479))
if mibBuilder.loadTexts: lgpCondId5479ContactClosureOpen.setStatus('current')
lgpCondId5480ContactClosureClosed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5480))
if mibBuilder.loadTexts: lgpCondId5480ContactClosureClosed.setStatus('current')
lgpCondId5492ExtSystemCondensationDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5492))
if mibBuilder.loadTexts: lgpCondId5492ExtSystemCondensationDetected.setStatus('current')
lgpCondId5495ExtFanIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5495))
if mibBuilder.loadTexts: lgpCondId5495ExtFanIssue.setStatus('current')
lgpCondId5500ExtRemoteShutdown = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5500))
if mibBuilder.loadTexts: lgpCondId5500ExtRemoteShutdown.setStatus('current')
lgpCondId5505HotAisleTempOutofRange = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5505))
if mibBuilder.loadTexts: lgpCondId5505HotAisleTempOutofRange.setStatus('current')
lgpCondId5508ColdAisleTempOutofRange = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5508))
if mibBuilder.loadTexts: lgpCondId5508ColdAisleTempOutofRange.setStatus('current')
lgpCondId5512RemoteShutdown = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5512))
if mibBuilder.loadTexts: lgpCondId5512RemoteShutdown.setStatus('current')
lgpCondId5513CompressorCapacityReduced = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5513))
if mibBuilder.loadTexts: lgpCondId5513CompressorCapacityReduced.setStatus('current')
lgpCondId5514CompressorLowPressureTransducerIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5514))
if mibBuilder.loadTexts: lgpCondId5514CompressorLowPressureTransducerIssue.setStatus('current')
lgpCondId5524PDUNeutralOverCurrent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5524))
if mibBuilder.loadTexts: lgpCondId5524PDUNeutralOverCurrent.setStatus('current')
lgpCondId5531CondenserCommunicationLost = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5531))
if mibBuilder.loadTexts: lgpCondId5531CondenserCommunicationLost.setStatus('current')
lgpCondId5535CondenserOutsideAirTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5535))
if mibBuilder.loadTexts: lgpCondId5535CondenserOutsideAirTempSensorIssue.setStatus('current')
lgpCondId5536CondenserOutsideAirTempOutofOperatingRange = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5536))
if mibBuilder.loadTexts: lgpCondId5536CondenserOutsideAirTempOutofOperatingRange.setStatus('current')
lgpCondId5537CondenserControlBoardIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5537))
if mibBuilder.loadTexts: lgpCondId5537CondenserControlBoardIssue.setStatus('current')
lgpCondId5539CondenserRefrigerantPressureOverThreshold = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5539))
if mibBuilder.loadTexts: lgpCondId5539CondenserRefrigerantPressureOverThreshold.setStatus('current')
lgpCondId5540CondenserRefrigerantPressureUnderThreshold = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5540))
if mibBuilder.loadTexts: lgpCondId5540CondenserRefrigerantPressureUnderThreshold.setStatus('current')
lgpCondId5541CondenserRefrigerantPressureSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5541))
if mibBuilder.loadTexts: lgpCondId5541CondenserRefrigerantPressureSensorIssue.setStatus('current')
lgpCondId5542CondenserSupplyRefrigerantOverTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5542))
if mibBuilder.loadTexts: lgpCondId5542CondenserSupplyRefrigerantOverTemp.setStatus('current')
lgpCondId5543CondenserSupplyRefrigerantUnderTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5543))
if mibBuilder.loadTexts: lgpCondId5543CondenserSupplyRefrigerantUnderTemp.setStatus('current')
lgpCondId5544CondenserSupplyRefrigerantTempSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5544))
if mibBuilder.loadTexts: lgpCondId5544CondenserSupplyRefrigerantTempSensorIssue.setStatus('current')
lgpCondId5545CondenserMaxFanSpeedOverride = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5545))
if mibBuilder.loadTexts: lgpCondId5545CondenserMaxFanSpeedOverride.setStatus('current')
lgpCondId5559EvaporatorReturnFluidOverTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5559))
if mibBuilder.loadTexts: lgpCondId5559EvaporatorReturnFluidOverTemp.setStatus('current')
lgpCondId5560EvaporatorReturnFluidUnderTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5560))
if mibBuilder.loadTexts: lgpCondId5560EvaporatorReturnFluidUnderTemp.setStatus('current')
lgpCondId5561LBSActiveMaster = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5561))
if mibBuilder.loadTexts: lgpCondId5561LBSActiveMaster.setStatus('current')
lgpCondId5562LBSActiveSlave = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5562))
if mibBuilder.loadTexts: lgpCondId5562LBSActiveSlave.setStatus('current')
lgpCondId5563DCBusLowFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5563))
if mibBuilder.loadTexts: lgpCondId5563DCBusLowFault.setStatus('current')
lgpCondId5564FanContactorOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5564))
if mibBuilder.loadTexts: lgpCondId5564FanContactorOpen.setStatus('current')
lgpCondId5565FanContactorOpenFail = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5565))
if mibBuilder.loadTexts: lgpCondId5565FanContactorOpenFail.setStatus('current')
lgpCondId5566FanContactorCloseFail = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5566))
if mibBuilder.loadTexts: lgpCondId5566FanContactorCloseFail.setStatus('current')
lgpCondId5567IPInhibit = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5567))
if mibBuilder.loadTexts: lgpCondId5567IPInhibit.setStatus('current')
lgpCondId5568InputUndervoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5568))
if mibBuilder.loadTexts: lgpCondId5568InputUndervoltage.setStatus('current')
lgpCondId5569InputOvervoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5569))
if mibBuilder.loadTexts: lgpCondId5569InputOvervoltage.setStatus('current')
lgpCondId5573AmbientAirSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5573))
if mibBuilder.loadTexts: lgpCondId5573AmbientAirSensorIssue.setStatus('current')
lgpCondId5577ExtDewPointUnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5577))
if mibBuilder.loadTexts: lgpCondId5577ExtDewPointUnderTemperature.setStatus('current')
lgpCondId5578DewPointOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5578))
if mibBuilder.loadTexts: lgpCondId5578DewPointOverTemperature.setStatus('current')
lgpCondId5579DewPointUnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5579))
if mibBuilder.loadTexts: lgpCondId5579DewPointUnderTemperature.setStatus('current')
lgpCondId5588UnspecifiedGeneralEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5588))
if mibBuilder.loadTexts: lgpCondId5588UnspecifiedGeneralEvent.setStatus('current')
lgpCondId5593RemoteSensorAverageOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5593))
if mibBuilder.loadTexts: lgpCondId5593RemoteSensorAverageOverTemperature.setStatus('current')
lgpCondId5594RemoteSensorAverageUnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5594))
if mibBuilder.loadTexts: lgpCondId5594RemoteSensorAverageUnderTemperature.setStatus('current')
lgpCondId5595RemoteSensorSystemAverageOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5595))
if mibBuilder.loadTexts: lgpCondId5595RemoteSensorSystemAverageOverTemperature.setStatus('current')
lgpCondId5596RemoteSensorSystemAverageUnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5596))
if mibBuilder.loadTexts: lgpCondId5596RemoteSensorSystemAverageUnderTemperature.setStatus('current')
lgpCondId5597RemoteSensorOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5597))
if mibBuilder.loadTexts: lgpCondId5597RemoteSensorOverTemperature.setStatus('current')
lgpCondId5598RemoteSensorUnderTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5598))
if mibBuilder.loadTexts: lgpCondId5598RemoteSensorUnderTemperature.setStatus('current')
lgpCondId5600AirEconomizerEmergencyOverride = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5600))
if mibBuilder.loadTexts: lgpCondId5600AirEconomizerEmergencyOverride.setStatus('current')
lgpCondId5601AirEconomizerReducedAirflow = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5601))
if mibBuilder.loadTexts: lgpCondId5601AirEconomizerReducedAirflow.setStatus('current')
lgpCondId5604CompressorSuperheatOverThreshold = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5604))
if mibBuilder.loadTexts: lgpCondId5604CompressorSuperheatOverThreshold.setStatus('current')
lgpCondId5609ThermalRunawayCelltoAmbientTemperatureEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5609))
if mibBuilder.loadTexts: lgpCondId5609ThermalRunawayCelltoAmbientTemperatureEvent.setStatus('current')
lgpCondId5610ThermalRunawayCelltoCellTemperatureEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5610))
if mibBuilder.loadTexts: lgpCondId5610ThermalRunawayCelltoCellTemperatureEvent.setStatus('current')
lgpCondId5611ThermalRunawayChargerCurrentLevelOneEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5611))
if mibBuilder.loadTexts: lgpCondId5611ThermalRunawayChargerCurrentLevelOneEvent.setStatus('current')
lgpCondId5612ThermalRunawayChargerCurrentLevelTwoEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5612))
if mibBuilder.loadTexts: lgpCondId5612ThermalRunawayChargerCurrentLevelTwoEvent.setStatus('current')
lgpCondId5617TemperatureControlSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5617))
if mibBuilder.loadTexts: lgpCondId5617TemperatureControlSensorIssue.setStatus('current')
lgpCondId5621EEVSuperheatBelowThreshold = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5621))
if mibBuilder.loadTexts: lgpCondId5621EEVSuperheatBelowThreshold.setStatus('current')
lgpCondId5622EEVDischargeTempAboveThreshold = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5622))
if mibBuilder.loadTexts: lgpCondId5622EEVDischargeTempAboveThreshold.setStatus('current')
lgpCondId5623EEVBatteryIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5623))
if mibBuilder.loadTexts: lgpCondId5623EEVBatteryIssue.setStatus('current')
lgpCondId5624EEVPowerIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5624))
if mibBuilder.loadTexts: lgpCondId5624EEVPowerIssue.setStatus('current')
lgpCondId5625EEVUnspecifiedGeneralEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5625))
if mibBuilder.loadTexts: lgpCondId5625EEVUnspecifiedGeneralEvent.setStatus('current')
lgpCondId5629StaticPressureSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5629))
if mibBuilder.loadTexts: lgpCondId5629StaticPressureSensorIssue.setStatus('current')
lgpCondId5630HighStaticPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5630))
if mibBuilder.loadTexts: lgpCondId5630HighStaticPressure.setStatus('current')
lgpCondId5631LowStaticPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5631))
if mibBuilder.loadTexts: lgpCondId5631LowStaticPressure.setStatus('current')
lgpCondId5636PumpUnspecifiedGeneralEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5636))
if mibBuilder.loadTexts: lgpCondId5636PumpUnspecifiedGeneralEvent.setStatus('current')
lgpCondId5637CondenserUnitUnspecifiedGeneralEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5637))
if mibBuilder.loadTexts: lgpCondId5637CondenserUnitUnspecifiedGeneralEvent.setStatus('current')
lgpCondId5638CondenserCircuitUnspecifiedGeneralEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5638))
if mibBuilder.loadTexts: lgpCondId5638CondenserCircuitUnspecifiedGeneralEvent.setStatus('current')
lgpCondId5642SFAReservedEvent1 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5642))
if mibBuilder.loadTexts: lgpCondId5642SFAReservedEvent1.setStatus('current')
lgpCondId5643SFAReservedEvent2 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5643))
if mibBuilder.loadTexts: lgpCondId5643SFAReservedEvent2.setStatus('current')
lgpCondId5644SFAReservedEvent3 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5644))
if mibBuilder.loadTexts: lgpCondId5644SFAReservedEvent3.setStatus('current')
lgpCondId5645SFAReservedEvent4 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5645))
if mibBuilder.loadTexts: lgpCondId5645SFAReservedEvent4.setStatus('current')
lgpCondId5646SFAReservedEvent5 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5646))
if mibBuilder.loadTexts: lgpCondId5646SFAReservedEvent5.setStatus('current')
lgpCondId5647SFAReservedEvent6 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5647))
if mibBuilder.loadTexts: lgpCondId5647SFAReservedEvent6.setStatus('current')
lgpCondId5648SFAReservedEvent7 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5648))
if mibBuilder.loadTexts: lgpCondId5648SFAReservedEvent7.setStatus('current')
lgpCondId5649SFAReservedEvent8 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5649))
if mibBuilder.loadTexts: lgpCondId5649SFAReservedEvent8.setStatus('current')
lgpCondId5650SFAReservedEvent9 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5650))
if mibBuilder.loadTexts: lgpCondId5650SFAReservedEvent9.setStatus('current')
lgpCondId5651SFAReservedEvent10 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5651))
if mibBuilder.loadTexts: lgpCondId5651SFAReservedEvent10.setStatus('current')
lgpCondId5652SFAReservedEvent11 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5652))
if mibBuilder.loadTexts: lgpCondId5652SFAReservedEvent11.setStatus('current')
lgpCondId5653SFAReservedEvent12 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5653))
if mibBuilder.loadTexts: lgpCondId5653SFAReservedEvent12.setStatus('current')
lgpCondId5654SFAReservedEvent13 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5654))
if mibBuilder.loadTexts: lgpCondId5654SFAReservedEvent13.setStatus('current')
lgpCondId5655SFAReservedEvent14 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5655))
if mibBuilder.loadTexts: lgpCondId5655SFAReservedEvent14.setStatus('current')
lgpCondId5656SFAReservedEvent15 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5656))
if mibBuilder.loadTexts: lgpCondId5656SFAReservedEvent15.setStatus('current')
lgpCondId5657SFAReservedEvent16 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5657))
if mibBuilder.loadTexts: lgpCondId5657SFAReservedEvent16.setStatus('current')
lgpCondId5658SFAReservedEvent17 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5658))
if mibBuilder.loadTexts: lgpCondId5658SFAReservedEvent17.setStatus('current')
lgpCondId5659SFAReservedEvent18 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5659))
if mibBuilder.loadTexts: lgpCondId5659SFAReservedEvent18.setStatus('current')
lgpCondId5660SFAReservedEvent19 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5660))
if mibBuilder.loadTexts: lgpCondId5660SFAReservedEvent19.setStatus('current')
lgpCondId5661SFAReservedEvent20 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5661))
if mibBuilder.loadTexts: lgpCondId5661SFAReservedEvent20.setStatus('current')
lgpCondId5662SFAReservedEvent21 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5662))
if mibBuilder.loadTexts: lgpCondId5662SFAReservedEvent21.setStatus('current')
lgpCondId5663SFAReservedEvent22 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5663))
if mibBuilder.loadTexts: lgpCondId5663SFAReservedEvent22.setStatus('current')
lgpCondId5664SFAReservedEvent23 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5664))
if mibBuilder.loadTexts: lgpCondId5664SFAReservedEvent23.setStatus('current')
lgpCondId5665SFAReservedEvent24 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5665))
if mibBuilder.loadTexts: lgpCondId5665SFAReservedEvent24.setStatus('current')
lgpCondId5666SFAReservedEvent25 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5666))
if mibBuilder.loadTexts: lgpCondId5666SFAReservedEvent25.setStatus('current')
lgpCondId5768OutletAirOvertemperatureLimit = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5768))
if mibBuilder.loadTexts: lgpCondId5768OutletAirOvertemperatureLimit.setStatus('current')
lgpCondId5769EMOShutdown = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5769))
if mibBuilder.loadTexts: lgpCondId5769EMOShutdown.setStatus('current')
lgpCondId5770TopOutletFanFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5770))
if mibBuilder.loadTexts: lgpCondId5770TopOutletFanFault.setStatus('current')
lgpCondId5771MMSOverCapacity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5771))
if mibBuilder.loadTexts: lgpCondId5771MMSOverCapacity.setStatus('current')
lgpCondId5773CompressorCapacityNormal = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5773))
if mibBuilder.loadTexts: lgpCondId5773CompressorCapacityNormal.setStatus('current')
lgpCondId5774CompressorContactorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5774))
if mibBuilder.loadTexts: lgpCondId5774CompressorContactorIssue.setStatus('current')
lgpCondId5775UnitShutdownUnspecifiedGeneralEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5775))
if mibBuilder.loadTexts: lgpCondId5775UnitShutdownUnspecifiedGeneralEvent.setStatus('current')
lgpCondId5776PDULowVoltageLN = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5776))
if mibBuilder.loadTexts: lgpCondId5776PDULowVoltageLN.setStatus('current')
lgpCondId5777PDULowVoltageLL = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5777))
if mibBuilder.loadTexts: lgpCondId5777PDULowVoltageLL.setStatus('current')
lgpCondId5778PDULowVoltageL1L2 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5778))
if mibBuilder.loadTexts: lgpCondId5778PDULowVoltageL1L2.setStatus('current')
lgpCondId5779PDULowVoltageL2L3 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5779))
if mibBuilder.loadTexts: lgpCondId5779PDULowVoltageL2L3.setStatus('current')
lgpCondId5780PDULowVoltageL3L1 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5780))
if mibBuilder.loadTexts: lgpCondId5780PDULowVoltageL3L1.setStatus('current')
lgpCondId5781PDULowVoltageL1N = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5781))
if mibBuilder.loadTexts: lgpCondId5781PDULowVoltageL1N.setStatus('current')
lgpCondId5782PDULowVoltageL2N = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5782))
if mibBuilder.loadTexts: lgpCondId5782PDULowVoltageL2N.setStatus('current')
lgpCondId5783PDULowVoltageL3N = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5783))
if mibBuilder.loadTexts: lgpCondId5783PDULowVoltageL3N.setStatus('current')
lgpCondId5784BranchLowVoltageLN = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5784))
if mibBuilder.loadTexts: lgpCondId5784BranchLowVoltageLN.setStatus('current')
lgpCondId5785BranchLowVoltageLL = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5785))
if mibBuilder.loadTexts: lgpCondId5785BranchLowVoltageLL.setStatus('current')
lgpCondId5786BranchLowVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5786))
if mibBuilder.loadTexts: lgpCondId5786BranchLowVoltage.setStatus('current')
lgpCondId5788ContTieActive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5788))
if mibBuilder.loadTexts: lgpCondId5788ContTieActive.setStatus('current')
lgpCondId5792UserkWhReset = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5792))
if mibBuilder.loadTexts: lgpCondId5792UserkWhReset.setStatus('current')
lgpCondId5796PeakkWReset = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5796))
if mibBuilder.loadTexts: lgpCondId5796PeakkWReset.setStatus('current')
lgpCondId5798BypassOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5798))
if mibBuilder.loadTexts: lgpCondId5798BypassOverload.setStatus('current')
lgpCondId5801LowBatteryShutdownImminent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5801))
if mibBuilder.loadTexts: lgpCondId5801LowBatteryShutdownImminent.setStatus('current')
lgpCondId5806OutputOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5806))
if mibBuilder.loadTexts: lgpCondId5806OutputOverload.setStatus('current')
lgpCondId5807OutputOffPending = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5807))
if mibBuilder.loadTexts: lgpCondId5807OutputOffPending.setStatus('current')
lgpCondId5808SystemShutdownOutputShort = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5808))
if mibBuilder.loadTexts: lgpCondId5808SystemShutdownOutputShort.setStatus('current')
lgpCondId5809SystemShutdownLowBattery = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5809))
if mibBuilder.loadTexts: lgpCondId5809SystemShutdownLowBattery.setStatus('current')
lgpCondId5810SystemShutdownRemoteShutdown = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5810))
if mibBuilder.loadTexts: lgpCondId5810SystemShutdownRemoteShutdown.setStatus('current')
lgpCondId5811SystemShutdownHardwareFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5811))
if mibBuilder.loadTexts: lgpCondId5811SystemShutdownHardwareFault.setStatus('current')
lgpCondId5817LossofRedundancy = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5817))
if mibBuilder.loadTexts: lgpCondId5817LossofRedundancy.setStatus('current')
lgpCondId5818PowerModuleFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5818))
if mibBuilder.loadTexts: lgpCondId5818PowerModuleFailure.setStatus('current')
lgpCondId5819PowerModuleWarning = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5819))
if mibBuilder.loadTexts: lgpCondId5819PowerModuleWarning.setStatus('current')
lgpCondId5838PowerModuleFanFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5838))
if mibBuilder.loadTexts: lgpCondId5838PowerModuleFanFault.setStatus('current')
lgpCondId5839PowerModuleOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5839))
if mibBuilder.loadTexts: lgpCondId5839PowerModuleOverTemperature.setStatus('current')
lgpCondId5840PowerModuleShutdownOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5840))
if mibBuilder.loadTexts: lgpCondId5840PowerModuleShutdownOverTemperature.setStatus('current')
lgpCondId5842ChargerModuleFanFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5842))
if mibBuilder.loadTexts: lgpCondId5842ChargerModuleFanFault.setStatus('current')
lgpCondId5847BatteryModuleTemperatureSensorFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5847))
if mibBuilder.loadTexts: lgpCondId5847BatteryModuleTemperatureSensorFault.setStatus('current')
lgpCondId5848BatteryModuleOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5848))
if mibBuilder.loadTexts: lgpCondId5848BatteryModuleOverTemperature.setStatus('current')
lgpCondId5849ReplaceBatteryModule = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5849))
if mibBuilder.loadTexts: lgpCondId5849ReplaceBatteryModule.setStatus('current')
lgpCondId5850SystemShutdownTransformerOverTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5850))
if mibBuilder.loadTexts: lgpCondId5850SystemShutdownTransformerOverTemperature.setStatus('current')
lgpCondId5851MaximumLoadAlarm = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5851))
if mibBuilder.loadTexts: lgpCondId5851MaximumLoadAlarm.setStatus('current')
lgpCondId5856BatteryModuleFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5856))
if mibBuilder.loadTexts: lgpCondId5856BatteryModuleFault.setStatus('current')
lgpCondId5857BatteryModuleWarning = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5857))
if mibBuilder.loadTexts: lgpCondId5857BatteryModuleWarning.setStatus('current')
lgpCondId5862CheckAirFilter = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5862))
if mibBuilder.loadTexts: lgpCondId5862CheckAirFilter.setStatus('current')
lgpCondId5863TransformerFanFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5863))
if mibBuilder.loadTexts: lgpCondId5863TransformerFanFault.setStatus('current')
lgpCondId5865NoLoadWarning = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5865))
if mibBuilder.loadTexts: lgpCondId5865NoLoadWarning.setStatus('current')
lgpCondId5871BatteryOverTemperatureLimit = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5871))
if mibBuilder.loadTexts: lgpCondId5871BatteryOverTemperatureLimit.setStatus('current')
lgpCondId5873UnexpectedMainBatteryDisconnectClosure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5873))
if mibBuilder.loadTexts: lgpCondId5873UnexpectedMainBatteryDisconnectClosure.setStatus('current')
lgpCondId5874BatteryOverVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5874))
if mibBuilder.loadTexts: lgpCondId5874BatteryOverVoltage.setStatus('current')
lgpCondId5875BatteryFuseFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5875))
if mibBuilder.loadTexts: lgpCondId5875BatteryFuseFault.setStatus('current')
lgpCondId5878MainBatteryDisconnectForcedToUnlock = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5878))
if mibBuilder.loadTexts: lgpCondId5878MainBatteryDisconnectForcedToUnlock.setStatus('current')
lgpCondId5879VdcBackfeed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5879))
if mibBuilder.loadTexts: lgpCondId5879VdcBackfeed.setStatus('current')
lgpCondId5880RectifierConfigurationChangeRequest = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5880))
if mibBuilder.loadTexts: lgpCondId5880RectifierConfigurationChangeRequest.setStatus('current')
lgpCondId5881RegenerationActive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5881))
if mibBuilder.loadTexts: lgpCondId5881RegenerationActive.setStatus('current')
lgpCondId5882RegenerationOperationTerminated = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5882))
if mibBuilder.loadTexts: lgpCondId5882RegenerationOperationTerminated.setStatus('current')
lgpCondId5883RegenerationOperationFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5883))
if mibBuilder.loadTexts: lgpCondId5883RegenerationOperationFailure.setStatus('current')
lgpCondId5884ProgramInputContact01 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5884))
if mibBuilder.loadTexts: lgpCondId5884ProgramInputContact01.setStatus('current')
lgpCondId5885ProgramInputContact02 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5885))
if mibBuilder.loadTexts: lgpCondId5885ProgramInputContact02.setStatus('current')
lgpCondId5886ProgramInputContact03 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5886))
if mibBuilder.loadTexts: lgpCondId5886ProgramInputContact03.setStatus('current')
lgpCondId5887ProgramInputContact04 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5887))
if mibBuilder.loadTexts: lgpCondId5887ProgramInputContact04.setStatus('current')
lgpCondId5888ProgramInputContact05 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5888))
if mibBuilder.loadTexts: lgpCondId5888ProgramInputContact05.setStatus('current')
lgpCondId5889ProgramInputContact06 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5889))
if mibBuilder.loadTexts: lgpCondId5889ProgramInputContact06.setStatus('current')
lgpCondId5890ProgramInputContact07 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5890))
if mibBuilder.loadTexts: lgpCondId5890ProgramInputContact07.setStatus('current')
lgpCondId5891ProgramInputContact08 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5891))
if mibBuilder.loadTexts: lgpCondId5891ProgramInputContact08.setStatus('current')
lgpCondId5892ProgramInputContact09 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5892))
if mibBuilder.loadTexts: lgpCondId5892ProgramInputContact09.setStatus('current')
lgpCondId5893ProgramInputContact10 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5893))
if mibBuilder.loadTexts: lgpCondId5893ProgramInputContact10.setStatus('current')
lgpCondId5894ProgramInputContact11 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5894))
if mibBuilder.loadTexts: lgpCondId5894ProgramInputContact11.setStatus('current')
lgpCondId5895ProgramInputContact12 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5895))
if mibBuilder.loadTexts: lgpCondId5895ProgramInputContact12.setStatus('current')
lgpCondId5896GroundFaultDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5896))
if mibBuilder.loadTexts: lgpCondId5896GroundFaultDetected.setStatus('current')
lgpCondId5902ReturnHumiditySensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5902))
if mibBuilder.loadTexts: lgpCondId5902ReturnHumiditySensorIssue.setStatus('current')
lgpCondId5903CompressorLowDifferentialPressureLockout = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5903))
if mibBuilder.loadTexts: lgpCondId5903CompressorLowDifferentialPressureLockout.setStatus('current')
lgpCondId5906AirflowSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5906))
if mibBuilder.loadTexts: lgpCondId5906AirflowSensorIssue.setStatus('current')
lgpCondId5907ExtAirDamperPositionIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5907))
if mibBuilder.loadTexts: lgpCondId5907ExtAirDamperPositionIssue.setStatus('current')
lgpCondId5908ExtPowerSourceAFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5908))
if mibBuilder.loadTexts: lgpCondId5908ExtPowerSourceAFailure.setStatus('current')
lgpCondId5909ExtPowerSourceBFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5909))
if mibBuilder.loadTexts: lgpCondId5909ExtPowerSourceBFailure.setStatus('current')
lgpCondId5910StaticPressureSensorOutofRange = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5910))
if mibBuilder.loadTexts: lgpCondId5910StaticPressureSensorOutofRange.setStatus('current')
lgpCondId5911FluidTemperatureSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5911))
if mibBuilder.loadTexts: lgpCondId5911FluidTemperatureSensorIssue.setStatus('current')
lgpCondId5912FluidFlowSensorIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5912))
if mibBuilder.loadTexts: lgpCondId5912FluidFlowSensorIssue.setStatus('current')
lgpCondId5914OverDifferentialPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5914))
if mibBuilder.loadTexts: lgpCondId5914OverDifferentialPressure.setStatus('current')
lgpCondId5915UnderDifferentialPressure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5915))
if mibBuilder.loadTexts: lgpCondId5915UnderDifferentialPressure.setStatus('current')
lgpCondId5924MixedModeLockout = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5924))
if mibBuilder.loadTexts: lgpCondId5924MixedModeLockout.setStatus('current')
lgpCondId5928UnbalancedLoadCondition = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5928))
if mibBuilder.loadTexts: lgpCondId5928UnbalancedLoadCondition.setStatus('current')
lgpCondId5939BranchOverCurrentProtection = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5939))
if mibBuilder.loadTexts: lgpCondId5939BranchOverCurrentProtection.setStatus('current')
lgpCondId5948BranchLowVoltageLL = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5948))
if mibBuilder.loadTexts: lgpCondId5948BranchLowVoltageLL.setStatus('current')
lgpCondId5957BypassInputVoltageFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5957))
if mibBuilder.loadTexts: lgpCondId5957BypassInputVoltageFault.setStatus('current')
lgpCondId5958BatteryTemperatureOutofRange = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5958))
if mibBuilder.loadTexts: lgpCondId5958BatteryTemperatureOutofRange.setStatus('current')
lgpCondId5960InverterOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5960))
if mibBuilder.loadTexts: lgpCondId5960InverterOverload.setStatus('current')
lgpCondId5966AuxAirTempDeviceCommunicationLost = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5966))
if mibBuilder.loadTexts: lgpCondId5966AuxAirTempDeviceCommunicationLost.setStatus('current')
lgpCondId5967ModbusPowerMeterCommunicationLost = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5967))
if mibBuilder.loadTexts: lgpCondId5967ModbusPowerMeterCommunicationLost.setStatus('current')
lgpCondId5968InverterDesaturation = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5968))
if mibBuilder.loadTexts: lgpCondId5968InverterDesaturation.setStatus('current')
lgpCondId5969GenericDICFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5969))
if mibBuilder.loadTexts: lgpCondId5969GenericDICFault.setStatus('current')
lgpCondId5970GroundFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5970))
if mibBuilder.loadTexts: lgpCondId5970GroundFault.setStatus('current')
lgpCondId5973InputBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5973))
if mibBuilder.loadTexts: lgpCondId5973InputBreakerOpen.setStatus('current')
lgpCondId5974NeutralBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5974))
if mibBuilder.loadTexts: lgpCondId5974NeutralBreakerOpen.setStatus('current')
lgpCondId5975OutputBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5975))
if mibBuilder.loadTexts: lgpCondId5975OutputBreakerOpen.setStatus('current')
lgpCondId5976MaintenanceBypassBreakerClosed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5976))
if mibBuilder.loadTexts: lgpCondId5976MaintenanceBypassBreakerClosed.setStatus('current')
lgpCondId5977BatteryBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5977))
if mibBuilder.loadTexts: lgpCondId5977BatteryBreakerOpen.setStatus('current')
lgpCondId5978RectifierIsolationBreakerRFBOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5978))
if mibBuilder.loadTexts: lgpCondId5978RectifierIsolationBreakerRFBOpen.setStatus('current')
lgpCondId5982BypassBreakerSBBOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5982))
if mibBuilder.loadTexts: lgpCondId5982BypassBreakerSBBOpen.setStatus('current')
lgpCondId5983BypassIsolationBreakerBIBOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5983))
if mibBuilder.loadTexts: lgpCondId5983BypassIsolationBreakerBIBOpen.setStatus('current')
lgpCondId5984BypassUndervoltageWarning = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5984))
if mibBuilder.loadTexts: lgpCondId5984BypassUndervoltageWarning.setStatus('current')
lgpCondId5985BypassStaticSwitchBPSSOn = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5985))
if mibBuilder.loadTexts: lgpCondId5985BypassStaticSwitchBPSSOn.setStatus('current')
lgpCondId5998BattOvtempWarning = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 5998))
if mibBuilder.loadTexts: lgpCondId5998BattOvtempWarning.setStatus('current')
lgpCondId6009InverterOutputBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6009))
if mibBuilder.loadTexts: lgpCondId6009InverterOutputBreakerOpen.setStatus('current')
lgpCondId6011EquipmentOverTempWarning = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6011))
if mibBuilder.loadTexts: lgpCondId6011EquipmentOverTempWarning.setStatus('current')
lgpCondId6012EquipmentOvertemperatureLimit = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6012))
if mibBuilder.loadTexts: lgpCondId6012EquipmentOvertemperatureLimit.setStatus('current')
lgpCondId6045RectifierInputBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6045))
if mibBuilder.loadTexts: lgpCondId6045RectifierInputBreakerOpen.setStatus('current')
lgpCondId6046LoadonUPS = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6046))
if mibBuilder.loadTexts: lgpCondId6046LoadonUPS.setStatus('current')
lgpCondId6047Core2CoreFuseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6047))
if mibBuilder.loadTexts: lgpCondId6047Core2CoreFuseFailure.setStatus('current')
lgpCondId6052SystemOutputBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6052))
if mibBuilder.loadTexts: lgpCondId6052SystemOutputBreakerOpen.setStatus('current')
lgpCondId6059InverterRelayFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6059))
if mibBuilder.loadTexts: lgpCondId6059InverterRelayFault.setStatus('current')
lgpCondId6060TransfertoBypassSystemOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6060))
if mibBuilder.loadTexts: lgpCondId6060TransfertoBypassSystemOverload.setStatus('current')
lgpCondId6061InputSourceBackfeed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6061))
if mibBuilder.loadTexts: lgpCondId6061InputSourceBackfeed.setStatus('current')
lgpCondId6062LossofSynchronization = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6062))
if mibBuilder.loadTexts: lgpCondId6062LossofSynchronization.setStatus('current')
lgpCondId6063BatteryConverterCurrentLimit = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6063))
if mibBuilder.loadTexts: lgpCondId6063BatteryConverterCurrentLimit.setStatus('current')
lgpCondId6064LBSCableFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6064))
if mibBuilder.loadTexts: lgpCondId6064LBSCableFailure.setStatus('current')
lgpCondId6065BatteryChargeEqualizationTimeout = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6065))
if mibBuilder.loadTexts: lgpCondId6065BatteryChargeEqualizationTimeout.setStatus('current')
lgpCondId6066ParallelCableFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6066))
if mibBuilder.loadTexts: lgpCondId6066ParallelCableFailure.setStatus('current')
lgpCondId6067BatteryFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6067))
if mibBuilder.loadTexts: lgpCondId6067BatteryFault.setStatus('current')
lgpCondId6068BatteryRoomAlarm = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6068))
if mibBuilder.loadTexts: lgpCondId6068BatteryRoomAlarm.setStatus('current')
lgpCondId6080UPSCCommunicationFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6080))
if mibBuilder.loadTexts: lgpCondId6080UPSCCommunicationFailure.setStatus('current')
lgpCondId6092Compressor1BThermalOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6092))
if mibBuilder.loadTexts: lgpCondId6092Compressor1BThermalOverload.setStatus('current')
lgpCondId6093Compressor2BThermalOverload = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6093))
if mibBuilder.loadTexts: lgpCondId6093Compressor2BThermalOverload.setStatus('current')
lgpCondId6094Compressor1BHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6094))
if mibBuilder.loadTexts: lgpCondId6094Compressor1BHoursExceeded.setStatus('current')
lgpCondId6095Compressor2BHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6095))
if mibBuilder.loadTexts: lgpCondId6095Compressor2BHoursExceeded.setStatus('current')
lgpCondId6100CondenserRemoteShutdown = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6100))
if mibBuilder.loadTexts: lgpCondId6100CondenserRemoteShutdown.setStatus('current')
lgpCondId6105ExternalCondenserTVSSIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6105))
if mibBuilder.loadTexts: lgpCondId6105ExternalCondenserTVSSIssue.setStatus('current')
lgpCondId6106ExternalCondenserVFDIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6106))
if mibBuilder.loadTexts: lgpCondId6106ExternalCondenserVFDIssue.setStatus('current')
lgpCondId6107ExternalCondenserIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6107))
if mibBuilder.loadTexts: lgpCondId6107ExternalCondenserIssue.setStatus('current')
lgpCondId6119Slotsnotavailable = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6119))
if mibBuilder.loadTexts: lgpCondId6119Slotsnotavailable.setStatus('current')
lgpCondId6180BatteryUnderVoltage = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6180))
if mibBuilder.loadTexts: lgpCondId6180BatteryUnderVoltage.setStatus('current')
lgpCondId6182ReplaceBattery = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6182))
if mibBuilder.loadTexts: lgpCondId6182ReplaceBattery.setStatus('current')
lgpCondId6186InputFrequencyDeviation = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6186))
if mibBuilder.loadTexts: lgpCondId6186InputFrequencyDeviation.setStatus('current')
lgpCondId6187ShutdownPending = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6187))
if mibBuilder.loadTexts: lgpCondId6187ShutdownPending.setStatus('current')
lgpCondId6194SystemRebootCommandIssued = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6194))
if mibBuilder.loadTexts: lgpCondId6194SystemRebootCommandIssued.setStatus('current')
lgpCondId6203SensorAdded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6203))
if mibBuilder.loadTexts: lgpCondId6203SensorAdded.setStatus('current')
lgpCondId6204SensorRemoved = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6204))
if mibBuilder.loadTexts: lgpCondId6204SensorRemoved.setStatus('current')
lgpCondId6205WaterLeakDetected = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6205))
if mibBuilder.loadTexts: lgpCondId6205WaterLeakDetected.setStatus('current')
lgpCondId6210FirmwareUpdateInProgress = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6210))
if mibBuilder.loadTexts: lgpCondId6210FirmwareUpdateInProgress.setStatus('current')
lgpCondId6211FirmwareUpdateCompletedSuccessfully = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6211))
if mibBuilder.loadTexts: lgpCondId6211FirmwareUpdateCompletedSuccessfully.setStatus('current')
lgpCondId6212FirmwareUpdateCompletedUnsuccessfully = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6212))
if mibBuilder.loadTexts: lgpCondId6212FirmwareUpdateCompletedUnsuccessfully.setStatus('current')
lgpCondId6216PrechargeCircuitFailed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6216))
if mibBuilder.loadTexts: lgpCondId6216PrechargeCircuitFailed.setStatus('current')
lgpCondId6217MemoryCardRemoved = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6217))
if mibBuilder.loadTexts: lgpCondId6217MemoryCardRemoved.setStatus('current')
lgpCondId6218AutoCalibrationActive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6218))
if mibBuilder.loadTexts: lgpCondId6218AutoCalibrationActive.setStatus('current')
lgpCondId6219AutoCalibrationFailed = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6219))
if mibBuilder.loadTexts: lgpCondId6219AutoCalibrationFailed.setStatus('current')
lgpCondId6220ModuleOutputBreakerOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6220))
if mibBuilder.loadTexts: lgpCondId6220ModuleOutputBreakerOpen.setStatus('current')
lgpCondId6221NeutralVoltageFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6221))
if mibBuilder.loadTexts: lgpCondId6221NeutralVoltageFault.setStatus('current')
lgpCondId6222BranchLoadLoss = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6222))
if mibBuilder.loadTexts: lgpCondId6222BranchLoadLoss.setStatus('current')
lgpCondId6225RemoteSensorLowHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6225))
if mibBuilder.loadTexts: lgpCondId6225RemoteSensorLowHumidity.setStatus('current')
lgpCondId6226RemoteSensorHighHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6226))
if mibBuilder.loadTexts: lgpCondId6226RemoteSensorHighHumidity.setStatus('current')
lgpCondId6227RemoteSensorAverageLowHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6227))
if mibBuilder.loadTexts: lgpCondId6227RemoteSensorAverageLowHumidity.setStatus('current')
lgpCondId6228RemoteSensorAverageHighHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6228))
if mibBuilder.loadTexts: lgpCondId6228RemoteSensorAverageHighHumidity.setStatus('current')
lgpCondId6229RemoteSensorSystemAverageLowHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6229))
if mibBuilder.loadTexts: lgpCondId6229RemoteSensorSystemAverageLowHumidity.setStatus('current')
lgpCondId6230RemoteSensorSystemAverageHighHumidity = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6230))
if mibBuilder.loadTexts: lgpCondId6230RemoteSensorSystemAverageHighHumidity.setStatus('current')
lgpCondId6231LowCompressorSuperheat = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6231))
if mibBuilder.loadTexts: lgpCondId6231LowCompressorSuperheat.setStatus('current')
lgpCondId6232SECUnspecifiedGeneralEvent = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6232))
if mibBuilder.loadTexts: lgpCondId6232SECUnspecifiedGeneralEvent.setStatus('current')
lgpCondId6233SECCommunicationLost = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6233))
if mibBuilder.loadTexts: lgpCondId6233SECCommunicationLost.setStatus('current')
lgpCondId6236PowerSourceAIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6236))
if mibBuilder.loadTexts: lgpCondId6236PowerSourceAIssue.setStatus('current')
lgpCondId6237PowerSourceBIssue = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6237))
if mibBuilder.loadTexts: lgpCondId6237PowerSourceBIssue.setStatus('current')
lgpCondId6239FluidValveHoursExceeded = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6239))
if mibBuilder.loadTexts: lgpCondId6239FluidValveHoursExceeded.setStatus('current')
lgpCondId6253BoosterFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6253))
if mibBuilder.loadTexts: lgpCondId6253BoosterFailure.setStatus('current')
lgpCondId6254ChargerFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6254))
if mibBuilder.loadTexts: lgpCondId6254ChargerFailure.setStatus('current')
lgpCondId6274UnitTopReturnAirSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6274))
if mibBuilder.loadTexts: lgpCondId6274UnitTopReturnAirSensorFailure.setStatus('current')
lgpCondId6275UnitMiddleReturnAirSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6275))
if mibBuilder.loadTexts: lgpCondId6275UnitMiddleReturnAirSensorFailure.setStatus('current')
lgpCondId6276UnitBottomReturnAirSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6276))
if mibBuilder.loadTexts: lgpCondId6276UnitBottomReturnAirSensorFailure.setStatus('current')
lgpCondId6277UnitTopSupplyAirSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6277))
if mibBuilder.loadTexts: lgpCondId6277UnitTopSupplyAirSensorFailure.setStatus('current')
lgpCondId6278UnitMiddleFirstSupplyAirSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6278))
if mibBuilder.loadTexts: lgpCondId6278UnitMiddleFirstSupplyAirSensorFailure.setStatus('current')
lgpCondId6279UnitBottomSupplyAirSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6279))
if mibBuilder.loadTexts: lgpCondId6279UnitBottomSupplyAirSensorFailure.setStatus('current')
lgpCondId6284UnitMiddleSecondSupplyAirSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6284))
if mibBuilder.loadTexts: lgpCondId6284UnitMiddleSecondSupplyAirSensorFailure.setStatus('current')
lgpCondId6293ChilledWaterControlActive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6293))
if mibBuilder.loadTexts: lgpCondId6293ChilledWaterControlActive.setStatus('current')
lgpCondId6294ChilledWaterFlowTransducerFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6294))
if mibBuilder.loadTexts: lgpCondId6294ChilledWaterFlowTransducerFailure.setStatus('current')
lgpCondId6295ChilledWaterInletTemperatureSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6295))
if mibBuilder.loadTexts: lgpCondId6295ChilledWaterInletTemperatureSensorFailure.setStatus('current')
lgpCondId6296ChilledWaterHighInletTemperature = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6296))
if mibBuilder.loadTexts: lgpCondId6296ChilledWaterHighInletTemperature.setStatus('current')
lgpCondId6297Modbus010VModuleCommunicationFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6297))
if mibBuilder.loadTexts: lgpCondId6297Modbus010VModuleCommunicationFailure.setStatus('current')
lgpCondId6299RackDoorsOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6299))
if mibBuilder.loadTexts: lgpCondId6299RackDoorsOpen.setStatus('current')
lgpCondId6303TeamStaticPressureSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6303))
if mibBuilder.loadTexts: lgpCondId6303TeamStaticPressureSensorFailure.setStatus('current')
lgpCondId6304HeatingLockout = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6304))
if mibBuilder.loadTexts: lgpCondId6304HeatingLockout.setStatus('current')
lgpCondId6305FreeCoolingStoppedHighRoomTemp = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6305))
if mibBuilder.loadTexts: lgpCondId6305FreeCoolingStoppedHighRoomTemp.setStatus('current')
lgpCondId6306ColdAisleTemperatureHumidityTeamSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6306))
if mibBuilder.loadTexts: lgpCondId6306ColdAisleTemperatureHumidityTeamSensorFailure.setStatus('current')
lgpCondId6309ColdAisleAirSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6309))
if mibBuilder.loadTexts: lgpCondId6309ColdAisleAirSensorFailure.setStatus('current')
lgpCondId6310ChilledWaterInletTemperatureControlActive = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6310))
if mibBuilder.loadTexts: lgpCondId6310ChilledWaterInletTemperatureControlActive.setStatus('current')
lgpCondId6313ChilledWaterInletTemperatureSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6313))
if mibBuilder.loadTexts: lgpCondId6313ChilledWaterInletTemperatureSensorFailure.setStatus('current')
lgpCondId6314ChilledWaterOutletTemperatureSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6314))
if mibBuilder.loadTexts: lgpCondId6314ChilledWaterOutletTemperatureSensorFailure.setStatus('current')
lgpCondId6315ChilledWaterFlowMeterSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6315))
if mibBuilder.loadTexts: lgpCondId6315ChilledWaterFlowMeterSensorFailure.setStatus('current')
lgpCondId6333Bypassoutofsync = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6333))
if mibBuilder.loadTexts: lgpCondId6333Bypassoutofsync.setStatus('current')
lgpCondId6348SystemOutputoffasrequested = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6348))
if mibBuilder.loadTexts: lgpCondId6348SystemOutputoffasrequested.setStatus('current')
lgpCondId6349SystemOffasrequested = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6349))
if mibBuilder.loadTexts: lgpCondId6349SystemOffasrequested.setStatus('current')
lgpCondId6350GeneralFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6350))
if mibBuilder.loadTexts: lgpCondId6350GeneralFault.setStatus('current')
lgpCondId6351UPSAwaitingPower = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6351))
if mibBuilder.loadTexts: lgpCondId6351UPSAwaitingPower.setStatus('current')
lgpCondId6352AutonomyCalibration = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6352))
if mibBuilder.loadTexts: lgpCondId6352AutonomyCalibration.setStatus('current')
lgpCondId6353GeneralWarning = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6353))
if mibBuilder.loadTexts: lgpCondId6353GeneralWarning.setStatus('current')
lgpCondId6354BatteryCharging = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6354))
if mibBuilder.loadTexts: lgpCondId6354BatteryCharging.setStatus('current')
lgpCondId6355BackfeedRelayFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6355))
if mibBuilder.loadTexts: lgpCondId6355BackfeedRelayFailure.setStatus('current')
lgpCondId6356BatteryCircuitOpen = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6356))
if mibBuilder.loadTexts: lgpCondId6356BatteryCircuitOpen.setStatus('current')
lgpCondId6357SystemRestartPending = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6357))
if mibBuilder.loadTexts: lgpCondId6357SystemRestartPending.setStatus('current')
lgpCondId6358PipeTemperatureSensorFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6358))
if mibBuilder.loadTexts: lgpCondId6358PipeTemperatureSensorFailure.setStatus('current')
lgpCondId6362SFAReservedEvent26 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6362))
if mibBuilder.loadTexts: lgpCondId6362SFAReservedEvent26.setStatus('current')
lgpCondId6363SFAReservedEvent27 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6363))
if mibBuilder.loadTexts: lgpCondId6363SFAReservedEvent27.setStatus('current')
lgpCondId6364SFAReservedEvent28 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6364))
if mibBuilder.loadTexts: lgpCondId6364SFAReservedEvent28.setStatus('current')
lgpCondId6365SFAReservedEvent29 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6365))
if mibBuilder.loadTexts: lgpCondId6365SFAReservedEvent29.setStatus('current')
lgpCondId6366SFAReservedEvent30 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6366))
if mibBuilder.loadTexts: lgpCondId6366SFAReservedEvent30.setStatus('current')
lgpCondId6367SFAReservedEvent31 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6367))
if mibBuilder.loadTexts: lgpCondId6367SFAReservedEvent31.setStatus('current')
lgpCondId6368SFAReservedEvent32 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6368))
if mibBuilder.loadTexts: lgpCondId6368SFAReservedEvent32.setStatus('current')
lgpCondId6369SFAReservedEvent33 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6369))
if mibBuilder.loadTexts: lgpCondId6369SFAReservedEvent33.setStatus('current')
lgpCondId6370SFAReservedEvent34 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6370))
if mibBuilder.loadTexts: lgpCondId6370SFAReservedEvent34.setStatus('current')
lgpCondId6371SFAReservedEvent35 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6371))
if mibBuilder.loadTexts: lgpCondId6371SFAReservedEvent35.setStatus('current')
lgpCondId6372SFAReservedEvent36 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6372))
if mibBuilder.loadTexts: lgpCondId6372SFAReservedEvent36.setStatus('current')
lgpCondId6373SFAReservedEvent37 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6373))
if mibBuilder.loadTexts: lgpCondId6373SFAReservedEvent37.setStatus('current')
lgpCondId6374SFAReservedEvent38 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6374))
if mibBuilder.loadTexts: lgpCondId6374SFAReservedEvent38.setStatus('current')
lgpCondId6375SFAReservedEvent39 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6375))
if mibBuilder.loadTexts: lgpCondId6375SFAReservedEvent39.setStatus('current')
lgpCondId6376SFAReservedEvent40 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6376))
if mibBuilder.loadTexts: lgpCondId6376SFAReservedEvent40.setStatus('current')
lgpCondId6377SFAReservedEvent41 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6377))
if mibBuilder.loadTexts: lgpCondId6377SFAReservedEvent41.setStatus('current')
lgpCondId6378SFAReservedEvent42 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6378))
if mibBuilder.loadTexts: lgpCondId6378SFAReservedEvent42.setStatus('current')
lgpCondId6379SFAReservedEvent43 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6379))
if mibBuilder.loadTexts: lgpCondId6379SFAReservedEvent43.setStatus('current')
lgpCondId6380SFAReservedEvent44 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6380))
if mibBuilder.loadTexts: lgpCondId6380SFAReservedEvent44.setStatus('current')
lgpCondId6381SFAReservedEvent45 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6381))
if mibBuilder.loadTexts: lgpCondId6381SFAReservedEvent45.setStatus('current')
lgpCondId6382SFAReservedEvent46 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6382))
if mibBuilder.loadTexts: lgpCondId6382SFAReservedEvent46.setStatus('current')
lgpCondId6383SFAReservedEvent47 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6383))
if mibBuilder.loadTexts: lgpCondId6383SFAReservedEvent47.setStatus('current')
lgpCondId6384SFAReservedEvent48 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6384))
if mibBuilder.loadTexts: lgpCondId6384SFAReservedEvent48.setStatus('current')
lgpCondId6385SFAReservedEvent49 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6385))
if mibBuilder.loadTexts: lgpCondId6385SFAReservedEvent49.setStatus('current')
lgpCondId6386SFAReservedEvent50 = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6386))
if mibBuilder.loadTexts: lgpCondId6386SFAReservedEvent50.setStatus('current')
lgpCondId6438PowerModuleInputCurrentAbnormal = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6438))
if mibBuilder.loadTexts: lgpCondId6438PowerModuleInputCurrentAbnormal.setStatus('current')
lgpCondId6439PowerModuleBalancerofDCBusFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6439))
if mibBuilder.loadTexts: lgpCondId6439PowerModuleBalancerofDCBusFailure.setStatus('current')
lgpCondId6440PowerModuleFuseFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6440))
if mibBuilder.loadTexts: lgpCondId6440PowerModuleFuseFailure.setStatus('current')
lgpCondId6441PowerModulePowerSupplyFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6441))
if mibBuilder.loadTexts: lgpCondId6441PowerModulePowerSupplyFailure.setStatus('current')
lgpCondId6450PDUPoweredOn = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6450))
if mibBuilder.loadTexts: lgpCondId6450PDUPoweredOn.setStatus('current')
lgpCondId6453InputWiringFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6453))
if mibBuilder.loadTexts: lgpCondId6453InputWiringFault.setStatus('current')
lgpCondId6454DCtoDCConverterFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6454))
if mibBuilder.loadTexts: lgpCondId6454DCtoDCConverterFault.setStatus('current')
lgpCondId6455LeakSensorCableFault = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6455))
if mibBuilder.loadTexts: lgpCondId6455LeakSensorCableFault.setStatus('current')
lgpCondId6518StandbyUnitActivatedDuetoChillerFailure = ObjectIdentity((1, 3, 6, 1, 4, 1, 476, 1, 42, 3, 2, 7, 1, 6518))
if mibBuilder.loadTexts: lgpCondId6518StandbyUnitActivatedDuetoChillerFailure.setStatus('current')
mibBuilder.exportSymbols("LIEBERT-GP-FLEXIBLE-CONDITIONS-MIB", lgpCondId5156LoadImpactTransfer=lgpCondId5156LoadImpactTransfer, lgpCondId4824SystemCommFail=lgpCondId4824SystemCommFail, lgpCondId6277UnitTopSupplyAirSensorFailure=lgpCondId6277UnitTopSupplyAirSensorFailure, lgpCondId5598RemoteSensorUnderTemperature=lgpCondId5598RemoteSensorUnderTemperature, lgpCondId6239FluidValveHoursExceeded=lgpCondId6239FluidValveHoursExceeded, lgpCondId4634SupplyRefrigerantOverTemp=lgpCondId4634SupplyRefrigerantOverTemp, lgpCondId4417BranchUnderCurrent=lgpCondId4417BranchUnderCurrent, lgpCondId5063Compressor1HoursExceeded=lgpCondId5063Compressor1HoursExceeded, lgpCondId4496BasicDisplayModuleRemoved=lgpCondId4496BasicDisplayModuleRemoved, lgpCondId4286OutputAmpOverUserLimitPhsA=lgpCondId4286OutputAmpOverUserLimitPhsA, lgpCondId4139BypassExcessAutoRetransfers=lgpCondId4139BypassExcessAutoRetransfers, lgpCondId4500PDUOverCurrent=lgpCondId4500PDUOverCurrent, lgpCondId5120MasterUnitCommunicationLost=lgpCondId5120MasterUnitCommunicationLost, lgpCondId5601AirEconomizerReducedAirflow=lgpCondId5601AirEconomizerReducedAirflow, lgpCondId5568InputUndervoltage=lgpCondId5568InputUndervoltage, lgpCondId4913HighBatteryStringRippleCurrent=lgpCondId4913HighBatteryStringRippleCurrent, lgpCondId4272InputContact03=lgpCondId4272InputContact03, lgpCondId5492ExtSystemCondensationDetected=lgpCondId5492ExtSystemCondensationDetected, lgpCondId5109UnitOn=lgpCondId5109UnitOn, lgpCondId6066ParallelCableFailure=lgpCondId6066ParallelCableFailure, lgpCondId6229RemoteSensorSystemAverageLowHumidity=lgpCondId6229RemoteSensorSystemAverageLowHumidity, lgpCondId5214PanelUndervoltage=lgpCondId5214PanelUndervoltage, lgpCondId6279UnitBottomSupplyAirSensorFailure=lgpCondId6279UnitBottomSupplyAirSensorFailure, lgpCondId4361ReceptacleUnderCurrent=lgpCondId4361ReceptacleUnderCurrent, lgpCondId6377SFAReservedEvent41=lgpCondId6377SFAReservedEvent41, lgpCondId5112UnitPartialShutdown=lgpCondId5112UnitPartialShutdown, lgpCondId4720SmokeDetected=lgpCondId4720SmokeDetected, lgpCondId5597RemoteSensorOverTemperature=lgpCondId5597RemoteSensorOverTemperature, lgpCondId5652SFAReservedEvent11=lgpCondId5652SFAReservedEvent11, lgpCondId5042HumidifierCylinderWorn=lgpCondId5042HumidifierCylinderWorn, lgpCondId5560EvaporatorReturnFluidUnderTemp=lgpCondId5560EvaporatorReturnFluidUnderTemp, lgpCondId5638CondenserCircuitUnspecifiedGeneralEvent=lgpCondId5638CondenserCircuitUnspecifiedGeneralEvent, lgpCondId5154DCBusAbnormal=lgpCondId5154DCBusAbnormal, lgpCondId4588OverRelativeHumidity=lgpCondId4588OverRelativeHumidity, lgpCondId4449PDUUnderCurrent=lgpCondId4449PDUUnderCurrent, lgpCondId5595RemoteSensorSystemAverageOverTemperature=lgpCondId5595RemoteSensorSystemAverageOverTemperature, lgpCondId5801LowBatteryShutdownImminent=lgpCondId5801LowBatteryShutdownImminent, lgpCondId5983BypassIsolationBreakerBIBOpen=lgpCondId5983BypassIsolationBreakerBIBOpen, lgpCondId5544CondenserSupplyRefrigerantTempSensorIssue=lgpCondId5544CondenserSupplyRefrigerantTempSensorIssue, lgpCondId5976MaintenanceBypassBreakerClosed=lgpCondId5976MaintenanceBypassBreakerClosed, lgpCondId4687Compressor2AShortCycle=lgpCondId4687Compressor2AShortCycle, lgpCondId6293ChilledWaterControlActive=lgpCondId6293ChilledWaterControlActive, lgpCondId6064LBSCableFailure=lgpCondId6064LBSCableFailure, lgpCondId4142BypassStaticSwitchOverload=lgpCondId4142BypassStaticSwitchOverload, lgpCondId4175BypassFrequencyError=lgpCondId4175BypassFrequencyError, lgpCondId5457ECOModeSuspended=lgpCondId5457ECOModeSuspended, lgpCondId4978IntertierResistanceHigh=lgpCondId4978IntertierResistanceHigh, lgpCondId5637CondenserUnitUnspecifiedGeneralEvent=lgpCondId5637CondenserUnitUnspecifiedGeneralEvent, lgpCondId6231LowCompressorSuperheat=lgpCondId6231LowCompressorSuperheat, lgpCondId5505HotAisleTempOutofRange=lgpCondId5505HotAisleTempOutofRange, lgpCondId5433TransformerOvertemperature=lgpCondId5433TransformerOvertemperature, lgpCondId6363SFAReservedEvent27=lgpCondId6363SFAReservedEvent27, lgpCondId4173MainBatteryDisconnectOpen=lgpCondId4173MainBatteryDisconnectOpen, lgpCondId5458ExcessECOSuspends=lgpCondId5458ExcessECOSuspends, lgpCondId5479ContactClosureOpen=lgpCondId5479ContactClosureOpen, lgpCondId4441SystemControllerError=lgpCondId4441SystemControllerError, lgpCondId6350GeneralFault=lgpCondId6350GeneralFault, lgpCondId5806OutputOverload=lgpCondId5806OutputOverload, lgpCondId5072CondenserVFDIssue=lgpCondId5072CondenserVFDIssue, lgpCondId5856BatteryModuleFault=lgpCondId5856BatteryModuleFault, lgpCondId6374SFAReservedEvent38=lgpCondId6374SFAReservedEvent38, lgpCondId4506BranchOverCurrent=lgpCondId4506BranchOverCurrent, lgpCondId4184BatteryBreaker3CloseFailure=lgpCondId4184BatteryBreaker3CloseFailure, lgpCondId6354BatteryCharging=lgpCondId6354BatteryCharging, lgpCondId6254ChargerFailure=lgpCondId6254ChargerFailure, lgpCondId4284InputContact15=lgpCondId4284InputContact15, lgpCondId5657SFAReservedEvent16=lgpCondId5657SFAReservedEvent16, lgpCondId4492ReceptaclePowerStateOn=lgpCondId4492ReceptaclePowerStateOn, lgpCondId6379SFAReservedEvent43=lgpCondId6379SFAReservedEvent43, lgpCondId4672Compressor1BHighHeadPressure=lgpCondId4672Compressor1BHighHeadPressure, lgpCondId5068ReheaterOverTemperature=lgpCondId5068ReheaterOverTemperature, lgpCondId5798BypassOverload=lgpCondId5798BypassOverload, lgpCondId4471PDUUnderCurrentL1=lgpCondId4471PDUUnderCurrentL1, lgpCondId4753MainControllerFault=lgpCondId4753MainControllerFault, lgpCondId5818PowerModuleFailure=lgpCondId5818PowerModuleFailure, lgpCondId5969GenericDICFault=lgpCondId5969GenericDICFault, lgpCondId5782PDULowVoltageL2N=lgpCondId5782PDULowVoltageL2N, lgpCondId5960InverterOverload=lgpCondId5960InverterOverload, lgpCondId4235InverterOverloadPhaseB=lgpCondId4235InverterOverloadPhaseB, lgpCondId4970HighIntercellResistance=lgpCondId4970HighIntercellResistance, lgpCondId5245SubfeedPhaseOvercurrent=lgpCondId5245SubfeedPhaseOvercurrent, lgpCondId4669Compressor1AHighHeadPressure=lgpCondId4669Compressor1AHighHeadPressure, lgpCondId5819PowerModuleWarning=lgpCondId5819PowerModuleWarning, lgpCondId6211FirmwareUpdateCompletedSuccessfully=lgpCondId6211FirmwareUpdateCompletedSuccessfully, lgpCondId4322BatteryTestPassed=lgpCondId4322BatteryTestPassed, lgpCondId4645SupplyFluidOverTemp=lgpCondId4645SupplyFluidOverTemp, lgpCondId6045RectifierInputBreakerOpen=lgpCondId6045RectifierInputBreakerOpen, lgpCondId4754SystemBreakersCloseFailure=lgpCondId4754SystemBreakersCloseFailure, lgpCondId5512RemoteShutdown=lgpCondId5512RemoteShutdown, lgpCondId5666SFAReservedEvent25=lgpCondId5666SFAReservedEvent25, lgpCondId6012EquipmentOvertemperatureLimit=lgpCondId6012EquipmentOvertemperatureLimit, lgpCondId4914BatteryStringDischargeDetected=lgpCondId4914BatteryStringDischargeDetected, lgpCondId5660SFAReservedEvent19=lgpCondId5660SFAReservedEvent19, lgpCondId5118CloggedAirFilter=lgpCondId5118CloggedAirFilter, lgpCondId4494BranchBreakerOpen=lgpCondId4494BranchBreakerOpen, lgpCondId4219BatteryOverTemperature=lgpCondId4219BatteryOverTemperature, lgpCondId5874BatteryOverVoltage=lgpCondId5874BatteryOverVoltage, lgpCondId5226BranchOvercurrent=lgpCondId5226BranchOvercurrent, lgpCondId6314ChilledWaterOutletTemperatureSensorFailure=lgpCondId6314ChilledWaterOutletTemperatureSensorFailure, lgpCondId6094Compressor1BHoursExceeded=lgpCondId6094Compressor1BHoursExceeded, lgpCondId5447MMSPowerSharing=lgpCondId5447MMSPowerSharing, lgpCondId5023ReturnAirOverTemperature=lgpCondId5023ReturnAirOverTemperature, lgpCondId4912HighBatteryStringFloatCurrent=lgpCondId4912HighBatteryStringFloatCurrent, lgpCondId5069ElectricReheater1HoursExceeded=lgpCondId5069ElectricReheater1HoursExceeded, lgpCondId5126DigScrollComp2OverTemp=lgpCondId5126DigScrollComp2OverTemp, lgpCondId6303TeamStaticPressureSensorFailure=lgpCondId6303TeamStaticPressureSensorFailure, lgpCondId4502PDUFailure=lgpCondId4502PDUFailure, lgpCondId4907HighAmbientTemperature=lgpCondId4907HighAmbientTemperature, lgpCondId5034HighReturnHumidity=lgpCondId5034HighReturnHumidity, lgpCondId4437PDUUnderCurrent=lgpCondId4437PDUUnderCurrent, lgpCondId5566FanContactorCloseFail=lgpCondId5566FanContactorCloseFail, lgpCondId4233InverterFailure=lgpCondId4233InverterFailure, lgpCondId4983SupplyRefrigOverTempBand1=lgpCondId4983SupplyRefrigOverTempBand1, lgpCondId6370SFAReservedEvent34=lgpCondId6370SFAReservedEvent34, lgpCondId5778PDULowVoltageL1L2=lgpCondId5778PDULowVoltageL1L2, lgpCondId4138BypassAutoRetransferFailed=lgpCondId4138BypassAutoRetransferFailed, lgpCondId4198BatteryBreaker8OpenFailure=lgpCondId4198BatteryBreaker8OpenFailure, lgpCondId4222BatteryGroundFault=lgpCondId4222BatteryGroundFault, lgpCondId5775UnitShutdownUnspecifiedGeneralEvent=lgpCondId5775UnitShutdownUnspecifiedGeneralEvent, lgpCondId4298UPSOutputonBypass=lgpCondId4298UPSOutputonBypass, lgpCondId4965HighCellVoltage=lgpCondId4965HighCellVoltage, lgpCondId4406BranchOverCurrent=lgpCondId4406BranchOverCurrent, lgpCondId5636PumpUnspecifiedGeneralEvent=lgpCondId5636PumpUnspecifiedGeneralEvent, lgpCondId5851MaximumLoadAlarm=lgpCondId5851MaximumLoadAlarm, lgpCondId4651SupplyFluidTempSensorIssue=lgpCondId4651SupplyFluidTempSensorIssue, lgpCondId6386SFAReservedEvent50=lgpCondId6386SFAReservedEvent50, lgpCondId5416ExtStandbyUnitOn=lgpCondId5416ExtStandbyUnitOn, lgpCondId4729FanIssue=lgpCondId4729FanIssue, lgpCondId5040HumidifierOverCurrent=lgpCondId5040HumidifierOverCurrent, lgpCondId4321InitiatedTransfertoInverter=lgpCondId4321InitiatedTransfertoInverter, lgpCondId4551GenericTestEvent=lgpCondId4551GenericTestEvent, lgpCondId5060RemoteSensorIssue=lgpCondId5060RemoteSensorIssue, lgpCondId5183OutputVoltageTHD=lgpCondId5183OutputVoltageTHD, lgpCondId5274CompressorHeadPressureOverThreshold=lgpCondId5274CompressorHeadPressureOverThreshold, lgpCondId6372SFAReservedEvent36=lgpCondId6372SFAReservedEvent36, lgpCondId5588UnspecifiedGeneralEvent=lgpCondId5588UnspecifiedGeneralEvent, lgpCondId5038DehumidifierHoursExceeded=lgpCondId5038DehumidifierHoursExceeded, lgpCondId5604CompressorSuperheatOverThreshold=lgpCondId5604CompressorSuperheatOverThreshold, lgpCondId4908LowOverallVoltage=lgpCondId4908LowOverallVoltage, lgpCondId5150BatteryTerminalsReversed=lgpCondId5150BatteryTerminalsReversed, lgpCondId5625EEVUnspecifiedGeneralEvent=lgpCondId5625EEVUnspecifiedGeneralEvent, lgpCondId5875BatteryFuseFault=lgpCondId5875BatteryFuseFault, lgpCondId4741BatterySelfTest=lgpCondId4741BatterySelfTest, lgpCondId5886ProgramInputContact03=lgpCondId5886ProgramInputContact03, lgpCondId5045HumidifierControlBoardNotDetected=lgpCondId5045HumidifierControlBoardNotDetected, lgpCondId5508ColdAisleTempOutofRange=lgpCondId5508ColdAisleTempOutofRange, lgpCondId6233SECCommunicationLost=lgpCondId6233SECCommunicationLost, lgpCondId4986SupplyRefrigUnderTempBand1=lgpCondId4986SupplyRefrigUnderTempBand1, lgpCondId5893ProgramInputContact10=lgpCondId5893ProgramInputContact10, lgpCondId5054FanHoursExceeded=lgpCondId5054FanHoursExceeded, lgpCondId5984BypassUndervoltageWarning=lgpCondId5984BypassUndervoltageWarning, lgpCondId6304HeatingLockout=lgpCondId6304HeatingLockout, lgpCondId4273InputContact04=lgpCondId4273InputContact04, lgpCondId5645SFAReservedEvent4=lgpCondId5645SFAReservedEvent4, lgpCondId6450PDUPoweredOn=lgpCondId6450PDUPoweredOn, lgpCondId4276InputContact07=lgpCondId4276InputContact07, lgpCondId4167OutputOff=lgpCondId4167OutputOff, lgpCondId5155MainsInputNeutralLost=lgpCondId5155MainsInputNeutralLost, lgpFlexConditionsWellKnown=lgpFlexConditionsWellKnown, lgpCondId5915UnderDifferentialPressure=lgpCondId5915UnderDifferentialPressure, lgpCondId4186BatteryBreaker4OpenFailure=lgpCondId4186BatteryBreaker4OpenFailure, lgpCondId4390InverterOffExternal=lgpCondId4390InverterOffExternal, lgpCondId5182GroundOvercurrent=lgpCondId5182GroundOvercurrent, lgpCondId4828MMSRetransferInhibit=lgpCondId4828MMSRetransferInhibit, lgpCondId5062Compressor1LowSuctionPressure=lgpCondId5062Compressor1LowSuctionPressure, lgpCondId5111UnitStandby=lgpCondId5111UnitStandby, lgpCondId6107ExternalCondenserIssue=lgpCondId6107ExternalCondenserIssue, lgpCondId4601ExternalAirSensorAOverTemperature=lgpCondId4601ExternalAirSensorAOverTemperature, lgpCondId5973InputBreakerOpen=lgpCondId5973InputBreakerOpen, lgpCondId6095Compressor2BHoursExceeded=lgpCondId6095Compressor2BHoursExceeded, lgpCondId4200BatteryChargingInhibited=lgpCondId4200BatteryChargingInhibited, lgpCondId5596RemoteSensorSystemAverageUnderTemperature=lgpCondId5596RemoteSensorSystemAverageUnderTemperature, lgpCondId5653SFAReservedEvent12=lgpCondId5653SFAReservedEvent12, lgpCondId4604ExternalAirSensorBOverTemperature=lgpCondId4604ExternalAirSensorBOverTemperature, lgpCondId5889ProgramInputContact06=lgpCondId5889ProgramInputContact06, lgpCondId5903CompressorLowDifferentialPressureLockout=lgpCondId5903CompressorLowDifferentialPressureLockout, lgpCondId5354DigScrollCompDischargeTempSensorIssue=lgpCondId5354DigScrollCompDischargeTempSensorIssue, lgpCondId5796PeakkWReset=lgpCondId5796PeakkWReset, lgpCondId6355BackfeedRelayFailure=lgpCondId6355BackfeedRelayFailure, lgpCondId6217MemoryCardRemoved=lgpCondId6217MemoryCardRemoved, lgpCondId5153LoadSharingFault=lgpCondId5153LoadSharingFault, lgpCondId4524ModuleAdded=lgpCondId4524ModuleAdded, lgpCondId4193BatteryBreaker6CloseFailure=lgpCondId4193BatteryBreaker6CloseFailure, lgpCondId5437HighAmbientTemperatureProbeTwo=lgpCondId5437HighAmbientTemperatureProbeTwo, lgpCondId5531CondenserCommunicationLost=lgpCondId5531CondenserCommunicationLost, lgpCondId5562LBSActiveSlave=lgpCondId5562LBSActiveSlave, lgpCondId5849ReplaceBatteryModule=lgpCondId5849ReplaceBatteryModule, lgpCondId6366SFAReservedEvent30=lgpCondId6366SFAReservedEvent30, lgpCondId5439BatteryStringEqualize=lgpCondId5439BatteryStringEqualize, lgpCondId5792UserkWhReset=lgpCondId5792UserkWhReset, lgpCondId5811SystemShutdownHardwareFault=lgpCondId5811SystemShutdownHardwareFault, lgpCondId6518StandbyUnitActivatedDuetoChillerFailure=lgpCondId6518StandbyUnitActivatedDuetoChillerFailure, lgpCondId4969HighInternalResistance=lgpCondId4969HighInternalResistance, lgpCondId4916DischargeLowOverallVoltage=lgpCondId4916DischargeLowOverallVoltage, lgpCondId4278InputContact09=lgpCondId4278InputContact09, lgpCondId4723WaterUnderFloor=lgpCondId4723WaterUnderFloor, lgpCondId5300PumpHoursExceeded=lgpCondId5300PumpHoursExceeded, lgpCondId4508ReceptacleLoadRemoved=lgpCondId4508ReceptacleLoadRemoved, lgpCondId5661SFAReservedEvent20=lgpCondId5661SFAReservedEvent20, lgpCondId4835MMSLowBatteryWarning=lgpCondId4835MMSLowBatteryWarning, lgpCondId5147ReturnAirSensorIssue=lgpCondId5147ReturnAirSensorIssue, lgpCondId5892ProgramInputContact09=lgpCondId5892ProgramInputContact09, lgpCondId6385SFAReservedEvent49=lgpCondId6385SFAReservedEvent49, lgpCondId5114WaterLeakageDetectorSensorIssue=lgpCondId5114WaterLeakageDetectorSensorIssue, lgpCondId5998BattOvtempWarning=lgpCondId5998BattOvtempWarning, lgpCondId6380SFAReservedEvent44=lgpCondId6380SFAReservedEvent44, lgpCondId6375SFAReservedEvent39=lgpCondId6375SFAReservedEvent39, lgpCondId5423TooManySensors=lgpCondId5423TooManySensors, lgpCondId4320InitiatedTransfertoBypass=lgpCondId4320InitiatedTransfertoBypass, lgpCondId5564FanContactorOpen=lgpCondId5564FanContactorOpen, lgpCondId5665SFAReservedEvent24=lgpCondId5665SFAReservedEvent24, lgpCondId5809SystemShutdownLowBattery=lgpCondId5809SystemShutdownLowBattery, lgpCondId5896GroundFaultDetected=lgpCondId5896GroundFaultDetected, lgpCondId5119RAMBatteryIssue=lgpCondId5119RAMBatteryIssue, lgpCondId4294InletAirOverTemperature=lgpCondId4294InletAirOverTemperature, lgpCondId4323BatteryTestFailed=lgpCondId4323BatteryTestFailed, lgpCondId4750MultipleFanFailure=lgpCondId4750MultipleFanFailure, lgpCondId4825LossofRedundancy=lgpCondId4825LossofRedundancy, lgpCondId5015SupplyAirOverTemperature=lgpCondId5015SupplyAirOverTemperature, lgpCondId6384SFAReservedEvent48=lgpCondId6384SFAReservedEvent48, lgpCondId4714ShutdownLossOfPower=lgpCondId4714ShutdownLossOfPower, lgpCondId5810SystemShutdownRemoteShutdown=lgpCondId5810SystemShutdownRemoteShutdown, lgpCondId5263CompressorNotStopping=lgpCondId5263CompressorNotStopping, lgpCondId5180OutputOvercurrent=lgpCondId5180OutputOvercurrent, lgpCondId4141BypassBreakerClosed=lgpCondId4141BypassBreakerClosed, lgpCondId5563DCBusLowFault=lgpCondId5563DCBusLowFault, lgpCondId5840PowerModuleShutdownOverTemperature=lgpCondId5840PowerModuleShutdownOverTemperature, lgpCondId4472PDUUnderCurrentL2=lgpCondId4472PDUUnderCurrentL2, lgpCondId5650SFAReservedEvent9=lgpCondId5650SFAReservedEvent9, lgpCondId6222BranchLoadLoss=lgpCondId6222BranchLoadLoss, lgpCondId6232SECUnspecifiedGeneralEvent=lgpCondId6232SECUnspecifiedGeneralEvent, lgpCondId4145BypassAutoTransferFailed=lgpCondId4145BypassAutoTransferFailed, lgpCondId4179BatteryCircuitBreaker2Open=lgpCondId4179BatteryCircuitBreaker2Open, lgpCondId4213SystemShutdownEPO=lgpCondId4213SystemShutdownEPO, lgpCondId5037HumidifierHoursExceeded=lgpCondId5037HumidifierHoursExceeded, lgpCondId6180BatteryUnderVoltage=lgpCondId6180BatteryUnderVoltage, lgpCondId5928UnbalancedLoadCondition=lgpCondId5928UnbalancedLoadCondition, lgpCondId6373SFAReservedEvent37=lgpCondId6373SFAReservedEvent37, lgpCondId4314PowerSupplyFailure=lgpCondId4314PowerSupplyFailure, lgpCondId5879VdcBackfeed=lgpCondId5879VdcBackfeed, lgpCondId5863TransformerFanFault=lgpCondId5863TransformerFanFault, lgpCondId5948BranchLowVoltageLL=lgpCondId5948BranchLowVoltageLL, lgpCondId6062LossofSynchronization=lgpCondId6062LossofSynchronization, lgpCondId4608ExtAirSensorAUnderTemperature=lgpCondId4608ExtAirSensorAUnderTemperature, lgpCondId5436LowAmbientTemperatureProbeTwo=lgpCondId5436LowAmbientTemperatureProbeTwo, lgpCondId4192BatteryBreaker6OpenFailure=lgpCondId4192BatteryBreaker6OpenFailure, lgpCondId4270InputContact01=lgpCondId4270InputContact01, lgpCondId5269CompressorHoursExceeded=lgpCondId5269CompressorHoursExceeded, lgpCondId6453InputWiringFault=lgpCondId6453InputWiringFault, lgpCondId5907ExtAirDamperPositionIssue=lgpCondId5907ExtAirDamperPositionIssue, lgpCondId4316AutoRestartInProgress=lgpCondId4316AutoRestartInProgress, lgpCondId6371SFAReservedEvent35=lgpCondId6371SFAReservedEvent35, lgpCondId5066Compressor1LowPressureTransducerIssue=lgpCondId5066Compressor1LowPressureTransducerIssue, lgpCondId5910StaticPressureSensorOutofRange=lgpCondId5910StaticPressureSensorOutofRange)
mibBuilder.exportSymbols("LIEBERT-GP-FLEXIBLE-CONDITIONS-MIB", lgpCondId5786BranchLowVoltage=lgpCondId5786BranchLowVoltage, lgpCondId5227BranchUndercurrent=lgpCondId5227BranchUndercurrent, lgpCondId4999Inverter2ShortCycle=lgpCondId4999Inverter2ShortCycle, lgpCondId4659Pump2LossofFlow=lgpCondId4659Pump2LossofFlow, lgpCondId4134BypassOverloadPhaseC=lgpCondId4134BypassOverloadPhaseC, lgpCondId5418UnitCodeMissing=lgpCondId5418UnitCodeMissing, lgpCondId5144OutputOfUf=lgpCondId5144OutputOfUf, lgpCondId5847BatteryModuleTemperatureSensorFault=lgpCondId5847BatteryModuleTemperatureSensorFault, lgpCondId4178BatteryBreaker1CloseFailure=lgpCondId4178BatteryBreaker1CloseFailure, lgpCondId5609ThermalRunawayCelltoAmbientTemperatureEvent=lgpCondId5609ThermalRunawayCelltoAmbientTemperatureEvent, lgpCondId4439AutomaticRestartFailed=lgpCondId4439AutomaticRestartFailed, lgpCondId4229EmergencyPowerOffLatched=lgpCondId4229EmergencyPowerOffLatched, lgpCondId5149BatteryNotQualified=lgpCondId5149BatteryNotQualified, lgpCondId5850SystemShutdownTransformerOverTemperature=lgpCondId5850SystemShutdownTransformerOverTemperature, lgpCondId4827MMSTransferInhibit=lgpCondId4827MMSTransferInhibit, lgpCondId5542CondenserSupplyRefrigerantOverTemp=lgpCondId5542CondenserSupplyRefrigerantOverTemp, lgpCondId4756ServiceCodeActive=lgpCondId4756ServiceCodeActive, lgpCondId5368ElectricReheaterHoursExceeded=lgpCondId5368ElectricReheaterHoursExceeded, lgpCondId6068BatteryRoomAlarm=lgpCondId6068BatteryRoomAlarm, lgpCondId5036LowReturnHumidity=lgpCondId5036LowReturnHumidity, lgpCondId5115BMSCommunicationsTimeout=lgpCondId5115BMSCommunicationsTimeout, lgpCondId5770TopOutletFanFault=lgpCondId5770TopOutletFanFault, lgpCondId4917DischargeLowCellVoltage=lgpCondId4917DischargeLowCellVoltage, lgpCondId5624EEVPowerIssue=lgpCondId5624EEVPowerIssue, lgpCondId5116MaintenanceDue=lgpCondId5116MaintenanceDue, lgpCondId4230SystemOutputLowPowerFactor=lgpCondId4230SystemOutputLowPowerFactor, lgpCondId5157UserOperationInvalid=lgpCondId5157UserOperationInvalid, lgpCondId5838PowerModuleFanFault=lgpCondId5838PowerModuleFanFault, lgpCondId6333Bypassoutofsync=lgpCondId6333Bypassoutofsync, lgpCondId4656Pump1LossofFlow=lgpCondId4656Pump1LossofFlow, lgpCondId5108ExternalFireDetected=lgpCondId5108ExternalFireDetected, lgpCondId5376MainFanOverload=lgpCondId5376MainFanOverload, lgpCondId5882RegenerationOperationTerminated=lgpCondId5882RegenerationOperationTerminated, lgpCondId4495DeviceConfigurationChange=lgpCondId4495DeviceConfigurationChange, lgpCondId4967HighCellTemperature=lgpCondId4967HighCellTemperature, lgpCondId5070ExtReheatLockout=lgpCondId5070ExtReheatLockout, lgpCondId5073CondenserTVSSIssue=lgpCondId5073CondenserTVSSIssue, lgpCondId4911LowBatteryStringFloatCurrent=lgpCondId4911LowBatteryStringFloatCurrent, lgpCondId4690Compressor2BShortCycle=lgpCondId4690Compressor2BShortCycle, lgpCondId4980SupplyChilledWaterLossofFlow=lgpCondId4980SupplyChilledWaterLossofFlow, lgpCondId5540CondenserRefrigerantPressureUnderThreshold=lgpCondId5540CondenserRefrigerantPressureUnderThreshold, lgpCondId5966AuxAirTempDeviceCommunicationLost=lgpCondId5966AuxAirTempDeviceCommunicationLost, lgpCondId4826BPSSStartupInhibit=lgpCondId4826BPSSStartupInhibit, lgpCondId4711SystemCondensationDetected=lgpCondId4711SystemCondensationDetected, lgpCondId4662PumpShortCycle=lgpCondId4662PumpShortCycle, lgpCondId5297AllPumpsLossofFlow=lgpCondId5297AllPumpsLossofFlow, lgpCondId6382SFAReservedEvent46=lgpCondId6382SFAReservedEvent46, lgpCondId6105ExternalCondenserTVSSIssue=lgpCondId6105ExternalCondenserTVSSIssue, lgpCondId5647SFAReservedEvent6=lgpCondId5647SFAReservedEvent6, lgpCondId5539CondenserRefrigerantPressureOverThreshold=lgpCondId5539CondenserRefrigerantPressureOverThreshold, lgpCondId5043HumidifierIssue=lgpCondId5043HumidifierIssue, lgpCondId4143BypassStaticSwitchUnavailable=lgpCondId4143BypassStaticSwitchUnavailable, lgpCondId5974NeutralBreakerOpen=lgpCondId5974NeutralBreakerOpen, lgpCondId4440FuseFailure=lgpCondId4440FuseFailure, lgpCondId4760ControlsResetRequired=lgpCondId4760ControlsResetRequired, lgpCondId4990SupplyRefrigOverTempBand2=lgpCondId4990SupplyRefrigOverTempBand2, lgpCondId4918DischargeHighBatteryStringCurrent=lgpCondId4918DischargeHighBatteryStringCurrent, lgpCondId4684Compressor1BShortCycle=lgpCondId4684Compressor1BShortCycle, lgpCondId5578DewPointOverTemperature=lgpCondId5578DewPointOverTemperature, lgpCondId4308DCBusGroundFaultPositive=lgpCondId4308DCBusGroundFaultPositive, lgpCondId5422OvertemperaturePowerOff=lgpCondId5422OvertemperaturePowerOff, lgpCondId4239InverterOutBreakerCloseFail=lgpCondId4239InverterOutBreakerCloseFail, lgpCondId6362SFAReservedEvent26=lgpCondId6362SFAReservedEvent26, lgpCondId4503PDUCommunicationFail=lgpCondId4503PDUCommunicationFail, lgpCondId4194BatteryCircuitBreaker7Open=lgpCondId4194BatteryCircuitBreaker7Open, lgpCondId5472DoorSensorDisconnected=lgpCondId5472DoorSensorDisconnected, lgpCondId4183BatteryBreaker3OpenFailure=lgpCondId4183BatteryBreaker3OpenFailure, lgpCondId5513CompressorCapacityReduced=lgpCondId5513CompressorCapacityReduced, lgpCondId4280InputContact11=lgpCondId4280InputContact11, lgpCondId4218BypassManualRexfrInhibited=lgpCondId4218BypassManualRexfrInhibited, lgpCondId5643SFAReservedEvent2=lgpCondId5643SFAReservedEvent2, lgpCondId4964LowCellVoltage=lgpCondId4964LowCellVoltage, lgpCondId5839PowerModuleOverTemperature=lgpCondId5839PowerModuleOverTemperature, lgpCondId5777PDULowVoltageLL=lgpCondId5777PDULowVoltageLL, lgpCondId5885ProgramInputContact02=lgpCondId5885ProgramInputContact02, lgpCondId6295ChilledWaterInletTemperatureSensorFailure=lgpCondId6295ChilledWaterInletTemperatureSensorFailure, lgpCondId5862CheckAirFilter=lgpCondId5862CheckAirFilter, lgpCondId6060TransfertoBypassSystemOverload=lgpCondId6060TransfertoBypassSystemOverload, lgpCondId5537CondenserControlBoardIssue=lgpCondId5537CondenserControlBoardIssue, lgpCondId5779PDULowVoltageL2L3=lgpCondId5779PDULowVoltageL2L3, lgpCondId5309TeamworkEvaporatorInletTempSensorIssue=lgpCondId5309TeamworkEvaporatorInletTempSensorIssue, lgpCondId4382SystemInputCurrentImbalance=lgpCondId4382SystemInputCurrentImbalance, lgpCondId5623EEVBatteryIssue=lgpCondId5623EEVBatteryIssue, lgpCondId4277InputContact08=lgpCondId4277InputContact08, lgpCondId4164BatteryChargingError=lgpCondId4164BatteryChargingError, lgpCondId6296ChilledWaterHighInletTemperature=lgpCondId6296ChilledWaterHighInletTemperature, lgpCondId4757LBSActive=lgpCondId4757LBSActive, lgpCondId5769EMOShutdown=lgpCondId5769EMOShutdown, lgpCondId6194SystemRebootCommandIssued=lgpCondId6194SystemRebootCommandIssued, lgpCondId5432TransformerOvertemperaturePowerOff=lgpCondId5432TransformerOvertemperaturePowerOff, lgpCondId4188BatteryCircuitBreaker5Open=lgpCondId4188BatteryCircuitBreaker5Open, lgpCondId5415ExtLossofAirBlower=lgpCondId5415ExtLossofAirBlower, lgpCondId4360ReceptacleOverCurrent=lgpCondId4360ReceptacleOverCurrent, lgpCondId4236InverterOverloadPhaseC=lgpCondId4236InverterOverloadPhaseC, lgpCondId5543CondenserSupplyRefrigerantUnderTemp=lgpCondId5543CondenserSupplyRefrigerantUnderTemp, lgpCondId4147SystemInputCurrentLimit=lgpCondId4147SystemInputCurrentLimit, lgpCondId4621ExternalAirSensorBIssue=lgpCondId4621ExternalAirSensorBIssue, lgpCondId6210FirmwareUpdateInProgress=lgpCondId6210FirmwareUpdateInProgress, lgpCondId4317AutoRestartInhibitedExt=lgpCondId4317AutoRestartInhibitedExt, lgpCondId4681Compressor1AShortCycle=lgpCondId4681Compressor1AShortCycle, lgpCondId5887ProgramInputContact04=lgpCondId5887ProgramInputContact04, lgpCondId4448PDUOverCurrent=lgpCondId4448PDUOverCurrent, lgpCondId5768OutletAirOvertemperatureLimit=lgpCondId5768OutletAirOvertemperatureLimit, lgpCondId5783PDULowVoltageL3N=lgpCondId5783PDULowVoltageL3N, lgpCondId5349ExtAirSensorAHighHumidity=lgpCondId5349ExtAirSensorAHighHumidity, lgpCondId5280LowFluidPressure=lgpCondId5280LowFluidPressure, lgpCondId4216BypassBackfeedDetected=lgpCondId4216BypassBackfeedDetected, lgpCondId5019SupplyAirUnderTemperature=lgpCondId5019SupplyAirUnderTemperature, lgpCondId5662SFAReservedEvent21=lgpCondId5662SFAReservedEvent21, lgpCondId6358PipeTemperatureSensorFailure=lgpCondId6358PipeTemperatureSensorFailure, lgpCondId5610ThermalRunawayCelltoCellTemperatureEvent=lgpCondId5610ThermalRunawayCelltoCellTemperatureEvent, lgpCondId5046ReturnHumidityOutOfProportionalBand=lgpCondId5046ReturnHumidityOutOfProportionalBand, lgpCondId4383BypassStaticSwitchOffExtrnl=lgpCondId4383BypassStaticSwitchOffExtrnl, lgpCondId4197BatteryCircuitBreaker8Open=lgpCondId4197BatteryCircuitBreaker8Open, lgpCondId4470PDUOverCurrentL3=lgpCondId4470PDUOverCurrentL3, lgpCondId5957BypassInputVoltageFault=lgpCondId5957BypassInputVoltageFault, lgpCondId6309ColdAisleAirSensorFailure=lgpCondId6309ColdAisleAirSensorFailure, lgpCondId5781PDULowVoltageL1N=lgpCondId5781PDULowVoltageL1N, lgpCondId4172BatteryAutoTestInProgress=lgpCondId4172BatteryAutoTestInProgress, lgpCondId4416BranchOverCurrent=lgpCondId4416BranchOverCurrent, lgpCondId5663SFAReservedEvent22=lgpCondId5663SFAReservedEvent22, lgpCondId5275CompressorLossofDifferentialPressure=lgpCondId5275CompressorLossofDifferentialPressure, lgpCondId5541CondenserRefrigerantPressureSensorIssue=lgpCondId5541CondenserRefrigerantPressureSensorIssue, lgpCondId5648SFAReservedEvent7=lgpCondId5648SFAReservedEvent7, lgpCondId5535CondenserOutsideAirTempSensorIssue=lgpCondId5535CondenserOutsideAirTempSensorIssue, lgpCondId4906LowAmbientTemperature=lgpCondId4906LowAmbientTemperature, lgpCondId5053LossofAirFlow=lgpCondId5053LossofAirFlow, lgpCondId5064DigScrollComp1TempSensorIssue=lgpCondId5064DigScrollComp1TempSensorIssue, lgpCondId5352CompressorShortCycle=lgpCondId5352CompressorShortCycle, lgpCondId6067BatteryFault=lgpCondId6067BatteryFault, lgpCondId6438PowerModuleInputCurrentAbnormal=lgpCondId6438PowerModuleInputCurrentAbnormal, lgpCondId6187ShutdownPending=lgpCondId6187ShutdownPending, lgpCondId5438ThermalRunawayDetected=lgpCondId5438ThermalRunawayDetected, lgpCondId6351UPSAwaitingPower=lgpCondId6351UPSAwaitingPower, lgpCondId4384BatteryEoDDisconnect=lgpCondId4384BatteryEoDDisconnect, lgpCondId4311SystemFanFailure=lgpCondId4311SystemFanFailure, lgpCondId6313ChilledWaterInletTemperatureSensorFailure=lgpCondId6313ChilledWaterInletTemperatureSensorFailure, lgpCondId4580OverTemperature=lgpCondId4580OverTemperature, lgpCondId5480ContactClosureClosed=lgpCondId5480ContactClosureClosed, lgpCondId5246SubfeedNeutralOvercurrent=lgpCondId5246SubfeedNeutralOvercurrent, lgpCondId5249EventState=lgpCondId5249EventState, lgpCondId5158PowerSubModuleFault=lgpCondId5158PowerSubModuleFault, lgpCondId5294ReturnFluidUnderTemp=lgpCondId5294ReturnFluidUnderTemp, lgpCondId4732ReceptacleLoadDropped=lgpCondId4732ReceptacleLoadDropped, lgpCondId6368SFAReservedEvent32=lgpCondId6368SFAReservedEvent32, lgpCondId5434TransformerTemperatureSensorFail=lgpCondId5434TransformerTemperatureSensorFail, lgpCondId4290InverterShutdownOverload=lgpCondId4290InverterShutdownOverload, lgpCondId6065BatteryChargeEqualizationTimeout=lgpCondId6065BatteryChargeEqualizationTimeout, lgpCondId5184OutputFrequencyError=lgpCondId5184OutputFrequencyError, lgpCondId5456ECOModeActive=lgpCondId5456ECOModeActive, lgpCondId4726ServiceRequired=lgpCondId4726ServiceRequired, lgpCondId6009InverterOutputBreakerOpen=lgpCondId6009InverterOutputBreakerOpen, lgpCondId6284UnitMiddleSecondSupplyAirSensorFailure=lgpCondId6284UnitMiddleSecondSupplyAirSensorFailure, lgpCondId6305FreeCoolingStoppedHighRoomTemp=lgpCondId6305FreeCoolingStoppedHighRoomTemp, lgpCondId4168BatteryDischarging=lgpCondId4168BatteryDischarging, lgpCondId4473PDUUnderCurrentL3=lgpCondId4473PDUUnderCurrentL3, lgpCondId4637SupplyRefrigerantUnderTemp=lgpCondId4637SupplyRefrigerantUnderTemp, lgpCondId5113UnitShutdown=lgpCondId5113UnitShutdown, lgpCondId6061InputSourceBackfeed=lgpCondId6061InputSourceBackfeed, lgpCondId4297UPSOutputonInverter=lgpCondId4297UPSOutputonInverter, lgpCondId6046LoadonUPS=lgpCondId6046LoadonUPS, lgpCondId4834MMSOnBattery=lgpCondId4834MMSOnBattery, lgpCondId4189BatteryBreaker5OpenFailure=lgpCondId4189BatteryBreaker5OpenFailure, lgpCondId4747EquipmentTemperatureSensorFail=lgpCondId4747EquipmentTemperatureSensorFail, lgpCondId6093Compressor2BThermalOverload=lgpCondId6093Compressor2BThermalOverload, lgpCondId4296RectifierOperationInhibitExt=lgpCondId4296RectifierOperationInhibitExt, lgpCondId4501PDUUnderCurrent=lgpCondId4501PDUUnderCurrent, lgpCondId5593RemoteSensorAverageOverTemperature=lgpCondId5593RemoteSensorAverageOverTemperature, lgpCondId5600AirEconomizerEmergencyOverride=lgpCondId5600AirEconomizerEmergencyOverride, lgpCondId4217BypassManualXfrInhibited=lgpCondId4217BypassManualXfrInhibited, lgpCondId5808SystemShutdownOutputShort=lgpCondId5808SystemShutdownOutputShort, lgpCondId6228RemoteSensorAverageHighHumidity=lgpCondId6228RemoteSensorAverageHighHumidity, lgpCondId5617TemperatureControlSensorIssue=lgpCondId5617TemperatureControlSensorIssue, lgpCondId6203SensorAdded=lgpCondId6203SensorAdded, lgpCondId4581UnderTemperature=lgpCondId4581UnderTemperature, lgpCondId4221BatteryExternalMonitor2=lgpCondId4221BatteryExternalMonitor2, lgpCondId6204SensorRemoved=lgpCondId6204SensorRemoved, lgpCondId5071Condenser1Issue=lgpCondId5071Condenser1Issue, lgpCondId6047Core2CoreFuseFailure=lgpCondId6047Core2CoreFuseFailure, lgpCondId4238InverterOutBreakerOpenFail=lgpCondId4238InverterOutBreakerOpenFail, lgpCondId5817LossofRedundancy=lgpCondId5817LossofRedundancy, lgpCondId6226RemoteSensorHighHumidity=lgpCondId6226RemoteSensorHighHumidity, lgpCondId5611ThermalRunawayChargerCurrentLevelOneEvent=lgpCondId5611ThermalRunawayChargerCurrentLevelOneEvent, lgpCondId4196BatteryBreaker7CloseFailure=lgpCondId4196BatteryBreaker7CloseFailure, lgpCondId6356BatteryCircuitOpen=lgpCondId6356BatteryCircuitOpen, lgpCondId4507BranchCurrent=lgpCondId4507BranchCurrent, lgpCondId5107ExtStandbyGlycolPumpOn=lgpCondId5107ExtStandbyGlycolPumpOn, lgpCondId6276UnitBottomReturnAirSensorFailure=lgpCondId6276UnitBottomReturnAirSensorFailure, lgpCondId5273CompressorLowOilPressure=lgpCondId5273CompressorLowOilPressure, lgpCondId5924MixedModeLockout=lgpCondId5924MixedModeLockout, lgpCondId6294ChilledWaterFlowTransducerFailure=lgpCondId6294ChilledWaterFlowTransducerFailure, lgpCondId4442SystemBreakersOpenFailure=lgpCondId4442SystemBreakersOpenFailure, lgpCondId4324BatteryTestManuallyStopped=lgpCondId4324BatteryTestManuallyStopped, lgpCondId5105ExtLossofFlow=lgpCondId5105ExtLossofFlow, lgpCondId4215SystemOutputOff=lgpCondId4215SystemOutputOff, lgpCondId5296TeamworkReturnFluidTempSensorIssue=lgpCondId5296TeamworkReturnFluidTempSensorIssue, lgpCondId5774CompressorContactorIssue=lgpCondId5774CompressorContactorIssue, lgpCondId4523ModuleRemoved=lgpCondId4523ModuleRemoved, lgpCondId4438SystemInternalTemperatureRise=lgpCondId4438SystemInternalTemperatureRise, lgpCondId4504BranchRemoved=lgpCondId4504BranchRemoved, lgpCondId5655SFAReservedEvent14=lgpCondId5655SFAReservedEvent14, lgpCondId4703ChilledWaterControlValvePosition=lgpCondId4703ChilledWaterControlValvePosition, lgpCondId5272CompressorThermalOverload=lgpCondId5272CompressorThermalOverload, lgpCondId6365SFAReservedEvent29=lgpCondId6365SFAReservedEvent29, lgpCondId4140BypassRestartInhibitExternal=lgpCondId4140BypassRestartInhibitExternal, lgpCondId5622EEVDischargeTempAboveThreshold=lgpCondId5622EEVDischargeTempAboveThreshold, lgpCondId5569InputOvervoltage=lgpCondId5569InputOvervoltage, lgpCondId4237InverterInhibitExternal=lgpCondId4237InverterInhibitExternal, lgpCondId4282InputContact13=lgpCondId4282InputContact13, lgpCondId4993SupplyRefrigUnderTempBand2=lgpCondId4993SupplyRefrigUnderTempBand2, lgpCondId5315SubgroupEventOccurredDuringCommunicationLoss=lgpCondId5315SubgroupEventOccurredDuringCommunicationLoss, lgpCondId4678Compressor2BHighHeadPressure=lgpCondId4678Compressor2BHighHeadPressure, lgpCondId4626SupplyChilledWaterOverTemp=lgpCondId4626SupplyChilledWaterOverTemp, lgpCondId5967ModbusPowerMeterCommunicationLost=lgpCondId5967ModbusPowerMeterCommunicationLost, lgpCondId4234InverterOverloadPhaseA=lgpCondId4234InverterOverloadPhaseA, lgpCondId6080UPSCCommunicationFailure=lgpCondId6080UPSCCommunicationFailure, lgpCondId4180BatteryBreaker2OpenFailure=lgpCondId4180BatteryBreaker2OpenFailure, lgpCondId4505BranchDiscovered=lgpCondId4505BranchDiscovered, lgpCondId5453ModuleInStandbyIntelligentParalleling=lgpCondId5453ModuleInStandbyIntelligentParalleling, lgpCondId6455LeakSensorCableFault=lgpCondId6455LeakSensorCableFault, lgpCondId4295RectifierFailure=lgpCondId4295RectifierFailure, lgpCondId5567IPInhibit=lgpCondId5567IPInhibit, lgpCondId5784BranchLowVoltageLN=lgpCondId5784BranchLowVoltageLN, lgpCondId4618ExternalAirSensorAIssue=lgpCondId4618ExternalAirSensorAIssue, lgpCondId5644SFAReservedEvent3=lgpCondId5644SFAReservedEvent3, lgpCondId5659SFAReservedEvent18=lgpCondId5659SFAReservedEvent18, lgpCondId4283InputContact14=lgpCondId4283InputContact14, lgpCondId5293ReturnFluidOverTemp=lgpCondId5293ReturnFluidOverTemp, lgpCondId5056BottomFanIssue=lgpCondId5056BottomFanIssue, lgpCondId5881RegenerationActive=lgpCondId5881RegenerationActive, lgpCondId5306FreeCoolingValveHoursExceeded=lgpCondId5306FreeCoolingValveHoursExceeded, lgpCondId5039HumidifierUnderCurrent=lgpCondId5039HumidifierUnderCurrent, lgpCondId5335ReturnAirUnderTemperature=lgpCondId5335ReturnAirUnderTemperature, lgpCondId4910HighBatteryStringCurrent=lgpCondId4910HighBatteryStringCurrent, lgpCondId4190BatteryBreaker5CloseFailure=lgpCondId4190BatteryBreaker5CloseFailure, lgpCondId4174BatteryTemperatureSensorFault=lgpCondId4174BatteryTemperatureSensorFault, lgpCondId4163OutputOffEndofDischarge=lgpCondId4163OutputOffEndofDischarge, lgpCondId5377Condenser=lgpCondId5377Condenser, lgpCondId4919ExcessiveCelltoCellTemperatureDeviation=lgpCondId4919ExcessiveCelltoCellTemperatureDeviation, lgpCondId5361ExtFreeCoolingLockout=lgpCondId5361ExtFreeCoolingLockout, lgpCondId6381SFAReservedEvent45=lgpCondId6381SFAReservedEvent45, lgpCondId4146SystemInputPhsRotationError=lgpCondId4146SystemInputPhsRotationError, lgpCondId5310EvaporatorFluidFreezeAutoReset=lgpCondId5310EvaporatorFluidFreezeAutoReset, lgpCondId5975OutputBreakerOpen=lgpCondId5975OutputBreakerOpen, lgpCondId5914OverDifferentialPressure=lgpCondId5914OverDifferentialPressure, lgpCondId4165BatteryChargingReducedExtrnl=lgpCondId4165BatteryChargingReducedExtrnl, lgpCondId5646SFAReservedEvent5=lgpCondId5646SFAReservedEvent5, lgpCondId5104ExtOverTemperature=lgpCondId5104ExtOverTemperature, lgpCondId5906AirflowSensorIssue=lgpCondId5906AirflowSensorIssue, lgpCondId4170BatteryEqualize=lgpCondId4170BatteryEqualize, lgpCondId4182BatteryCircuitBreaker3Open=lgpCondId4182BatteryCircuitBreaker3Open, lgpCondId5594RemoteSensorAverageUnderTemperature=lgpCondId5594RemoteSensorAverageUnderTemperature, lgpCondId5911FluidTemperatureSensorIssue=lgpCondId5911FluidTemperatureSensorIssue, lgpCondId6369SFAReservedEvent33=lgpCondId6369SFAReservedEvent33)
mibBuilder.exportSymbols("LIEBERT-GP-FLEXIBLE-CONDITIONS-MIB", lgpCondId5939BranchOverCurrentProtection=lgpCondId5939BranchOverCurrentProtection, lgpCondId5417DigitalOutputBoardNotDetected=lgpCondId5417DigitalOutputBoardNotDetected, lgpCondId4996Inverter1ShortCycle=lgpCondId4996Inverter1ShortCycle, lgpCondId6212FirmwareUpdateCompletedUnsuccessfully=lgpCondId6212FirmwareUpdateCompletedUnsuccessfully, lgpCondId6253BoosterFailure=lgpCondId6253BoosterFailure, lgpCondId5311EvaporatorFluidFreezeManualResetRequired=lgpCondId5311EvaporatorFluidFreezeManualResetRequired, PYSNMP_MODULE_ID=liebertGlobalProductsFlexibleConditionsModule, lgpCondId6278UnitMiddleFirstSupplyAirSensorFailure=lgpCondId6278UnitMiddleFirstSupplyAirSensorFailure, lgpCondId5654SFAReservedEvent13=lgpCondId5654SFAReservedEvent13, lgpCondId5536CondenserOutsideAirTempOutofOperatingRange=lgpCondId5536CondenserOutsideAirTempOutofOperatingRange, lgpCondId4742BatteryLowShutdown=lgpCondId4742BatteryLowShutdown, lgpCondId4759LeadingPowerFactor=lgpCondId4759LeadingPowerFactor, lgpCondId4469PDUOverCurrentL2=lgpCondId4469PDUOverCurrentL2, lgpCondId5573AmbientAirSensorIssue=lgpCondId5573AmbientAirSensorIssue, lgpCondId5579DewPointUnderTemperature=lgpCondId5579DewPointUnderTemperature, lgpCondId4325BackfeedBreakerOpen=lgpCondId4325BackfeedBreakerOpen, lgpCondId5151BatteryConverterFailure=lgpCondId5151BatteryConverterFailure, lgpCondId5067ExtCompressorLockout=lgpCondId5067ExtCompressorLockout, lgpCondId4300InternalCommunicationsFailure=lgpCondId4300InternalCommunicationsFailure, lgpCondId5212PanelSummaryStatus=lgpCondId5212PanelSummaryStatus, lgpCondId4749SystemFanFailureRedundant=lgpCondId4749SystemFanFailureRedundant, lgpCondId6219AutoCalibrationFailed=lgpCondId6219AutoCalibrationFailed, lgpCondId6059InverterRelayFault=lgpCondId6059InverterRelayFault, lgpCondId5871BatteryOverTemperatureLimit=lgpCondId5871BatteryOverTemperatureLimit, lgpCondId6367SFAReservedEvent31=lgpCondId6367SFAReservedEvent31, lgpCondId4640SupplyRefrigerantTempSensorIssue=lgpCondId4640SupplyRefrigerantTempSensorIssue, lgpCondId6441PowerModulePowerSupplyFailure=lgpCondId6441PowerModulePowerSupplyFailure, lgpCondId5970GroundFault=lgpCondId5970GroundFault, lgpCondId6297Modbus010VModuleCommunicationFailure=lgpCondId6297Modbus010VModuleCommunicationFailure, lgpCondId6383SFAReservedEvent47=lgpCondId6383SFAReservedEvent47, lgpCondId5545CondenserMaxFanSpeedOverride=lgpCondId5545CondenserMaxFanSpeedOverride, lgpCondId6220ModuleOutputBreakerOpen=lgpCondId6220ModuleOutputBreakerOpen, lgpCondId6439PowerModuleBalancerofDCBusFailure=lgpCondId6439PowerModuleBalancerofDCBusFailure, lgpCondId5514CompressorLowPressureTransducerIssue=lgpCondId5514CompressorLowPressureTransducerIssue, lgpCondId6353GeneralWarning=lgpCondId6353GeneralWarning, lgpCondId5629StaticPressureSensorIssue=lgpCondId5629StaticPressureSensorIssue, lgpCondId5365HotWaterHotGasValveHoursExceeded=lgpCondId5365HotWaterHotGasValveHoursExceeded, lgpCondId6440PowerModuleFuseFailure=lgpCondId6440PowerModuleFuseFailure, lgpCondId5495ExtFanIssue=lgpCondId5495ExtFanIssue, lgpCondId5271CompressorLowSuctionPressure=lgpCondId5271CompressorLowSuctionPressure, lgpCondId5565FanContactorOpenFail=lgpCondId5565FanContactorOpenFail, lgpCondId5880RectifierConfigurationChangeRequest=lgpCondId5880RectifierConfigurationChangeRequest, lgpCondId5664SFAReservedEvent23=lgpCondId5664SFAReservedEvent23, lgpCondId4468PDUOverCurrentL1=lgpCondId4468PDUOverCurrentL1, lgpCondId5440BatteryStringOffline=lgpCondId5440BatteryStringOffline, lgpCondId5110UnitOff=lgpCondId5110UnitOff, lgpCondId5776PDULowVoltageLN=lgpCondId5776PDULowVoltageLN, lgpCondId4648SupplyFluidUnderTemp=lgpCondId4648SupplyFluidUnderTemp, lgpCondId5848BatteryModuleOverTemperature=lgpCondId5848BatteryModuleOverTemperature, lgpCondId4187BatteryBreaker4CloseFailure=lgpCondId4187BatteryBreaker4CloseFailure, lgpCondId5656SFAReservedEvent15=lgpCondId5656SFAReservedEvent15, lgpCondId4589UnderRelativeHumidity=lgpCondId4589UnderRelativeHumidity, lgpCondId6052SystemOutputBreakerOpen=lgpCondId6052SystemOutputBreakerOpen, lgpCondId5978RectifierIsolationBreakerRFBOpen=lgpCondId5978RectifierIsolationBreakerRFBOpen, lgpCondId4315OnGenerator=lgpCondId4315OnGenerator, lgpCondId5642SFAReservedEvent1=lgpCondId5642SFAReservedEvent1, lgpCondId6100CondenserRemoteShutdown=lgpCondId6100CondenserRemoteShutdown, lgpCondId6205WaterLeakDetected=lgpCondId6205WaterLeakDetected, lgpCondId5117MaintenanceCompleted=lgpCondId5117MaintenanceCompleted, lgpCondId4195BatteryBreaker7OpenFailure=lgpCondId4195BatteryBreaker7OpenFailure, lgpCondId5908ExtPowerSourceAFailure=lgpCondId5908ExtPowerSourceAFailure, lgpCondId5217PanelGroundOvercurrent=lgpCondId5217PanelGroundOvercurrent, lgpCondId5559EvaporatorReturnFluidOverTemp=lgpCondId5559EvaporatorReturnFluidOverTemp, lgpCondId4274InputContact05=lgpCondId4274InputContact05, lgpCondId5902ReturnHumiditySensorIssue=lgpCondId5902ReturnHumiditySensorIssue, lgpCondId4185BatteryCircuitBreaker4Open=lgpCondId4185BatteryCircuitBreaker4Open, lgpCondId4285InputContact16=lgpCondId4285InputContact16, lgpCondId5651SFAReservedEvent10=lgpCondId5651SFAReservedEvent10, lgpCondId5891ProgramInputContact08=lgpCondId5891ProgramInputContact08, lgpCondId5630HighStaticPressure=lgpCondId5630HighStaticPressure, lgpCondId4909HighOverallVoltage=lgpCondId4909HighOverallVoltage, lgpCondId5295ReturnFluidTempSensorIssue=lgpCondId5295ReturnFluidTempSensorIssue, lgpCondId4831MMSOverload=lgpCondId4831MMSOverload, lgpCondId4287OutputAmpOverUserLimitPhsB=lgpCondId4287OutputAmpOverUserLimitPhsB, lgpCondId5561LBSActiveMaster=lgpCondId5561LBSActiveMaster, lgpCondId5351ExtAirSensorALowHumidity=lgpCondId5351ExtAirSensorALowHumidity, lgpCondId4132BypassOverloadPhaseA=lgpCondId4132BypassOverloadPhaseA, lgpCondId6378SFAReservedEvent42=lgpCondId6378SFAReservedEvent42, lgpCondId6454DCtoDCConverterFault=lgpCondId6454DCtoDCConverterFault, lgpCondId4288OutputAmpOverUserLimitPhsC=lgpCondId4288OutputAmpOverUserLimitPhsC, lgpCondId4169BatteryTemperatureImbalance=lgpCondId4169BatteryTemperatureImbalance, lgpCondId6349SystemOffasrequested=lgpCondId6349SystemOffasrequested, lgpCondId5471DoorOpen=lgpCondId5471DoorOpen, lgpCondId5121HighPowerShutdown=lgpCondId5121HighPowerShutdown, lgpCondId4755InputFilterCycleLock=lgpCondId4755InputFilterCycleLock, lgpCondId4920ExcessiveCelltoAmbientTemperatureDeviation=lgpCondId4920ExcessiveCelltoAmbientTemperatureDeviation, lgpCondId5106ExtCondenserPumpHighWater=lgpCondId5106ExtCondenserPumpHighWater, lgpCondId4191BatteryCircuitBreaker6Open=lgpCondId4191BatteryCircuitBreaker6Open, lgpCondId6227RemoteSensorAverageLowHumidity=lgpCondId6227RemoteSensorAverageLowHumidity, lgpCondId6274UnitTopReturnAirSensorFailure=lgpCondId6274UnitTopReturnAirSensorFailure, lgpCondId4392TemperatureSensorError=lgpCondId4392TemperatureSensorError, lgpCondId6186InputFrequencyDeviation=lgpCondId6186InputFrequencyDeviation, lgpCondId6225RemoteSensorLowHumidity=lgpCondId6225RemoteSensorLowHumidity, lgpCondId4966LowCellTemperature=lgpCondId4966LowCellTemperature, lgpCondId5148CompressorHighPressureTransducerIssue=lgpCondId5148CompressorHighPressureTransducerIssue, lgpCondId4137BypassAutoRetransferPrimed=lgpCondId4137BypassAutoRetransferPrimed, lgpCondId4497BasicDisplayModuleDiscovered=lgpCondId4497BasicDisplayModuleDiscovered, lgpCondId4611ExtAirSensorBUnderTemperature=lgpCondId4611ExtAirSensorBUnderTemperature, lgpCondId5247SubfeedGroundOvercurrent=lgpCondId5247SubfeedGroundOvercurrent, lgpCondId5179OutputUndervoltage=lgpCondId5179OutputUndervoltage, lgpCondId4629SupplyChilledWaterTempSensorIssue=lgpCondId4629SupplyChilledWaterTempSensorIssue, lgpCondId4133BypassOverloadPhaseB=lgpCondId4133BypassOverloadPhaseB, lgpCondId5146CompressorPumpDownIssue=lgpCondId5146CompressorPumpDownIssue, lgpCondId5213PanelOvervoltage=lgpCondId5213PanelOvervoltage, lgpCondId4389SystemOutputFault=lgpCondId4389SystemOutputFault, lgpCondId5912FluidFlowSensorIssue=lgpCondId5912FluidFlowSensorIssue, lgpCondId4220BatteryExternalMonitor1=lgpCondId4220BatteryExternalMonitor1, lgpCondId4289InverterTransferInhibitExt=lgpCondId4289InverterTransferInhibitExt, lgpCondId6376SFAReservedEvent40=lgpCondId6376SFAReservedEvent40, lgpCondId4693Tandem1LowSuctionPressure=lgpCondId4693Tandem1LowSuctionPressure, lgpCondId5145MMSModuleAlarmActive=lgpCondId5145MMSModuleAlarmActive, lgpCondId5277CondenserFanIssue=lgpCondId5277CondenserFanIssue, lgpCondId4696Tandem2LowSuctionPressure=lgpCondId4696Tandem2LowSuctionPressure, lgpCondId4166BatteryCapacityLow=lgpCondId4166BatteryCapacityLow, lgpCondId5773CompressorCapacityNormal=lgpCondId5773CompressorCapacityNormal, lgpCondId6221NeutralVoltageFault=lgpCondId6221NeutralVoltageFault, lgpCondId6315ChilledWaterFlowMeterSensorFailure=lgpCondId6315ChilledWaterFlowMeterSensorFailure, lgpCondId5771MMSOverCapacity=lgpCondId5771MMSOverCapacity, lgpCondId4271InputContact02=lgpCondId4271InputContact02, lgpCondId5362FreeCoolingTempSensorIssue=lgpCondId5362FreeCoolingTempSensorIssue, lgpCondId5888ProgramInputContact05=lgpCondId5888ProgramInputContact05, lgpCondId4509ReceptacleLoadAdded=lgpCondId4509ReceptacleLoadAdded, lgpCondId5278LowCondenserRefrigerantPressure=lgpCondId5278LowCondenserRefrigerantPressure, lgpCondId5178OutputOvervoltage=lgpCondId5178OutputOvervoltage, lgpCondId5909ExtPowerSourceBFailure=lgpCondId5909ExtPowerSourceBFailure, lgpCondId4162BatteryLow=lgpCondId4162BatteryLow, lgpCondId5152InverterSCROpen=lgpCondId5152InverterSCROpen, lgpCondId5612ThermalRunawayChargerCurrentLevelTwoEvent=lgpCondId5612ThermalRunawayChargerCurrentLevelTwoEvent, lgpCondId6011EquipmentOverTempWarning=lgpCondId6011EquipmentOverTempWarning, lgpCondId5577ExtDewPointUnderTemperature=lgpCondId5577ExtDewPointUnderTemperature, lgpCondId5621EEVSuperheatBelowThreshold=lgpCondId5621EEVSuperheatBelowThreshold, lgpCondId6306ColdAisleTemperatureHumidityTeamSensorFailure=lgpCondId6306ColdAisleTemperatureHumidityTeamSensorFailure, lgpCondId4177BatteryBreaker1OpenFailure=lgpCondId4177BatteryBreaker1OpenFailure, lgpCondId6299RackDoorsOpen=lgpCondId6299RackDoorsOpen, lgpCondId4176BatteryCircuitBreaker1Open=lgpCondId4176BatteryCircuitBreaker1Open, lgpCondId4615ExtDewPointOverTemperature=lgpCondId4615ExtDewPointOverTemperature, lgpCondId5041HumidifierLowWater=lgpCondId5041HumidifierLowWater, lgpCondId4421BranchFailure=lgpCondId4421BranchFailure, lgpCondId5842ChargerModuleFanFault=lgpCondId5842ChargerModuleFanFault, lgpCondId6092Compressor1BThermalOverload=lgpCondId6092Compressor1BThermalOverload, lgpCondId5895ProgramInputContact12=lgpCondId5895ProgramInputContact12, lgpCondId5044ExtHumidifierLockout=lgpCondId5044ExtHumidifierLockout, lgpCondId6237PowerSourceBIssue=lgpCondId6237PowerSourceBIssue, lgpCondId5883RegenerationOperationFailure=lgpCondId5883RegenerationOperationFailure, lgpCondId5894ProgramInputContact11=lgpCondId5894ProgramInputContact11, lgpCondId4231OutputCurrentExceedsThreshold=lgpCondId4231OutputCurrentExceedsThreshold, lgpCondId4122SystemInputPowerProblem=lgpCondId4122SystemInputPowerProblem, lgpCondId5968InverterDesaturation=lgpCondId5968InverterDesaturation, lgpCondId6236PowerSourceAIssue=lgpCondId6236PowerSourceAIssue, lgpCondId4309DCBusGroundFaultNegative=lgpCondId4309DCBusGroundFaultNegative, lgpCondId5857BatteryModuleWarning=lgpCondId5857BatteryModuleWarning, lgpCondId5873UnexpectedMainBatteryDisconnectClosure=lgpCondId5873UnexpectedMainBatteryDisconnectClosure, lgpCondId6106ExternalCondenserVFDIssue=lgpCondId6106ExternalCondenserVFDIssue, lgpCondId5185TransformerOvertemperature=lgpCondId5185TransformerOvertemperature, lgpCondId5065DigScrollComp1OverTemp=lgpCondId5065DigScrollComp1OverTemp, lgpCondId6348SystemOutputoffasrequested=lgpCondId6348SystemOutputoffasrequested, lgpCondId5884ProgramInputContact01=lgpCondId5884ProgramInputContact01, lgpCondId4407BranchUnderCurrent=lgpCondId4407BranchUnderCurrent, lgpCondId5807OutputOffPending=lgpCondId5807OutputOffPending, lgpCondId5355DigScrollCompOverTemp=lgpCondId5355DigScrollCompOverTemp, lgpCondId6216PrechargeCircuitFailed=lgpCondId6216PrechargeCircuitFailed, lgpCondId4135BypassNotAvailable=lgpCondId4135BypassNotAvailable, lgpCondId5055TopFanIssue=lgpCondId5055TopFanIssue, lgpCondId5785BranchLowVoltageLL=lgpCondId5785BranchLowVoltageLL, lgpCondId4144BypassExcessivePulseParallel=lgpCondId4144BypassExcessivePulseParallel, lgpCondId4968LowInternalResistance=lgpCondId4968LowInternalResistance, lgpCondId6119Slotsnotavailable=lgpCondId6119Slotsnotavailable, lgpCondId5631LowStaticPressure=lgpCondId5631LowStaticPressure, lgpCondId4915MaximumDischargeTimeExceeded=lgpCondId4915MaximumDischargeTimeExceeded, lgpCondId5216PanelNeutralOvercurrent=lgpCondId5216PanelNeutralOvercurrent, lgpCondId5958BatteryTemperatureOutofRange=lgpCondId5958BatteryTemperatureOutofRange, lgpCondId5985BypassStaticSwitchBPSSOn=lgpCondId5985BypassStaticSwitchBPSSOn, lgpCondId6357SystemRestartPending=lgpCondId6357SystemRestartPending, lgpCondId5308EvaporatorInletTempSensorIssue=lgpCondId5308EvaporatorInletTempSensorIssue, lgpCondId5865NoLoadWarning=lgpCondId5865NoLoadWarning, lgpCondId4740BatteryAutomaticTestInhibited=lgpCondId4740BatteryAutomaticTestInhibited, lgpCondId4341VelocityAuthenticationFailure=lgpCondId4341VelocityAuthenticationFailure, lgpCondId4299OutputLoadonMaintBypass=lgpCondId4299OutputLoadonMaintBypass, lgpCondId6230RemoteSensorSystemAverageHighHumidity=lgpCondId6230RemoteSensorSystemAverageHighHumidity, lgpCondId4279InputContact10=lgpCondId4279InputContact10, lgpCondId6352AutonomyCalibration=lgpCondId6352AutonomyCalibration, liebertGlobalProductsFlexibleConditionsModule=liebertGlobalProductsFlexibleConditionsModule, lgpCondId5649SFAReservedEvent8=lgpCondId5649SFAReservedEvent8, lgpCondId5780PDULowVoltageL3L1=lgpCondId5780PDULowVoltageL3L1, lgpCondId5890ProgramInputContact07=lgpCondId5890ProgramInputContact07, lgpCondId5788ContTieActive=lgpCondId5788ContTieActive, lgpCondId4830MMSLossofSyncPulse=lgpCondId4830MMSLossofSyncPulse, lgpCondId4758LBSInhibited=lgpCondId4758LBSInhibited, lgpCondId5982BypassBreakerSBBOpen=lgpCondId5982BypassBreakerSBBOpen, lgpCondId4310EquipmentOverTemperature=lgpCondId4310EquipmentOverTemperature, lgpCondId4493ReceptaclePowerStateOff=lgpCondId4493ReceptaclePowerStateOff, lgpCondId6063BatteryConverterCurrentLimit=lgpCondId6063BatteryConverterCurrentLimit, lgpCondId5215PanelOvercurrent=lgpCondId5215PanelOvercurrent, lgpCondId4181BatteryBreaker2CloseFailure=lgpCondId4181BatteryBreaker2CloseFailure, lgpCondId6310ChilledWaterInletTemperatureControlActive=lgpCondId6310ChilledWaterInletTemperatureControlActive, lgpCondId4199BatteryBreaker8CloseFailure=lgpCondId4199BatteryBreaker8CloseFailure, lgpCondId4675Compressor2AHighHeadPressure=lgpCondId4675Compressor2AHighHeadPressure, lgpCondId6364SFAReservedEvent28=lgpCondId6364SFAReservedEvent28, lgpCondId5419UnitCommunicationLost=lgpCondId5419UnitCommunicationLost, lgpCondId6218AutoCalibrationActive=lgpCondId6218AutoCalibrationActive, lgpCondId5524PDUNeutralOverCurrent=lgpCondId5524PDUNeutralOverCurrent, lgpCondId4281InputContact12=lgpCondId4281InputContact12, lgpCondId4550FirmwareUpdateRequired=lgpCondId4550FirmwareUpdateRequired, lgpCondId4171BatteryManualTestInProgress=lgpCondId4171BatteryManualTestInProgress, lgpCondId5658SFAReservedEvent17=lgpCondId5658SFAReservedEvent17, lgpCondId4275InputContact06=lgpCondId4275InputContact06, lgpCondId5270CompressorHighHeadPressure=lgpCondId5270CompressorHighHeadPressure, lgpCondId6182ReplaceBattery=lgpCondId6182ReplaceBattery, lgpCondId4436PDUOverCurrent=lgpCondId4436PDUOverCurrent, lgpCondId4391InverterStaticSwitchSCRShort=lgpCondId4391InverterStaticSwitchSCRShort, lgpCondId5977BatteryBreakerOpen=lgpCondId5977BatteryBreakerOpen, lgpCondId5442DischargeLowCellVoltage=lgpCondId5442DischargeLowCellVoltage, lgpCondId4313PasswordChanged=lgpCondId4313PasswordChanged, lgpCondId4214SystemShutdownREPO=lgpCondId4214SystemShutdownREPO, lgpCondId5500ExtRemoteShutdown=lgpCondId5500ExtRemoteShutdown, lgpCondId5181NeutralOvercurrent=lgpCondId5181NeutralOvercurrent, lgpCondId6275UnitMiddleReturnAirSensorFailure=lgpCondId6275UnitMiddleReturnAirSensorFailure, lgpCondId5026SupplyAirSensorIssue=lgpCondId5026SupplyAirSensorIssue, lgpCondId4823ParallelCommWarning=lgpCondId4823ParallelCommWarning, lgpCondId5878MainBatteryDisconnectForcedToUnlock=lgpCondId5878MainBatteryDisconnectForcedToUnlock)
| 129.017639
| 18,904
| 0.818233
|
4a0e6b36b4823312027c99ce9d03c8b9c8956d6c
| 4,130
|
py
|
Python
|
train.py
|
BaoLocPham/hum2song
|
706b7fdf838944e2aabe0ae331c0867cb67f6fbc
|
[
"MIT"
] | null | null | null |
train.py
|
BaoLocPham/hum2song
|
706b7fdf838944e2aabe0ae331c0867cb67f6fbc
|
[
"MIT"
] | null | null | null |
train.py
|
BaoLocPham/hum2song
|
706b7fdf838944e2aabe0ae331c0867cb67f6fbc
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from data.dataset import Dataset
from torch.utils import data
from models.focal_loss import FocalLoss
from models.metrics import *
from utils.visualizer import Visualizer
import time
from config.config import Config
from torch.nn import DataParallel
from torch.optim.lr_scheduler import StepLR
from val import *
from models import model
torch.manual_seed(3407)
def save_model(model, save_path, name, iter_cnt):
os.makedirs(save_path, exist_ok=True)
save_name = os.path.join(save_path, name + '_' + str(iter_cnt) + '.pth')
torch.save(model.state_dict(), save_name)
return save_name
if __name__ == '__main__':
opt = Config()
if opt.display:
visualizer = Visualizer()
device = torch.device("cuda")
train_dataset = Dataset(opt.train_root, opt.train_list, phase='train', input_shape=opt.input_shape,
mp3aug_ratio=opt.mp3aug_ratio, npy_aug=opt.npy_aug)
trainloader = data.DataLoader(train_dataset,
batch_size=opt.train_batch_size,
shuffle=True,
num_workers=opt.num_workers)
val_path = os.path.join(opt.train_root, opt.val_list)
print('{} train iters per epoch:'.format(len(trainloader)))
if opt.loss == 'focal_loss':
criterion = FocalLoss(gamma=2)
else:
criterion = torch.nn.CrossEntropyLoss()
model = model.get_model(config=opt)
if opt.metric == 'add_margin':
metric_fc = AddMarginProduct(512, opt.num_classes, s=30, m=0.35)
elif opt.metric == 'arc_margin':
metric_fc = ArcMarginProduct(512, opt.num_classes, s=30, m=0.5, easy_margin=opt.easy_margin)
elif opt.metric == 'sphere':
metric_fc = SphereProduct(512, opt.num_classes, m=4)
else:
metric_fc = nn.Linear(512, opt.num_classes)
model.to(device)
model = DataParallel(model)
metric_fc.to(device)
metric_fc = DataParallel(metric_fc)
if opt.optimizer == 'sgd':
optimizer = torch.optim.SGD([{'params': model.parameters()}, {'params': metric_fc.parameters()}],
lr=opt.lr, weight_decay=opt.weight_decay)
else:
optimizer = torch.optim.Adam([{'params': model.parameters()}, {'params': metric_fc.parameters()}],
lr=opt.lr, weight_decay=opt.weight_decay)
scheduler = StepLR(optimizer, step_size=opt.lr_step, gamma=0.1)
start = time.time()
mrr_best = 0
for i in range(1, opt.max_epoch + 1):
scheduler.step()
model.train()
for ii, data in enumerate(trainloader):
data_input, label = data
data_input = data_input.to(device)
label = label.to(device).long()
feature = model(data_input)
output = metric_fc(feature, label)
loss = criterion(output, label)
optimizer.zero_grad()
loss.backward()
optimizer.step()
iters = i * len(trainloader) + ii
if iters % opt.print_freq == 0:
output = output.data.cpu().numpy()
output = np.argmax(output, axis=1)
label = label.data.cpu().numpy()
acc = np.mean((output == label).astype(int))
speed = opt.print_freq / (time.time() - start)
time_str = time.asctime(time.localtime(time.time()))
print('{} train epoch {} iter {} {} iters/s loss {} acc {}'.format(time_str, i, ii, speed, loss.item(), acc))
start = time.time()
if i % opt.save_interval == 0 or i == opt.max_epoch:
print('calculating mrr .....')
save_model(model, opt.checkpoints_path, opt.backbone, 'latest')
model.eval()
data_val = read_val(opt.val_list, opt.train_root)
mrr = mrr_score(model, data_val, opt.input_shape)
print(f'epoch {i}: MRR= {mrr}')
if mrr > mrr_best:
mrr_best = mrr
save_model(model, opt.checkpoints_path, opt.backbone, 'best')
| 37.889908
| 125
| 0.602179
|
4a0e6b63e5738fc90ac76b998a2d1e69f00bd0d0
| 1,399
|
py
|
Python
|
deletetweets/removefollows.py
|
vitorpy/delete-tweets
|
f8117d47c60f66a899b235d917007e7014ee8a08
|
[
"0BSD"
] | null | null | null |
deletetweets/removefollows.py
|
vitorpy/delete-tweets
|
f8117d47c60f66a899b235d917007e7014ee8a08
|
[
"0BSD"
] | null | null | null |
deletetweets/removefollows.py
|
vitorpy/delete-tweets
|
f8117d47c60f66a899b235d917007e7014ee8a08
|
[
"0BSD"
] | null | null | null |
import io
import os
import sys
import json
import twitter
from datetime import datetime
from dateutil import parser
class TweetUnfollower(object):
def __init__(self, twitter_api, dry_run=False):
self.twitter_api = twitter_api
self.dry_run = dry_run
def destroy(self, all, tweet_id):
try:
user = self.twitter_api.GetUser(user_id=tweet_id)
print("delete friend %s" % user.screen_name)
if not self.dry_run:
if not all:
self.twitter_api.DestroyFriendship(user_id=tweet_id)
self.twitter_api.DestroyMute(user_id=tweet_id)
except twitter.TwitterError as err:
print("Exception: %s\n" % err.message)
def delete(all, dry_run=False):
api = twitter.Api(consumer_key=os.environ["TWITTER_CONSUMER_KEY"],
consumer_secret=os.environ["TWITTER_CONSUMER_SECRET"],
access_token_key=os.environ["TWITTER_ACCESS_TOKEN"],
access_token_secret=os.environ["TWITTER_ACCESS_TOKEN_SECRET"],
sleep_on_rate_limit=True)
destroyer = TweetUnfollower(api, dry_run)
mutes = set(api.GetMutesIDs())
friends = set(api.GetFriendIDs())
target = mutes
if not all:
target = mutes.intersection(friends)
for id in target:
destroyer.destroy(all, id)
sys.exit()
| 30.413043
| 84
| 0.639743
|
4a0e6be0911ea0caa75263fd0b9a11fc9780e97a
| 433
|
py
|
Python
|
ex072.py
|
JuuzouMarques/CursoEmVideo-Python
|
8fdeae09f69d4047b6532a4759af115beedb8545
|
[
"MIT"
] | null | null | null |
ex072.py
|
JuuzouMarques/CursoEmVideo-Python
|
8fdeae09f69d4047b6532a4759af115beedb8545
|
[
"MIT"
] | null | null | null |
ex072.py
|
JuuzouMarques/CursoEmVideo-Python
|
8fdeae09f69d4047b6532a4759af115beedb8545
|
[
"MIT"
] | null | null | null |
numeros = ('zero', 'um', 'dois', 'tres', 'quatro', 'cinco',
'seis', 'sete', 'oito', 'nove', 'dez',
'onze', 'doze', 'treze', 'quatorze', 'quinze',
'dezesseis', 'dezessete', 'dezoito', 'dezenove', 'vinte')
while True:
num = int(input('Digite um número entre 0 e 20: '))
if 0 <= num <= 20:
break
print('Tente Novamente.', end=' ')
print(f'Você digitou o número {numeros[num]}')
| 36.083333
| 69
| 0.526559
|
4a0e6c2f0eea35bce47a1b6c86ea76be63a794b8
| 7,764
|
py
|
Python
|
tests/devices/test_open_channel.py
|
timgates42/python-library
|
453ebd674513e934420dde2e35ac99fd2308be82
|
[
"Apache-2.0"
] | 26
|
2015-01-05T21:08:07.000Z
|
2021-05-13T07:27:19.000Z
|
tests/devices/test_open_channel.py
|
timgates42/python-library
|
453ebd674513e934420dde2e35ac99fd2308be82
|
[
"Apache-2.0"
] | 32
|
2015-01-08T23:46:36.000Z
|
2022-02-02T18:17:58.000Z
|
tests/devices/test_open_channel.py
|
timgates42/python-library
|
453ebd674513e934420dde2e35ac99fd2308be82
|
[
"Apache-2.0"
] | 33
|
2015-01-21T08:02:40.000Z
|
2022-03-25T06:02:04.000Z
|
import datetime
import json
import mock
import unittest
import requests
import urbanairship as ua
from tests import TEST_KEY, TEST_SECRET
class TestOpenChannel(unittest.TestCase):
def test_create_channel(self):
channel_id = "37b4f6e9-8e50-4400-8246-bdfcbf7ed3be"
address = "some_address"
platform = "a_platform"
identifiers = {
"com.example.external_id": "df6a6b50-9843-7894-1235-12aed4489489",
"another_example_identifier": "some_hash",
}
with mock.patch.object(ua.Airship, "_request") as mock_request:
response = requests.Response()
response._content = json.dumps({"channel_id": channel_id}).encode("utf-8")
response.status_code = 200
mock_request.return_value = response
airship = ua.Airship(TEST_KEY, TEST_SECRET)
channel = ua.OpenChannel(airship)
channel.address = address
channel.open_platform = platform
channel.opt_in = True
channel.identifiers = identifiers
channel.create()
self.assertEqual(channel.channel_id, channel_id)
def test_create_channel_with_tags(self):
channel_id = "37b4f6e9-8e50-4400-8246-bdfcbf7ed3be"
address = "some_address"
platform = "a_platform"
with mock.patch.object(ua.Airship, "_request") as mock_request:
response = requests.Response()
response._content = json.dumps({"channel_id": channel_id}).encode("utf-8")
response.status_code = 200
mock_request.return_value = response
airship = ua.Airship(TEST_KEY, TEST_SECRET)
channel = ua.OpenChannel(airship)
channel.address = address
channel.open_platform = platform
channel.opt_in = True
channel.tags = ["a_tag"]
channel.create()
self.assertEqual(channel.channel_id, channel_id)
def test_create_channel_requires_platform(self):
address = "some_address"
airship = ua.Airship(TEST_KEY, TEST_SECRET)
channel = ua.OpenChannel(airship)
# Do not set platform
channel.address = address
channel.opt_in = True
self.assertRaises(ValueError, channel.create)
def test_create_channel_requires_address(self):
platform = "a_platform"
airship = ua.Airship(TEST_KEY, TEST_SECRET)
channel = ua.OpenChannel(airship)
# Do not set address
channel.open_platform = platform
channel.opt_in = True
self.assertRaises(ValueError, channel.create)
def test_create_channel_requires_opt_in(self):
address = "some_address"
platform = "a_platform"
airship = ua.Airship(TEST_KEY, TEST_SECRET)
channel = ua.OpenChannel(airship)
# Do not set opt in
channel.address = address
channel.open_platform = platform
self.assertRaises(ValueError, channel.create)
def test_open_channel_lookup(self):
with mock.patch.object(ua.Airship, "_request") as mock_request:
response = requests.Response()
response._content = json.dumps(
{
"ok": "true",
"channel": {
"channel_id": "b8f9b663-0a3b-cf45-587a-be880946e881",
"device_type": "open",
"installed": "true",
"named_user_id": "john_doe_123",
"tags": ["tag_a", "tag_b"],
"tag_groups": {
"timezone": ["America/Los_Angeles"],
"locale_country": ["US"],
"locale_language": ["en"],
"tag_group_1": ["tag1", "tag2"],
"tag_group_2": ["tag1", "tag2"],
},
"created": "2017-08-08T20:41:06",
"address": "example@example.com",
"opt_in": "true",
"open": {
"open_platform_name": "email",
"identifiers": {
"com.example.external_id": "df6a6b50-9843-7894-1235-12aed4489489",
"another_example_identifier": "some_hash",
},
},
"last_registration": "2017-09-01T18:00:27",
},
}
).encode("utf-8")
response.status_code = 200
mock_request.return_value = response
airship = ua.Airship(TEST_KEY, TEST_SECRET)
channel_id = "b8f9b663-0a3b-cf45-587a-be880946e881"
open_channel_lookup = ua.OpenChannel(airship).lookup(channel_id)
date_created = datetime.datetime.strptime(
"2017-08-08T20:41:06", "%Y-%m-%dT%H:%M:%S"
)
date_last_registration = datetime.datetime.strptime(
"2017-09-01T18:00:27", "%Y-%m-%dT%H:%M:%S"
)
self.assertEqual(open_channel_lookup.channel_id, channel_id)
self.assertEqual(open_channel_lookup.device_type, "open")
self.assertEqual(open_channel_lookup.installed, "true")
self.assertEqual(open_channel_lookup.opt_in, "true")
self.assertEqual(open_channel_lookup.named_user_id, "john_doe_123")
self.assertEqual(open_channel_lookup.created, date_created)
self.assertEqual(open_channel_lookup.open_platform, "email")
self.assertEqual(
open_channel_lookup.last_registration, date_last_registration
)
self.assertEqual(open_channel_lookup.address, "example@example.com")
self.assertListEqual(open_channel_lookup.tags, ["tag_a", "tag_b"])
self.assertDictEqual(
open_channel_lookup.identifiers,
{
"com.example.external_id": "df6a6b50-9843-7894-1235-12aed4489489",
"another_example_identifier": "some_hash",
},
)
def test_open_channel_update(self):
channel_id = "b8f9b663-0a3b-cf45-587a-be880946e881"
with mock.patch.object(ua.Airship, "_request") as mock_request:
response = requests.Response()
response._content = json.dumps(
{"ok": True, "channel_id": channel_id}
).encode("utf-8")
response.status_code = 200
mock_request.return_value = response
airship = ua.Airship(TEST_KEY, TEST_SECRET)
channel_to_update = ua.OpenChannel(airship)
channel_to_update.channel_id = channel_id
channel_to_update.open_platform = "email"
channel_to_update.tags = ["a_new_tag"]
channel_to_update.opt_in = True
channel_to_update.address = "example@example.com"
channel_to_update.update()
self.assertEqual(channel_to_update.channel_id, channel_id)
def test_open_channel_uninstall(self):
with mock.patch.object(ua.Airship, "_request") as mock_request:
response = requests.Response()
response._content = json.dumps({"ok": True})
response.status_code = 200
mock_request.return_value = response
airship = ua.Airship(TEST_KEY, TEST_SECRET)
channel = ua.OpenChannel(airship)
channel.address = "new_email@example.com"
channel.open_platform = "email"
un_res = json.loads(channel.uninstall().content)
self.assertEqual(un_res["ok"], True)
| 38.82
| 98
| 0.574704
|
4a0e6c92c7981ca8f7ff912ff59593ee1ac840b0
| 1,667
|
py
|
Python
|
game/audio.py
|
alex-marcozzi/Interithmetic
|
861cd85ae82861430d6ed637945af610f4bd2e41
|
[
"MIT"
] | null | null | null |
game/audio.py
|
alex-marcozzi/Interithmetic
|
861cd85ae82861430d6ed637945af610f4bd2e41
|
[
"MIT"
] | null | null | null |
game/audio.py
|
alex-marcozzi/Interithmetic
|
861cd85ae82861430d6ed637945af610f4bd2e41
|
[
"MIT"
] | null | null | null |
# Title: audio.py
# Description: Module file for audio used in Interithmetic.
# Author: Alexander Marcozzi
# Date: 11/19/2021
import pyglet
import random
# sound effects
drums = pyglet.media.load('./assets/sfx/drumroll.mp3', streaming=False)
cheer = pyglet.media.load('./assets/sfx/cheer.mp3', streaming=False)
awwww = pyglet.media.load('./assets/sfx/awwww.mp3', streaming=False)
# music
music = [
pyglet.media.load('./assets/music/garden.mp3', streaming=False), # www.youtube.com/watch?v=SnFqblwmIoA
pyglet.media.load('./assets/music/jeopardy.mp3', streaming=False), # Jeopardy song
pyglet.media.load('./assets/music/rose.mp3', streaming=False), # Rondo Brothers, Yellow Flower of Berkeley
pyglet.media.load('./assets/music/thinking.mp3', streaming=False), # Kevin MacLeod, Thinking Music
pyglet.media.load('./assets/music/nitro.mp3', streaming=False), # Nitro Fun, Checkpoint
pyglet.media.load('./assets/music/last7.mp3', streaming=False), # oneohkay, Last 7 Letters
pyglet.media.load('./assets/music/moon.mp3', streaming=False), # Gecko&Tokage Parade, Moon
pyglet.media.load('./assets/music/beat.mp3', streaming=False), # Nujabes, Beat laments the world
pyglet.media.load('./assets/music/prayer.mp3', streaming=False), # Nujabes, Prayer
pyglet.media.load('./assets/music/rush.mp3', streaming=False), # The Seatbelts, Rush
pyglet.media.load('./assets/music/dog.mp3', streaming=False), # The Seatbelts, Bad Dog No Biscuits
]
# convenience functions
def randomSong():
"""
Randomly selects a song and returns it.
"""
return music[random.randint(1,len(music)-1)]
| 47.628571
| 115
| 0.70186
|
4a0e6ce32a9e8f184b80ed8b370a7982ef7b79c0
| 22,359
|
py
|
Python
|
sep_gui.py
|
cccaaannn/easy-sep
|
b76ed4d3853dd1fa1f75c45873a64f0aae8795d0
|
[
"MIT"
] | null | null | null |
sep_gui.py
|
cccaaannn/easy-sep
|
b76ed4d3853dd1fa1f75c45873a64f0aae8795d0
|
[
"MIT"
] | null | null | null |
sep_gui.py
|
cccaaannn/easy-sep
|
b76ed4d3853dd1fa1f75c45873a64f0aae8795d0
|
[
"MIT"
] | null | null | null |
import os
import sys
import shutil
import datetime
from PIL import Image
from PyQt5 import QtWidgets, QtCore
from PyQt5.QtWidgets import QFileDialog, QFrame
import threading
"""
checks the existence of a file and renames it if it exists on the destination and returns the abs path of destination
"""
def check_existace_and_rename(src_path, dest_path):
i = 1
file_name = os.path.basename(src_path)
name_temp = file_name
is_name_changed = False
while os.path.exists(dest_path + os.sep + name_temp):
is_name_changed = True
name_temp = file_name
name_without_extension = os.path.splitext(name_temp)[0]
extension = os.path.splitext(name_temp)[1]
name_temp = name_without_extension + "(" + i.__str__() + ")" + extension
i += 1
return (dest_path + os.sep + name_temp), is_name_changed, src_path
"""
moves file
"""
def move_file(src, dest):
try:
shutil.move(src, dest)
return 1
except(FileNotFoundError):
print("file not found")
return 0
"""
copies file
"""
def copy_file(src, dest):
try:
shutil.copy(src, dest)
return 1
except(FileNotFoundError):
print("file not found")
return 0
"""
gets date from exif data
"""
def get_date_taken_EXIF(path):
try:
date = Image.open(path)._getexif()[36867]
date = date.replace(":", "-")
return date, True
except:
return 0, False
"""
gets file modify date from os
"""
def get_date_modification_SYSTEM(path):
try:
posix_time = os.path.getmtime(path)
date = datetime.datetime.utcfromtimestamp(posix_time).strftime('%Y-%m-%d %H-%M-%S')
return date, True
except(FileNotFoundError):
return 0, False
"""
creates dir if not exists returns it regardless
"""
def create_dir_if_not_exists(dir_path, dir_name):
dir = dir_path + os.sep + dir_name
try:
os.makedirs(dir)
return dir
except FileExistsError:
return dir
"""
checks the extensions list for selecting files if * is given returns 1 to accept all files
"""
def check_extension(extensions_list, path):
if (extensions_list[0] == "*"):
return 1
else:
for extension in extensions_list:
if (os.path.splitext(path)[1] == extension):
return 1
return 0
"""
prints list line by line
"""
def printlist(list):
for i in range(len(list)):
print((i + 1).__str__() + ": " + list[i])
"""
writes list line by line
"""
def writelist(file, list):
for i in range(len(list)):
file.write((i + 1).__str__() + ": " + list[i] + "\n")
"""
frame class
"""
class sep_frame(QtWidgets.QWidget):
def __init__(self):
super().__init__()
self.init_variables()
self.create_frame()
"""
inits variables
"""
def init_variables(self):
#current dir
#self.old_file_dir = os.getcwd()
#self.new_file_dir = os.getcwd() + os.sep + "new_folder"
#desktop
self.old_file_dir = os.path.abspath(os.path.expanduser("~/Desktop"))
self.new_file_dir = os.path.abspath(os.path.expanduser("~/Desktop") + os.sep + "new_folder")
#create threads veriable
self.thread
#for stopping thread
self.stop_thread = False
"""
creates frame
"""
def create_frame(self):
self.setWindowTitle("Easy Separate")
self.setGeometry(100, 100, 300, 250)
self.setFixedSize(400, 260)
self.labels()
self.line_edits()
self.buttons()
self.radio_butons()
self.checkoxes()
self.show()
"""
label options
"""
def labels(self):
self.info_label = QtWidgets.QLabel(self)
#shows borders
self.info_label.setFrameShape(QFrame.Panel)
self.info_label.setFrameShadow(QFrame.Sunken)
self.info_label.setLineWidth(2)
self.info_label.setText("EASY SEPARATE")
self.info_label.move(10, 130)
self.info_label.setFixedSize(380,30)
self.info_label.setAlignment(QtCore.Qt.AlignCenter)
labbel_font1 = self.info_label.font()
labbel_font1.setPointSize(10)
labbel_font1.setBold(True)
self.info_label.setFont(labbel_font1)
"""
lineedit options
"""
def line_edits(self):
self.old_file_line = QtWidgets.QLineEdit(self)
self.old_file_line.setText(self.old_file_dir)
self.old_file_line.move(120, 10)
self.old_file_line.setFixedSize(270,30)
self.new_file_line = QtWidgets.QLineEdit(self)
self.new_file_line.setText(self.new_file_dir)
self.new_file_line.move(120, 50)
self.new_file_line.setFixedSize(270, 30)
self.extensions_line = QtWidgets.QLineEdit(self)
self.extensions_line.setText(".jpg,.png,.txt")
self.extensions_line.move(120, 90)
self.extensions_line.setFixedSize(270, 30)
self.extensions_line.setEnabled(False)
line_font1 = self.old_file_line.font()
line_font1.setPointSize(10)
line_font1.setBold(True)
self.old_file_line.setFont(line_font1)
self.new_file_line.setFont(line_font1)
self.extensions_line.setFont(line_font1)
"""
opens file selection dialog for old file
"""
def old_file_button_function(self):
file = str(os.path.abspath(QFileDialog.getExistingDirectory(self, "Select Directory")))
self.old_file_line.setText(file)
"""
opens file selection dialog for new file
"""
def new_file_button_function(self):
file = str(os.path.abspath(QFileDialog.getExistingDirectory(self, "Select Directory")))
self.new_file_line.setText(file)
"""
creates info pop up
"""
def info_button_function(self):
QtWidgets.QMessageBox.information(self,'INFO',"""
This program separates and re-folders files according to their system modification or exif dates.
usage:
1-select the folder that you want to re-folder.
2-select new folder that will be used as root folder for separated files.
3-type extensions to be separated or check all extensions box.
4-select exif or system modify date or both (if both is selected exif will be prioritised).
5-select year month or day to select separation dept.
6-select move or copy.
7-press separate button to start process.
""")
"""
stops thread
"""
def stop_button_function(self):
self.stop_thread = True
self.info_label.setText("stopped")
"""
when separate button clicked takes all parameters from gui checks them and creates a thread for separate function
----------------------------THREAD IS USED TO PREVENT FREZING OF THE FRAME------------------------------------------------------------------------
"""
def separate_button_function(self):
self.stop_thread = False
are_parameters_ok, date_source_selection, cp_or_mv_selection, day_monty_year_selection, extensions_list, path_from, path_to = self.value_collector()
if(are_parameters_ok):
self.thread = threading.Thread(target=self.seperate, args=(date_source_selection, cp_or_mv_selection, day_monty_year_selection, extensions_list, path_from, path_to), daemon = True)
self.thread.start()
#self.seperate(date_source_selection, cp_or_mv_selection, day_monty_year_selection, extensions_list, path_from, path_to)
"""
button options
"""
def buttons(self):
self.old_file_button = QtWidgets.QPushButton(self)
self.old_file_button.setText("select old")
self.old_file_button.move(10, 10)
self.old_file_button.clicked.connect(self.old_file_button_function)
self.new_file_button = QtWidgets.QPushButton(self)
self.new_file_button.setText("select new")
self.new_file_button.move(10, 50)
self.new_file_button.clicked.connect(self.new_file_button_function)
self.info_button = QtWidgets.QPushButton(self)
self.info_button.setText("Info")
self.info_button.move(315, 165)
self.info_button.setFixedSize(75, 27)
self.info_button.clicked.connect(self.info_button_function)
self.stop_button = QtWidgets.QPushButton(self)
self.stop_button.setText("stop")
self.stop_button.move(315, 195)
self.stop_button.setFixedSize(75, 27)
self.stop_button.clicked.connect(self.stop_button_function)
self.separate_button = QtWidgets.QPushButton(self)
self.separate_button.setText("Separate")
self.separate_button.move(315, 225)
self.separate_button.setFixedSize(75,27)
self.separate_button.clicked.connect(self.separate_button_function)
"""
radiobutton options
"""
def radio_butons(self):
self.S = QtWidgets.QRadioButton("system modify", self)
self.S.move(10, 170)
self.S.setChecked(True)
self.E = QtWidgets.QRadioButton("EXIF", self)
self.E.move(10, 200)
self.ES = QtWidgets.QRadioButton("EXIF + system", self)
self.ES.move(10, 230)
S_E_ES_button_group = QtWidgets.QButtonGroup(self)
S_E_ES_button_group.addButton(self.S)
S_E_ES_button_group.addButton(self.E)
S_E_ES_button_group.addButton(self.ES)
self.year = QtWidgets.QRadioButton("year", self)
self.year.move(150, 170)
self.month = QtWidgets.QRadioButton("month", self)
self.month.move(150, 200)
self.month.setChecked(True)
self.day = QtWidgets.QRadioButton("day", self)
self.day.move(150, 230)
year_month_day_button_group = QtWidgets.QButtonGroup(self)
year_month_day_button_group.addButton(self.year)
year_month_day_button_group.addButton(self.month)
year_month_day_button_group.addButton(self.day)
self.cp = QtWidgets.QRadioButton("copy", self)
self.cp.move(250, 170)
self.cp.setChecked(True)
self.mv = QtWidgets.QRadioButton("move", self)
self.mv.move(250, 200)
cp_mv_button_group = QtWidgets.QButtonGroup(self)
cp_mv_button_group.addButton(self.cp)
cp_mv_button_group.addButton(self.mv)
"""
toggles all extensions usage checbox
"""
def extensions_checkox_function(self):
if(self.extensions_line.isEnabled()):
self.extensions_line.setEnabled(False)
else:
self.extensions_line.setEnabled(True)
"""
checkox options
"""
def checkoxes(self):
self.extensions_checkox = QtWidgets.QCheckBox("all extensions", self)
self.extensions_checkox.move(10, 95)
self.extensions_checkox.clicked.connect(self.extensions_checkox_function)
self.extensions_checkox.setChecked(True)
"""
gets necessary values from gui converts them for separate function
"""
def value_collector(self):
date_source_selection = ""
cp_or_mv_selection = ""
day_monty_year_selection = ""
extensions_list = []
path_from = ""
path_to = ""
are_parameters_ok = True
if(self.S.isChecked()):
date_source_selection = "-S"
elif(self.E.isChecked()):
date_source_selection = "-E"
elif(self.ES.isChecked()):
date_source_selection = "-ES"
if (self.year.isChecked()):
day_monty_year_selection = "-y"
elif (self.month.isChecked()):
day_monty_year_selection = "-m"
elif (self.day.isChecked()):
day_monty_year_selection = "-d"
if (self.cp.isChecked()):
cp_or_mv_selection = "-cp"
elif (self.mv.isChecked()):
cp_or_mv_selection = "-mv"
#spilts extensions by comma
if(self.extensions_checkox.isChecked()):
extensions_list.append("*")
else:
extensions_list = self.extensions_line.text().split(',')
#checks old directoryes existence
if (not os.path.isdir(self.old_file_line.text())):
are_parameters_ok = False
self.info_label.setText("Directory error! (old)")
else:
path_from = self.old_file_line.text()
#checks if new directory exists but checks only the path if name is not exists it will be created
new_main_dir_path, new_main_dir_name = os.path.split(self.new_file_line.text())
if (not os.path.isdir(new_main_dir_path)):
are_parameters_ok = False
self.info_label.setText("Directory error! (new)")
else:
path_to = self.new_file_line.text()
return are_parameters_ok, date_source_selection, cp_or_mv_selection, day_monty_year_selection, extensions_list, path_from, path_to
"""
writes lists to a file
"""
def write_extra_info_to_file(self, path, file_name, all_files_list, files_with_no_exif_list, changed_file_names_list):
with open(path + os.sep + file_name + ".txt", 'a', encoding='utf-8') as file:
# if exif is not selected we should notify
if (files_with_no_exif_list[0] == "exif_not_selected"):
exif_count = "exif data not scanned (if you want to get exif information, use with exif option selected)"
else:
exif_count = len(files_with_no_exif_list).__str__()
file.write("--- Content of the file --- \n 1-All files and their old and new directories. Total: " + len(
all_files_list).__str__()
+ " \n 2-Files with no exif data. Total: " + exif_count
+ "\n 3-Files that renamed because of the name collision. Total: " + len(
changed_file_names_list).__str__())
file.write("\n")
file.write("\n")
file.write("\n")
file.write("\n")
file.write("----------------------------------------------------")
file.write("\n")
file.write("--- List of all files and directories old -> new ---")
file.write("\n")
file.write("----------------------------------------------------")
file.write("\n")
file.write("\n")
writelist(file, all_files_list)
file.write("\n")
file.write("\n")
file.write("--------------------------------------------------")
file.write("\n")
file.write("--- List of files that has no exif information ---")
file.write("\n")
file.write("--------------------------------------------------")
file.write("\n")
file.write("\n")
writelist(file, files_with_no_exif_list)
file.write("\n")
file.write("\n")
file.write("----------------------------------------------------------------")
file.write("\n")
file.write("--- List of files that renamed because of the name collision ---")
file.write("\n")
file.write("----------------------------------------------------------------")
file.write("\n")
file.write("\n")
writelist(file, changed_file_names_list)
"""
gets all file paths in a directory separetes those files according to the given values and re-folders them
"""
def seperate(self, date_source_selection, cp_or_mv_selection, day_monty_year_selection, extensions_list, old_main_dir,new_main_dir):
# -------------------------------------------gating all files from all subdirs--------------------------------------
#---(FOR GUI)--- give info ---(FOR GUI)---
self.info_label.setText("collecting directories")
all_subdirs = [] # unused
file_paths = []
for sub_dir in os.walk(old_main_dir): # walks on subdirs
all_subdirs.append(sub_dir[0]) # apends all subdirs (unused)
file_paths_temp = os.listdir(sub_dir[0]) # gets all files from a subdir and puts them inside file_paths_temp
for file_path_temp in file_paths_temp: # walks on the files that we get from a single subdir
abs_path = sub_dir[0] + os.sep + file_path_temp # adds path to the name of the file to get abs path
if (os.path.isfile(abs_path)): # checks if abs path is file or folder if it is file than ok
if (check_extension(extensions_list,abs_path)): # calls check extensions function for checking the extension
file_paths.append(abs_path) # if extension is ok than apend it to file paths
#---(FOR GUI)--- this is for stopping thread if stop button pressed ---(FOR GUI)---
if (self.stop_thread):
return
# printlist(file_paths)
# ------------------------------copy or move files to apropriate destinations-----------------------------------
#split new path and name
new_main_dir_path, new_main_dir_name = os.path.split(new_main_dir)
#create new folder for puting seperated stuff in
create_dir_if_not_exists(dir_path=new_main_dir_path, dir_name=new_main_dir_name)
#status for informatin
status = 1
#those lists are for report file they are for collecting and displaying extra information in the report file
changed_file_names_list = []
files_with_no_exif_list = []
all_files_list = []
for file_path in file_paths:
# ---(FOR GUI)--- this is for stopping thread if stop button pressed ---(FOR GUI)---
if (self.stop_thread):
return
#---(FOR GUI)--- update info label with percentage ---(FOR GUI)---
self.info_label.setText(len(file_paths).__str__() + "/" + status.__str__() + " %" + int((status/len(file_paths))*100).__str__())
#print(len(file_paths).__str__() + "/" + status.__str__() + " -> " + os.path.basename(file_path))
status += 1
#these are needed to understand existance ofdates
is_exif_exists = False
is_modified_date_exists = False
#this part tries to get exif date if E or ES option selected
if (date_source_selection == "-E" or date_source_selection == "-ES"):
#getting exif date
date, is_exif_exists = get_date_taken_EXIF(path=file_path)
#if date exists
if (is_exif_exists):
if (day_monty_year_selection == "-y" or day_monty_year_selection == "-m" or day_monty_year_selection == "-d"):
dir_for_copy = create_dir_if_not_exists(dir_path=new_main_dir, dir_name=date[:4] + "-exif")
if (day_monty_year_selection == "-m" or day_monty_year_selection == "-d"):
dir_for_copy = create_dir_if_not_exists(dir_path=dir_for_copy, dir_name=date[:7])
if (day_monty_year_selection == "-d"):
dir_for_copy = create_dir_if_not_exists(dir_path=dir_for_copy, dir_name=date[:10])
else:
#---(FOR report text)--- appending files that has no exif info to the list ---(FOR report text)---
files_with_no_exif_list.append(file_path)
#this part gets system provided modify date if S or ES option selected
if ((date_source_selection == "-S" or date_source_selection == "-ES") and (not is_exif_exists)):
#getting system date
date, is_modified_date_exists = get_date_modification_SYSTEM(path=file_path)
#if date exists
if (is_modified_date_exists):
if (day_monty_year_selection == "-y" or day_monty_year_selection == "-m" or day_monty_year_selection == "-d"):
dir_for_copy = create_dir_if_not_exists(dir_path=new_main_dir, dir_name=date[:4])
if (day_monty_year_selection == "-m" or day_monty_year_selection == "-d"):
dir_for_copy = create_dir_if_not_exists(dir_path=dir_for_copy, dir_name=date[:7])
if (day_monty_year_selection == "-d"):
dir_for_copy = create_dir_if_not_exists(dir_path=dir_for_copy, dir_name=date[:10])
#if no date found anywhere dont touch that file
if (not is_modified_date_exists and not is_exif_exists):
continue
#now we have destination path but there can be collision so we check for that and make a rename if needed
dest, is_name_changed, original_src = check_existace_and_rename(src_path=file_path, dest_path=dir_for_copy)
#---(FOR report text)--- if name changed append this file to changed_file_names_list ---(FOR report text)---
if(is_name_changed):
changed_file_names_list.append(original_src + " -> " + dest)
#---(FOR report text)--- append every file to all_files_list this will be used in the report text ---(FOR report text)---
all_files_list.append(original_src + " -> " + dest)
#copy or move file to destination
if (cp_or_mv_selection == "-cp"):
copy_file(src=file_path, dest=dest)
elif (cp_or_mv_selection == "-mv"):
move_file(src=file_path, dest=dest)
#---(FOR GUI)--- showing final results on the info label ---(FOR GUI)---
self.info_label.setText(len(file_paths).__str__() + "/" + (status - 1).__str__() + " see report file for extra info")
#---(FOR report text)--- if exif id not checked at all we need to show it in the report ---(FOR report text)---
if(date_source_selection == "-S"):
files_with_no_exif_list.append("exif_not_selected")
#writing all the extra information to info file
self.write_extra_info_to_file(new_main_dir, "Report", all_files_list, files_with_no_exif_list, changed_file_names_list)
if __name__=='__main__':
app = QtWidgets.QApplication(sys.argv)
sf = sep_frame()
sys.exit(app.exec_())
| 35.889246
| 192
| 0.607138
|
4a0e6d1e7a18e69b03db22388cc263b866a8319c
| 1,296
|
py
|
Python
|
math/0400-第 N 个数字.py
|
ZHUANGHP/LeetCode-Solution-Python
|
af2b14abb7f50ee061bcd601c8666b32e448cbd8
|
[
"Apache-2.0"
] | 1
|
2021-01-10T17:03:21.000Z
|
2021-01-10T17:03:21.000Z
|
math/0400-第 N 个数字.py
|
ZHUANGHP/LeetCode-Solution-Python
|
af2b14abb7f50ee061bcd601c8666b32e448cbd8
|
[
"Apache-2.0"
] | null | null | null |
math/0400-第 N 个数字.py
|
ZHUANGHP/LeetCode-Solution-Python
|
af2b14abb7f50ee061bcd601c8666b32e448cbd8
|
[
"Apache-2.0"
] | 1
|
2021-07-25T07:53:14.000Z
|
2021-07-25T07:53:14.000Z
|
class Solution:
def findNthDigit(self, n: int) -> int:
# 特判:如果 n 小于 10 ,直接返回就可以了
if n < 10:
return n
# 表示几位数
# 2 位数,从 10 到 99 一共 ( 99 - 10 + 1) * 2 = 90 * 2 = 180 位
# 3 位数,从 100 到 999 一共 ( 999 - 100 + 1) * 2 = 900 * 3 = 2700 位
# 4 位数,从 1000 到 9999 一共 ( 9999 - 1000 + 1) * 2 = 9000 * 4 = 3600 位
# 步骤1:calculate how many digits the number has
# 计算前缀部分
length = 0
base = 9
digits = 1
# n = 1001 时,9 过,180 过,剩下 812
# 不越界才加,要清楚这一点
while length + base * digits < n:
length += base * digits
base *= 10
digits += 1
n -= length
# step 2. calculate what the number is
# 到这里,num 是 "digits 位数" 中的某一个数字
# 以 digits = 3 为例,n 是 100 - 999 中的一位,num 表示是哪个数字
index = n % digits
if index == 0:
# 如果整除,就是那个数字的最后一位
num = 10 ** (digits - 1) + n // digits - 1
return num % 10
else:
num = 10 ** (digits - 1) + n // digits
for i in range(index, digits):
num //= 10
return num % 10
if __name__ == '__main__':
solution = Solution()
n = 190
result1 = solution.findNthDigit(n)
print(result1)
| 25.411765
| 74
| 0.46142
|
4a0e6d5e1f701c49558cfe1ea1df61e9b4180a89
| 821
|
py
|
Python
|
StyleText/tools/synth_dataset.py
|
cy333/PaddleOCR
|
75b9feb0a69c0a3be05836605fc00d466fb1eaa1
|
[
"Apache-2.0"
] | 1
|
2020-12-16T06:46:03.000Z
|
2020-12-16T06:46:03.000Z
|
StyleText/tools/synth_dataset.py
|
cy333/PaddleOCR
|
75b9feb0a69c0a3be05836605fc00d466fb1eaa1
|
[
"Apache-2.0"
] | null | null | null |
StyleText/tools/synth_dataset.py
|
cy333/PaddleOCR
|
75b9feb0a69c0a3be05836605fc00d466fb1eaa1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from engine.synthesisers import DatasetSynthesiser
def synth_dataset():
dataset_synthesiser = DatasetSynthesiser()
dataset_synthesiser.synth_dataset()
if __name__ == '__main__':
synth_dataset()
| 34.208333
| 74
| 0.763703
|
4a0e6eac6238a6287595861d2b91b18b3789b130
| 15,796
|
py
|
Python
|
unet_vgg4_cc.py
|
TopoXLab/TopoCount
|
eb93de2bc40d4421ea39c1b80d5c4c4829f3e369
|
[
"MIT"
] | 18
|
2020-12-18T02:54:55.000Z
|
2022-02-26T01:52:22.000Z
|
unet_vgg4_cc.py
|
TopoXLab/TopoCount
|
eb93de2bc40d4421ea39c1b80d5c4c4829f3e369
|
[
"MIT"
] | 2
|
2021-01-13T09:15:38.000Z
|
2021-03-26T08:43:43.000Z
|
unet_vgg4_cc.py
|
TopoXLab/TopoCount
|
eb93de2bc40d4421ea39c1b80d5c4c4829f3e369
|
[
"MIT"
] | 8
|
2020-12-25T01:50:55.000Z
|
2021-06-08T05:21:48.000Z
|
import torch.nn as nn
import torch.nn.functional as F
import torch
from torchvision import models
import collections
from distutils.util import strtobool;
from sa_net_arch_utilities_pytorch import CNNArchUtilsPyTorch;
class Upsample(nn.Module):
def __init__(self, scale_factor):
super(Upsample, self).__init__()
self.scale_factor = scale_factor
def forward(self, x):
return F.interpolate(x, scale_factor=self.scale_factor, mode='bilinear')
class UnetVggCC(nn.Module):
def __init__(self, load_weights=False, kwargs=None):
super(UnetVggCC,self).__init__()
# predefined list of arguments
args = {'input_img_width':-1, 'input_img_height':-1, 'pretrained':'False'
, 'conv_init': 'he'
, 'use_softmax':'False', 'use_relu':'False', 'use_tanh':'False'
,'n_layers_per_path':4, 'n_conv_blocks_in_start': 64, 'block_size':3, 'pool_size':2
, 'dropout_keep_prob' : 1.0, 'initial_pad':0, 'interpolate':'False', 'n_classes':1, 'n_channels':3
};
if(not(kwargs is None)):
args.update(kwargs);
# 'conv_init': 'uniform', 'normal', 'xavier_uniform', 'xavier_normal', 'he'
# read extra argument
#self.n_layers_per_path = int(args['n_layers_per_path']); # n_layers_per_path in contracting path + n_layers_per_path in expanding path + 1 bottleneck layer
#self.n_conv_blocks_in_start = int(args['n_conv_blocks_in_start']);
self.input_img_width = int(args['input_img_width']);
self.input_img_height = int(args['input_img_height']);
self.n_channels = int(args['n_channels']);
self.n_classes = int(args['n_classes']);
#dropout = args['dropout'];
self.pretrained = bool(strtobool(args['pretrained']));
#self.stain_init_name = str(args['stain_init_name']);
self.conv_init = str(args['conv_init']).lower();
self.use_softmax = bool(strtobool(args['use_softmax']));
self.use_relu = bool(strtobool(args['use_relu']));
self.use_tanh = bool(strtobool(args['use_tanh']));
self.n_layers_per_path = int(args['n_layers_per_path']);
self.n_conv_blocks_in_start = int(args['n_conv_blocks_in_start']);
self.block_size = int(args['block_size']);
self.pool_size = int(args['pool_size']);
self.dropout_keep_prob = float(args['dropout_keep_prob'])
self.initial_pad = int(args['initial_pad']);
self.interpolate = bool(strtobool(args['interpolate']));
print('self.initial_pad',self.initial_pad)
n_blocks = self.n_conv_blocks_in_start;
n_blocks_prev = self.n_channels;
# Contracting Path
#self.encoder = [];
self.encoder = nn.Sequential()
layer_index = 0;
layer = nn.Sequential();
layer.add_module('encoder_conv_l_'+str(layer_index)+ '_0', nn.Conv2d(3, 64, kernel_size=self.block_size, padding=self.initial_pad));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_0', nn.ReLU(inplace=True))
layer.add_module('encoder_conv_l_'+str(layer_index)+ '_1', nn.Conv2d(64, 64, kernel_size=self.block_size));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_1', nn.ReLU(inplace=True))
self.encoder.add_module('encoder_l_'+str(layer_index), layer);
layer_index = 1;
layer = nn.Sequential();
layer.add_module('encoder_maxpool_l_'+str(layer_index), nn.MaxPool2d(kernel_size=self.pool_size, stride=self.pool_size));
layer.add_module('encoder_conv_l_'+str(layer_index)+ '_0', nn.Conv2d(64, 128, kernel_size=self.block_size));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_0', nn.ReLU(inplace=True))
layer.add_module('encoder_conv_l_'+str(layer_index)+ '_1', nn.Conv2d(128, 128, kernel_size=self.block_size));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_1', nn.ReLU(inplace=True))
self.encoder.add_module('encoder_l_'+str(layer_index), layer);
layer_index = 2;
layer = nn.Sequential();
layer.add_module('encoder_maxpool_l_'+str(layer_index), nn.MaxPool2d(kernel_size=self.pool_size, stride=self.pool_size));
layer.add_module('encoder_conv_l_'+str(layer_index) + '_0', nn.Conv2d(128, 256, kernel_size=self.block_size));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_0', nn.ReLU(inplace=True))
layer.add_module('encoder_conv_l_'+str(layer_index)+ '_1', nn.Conv2d(256, 256, kernel_size=self.block_size));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_1', nn.ReLU(inplace=True))
layer.add_module('encoder_conv_l_'+str(layer_index)+ '_2', nn.Conv2d(256, 256, kernel_size=self.block_size));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_2', nn.ReLU(inplace=True))
self.encoder.add_module('encoder_l_'+str(layer_index), layer);
layer_index = 3;
layer = nn.Sequential();
layer.add_module('encoder_maxpool_l_'+str(layer_index), nn.MaxPool2d(kernel_size=self.pool_size, stride=self.pool_size));
layer.add_module('encoder_conv_l_'+str(layer_index) + '_0', nn.Conv2d(256, 512, kernel_size=self.block_size));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_0', nn.ReLU(inplace=True))
layer.add_module('encoder_conv_l_'+str(layer_index)+ '_1', nn.Conv2d(512, 512, kernel_size=self.block_size));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_1', nn.ReLU(inplace=True))
layer.add_module('encoder_conv_l_'+str(layer_index)+ '_2', nn.Conv2d(512, 512, kernel_size=self.block_size));
layer.add_module('encoder_relu_l_'+str(layer_index)+'_2', nn.ReLU(inplace=True))
self.encoder.add_module('encoder_l_'+str(layer_index), layer);
self.bottleneck = nn.Sequential();
self.bottleneck.add_module('bottleneck_maxpool', nn.MaxPool2d(kernel_size=self.pool_size, stride=self.pool_size));
self.bottleneck.add_module('bottleneck_conv'+ '_0', nn.Conv2d(512, 512, kernel_size=self.block_size));
self.bottleneck.add_module('bottleneck_relu'+'_0', nn.ReLU(inplace=True))
self.bottleneck.add_module('bottleneck_conv'+ '_1', nn.Conv2d(512, 512, kernel_size=self.block_size));
self.bottleneck.add_module('bottleneck_relu'+'_1', nn.ReLU(inplace=True))
self.bottleneck.add_module('bottleneck_conv'+ '_2', nn.Conv2d(512, 512, kernel_size=self.block_size));
self.bottleneck.add_module('bottleneck_relu'+'_2', nn.ReLU(inplace=True))
# Expanding Path
#self.decoder = [];
self.decoder = nn.Sequential()
layer_index = 3;
layer = nn.Sequential();
layer.add_module('decoder_deconv_l_'+str(layer_index), nn.ConvTranspose2d(512, 512, stride=self.pool_size, kernel_size=self.pool_size))
layer.add_module('decoder_conv_l_s_'+str(layer_index)+'_0', nn.Conv2d(1024, 512, kernel_size=self.block_size));
layer.add_module('decoder_relu_l_'+str(layer_index)+'_0', nn.ReLU(inplace=True))
layer.add_module('decoder_conv_l_'+str(layer_index)+'_1', nn.Conv2d(512, 512, kernel_size=self.block_size));
layer.add_module('decoder_relu_l_'+str(layer_index)+'_1', nn.ReLU(True));
self.decoder.add_module('decoder_l_'+str(layer_index), layer);
layer_index = 2;
layer = nn.Sequential();
layer.add_module('decoder_deconv_l_'+str(layer_index), nn.ConvTranspose2d(512, 256, stride=self.pool_size, kernel_size=self.pool_size))
layer.add_module('decoder_conv_l_s_'+str(layer_index)+'_0', nn.Conv2d(512, 256, kernel_size=self.block_size));
layer.add_module('decoder_relu_l_'+str(layer_index)+'_0', nn.ReLU(inplace=True))
layer.add_module('decoder_conv_l_'+str(layer_index)+'_1', nn.Conv2d(256, 256, kernel_size=self.block_size));
layer.add_module('decoder_relu_l_'+str(layer_index)+'_1', nn.ReLU(True));
self.decoder.add_module('decoder_l_'+str(layer_index), layer);
layer_index = 1;
layer = nn.Sequential();
layer.add_module('decoder_deconv_l_'+str(layer_index), nn.ConvTranspose2d(256, 128, stride=self.pool_size, kernel_size=self.pool_size))
layer.add_module('decoder_conv_l_s_'+str(layer_index)+'_0', nn.Conv2d(256, 128, kernel_size=self.block_size));
layer.add_module('decoder_relu_l_'+str(layer_index)+'_0', nn.ReLU(inplace=True))
layer.add_module('decoder_conv_l_'+str(layer_index)+'_1', nn.Conv2d(128, 128, kernel_size=self.block_size));
layer.add_module('decoder_relu_l_'+str(layer_index)+'_1', nn.ReLU(True));
self.decoder.add_module('decoder_l_'+str(layer_index), layer);
layer_index = 0;
layer = nn.Sequential();
layer.add_module('decoder_deconv_l_'+str(layer_index), nn.ConvTranspose2d(128, 64, stride=self.pool_size, kernel_size=self.pool_size))
#layer.add_module('decoder_conv_l_s_'+str(layer_index)+'_0', nn.Conv2d(128, 64, kernel_size=self.block_size));
layer.add_module('decoder_conv_l_'+str(layer_index)+'_0', nn.Conv2d(64, 64, kernel_size=self.block_size));
layer.add_module('decoder_relu_l_'+str(layer_index)+'_0', nn.ReLU(inplace=True))
layer.add_module('decoder_conv_l_'+str(layer_index)+'_1', nn.Conv2d(64, 64, kernel_size=self.block_size));
layer.add_module('decoder_relu_l_'+str(layer_index)+'_1', nn.ReLU(True));
self.decoder.add_module('decoder_l_'+str(layer_index), layer);
self.final_layer = nn.Sequential();
self.final_layer.add_module('conv_final', nn.Conv2d(64, self.n_classes, kernel_size=1));
if(self.use_relu):
self.final_layer.add_module('relu_final', nn.ReLU(True));
if(self.use_tanh):
self.final_layer.add_module('tanh_final', nn.Tanh());
# Softmax
self.softmax_layer = torch.nn.Softmax(dim=1);
self._initialize_weights()
self.zero_grad() ;
print('self.encoder',self.encoder)
#print('self.bottleneck',self.bottleneck)
print('self.decoder',self.decoder)
def forward(self,x):
encoder_out = [];
encoder_out = [];
for l in self.encoder:
x = l(x);
encoder_out.append(x);
x = self.bottleneck(x);
j = len(self.decoder);
for l in self.decoder:
x = l[0](x);
j -= 1;
corresponding_layer_indx = j;
## crop and concatenate
if(j > 0):
cropped = CNNArchUtilsPyTorch.crop_a_to_b(encoder_out[corresponding_layer_indx], x);
x = torch.cat((cropped, x), 1) ;
for i in range(1, len(l)):
x = l[i](x);
c = self.final_layer(x);
if(self.use_softmax):
sm = self.softmax_layer(c);
else:
sm = c;
return sm;
def _initialize_weights(self):
BIAS_INIT = 0.1;
for l in self.encoder:
for layer in l:
if(isinstance(layer, nn.ConvTranspose2d) or isinstance(layer, nn.Conv2d)):
if(self.conv_init == 'normal'):
torch.nn.init.normal_(layer.weight) ;
elif(self.conv_init == 'xavier_uniform'):
torch.nn.init.xavier_uniform_(layer.weight) ;
elif(self.conv_init == 'xavier_normal'):
torch.nn.init.xavier_normal_(layer.weight, gain=10) ;
elif(self.conv_init == 'he'):
torch.nn.init.kaiming_normal_(layer.weight, mode='fan_out', nonlinearity='relu') ;
#layer.bias.data.fill_(BIAS_INIT);
for layer in self.bottleneck:
if(isinstance(layer, nn.ConvTranspose2d) or isinstance(layer, nn.Conv2d)):
if(self.conv_init == 'normal'):
torch.nn.init.normal_(layer.weight) ;
elif(self.conv_init == 'xavier_uniform'):
torch.nn.init.xavier_uniform_(layer.weight) ;
elif(self.conv_init == 'xavier_normal'):
torch.nn.init.xavier_normal_(layer.weight, gain=10) ;
elif(self.conv_init == 'he'):
torch.nn.init.kaiming_normal_(layer.weight, mode='fan_out', nonlinearity='relu') ;
#layer.bias.data.fill_(BIAS_INIT);
for l in self.decoder:
for layer in l:
if(isinstance(layer, nn.ConvTranspose2d) or isinstance(layer, nn.Conv2d)):
if(self.conv_init == 'normal'):
torch.nn.init.normal_(layer.weight) ;
elif(self.conv_init == 'xavier_uniform'):
torch.nn.init.xavier_uniform_(layer.weight) ;
elif(self.conv_init == 'xavier_normal'):
torch.nn.init.xavier_normal_(layer.weight, gain=10) ;
elif(self.conv_init == 'he'):
torch.nn.init.kaiming_normal_(layer.weight, mode='fan_out', nonlinearity='relu') ;
#layer.bias.data.fill_(BIAS_INIT);
for layer in self.final_layer:
if(isinstance(layer, nn.ConvTranspose2d) or isinstance(layer, nn.Conv2d)):
if(self.conv_init == 'normal'):
torch.nn.init.normal_(layer.weight) ;
elif(self.conv_init == 'xavier_uniform'):
torch.nn.init.xavier_uniform_(layer.weight) ;
elif(self.conv_init == 'xavier_normal'):
torch.nn.init.xavier_normal_(layer.weight, gain=10) ;
elif(self.conv_init == 'he'):
torch.nn.init.kaiming_normal_(layer.weight, mode='fan_out', nonlinearity='relu') ;
#layer.bias.data.fill_(BIAS_INIT);
vgg_model = models.vgg16(pretrained = True)
fsd=collections.OrderedDict()
i = 0
for m in self.encoder.state_dict().items():
temp_key=m[0]
print('temp_key', temp_key)
print('vgg_key', list(vgg_model.state_dict().items())[i][0])
fsd[temp_key]=list(vgg_model.state_dict().items())[i][1]
i += 1
self.encoder.load_state_dict(fsd)
fsd=collections.OrderedDict()
for m in self.bottleneck.state_dict().items():
temp_key=m[0]
print('temp_key', temp_key)
print('vgg_key', list(vgg_model.state_dict().items())[i][0])
fsd[temp_key]=list(vgg_model.state_dict().items())[i][1]
i += 1
self.bottleneck.load_state_dict(fsd)
#del vgg_model
def make_layers(cfg, in_channels = 3,batch_norm=False,dilation = False, deconv=None, pad_list=None):
if dilation:
d_rate = 2
else:
d_rate = 1
layers = []
#for v in cfg:
for i in range(len(cfg)):
v=cfg[i]
print('in_channels=',in_channels)
print('v=',v)
if(not (deconv is None)):
print('deconv[i]=',deconv[i])
if(pad_list is None):
padding = d_rate;
else:
padding = pad_list[i];
print('padding =', padding);
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
if(deconv is None or deconv[i] == False):
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=padding, dilation = d_rate)
else:
conv2d = nn.ConvTranspose2d(in_channels, v, stride=2, kernel_size=3, padding=padding)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
return nn.Sequential(*layers)
| 50.954839
| 164
| 0.627374
|
4a0e6f0614ca4d84980d8650a67d00c7b0ccef10
| 2,312
|
py
|
Python
|
draco/js.py
|
domoritz/small-vis-constraints
|
efc77bed9cc0f431c7d5082adfab490663291e07
|
[
"BSD-3-Clause"
] | 179
|
2018-06-07T21:32:35.000Z
|
2022-01-21T06:10:27.000Z
|
draco/js.py
|
domoritz/small-vis-constraints
|
efc77bed9cc0f431c7d5082adfab490663291e07
|
[
"BSD-3-Clause"
] | 61
|
2017-10-30T23:54:41.000Z
|
2018-05-29T07:14:17.000Z
|
draco/js.py
|
domoritz/small-vis-constraints
|
efc77bed9cc0f431c7d5082adfab490663291e07
|
[
"BSD-3-Clause"
] | 24
|
2018-06-20T17:01:48.000Z
|
2022-02-27T08:13:29.000Z
|
import json
import logging
import os
import subprocess
from typing import Dict, List, Optional, Tuple
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def absolute_path(p: str) -> str:
return os.path.join(os.path.dirname(os.path.abspath(__file__)), p)
def vl2asp(vl: Dict) -> List[str]:
proc = subprocess.Popen(
args=["node", absolute_path("../js/bin/vl2asp")],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = proc.communicate(json.dumps(vl).encode("utf8"))
if stderr:
logger.error("stderr: %s", stderr)
return list(filter(lambda x: x, stdout.decode("utf-8").split("\n")))
def asp2vl(asp: List[str]) -> Dict:
proc = subprocess.Popen(
args=["node", absolute_path("../js/bin/asp2vl")],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = proc.communicate("\n".join(asp).encode("utf8"))
if stderr:
logger.error("stderr: %s", stderr)
return json.loads(stdout)
def cql2asp(cql: Dict) -> List[str]:
proc = subprocess.Popen(
args=["node", absolute_path("../js/bin/cql2asp")],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = proc.communicate(json.dumps(cql).encode("utf8"))
if stderr:
logger.error("stderr: %s", stderr)
return stdout.decode("utf-8").split("\n")
def data2schema(data: List) -> Dict:
proc = subprocess.Popen(
args=["node", absolute_path("../js/bin/data2schema")],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = proc.communicate(json.dumps(data).encode("utf8"))
if stderr:
logger.error("stderr: %s", stderr)
return json.loads(stdout)
def schema2asp(schema: Dict) -> List[str]:
proc = subprocess.Popen(
args=["node", absolute_path("../js/bin/schema2asp")],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = proc.communicate(json.dumps(schema).encode("utf8"))
if stderr:
logger.error("stderr: %s", stderr)
return stdout.decode("utf-8").split("\n")
| 26.272727
| 72
| 0.629325
|
4a0e70320157bb00f705fcd62bd9874a542e95e7
| 1,383
|
py
|
Python
|
display/getweather.py
|
Hegelim/twitterweather
|
fb509da7413878d6088d7545fef870e0e721e87a
|
[
"BSD-3-Clause"
] | null | null | null |
display/getweather.py
|
Hegelim/twitterweather
|
fb509da7413878d6088d7545fef870e0e721e87a
|
[
"BSD-3-Clause"
] | null | null | null |
display/getweather.py
|
Hegelim/twitterweather
|
fb509da7413878d6088d7545fef870e0e721e87a
|
[
"BSD-3-Clause"
] | null | null | null |
from pyowm.owm import OWM
from . import utils
APIKEY = ''
def get_realtime_weather():
"""Get realtime weather as dict. """
owm = OWM(APIKEY)
mgr = owm.weather_manager()
weather = mgr.weather_at_place('New York').weather
temp_dict_celsius = weather.temperature('celsius')
wind_dict_in_meters_per_sec = weather.wind()
temp_and_wind = {**temp_dict_celsius, **wind_dict_in_meters_per_sec}
return temp_and_wind
def get_weather_ohe():
owm = OWM(APIKEY)
mgr = owm.weather_manager()
weather = mgr.weather_at_place('New York').weather
celsius_temp = weather.temperature('celsius')
weather_dict = {
"Temperature": celsius_temp['temp'],
"Status_Clear": 0,
"Status_Clouds": 0,
"Status_Haze": 0,
"Status_Mist": 0,
"Status_Rain": 0,
"Status_Snow": 0
}
if weather.status == "Clear":
weather_dict["Status_Clear"] += 1
elif weather.status == "Clouds":
weather_dict["Status_Clouds"] += 1
elif weather.status == "Rain":
weather_dict["Status_Rain"] += 1
elif weather.status == "Haze":
weather_dict["Status_Haze"] += 1
elif weather.status == "Mist":
weather_dict["Status_Mist"] += 1
elif weather.status == "Snow":
weather_dict["Status_Snow"] += 1
utils.save_weather_dict(weather_dict)
return weather_dict
| 28.8125
| 72
| 0.640636
|
4a0e71d8ba37e8d71f3bf95259615bd99e82b0e1
| 140
|
py
|
Python
|
resultadoOperaciones/urls.py
|
jaimevz001/profiles-rest-api
|
88bb3cfc15c7d6787938812b1e6f0a4ec010173b
|
[
"MIT"
] | null | null | null |
resultadoOperaciones/urls.py
|
jaimevz001/profiles-rest-api
|
88bb3cfc15c7d6787938812b1e6f0a4ec010173b
|
[
"MIT"
] | null | null | null |
resultadoOperaciones/urls.py
|
jaimevz001/profiles-rest-api
|
88bb3cfc15c7d6787938812b1e6f0a4ec010173b
|
[
"MIT"
] | null | null | null |
from django.urls import path
from resultadoOperaciones import views
urlpatterns = [
path('resultado/', views.Resultados.as_view())
]
| 15.555556
| 50
| 0.75
|
4a0e7384564ae43185236b45d9f0d99803ef4f25
| 70,931
|
py
|
Python
|
src/virtual-wan/azext_vwan/vendored_sdks/v2021_03_01/v2021_03_01/operations/_application_gateways_operations.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/virtual-wan/azext_vwan/vendored_sdks/v2021_03_01/v2021_03_01/operations/_application_gateways_operations.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/virtual-wan/azext_vwan/vendored_sdks/v2021_03_01/v2021_03_01/operations/_application_gateways_operations.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ApplicationGatewaysOperations(object):
"""ApplicationGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGateway"
"""Gets the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.ApplicationGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.ApplicationGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ApplicationGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.ApplicationGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGateway"]
"""Creates or updates the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param parameters: Parameters supplied to the create or update application gateway operation.
:type parameters: ~azure.mgmt.network.v2021_03_01.models.ApplicationGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2021_03_01.models.ApplicationGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGateway"
"""Updates the specified application gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param parameters: Parameters supplied to update application gateway tags.
:type parameters: ~azure.mgmt.network.v2021_03_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.ApplicationGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationGatewayListResult"]
"""Lists all application gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_03_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationGatewayListResult"]
"""Gets all the application gateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_03_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGateways'} # type: ignore
def _start_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self._start_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore
def begin_start(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Starts the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore
def _stop_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self._stop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore
def begin_stop(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Stops the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._stop_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore
def _backend_health_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ApplicationGatewayBackendHealth"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ApplicationGatewayBackendHealth"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self._backend_health_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_backend_health_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore
def begin_backend_health(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGatewayBackendHealth"]
"""Gets the backend health of the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGatewayBackendHealth or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2021_03_01.models.ApplicationGatewayBackendHealth]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayBackendHealth"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._backend_health_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
expand=expand,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_backend_health.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore
def _backend_health_on_demand_initial(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
probe_request, # type: "_models.ApplicationGatewayOnDemandProbe"
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ApplicationGatewayBackendHealthOnDemand"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ApplicationGatewayBackendHealthOnDemand"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._backend_health_on_demand_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(probe_request, 'ApplicationGatewayOnDemandProbe')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGatewayBackendHealthOnDemand', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_backend_health_on_demand_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/getBackendHealthOnDemand'} # type: ignore
def begin_backend_health_on_demand(
self,
resource_group_name, # type: str
application_gateway_name, # type: str
probe_request, # type: "_models.ApplicationGatewayOnDemandProbe"
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ApplicationGatewayBackendHealthOnDemand"]
"""Gets the backend health for given combination of backend pool and http setting of the specified
application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param probe_request: Request body for on-demand test probe operation.
:type probe_request: ~azure.mgmt.network.v2021_03_01.models.ApplicationGatewayOnDemandProbe
:param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationGatewayBackendHealthOnDemand or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2021_03_01.models.ApplicationGatewayBackendHealthOnDemand]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayBackendHealthOnDemand"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._backend_health_on_demand_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
probe_request=probe_request,
expand=expand,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayBackendHealthOnDemand', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_backend_health_on_demand.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/getBackendHealthOnDemand'} # type: ignore
def list_available_server_variables(
self,
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Lists all available server variables.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.list_available_server_variables.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_server_variables.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableServerVariables'} # type: ignore
def list_available_request_headers(
self,
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Lists all available request headers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.list_available_request_headers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_request_headers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableRequestHeaders'} # type: ignore
def list_available_response_headers(
self,
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Lists all available response headers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.list_available_response_headers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_response_headers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableResponseHeaders'} # type: ignore
def list_available_waf_rule_sets(
self,
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGatewayAvailableWafRuleSetsResult"
"""Lists all available web application firewall rule sets.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewayAvailableWafRuleSetsResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.ApplicationGatewayAvailableWafRuleSetsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayAvailableWafRuleSetsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.list_available_waf_rule_sets.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewayAvailableWafRuleSetsResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_waf_rule_sets.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableWafRuleSets'} # type: ignore
def list_available_ssl_options(
self,
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGatewayAvailableSslOptions"
"""Lists available Ssl options for configuring Ssl policy.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewayAvailableSslOptions, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.ApplicationGatewayAvailableSslOptions
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayAvailableSslOptions"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.list_available_ssl_options.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewayAvailableSslOptions', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_ssl_options.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default'} # type: ignore
def list_available_ssl_predefined_policies(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ApplicationGatewayAvailableSslPredefinedPolicies"]
"""Lists all SSL predefined policies for configuring Ssl policy.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayAvailableSslPredefinedPolicies or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_03_01.models.ApplicationGatewayAvailableSslPredefinedPolicies]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayAvailableSslPredefinedPolicies"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_available_ssl_predefined_policies.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayAvailableSslPredefinedPolicies', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_available_ssl_predefined_policies.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies'} # type: ignore
def get_ssl_predefined_policy(
self,
predefined_policy_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ApplicationGatewaySslPredefinedPolicy"
"""Gets Ssl predefined policy with the specified policy name.
:param predefined_policy_name: Name of Ssl predefined policy.
:type predefined_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewaySslPredefinedPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.ApplicationGatewaySslPredefinedPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewaySslPredefinedPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.get_ssl_predefined_policy.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'predefinedPolicyName': self._serialize.url("predefined_policy_name", predefined_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewaySslPredefinedPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_ssl_predefined_policy.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies/{predefinedPolicyName}'} # type: ignore
| 50.163366
| 240
| 0.672203
|
4a0e73860c10b2b7b915104a849fc5365f1bf58f
| 825
|
py
|
Python
|
src/adafruit-circuitpython-bundle-4.x-mpy-20190713/examples/st7735r_minitft_simpletest.py
|
mbaaba/solar_panel
|
42059d8c61320494ad1298065dbc50cd9b3bd51e
|
[
"MIT"
] | 1
|
2020-04-13T16:10:53.000Z
|
2020-04-13T16:10:53.000Z
|
infra/libs-400rc2-20190512/examples/st7735r_minitft_simpletest.py
|
jadudm/feather-isa
|
b7419e6698c3f64be4d8122656eb8124631ca859
|
[
"MIT"
] | null | null | null |
infra/libs-400rc2-20190512/examples/st7735r_minitft_simpletest.py
|
jadudm/feather-isa
|
b7419e6698c3f64be4d8122656eb8124631ca859
|
[
"MIT"
] | null | null | null |
"""
This example will test out the display on the Mini TFT Breakout
"""
import board
import displayio
from adafruit_st7735r import ST7735R
spi = board.SPI()
tft_cs = board.D5
tft_dc = board.D6
displayio.release_displays()
display_bus = displayio.FourWire(spi, command=tft_dc, chip_select=tft_cs, reset=board.D9)
display = ST7735R(display_bus, width=160, height=80, colstart=24, rotation=90, bgr=True)
# Make the display context
splash = displayio.Group(max_size=10)
display.show(splash)
color_bitmap = displayio.Bitmap(160, 80, 1)
color_palette = displayio.Palette(1)
color_palette[0] = 0xFF0000
bg_sprite = displayio.TileGrid(color_bitmap,
pixel_shader=color_palette,
x=0, y=0)
splash.append(bg_sprite)
while True:
pass
| 25.78125
| 90
| 0.688485
|
4a0e74d8d8d5291b061da40ab516c89aa12b6fa5
| 106
|
py
|
Python
|
tests/test_serilizers.py
|
TobiasRosskopf/django_backend_template
|
1de5ea6b9006b0cc3898873b26ea251313cfe61d
|
[
"MIT"
] | null | null | null |
tests/test_serilizers.py
|
TobiasRosskopf/django_backend_template
|
1de5ea6b9006b0cc3898873b26ea251313cfe61d
|
[
"MIT"
] | null | null | null |
tests/test_serilizers.py
|
TobiasRosskopf/django_backend_template
|
1de5ea6b9006b0cc3898873b26ea251313cfe61d
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
class UserSerializerTests(TestCase):
def setUp(self):
pass
| 15.142857
| 36
| 0.716981
|
4a0e7677fe8f49c430bf2fc2fe3ccda9ed3bdb62
| 33,422
|
py
|
Python
|
paleomix/common/makefile.py
|
jfy133/paleomix
|
f7f687f6f69b2faedd247a1d289d28657710a8c2
|
[
"MIT"
] | null | null | null |
paleomix/common/makefile.py
|
jfy133/paleomix
|
f7f687f6f69b2faedd247a1d289d28657710a8c2
|
[
"MIT"
] | null | null | null |
paleomix/common/makefile.py
|
jfy133/paleomix
|
f7f687f6f69b2faedd247a1d289d28657710a8c2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# Copyright (c) 2012 Mikkel Schubert <MikkelSch@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
"""Generalized methods for parsing/validating "makefiles" in YAML format.
The following example will use a imagined makefile for 'vcf_filter', which
takes a set of input files, each of which is assigned an output file, and
each of which may have a set of filters (in addition to a set of defaults):
Example makefile in YAML format:
-------------------------------------------------------------------------------
| Defaults:
| --min-mapq-bias: 1e-4
| --min-end-distance-bias: 1e-4
|
| VCF_Files:
| "path/to/file1.vcf":
| Output_File: "path/to/output1.vcf"
| Options:
| --min-strand-bias: 1e-4
| --min-baseq-bias: 1e-4
| "path/to/file2.vcf":
| Output_File: "path/to/output2.vcf"
-------------------------------------------------------------------------------
Such a makefile can be parsed into a dictionary using YAML, but to help us
ensure that the makefile fits the expected layout (as above), we need to
specify the structure of the makefile.
Firstly, note that the options are specified twice, so we will make a re-usable
specification for those. In this case, this can accomplished like so:
-------------------------------------------------------------------------------
| _SPECIFICATION_OF_OPTIONS = {
| StringStartsWith("--") : Or(IsInt, IsFloat),
| }
-------------------------------------------------------------------------------
or as so:
-------------------------------------------------------------------------------
| _SPECIFICATION_OF_OPTIONS = IsDictOf(StringStartsWith("--"),
| Or(IsInt, IsFloat))
-------------------------------------------------------------------------------
In either case, we require that the options be a dictionary with string keys
that start with "--", and that the values are either floats or integers. In
this case the two methods are equivalent, but normally the first method would
be preferred for more complex structures, while the second method is required
if different sub-structures are possible. For example, to require EITHER a
list of integers, or a dict of strings -> integers would have to be specified
as so:
-------------------------------------------------------------------------------
| Or(IsListOf(IsInt), IsDictOf(IsStr, IsInt))
-------------------------------------------------------------------------------
Note that specification objects that do not take any parameters (IsInt, etc.)
do not need to be instantiated. Thus one can use both 'IsInt' or 'IsInt()',
whereas 'IsListOf', 'IsDictOf', etc. needs to be instantiated. This is purely
for convinience.
Having specified the expected structure of the options, we can specify the
remaining structure of the makefile:
-------------------------------------------------------------------------------
| _MAKEFILE_SPECIFICATION = {
| Defaults" : _SPECIFICATION_OF_OPTIONS,
|
| "VCF_Files" : {
| Str : {
| "Output_File" : IsStr,
| "Options" : _SPECIFICATION_OF_OPTIONS,
| }
| }
| }
-------------------------------------------------------------------------------
Finally, we can specify default values. Defaults can be specified for almost
all specification objects (excepting specifications for keys in dictionaries,
sub-specification for logical operators, and a couple of others). Let's suppose
that we always want a min/max depth set, even if the user did not include them
in the defaults:
-------------------------------------------------------------------------------
| _SPECIFICATION_OF_OPTIONS = {
| StringStartsWith("--") : Or(IsInt, IsFloat),
| "--min-depth" : IsInt(default = 8),
| "--max-depth" : IsInt(default = 100),
| }
-------------------------------------------------------------------------------
These values would then be set, unless they were already set. Note that named
keys are given precedence above specification objects, when validating
key/value pairs. In other words, given this specification, the key
"--min-depth" is ALWAYS valid (even if it would fail StringStartsWith("--"),
and the value is ONLY checked against IsInt(default = 8).
Bringing all this together, we could then parse the a file containing the YAML
code shown above as follows:
-------------------------------------------------------------------------------
| makefile = read_makefile("/path/to/makefile.yaml",
| _MAKEFILE_SPECIFICATION)
-------------------------------------------------------------------------------
which would yield the following dictionary:
-------------------------------------------------------------------------------
| {'Defaults': {'--max-depth': 100,
| '--min-depth': 8,
| '--min-end-distance-bias': 0.001,
| '--min-mapq-bias': 0.001},
| 'VCF_Files': {'path/to/file1.vcf':
| {'Options': {'--max-depth': 100,
| '--min-baseq-bias': 0.005,
| '--min-depth': 8,
| '--min-strand-bias': 0.005},
| 'Output_File': 'path/to/output1.vcf'},
| 'path/to/file2.vcf':
| {'Output_File': 'path/to/output1.vcf'}}},
-------------------------------------------------------------------------------
Unfortunately, the defaults are being applied to BOTH "Options" sub-trees,
which makes it impossible to tell which values are supposed to be over-ridden
for the files. To prevent this from happening, we can specify that defaults
should NOT be applied, by using the WithoutDefaults wrapper object:
-------------------------------------------------------------------------------
| _MAKEFILE_SPECIFICATION = {
| Defaults" : _SPECIFICATION_OF_OPTIONS,
|
| "VCF_Files" : {
| Str : {
| "Output_File" : IsStr,
| "Options" : WithoutDefaults(_SPECIFICATION_OF_OPTIONS),
| }
| }
| }
-------------------------------------------------------------------------------
Which yields the following structure following processing:
-------------------------------------------------------------------------------
| {'Defaults': {'--max-depth': 100,
| '--min-depth': 8,
| '--min-end-distance-bias': 0.001,
| '--min-mapq-bias': 0.001},
| 'VCF_Files': {'path/to/file1.vcf': {'Options': {'--min-baseq-bias': 0.005,
| '--min-strand-bias': 0.005},
| 'Output_File': 'path/to/output1.vcf'},
| 'path/to/file2.vcf': {'Output_File': 'path/to/output2.vcf'}}}
-------------------------------------------------------------------------------
If the file contents does not match the expected structure, a MakefileError is
raised which describes the problem. For example, suppose that an "Output_File"
value has accidentically been left blank ('IsStr' requires a NON-EMPTY string):
-------------------------------------------------------------------------------
| Makefile requirement not met at ...:
| Expected value: a non-empty string
| Observed value: ''
-------------------------------------------------------------------------------
"""
import copy
import logging
import paleomix.yaml
from paleomix.common.utilities import group_by_pred
class MakefileError(RuntimeError):
"""Raised if a makefile is unreadable, or does not meet specifications."""
def read_makefile(filename, specification):
"""Reads and parses a makefile using the given specification."""
try:
with open(filename) as handle:
data = paleomix.yaml.safe_load(handle)
except paleomix.yaml.YAMLError as error:
raise MakefileError(error)
return process_makefile(data, specification)
def process_makefile(data, specification, path=(), apply_defaults=True):
"""Validates a makefile and applies defaults to missing keys.
Note that that default values are deep-copied before being set.
"""
if isinstance(specification, WithoutDefaults):
specification = specification.specification
data = process_makefile(data, specification, path, apply_defaults=False)
elif isinstance(specification, PreProcessMakefile):
data, specification = specification(path, data)
data = process_makefile(data, specification, path, apply_defaults)
elif _is_spec(specification):
_instantiate_spec(specification)(path, data)
elif isinstance(data, (dict, type(None))) and isinstance(specification, dict):
# A limitation of YAML is that empty subtrees are equal to None;
# this check ensures that empty subtrees to be handled properly
if data is None:
data = {}
_process_default_values(data, specification, path, apply_defaults)
for cur_key in data:
ref_key = _get_matching_spec_or_value(
cur_key, specification, path + (cur_key,)
)
data[cur_key] = process_makefile(
data[cur_key], specification[ref_key], path + (cur_key,), apply_defaults
)
elif isinstance(data, (list, type(None))) and isinstance(specification, list):
if not all(_is_spec(spec) for spec in specification):
raise TypeError(
"Lists contains non-specification objects (%r): %r"
% (_path_to_str(path), specification)
)
elif data is None: # See comment above
data = []
specification = IsListOf(*specification)
_instantiate_spec(specification)(path, data)
elif not isinstance(specification, (dict, list)):
raise TypeError(
"Unexpected type in makefile specification at %r: %r!"
% (_path_to_str(path), specification)
)
else:
raise MakefileError(
"Inconsistency between makefile specification and "
"current makefile at %s:\n Expected %s, "
"found %s %r!"
% (
_path_to_str(path),
type(specification).__name__,
type(data).__name__,
data,
)
)
return data
###############################################################################
###############################################################################
# Unique 'value' used to specify that a MakefileSpec lacks a default value.
DEFAULT_NOT_SET = object()
# Unique 'value' used to specify that the user MUST supply a value
REQUIRED_VALUE = object()
class WithoutDefaults:
"""Wrapper object, that tells 'process_makefile' not to apply
default values for the wrapped specification. See module docs
for example usage.
"""
def __init__(self, specification):
self.specification = specification
class PreProcessMakefile:
"""Allows pre-processing of a part of a makefile prior to validation; when
encountered, the object is called with the current value, and is expected
to return a tuple containing (value, specification), which are then used
subsequently. This allows transformation of fields for backwards
compatibility.
"""
def __call__(self, path, value):
"""Must return (value, specification) tuple."""
raise NotImplementedError # pragma: no coverage
class MakefileSpec:
"""Base-class for specifications, from which ALL specification
objects are expected to derive. Sub-classes must implement the
'meets_spec' function, which must return True or False depending
on whether or not the given value meets the specification.
"""
def __init__(self, description, default=DEFAULT_NOT_SET):
"""description -- A string describing the specification.
default -- A default value, or DEFAULT_NOT_SET if not used. If a
value is set, it is copied before being applied."""
self.description = description
self.default = default
if (default not in (DEFAULT_NOT_SET, REQUIRED_VALUE)) and not self.meets_spec(
default
):
raise ValueError(
(
"Default value does not meet requirements:\n"
" Expected value: %s\n"
" Observed value: %r\n"
)
% (description, default)
)
def __call__(self, path, value):
if not self.meets_spec(value):
raise MakefileError(
(
"Makefile requirement not met at %r:\n"
" Expected value: %s\n"
" Observed value: %r\n"
" Observed type: %s"
)
% (_path_to_str(path), self.description, value, type(value).__name__)
)
def meets_spec(self, _value):
"""Return True if value meets the specification, False otherwise."""
raise NotImplementedError
###############################################################################
###############################################################################
# Tests for basic types
class IsInt(MakefileSpec):
"""Require that the value is either an Int or a Long."""
def __init__(self, description="an integer", default=DEFAULT_NOT_SET):
MakefileSpec.__init__(self, description, default)
def meets_spec(self, value):
return isinstance(value, int) and not isinstance(value, bool)
class IsUnsignedInt(IsInt):
"""Require that the value is either an Int or a Long, and >= 0."""
def __init__(self, description="an unsigned integer", default=DEFAULT_NOT_SET):
IsInt.__init__(self, description, default)
def meets_spec(self, value):
return IsInt.meets_spec(self, value) and value >= 0
class IsFloat(MakefileSpec):
"""Require that the value is a float (does not cover integer types)."""
def __init__(self, description="a float", default=DEFAULT_NOT_SET):
MakefileSpec.__init__(self, description, default)
def meets_spec(self, value):
return isinstance(value, float)
class IsBoolean(MakefileSpec):
"""Require that the value is a boolean (True/False)."""
def __init__(self, description="a boolean", default=DEFAULT_NOT_SET):
MakefileSpec.__init__(self, description, default)
def meets_spec(self, value):
return isinstance(value, bool)
class IsStr(MakefileSpec):
"""Require that the value is a non-empty string."""
def __init__(self, description=None, default=DEFAULT_NOT_SET, min_len=1):
if description is None:
if min_len == 0:
description = "a string"
elif min_len == 1:
description = "a non-empty string"
elif min_len >= 2:
description = "a string at least %s characters long" % (min_len,)
else:
raise ValueError("min_len must be non-negative")
self._min_len = min_len
MakefileSpec.__init__(self, description, default)
def meets_spec(self, value):
return isinstance(value, str) and len(value) >= self._min_len
class IsNone(MakefileSpec):
"""Require that the value is None, typically signifying that
the value was not set in the makefile."""
def __init__(self, description="null or not set", default=DEFAULT_NOT_SET):
if default is not DEFAULT_NOT_SET:
raise NotImplementedError("IsNone does not support default values")
MakefileSpec.__init__(self, description, default)
def meets_spec(self, value):
return value is None
class ValueMissing(MakefileSpec):
"""Used to signify empty substructures in the makefile specification."""
def __init__(self, description="no values"):
MakefileSpec.__init__(self, description, DEFAULT_NOT_SET)
def meets_spec(self, _value):
return False
class DeprecatedOption(MakefileSpec):
"""Used to signify substructures that have been removed, and are hence ignored."""
def __init__(self, spec):
self._spec = spec
if not isinstance(spec, MakefileSpec):
raise ValueError(spec)
MakefileSpec.__init__(self, spec.description, spec.default)
def __call__(self, path, value):
self._spec(path, value)
log = logging.getLogger(__name__)
log.warning(
"option has been deprecated and will be removed in the future: %s"
% (_path_to_str(path),)
)
def meets_spec(self, value):
return self._spec.meets_spec(value)
class RemovedOption(MakefileSpec):
"""Used to signify substructures that have been removed, and are hence ignored."""
def __init__(self, description="removed settings"):
MakefileSpec.__init__(self, description, DEFAULT_NOT_SET)
def __call__(self, path, _value):
log = logging.getLogger(__name__)
log.warning(
"option has been removed and no longer has any effect: %s"
% (_path_to_str(path),)
)
def meets_spec(self, _value):
return True
###############################################################################
###############################################################################
# BinaryOperators
class _BinaryOperator(MakefileSpec):
"""Base class for binary operations; takes a operation function which is
assumed to take parameters (lvalue, rvalue), a rvalue to use when calling
the function, and a description in the form 'operator {rvalue}' which is
used to generate a human readable description of the specification.
If list_kword is specified, the rvalue is assumed to be a sequence, and
_list_values is used to convert it to human readable form.
"""
def __init__(self, description, default, opfunc, rvalue, key=None, list_kword=None):
self._operator = opfunc
self._keyfunc = key
self._rvalue = rvalue
rvalue_repr = _list_values(rvalue, list_kword) if list_kword else rvalue
description = description.format(rvalue=rvalue_repr)
MakefileSpec.__init__(self, description, default)
def meets_spec(self, value):
if self._keyfunc is not None:
value = self._keyfunc(value)
return self._operator(value, self._rvalue)
class ValueIn(_BinaryOperator):
def __init__(
self,
rvalues,
key=None,
description="value in {rvalue}",
default=DEFAULT_NOT_SET,
):
description = description.format(rvalue=_list_values(rvalues, "or"))
_BinaryOperator.__init__(
self,
description=description,
default=default,
opfunc=self._in_operator,
rvalue=rvalues,
key=key,
)
def _in_operator(self, lvalue, rvalues):
"""Implements 'in' operator."""
return _is_hashable(lvalue) and lvalue in rvalues
class ValuesIntersect(_BinaryOperator):
def __init__(self, rvalues, key=None, description=None, default=DEFAULT_NOT_SET):
if not description:
description = "one or more of %s" % (_list_values(rvalues, "and"),)
_BinaryOperator.__init__(
self,
description=description,
default=default,
opfunc=self._operator,
rvalue=rvalues,
key=key,
)
def _operator(self, lvalue, rvalues):
try:
return not isinstance(lvalue, dict) and bool(
frozenset(lvalue).intersection(rvalues)
)
except TypeError:
return False
class ValuesSubsetOf(_BinaryOperator):
def __init__(self, rvalues, key=None, description=None, default=DEFAULT_NOT_SET):
description = description or "subset of %s" % (_list_values(rvalues, "and"),)
_BinaryOperator.__init__(
self,
description=description,
default=default,
opfunc=self._operator,
rvalue=rvalues,
key=key,
)
def _operator(self, lvalue, rvalues):
try:
return not isinstance(lvalue, dict) and bool(
frozenset(lvalue).issubset(rvalues)
)
except TypeError:
return False
###############################################################################
###############################################################################
# Logical operators
class _MultipleSpecs(MakefileSpec):
"""Base-class for logical operators for one or more specifications."""
def __init__(
self, specs, kwargs, name, prefix="", postfix="", join_by=" ", fmt="%s"
):
self._specs = [_instantiate_spec(spec) for spec in specs]
if not self._specs:
raise ValueError("No specification given to %r" % (name.title(),))
elif not all((spc.default is DEFAULT_NOT_SET) for spc in self._specs):
raise ValueError(
"Default values cannot be set in specs given to " "logical operators"
)
description = [(fmt % (spec.description,)) for spec in self._specs]
description = "%s%s%s" % (prefix, join_by.join(description), postfix)
default_value = kwargs.get("default", DEFAULT_NOT_SET)
MakefileSpec.__init__(self, description, default_value)
class And(_MultipleSpecs):
"""Takes one or more specification objects, and requires that values meets
all of these specifications. A default value may be set for the 'And'
specification, but not for the specifications given to the 'And' object.
"""
def __init__(self, *specs, **kwargs):
_MultipleSpecs.__init__(self, specs, kwargs, "And", join_by=" and ", fmt="(%s)")
def meets_spec(self, value):
return all(spec.meets_spec(value) for spec in self._specs)
class Or(_MultipleSpecs):
"""Takes one or more specification objects, and requires that values meets
at least one these specifications. A default value may be set for the 'Or'
specification, but not for the specifications given to the 'Or' object.
"""
def __init__(self, *specs, **kwargs):
_MultipleSpecs.__init__(self, specs, kwargs, "Or", join_by=" or ", fmt="(%s)")
def meets_spec(self, value):
return any(spec.meets_spec(value) for spec in self._specs)
class Not(_MultipleSpecs):
"""Takes a single specification object, and requires that values do NOT
meet this specification. A default value may be set for the 'Not'
specification, but not for the specifications given to the 'Not' object.
"""
def __init__(self, spec, **kwargs):
_MultipleSpecs.__init__(self, [spec], kwargs, "Not", prefix="not ", fmt="(%s)")
def meets_spec(self, value):
return not self._specs[0].meets_spec(value)
###############################################################################
###############################################################################
# String operators
#
# In addition to providing string-specific operators (is uppercase, ends/starts
# with), "in" and set operators are provided which do case-insensitive
# comparsions. For case-sensitive operations, use the Value* specifications.
class StringIn(_BinaryOperator):
"""Require that values are found in a set of values. For strings, the
comparison is done in a case-insensitive. For case-sensitive comparisons,
see 'ValueIn'.
"""
def __init__(
self,
rvalues,
key=None,
description="one of {rvalue}, case-insentive",
default=DEFAULT_NOT_SET,
):
description = description.format(rvalue=_list_values(rvalues, "or"))
rvalues = frozenset(map(_safe_coerce_to_lowercase, rvalues))
_BinaryOperator.__init__(
self, description, default, self._string_in_operator, rvalues
)
@classmethod
def _string_in_operator(cls, lvalue, rvalues):
"""Implements case-insensitive 'in' operator."""
if not _is_hashable(lvalue):
return False
return _safe_coerce_to_lowercase(lvalue) in rvalues
class StringStartsWith(IsStr):
"""Require that the value is a string with given prefix."""
def __init__(self, prefix, default=DEFAULT_NOT_SET):
assert prefix and isinstance(prefix, str)
self._prefix = prefix
description = "a string with prefix %r" % (prefix,)
IsStr.__init__(self, description, default)
def meets_spec(self, value):
return super(StringStartsWith, self).meets_spec(value) and value.startswith(
self._prefix
)
class StringEndsWith(IsStr):
"""Require that the value is a string with given postfix."""
def __init__(self, postfix, default=DEFAULT_NOT_SET):
assert postfix and isinstance(postfix, str)
self._postfix = postfix
description = "a string with postfix %r" % (postfix,)
IsStr.__init__(self, description, default)
def meets_spec(self, value):
return super(StringEndsWith, self).meets_spec(value) and value.endswith(
self._postfix
)
###############################################################################
###############################################################################
# Tests for collections
class IsListOf(_MultipleSpecs):
"""Require that the value is a list, the contents of which matches one or
more of the provided specifications; if no default value (ie. a non-empty
list) is required, then using the following syntax is preferred:
[IsType1, IsType2, ...]
This is equivalent to the following:
IsListOf(IsType1, IsType2, ...)
"""
def __init__(self, *specs, **kwargs):
_MultipleSpecs.__init__(
self,
specs,
kwargs,
"IsListOf",
prefix="[",
postfix=", ...]",
join_by=" or ",
fmt="(%s)",
)
def meets_spec(self, value):
if not isinstance(value, list):
return False
return all(
any(spec.meets_spec(lstvalue) for spec in self._specs) for lstvalue in value
)
class IsDictOf(MakefileSpec):
"""Require that the value is a list, the keys/values of which matches
the specifications provided for keys/values; if no default value (ie. a
dictioanry) is required, then using the following syntax is preferred:
{IsType1: IsType2}
This is equivalent to the following:
IsDictOf(IsType1, IsType2)
but also allows multiple type-pairs to be specified.
"""
def __init__(self, key_spec, value_spec, default=DEFAULT_NOT_SET):
self._key_spec = _instantiate_spec(key_spec)
self._value_spec = _instantiate_spec(value_spec)
if self._key_spec.default is not DEFAULT_NOT_SET:
raise ValueError("Default values cannot be set in key-specs")
elif self._value_spec.default is not DEFAULT_NOT_SET:
raise ValueError("Default values cannot be set in value-specs")
description = "{(%s) : (%s)}" % (
self._key_spec.description,
self._value_spec.description,
)
MakefileSpec.__init__(self, description, default)
def meets_spec(self, value):
if not isinstance(value, dict):
return False
for (key, value) in value.items():
if not (
self._key_spec.meets_spec(key) and self._value_spec.meets_spec(value)
):
return False
return True
###############################################################################
###############################################################################
# Helper functions
def _is_hashable(value):
try:
hash(value)
return True
except TypeError:
return False
def _is_spec(spec):
"""Returns true if 'spec' is a specification instance or class."""
if isinstance(spec, MakefileSpec):
return True
elif isinstance(spec, type) and issubclass(spec, MakefileSpec):
return True
return False
def _instantiate_spec(spec):
"""Takes a specification instance or class, and returns an instance."""
if isinstance(spec, MakefileSpec):
return spec
elif isinstance(spec, type) and issubclass(spec, MakefileSpec):
return spec()
else:
raise TypeError("Specifications must derive from 'MakefileSpec'")
def _safe_coerce_to_lowercase(value):
"""Returns strings as lowercase, and any other types of value unchanged."""
if isinstance(value, str):
return value.lower()
return value
def _list_values(values, sep):
"""Returns list of values as '[values[0], values[1], ..., sep values[-1]]':
$ _list_values([1, 2, 3], "and")
"[1, 2, and 3]"
"""
values = list(map(repr, values))
if len(values) > 2:
values = (", ".join(values[:-1]) + ",", values[-1])
if len(values) == 2:
values = (" ".join((values[0], sep, values[1])),)
return values[0]
def _get_summary_spec(specs_or_keys):
"""Returns a specification object that may be used to describe a set of
requirements. This is used if a key or value does not match the possible
specs, thereby describing the set of allowed values.
"""
specs, keys = group_by_pred(_is_spec, specs_or_keys)
if specs and keys:
return Or(ValueIn(keys, description="key in {rvalue}"), *specs)
elif specs:
return Or(*specs)
elif keys:
return ValueIn(keys, description="key in {rvalue}")
return ValueMissing()
def _get_matching_spec_or_value(value, specs, path):
"""Returns the specification object or value that matches the observed
value; specs may be a list of specification objects and/or constant values
allowed by the makefile. If no matching specification or value is found,
an MakefileError is raised.
"""
if value in specs:
return value
for spec in specs:
if _is_spec(spec) and _instantiate_spec(spec).meets_spec(value):
return spec
# No matching key or spec; create combined spec to raise error message
_get_summary_spec(specs)(path, value)
assert False # pragma: no coverage
def _process_default_values(data, specification, path, apply_defaults):
"""Checks a subtree against a specification, verifies that required values
have been set, and (optionally) sets values for keys where defaults have
been specified.
"""
for cur_key in specification:
if (not _is_spec(cur_key)) and (cur_key not in data):
default_value = specification[cur_key]
default_value_from_spec = False
while isinstance(default_value, PreProcessMakefile):
data, default_value = default_value(path, data)
if _is_spec(default_value):
default_value = _instantiate_spec(default_value)
if default_value.default is DEFAULT_NOT_SET:
continue
elif default_value.default is REQUIRED_VALUE:
raise MakefileError(
"A value MUST be supplified for %r"
% (_path_to_str(path + (cur_key,)))
)
default_value = default_value.default
default_value_from_spec = True
if apply_defaults and not isinstance(
default_value, (PreProcessMakefile, WithoutDefaults)
):
if isinstance(default_value, dict):
# Setting of values in the dict will be accomplished
# in subsequent calls to _process_default_values
default_value = {}
elif isinstance(default_value, list):
# Lists of specs defaults to empty lists
if not default_value_from_spec:
default_value = []
# Prevent clobbering of values when re-using sub-specs
data[cur_key] = copy.deepcopy(default_value)
def _path_to_str(path):
"""Converts a path (tuple of strings) to a printable string."""
return " :: ".join(str(field) for field in path)
CLI_PARAMETERS = Or(IsListOf(IsStr, IsInt, IsFloat), Or(IsStr, IsInt, IsFloat, IsNone))
| 37.259755
| 88
| 0.591676
|
4a0e779f4886806f6b5604b540bcdf9057436418
| 13,195
|
py
|
Python
|
Doc/tools/docutils/parsers/rst/roles.py
|
cocoatomo/Python3.2_C_API_Tutorial
|
e33d4a285429935aca3178dc2a97aca3ab484232
|
[
"PSF-2.0"
] | 2
|
2019-03-03T00:04:36.000Z
|
2020-10-06T16:22:38.000Z
|
Doc/tools/docutils/parsers/rst/roles.py
|
cocoatomo/Python3.2_C_API_Tutorial
|
e33d4a285429935aca3178dc2a97aca3ab484232
|
[
"PSF-2.0"
] | null | null | null |
Doc/tools/docutils/parsers/rst/roles.py
|
cocoatomo/Python3.2_C_API_Tutorial
|
e33d4a285429935aca3178dc2a97aca3ab484232
|
[
"PSF-2.0"
] | 1
|
2019-03-03T00:04:38.000Z
|
2019-03-03T00:04:38.000Z
|
# $Id: roles.py 78909 2010-03-13 10:49:23Z georg.brandl $
# Author: Edward Loper <edloper@gradient.cis.upenn.edu>
# Copyright: This module has been placed in the public domain.
"""
This module defines standard interpreted text role functions, a registry for
interpreted text roles, and an API for adding to and retrieving from the
registry.
The interface for interpreted role functions is as follows::
def role_fn(name, rawtext, text, lineno, inliner,
options={}, content=[]):
code...
# Set function attributes for customization:
role_fn.options = ...
role_fn.content = ...
Parameters:
- ``name`` is the local name of the interpreted text role, the role name
actually used in the document.
- ``rawtext`` is a string containing the entire interpreted text construct.
Return it as a ``problematic`` node linked to a system message if there is a
problem.
- ``text`` is the interpreted text content, with backslash escapes converted
to nulls (``\x00``).
- ``lineno`` is the line number where the interpreted text beings.
- ``inliner`` is the Inliner object that called the role function.
It defines the following useful attributes: ``reporter``,
``problematic``, ``memo``, ``parent``, ``document``.
- ``options``: A dictionary of directive options for customization, to be
interpreted by the role function. Used for additional attributes for the
generated elements and other functionality.
- ``content``: A list of strings, the directive content for customization
("role" directive). To be interpreted by the role function.
Function attributes for customization, interpreted by the "role" directive:
- ``options``: A dictionary, mapping known option names to conversion
functions such as `int` or `float`. ``None`` or an empty dict implies no
options to parse. Several directive option conversion functions are defined
in the `directives` module.
All role functions implicitly support the "class" option, unless disabled
with an explicit ``{'class': None}``.
- ``content``: A boolean; true if content is allowed. Client code must handle
the case where content is required but not supplied (an empty content list
will be supplied).
Note that unlike directives, the "arguments" function attribute is not
supported for role customization. Directive arguments are handled by the
"role" directive itself.
Interpreted role functions return a tuple of two values:
- A list of nodes which will be inserted into the document tree at the
point where the interpreted role was encountered (can be an empty
list).
- A list of system messages, which will be inserted into the document tree
immediately after the end of the current inline block (can also be empty).
"""
__docformat__ = 'reStructuredText'
from docutils import nodes, utils
from docutils.parsers.rst import directives
from docutils.parsers.rst.languages import en as _fallback_language_module
DEFAULT_INTERPRETED_ROLE = 'title-reference'
"""
The canonical name of the default interpreted role. This role is used
when no role is specified for a piece of interpreted text.
"""
_role_registry = {}
"""Mapping of canonical role names to role functions. Language-dependent role
names are defined in the ``language`` subpackage."""
_roles = {}
"""Mapping of local or language-dependent interpreted text role names to role
functions."""
def role(role_name, language_module, lineno, reporter):
"""
Locate and return a role function from its language-dependent name, along
with a list of system messages. If the role is not found in the current
language, check English. Return a 2-tuple: role function (``None`` if the
named role cannot be found) and a list of system messages.
"""
normname = role_name.lower()
messages = []
msg_text = []
if normname in _roles:
return _roles[normname], messages
if role_name:
canonicalname = None
try:
canonicalname = language_module.roles[normname]
except AttributeError, error:
msg_text.append('Problem retrieving role entry from language '
'module %r: %s.' % (language_module, error))
except KeyError:
msg_text.append('No role entry for "%s" in module "%s".'
% (role_name, language_module.__name__))
else:
canonicalname = DEFAULT_INTERPRETED_ROLE
# If we didn't find it, try English as a fallback.
if not canonicalname:
try:
canonicalname = _fallback_language_module.roles[normname]
msg_text.append('Using English fallback for role "%s".'
% role_name)
except KeyError:
msg_text.append('Trying "%s" as canonical role name.'
% role_name)
# The canonical name should be an English name, but just in case:
canonicalname = normname
# Collect any messages that we generated.
if msg_text:
message = reporter.info('\n'.join(msg_text), line=lineno)
messages.append(message)
# Look the role up in the registry, and return it.
if canonicalname in _role_registry:
role_fn = _role_registry[canonicalname]
register_local_role(normname, role_fn)
return role_fn, messages
else:
return None, messages # Error message will be generated by caller.
def register_canonical_role(name, role_fn):
"""
Register an interpreted text role by its canonical name.
:Parameters:
- `name`: The canonical name of the interpreted role.
- `role_fn`: The role function. See the module docstring.
"""
set_implicit_options(role_fn)
_role_registry[name] = role_fn
def register_local_role(name, role_fn):
"""
Register an interpreted text role by its local or language-dependent name.
:Parameters:
- `name`: The local or language-dependent name of the interpreted role.
- `role_fn`: The role function. See the module docstring.
"""
set_implicit_options(role_fn)
_roles[name] = role_fn
def set_implicit_options(role_fn):
"""
Add customization options to role functions, unless explicitly set or
disabled.
"""
if not hasattr(role_fn, 'options') or role_fn.options is None:
role_fn.options = {'class': directives.class_option}
elif 'class' not in role_fn.options:
role_fn.options['class'] = directives.class_option
def register_generic_role(canonical_name, node_class):
"""For roles which simply wrap a given `node_class` around the text."""
role = GenericRole(canonical_name, node_class)
register_canonical_role(canonical_name, role)
class GenericRole:
"""
Generic interpreted text role, where the interpreted text is simply
wrapped with the provided node class.
"""
def __init__(self, role_name, node_class):
self.name = role_name
self.node_class = node_class
def __call__(self, role, rawtext, text, lineno, inliner,
options={}, content=[]):
set_classes(options)
return [self.node_class(rawtext, utils.unescape(text), **options)], []
class CustomRole:
"""
Wrapper for custom interpreted text roles.
"""
def __init__(self, role_name, base_role, options={}, content=[]):
self.name = role_name
self.base_role = base_role
self.options = None
if hasattr(base_role, 'options'):
self.options = base_role.options
self.content = None
if hasattr(base_role, 'content'):
self.content = base_role.content
self.supplied_options = options
self.supplied_content = content
def __call__(self, role, rawtext, text, lineno, inliner,
options={}, content=[]):
opts = self.supplied_options.copy()
opts.update(options)
cont = list(self.supplied_content)
if cont and content:
cont += '\n'
cont.extend(content)
return self.base_role(role, rawtext, text, lineno, inliner,
options=opts, content=cont)
def generic_custom_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
""""""
# Once nested inline markup is implemented, this and other methods should
# recursively call inliner.nested_parse().
set_classes(options)
return [nodes.inline(rawtext, utils.unescape(text), **options)], []
generic_custom_role.options = {'class': directives.class_option}
######################################################################
# Define and register the standard roles:
######################################################################
register_generic_role('abbreviation', nodes.abbreviation)
register_generic_role('acronym', nodes.acronym)
register_generic_role('emphasis', nodes.emphasis)
register_generic_role('literal', nodes.literal)
register_generic_role('strong', nodes.strong)
register_generic_role('subscript', nodes.subscript)
register_generic_role('superscript', nodes.superscript)
register_generic_role('title-reference', nodes.title_reference)
def pep_reference_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
try:
pepnum = int(text)
if pepnum < 0 or pepnum > 9999:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'PEP number must be a number from 0 to 9999; "%s" is invalid.'
% text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
# Base URL mainly used by inliner.pep_reference; so this is correct:
ref = (inliner.document.settings.pep_base_url
+ inliner.document.settings.pep_file_url_template % pepnum)
set_classes(options)
return [nodes.reference(rawtext, 'PEP ' + utils.unescape(text), refuri=ref,
**options)], []
register_canonical_role('pep-reference', pep_reference_role)
def rfc_reference_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
try:
rfcnum = int(text)
if rfcnum <= 0:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'RFC number must be a number greater than or equal to 1; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
# Base URL mainly used by inliner.rfc_reference, so this is correct:
ref = inliner.document.settings.rfc_base_url + inliner.rfc_url % rfcnum
set_classes(options)
node = nodes.reference(rawtext, 'RFC ' + utils.unescape(text), refuri=ref,
**options)
return [node], []
register_canonical_role('rfc-reference', rfc_reference_role)
def raw_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
if not inliner.document.settings.raw_enabled:
msg = inliner.reporter.warning('raw (and derived) roles disabled')
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
if 'format' not in options:
msg = inliner.reporter.error(
'No format (Writer name) is associated with this role: "%s".\n'
'The "raw" role cannot be used directly.\n'
'Instead, use the "role" directive to create a new role with '
'an associated format.' % role, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
set_classes(options)
node = nodes.raw(rawtext, utils.unescape(text, 1), **options)
return [node], []
raw_role.options = {'format': directives.unchanged}
register_canonical_role('raw', raw_role)
######################################################################
# Register roles that are currently unimplemented.
######################################################################
def unimplemented_role(role, rawtext, text, lineno, inliner, attributes={}):
msg = inliner.reporter.error(
'Interpreted text role "%s" not implemented.' % role, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
register_canonical_role('index', unimplemented_role)
register_canonical_role('named-reference', unimplemented_role)
register_canonical_role('anonymous-reference', unimplemented_role)
register_canonical_role('uri-reference', unimplemented_role)
register_canonical_role('footnote-reference', unimplemented_role)
register_canonical_role('citation-reference', unimplemented_role)
register_canonical_role('substitution-reference', unimplemented_role)
register_canonical_role('target', unimplemented_role)
# This should remain unimplemented, for testing purposes:
register_canonical_role('restructuredtext-unimplemented-role',
unimplemented_role)
def set_classes(options):
"""
Auxiliary function to set options['classes'] and delete
options['class'].
"""
if 'class' in options:
assert 'classes' not in options
options['classes'] = options['class']
del options['class']
| 37.592593
| 79
| 0.667753
|
4a0e79a7916f85ce67852c024d1d76accd8319eb
| 2,618
|
py
|
Python
|
Lab2_Hanoi_Style/backtrack_hanoi_solver.py
|
marianfx/ai-labs
|
f9d780c2d453294e423d3ca1b54bb8895209f402
|
[
"MIT"
] | 1
|
2017-10-26T04:23:36.000Z
|
2017-10-26T04:23:36.000Z
|
Lab2_Hanoi_Style/backtrack_hanoi_solver.py
|
marianfx/ai-labs
|
f9d780c2d453294e423d3ca1b54bb8895209f402
|
[
"MIT"
] | 9
|
2016-10-17T12:20:40.000Z
|
2016-12-21T18:29:07.000Z
|
Lab2_Hanoi_Style/backtrack_hanoi_solver.py
|
marianfx/ai-labs
|
f9d780c2d453294e423d3ca1b54bb8895209f402
|
[
"MIT"
] | null | null | null |
from hanoi_solver import HanoiSolver
import sys
class BacktrackHanoiSolver(HanoiSolver):
"""Solves the Hanoi Towers Problem using backtrack.
:param n: The number of towers.
:param m: The number of disks.
:param only_first: Specifies if alg. should stop at first solution.
"""
def __init__(self, n, m, only_first=False):
super(BacktrackHanoiSolver, self).__init__(n, m)
sys.setrecursionlimit(2 ** 20)
self.previous = []
self.strategy_name = "Backtracking"
self.only_first = only_first
self.solution_found = False
def undo_transition(self, i, tower_i, tower_j):
"""Undo the transition from tower i to tower j.
Assumed that disk i was moved (we need to know the disk to revert).
:param i: The disk which was moved.
"""
self.current_state[i] = tower_i
# print ("Undo: disk {d} from {s} to {f}.".format(
# d=str(top_from_tower_j),
# s=str(tower_j),
# f=str(tower_i)))
# print (self.get_current_state())
del self.tempsolutions[-1]
return self.current_state
def strategy(self):
"""The strategy uses backtracking for transitioning.
Details:
- If we encountered the final state, we have a solution.
- We get a list with all the available moves from the current state
- For each move:
- We check for cycles (no return to previous states)
- we save the transition as visited (if it's not)
- we update the current state
- we move on
"""
if self.solution_found and self.only_first:
return
# transitions achieved final state
if self.is_in_final_state():
self.run_on_final_state()
return True
# get a list of all the valid available moves, so we can backtrack'em
moves = self.get_all_available_moves()
for move in moves:
tower_i = move[0]
tower_j = move[1]
i = self.current_state[1:].index(tower_i) + 1
transition = self.do_transition(tower_i, tower_j)
# Stop if existing transition
if tuple(transition) in self.previous:
self.undo_transition(i, tower_i, tower_j)
continue
self.previous.append(tuple(transition))
self.strategy()
del self.previous[-1]
self.undo_transition(i, tower_i, tower_j)
| 34
| 80
| 0.576012
|
4a0e7bcbcee7f6828a5ded9bdda42268968d6132
| 19,637
|
py
|
Python
|
test/test_client.py
|
boralyl/aioynab
|
6814c4da8450671f01b03236ca384c602e8660de
|
[
"MIT"
] | 1
|
2021-09-06T07:52:54.000Z
|
2021-09-06T07:52:54.000Z
|
test/test_client.py
|
boralyl/aioynab
|
6814c4da8450671f01b03236ca384c602e8660de
|
[
"MIT"
] | null | null | null |
test/test_client.py
|
boralyl/aioynab
|
6814c4da8450671f01b03236ca384c602e8660de
|
[
"MIT"
] | null | null | null |
import asyncio
from unittest import mock
import aiohttp
import pytest
from aioresponses import aioresponses
from aioynab.client import BASE_URL, Client, YNABAPIError
@pytest.fixture(scope='module')
def client():
loop = asyncio.get_event_loop()
client = Client('access-token', loop=loop)
yield client
loop.run_until_complete(client.close())
def test_init(client):
assert 'access-token' == client.personal_access_token
assert {'Authorization': 'Bearer access-token'} == client.headers
def test_request_clienterror(client):
with aioresponses() as mock_req:
mock_req.get(
BASE_URL + '/foo', exception=aiohttp.ClientError('timeout'))
with pytest.raises(aiohttp.ClientError, match='timeout'):
client.loop.run_until_complete(client._request('/foo', 'GET'))
@mock.patch('aioynab.client.json')
def test_request_rate_limit_error(mock_json, client):
mock_json.loads.return_value = {
'error': {
'id': '429',
'detail': 'rate limit',
},
}
with aioresponses() as mock_req:
mock_req.get(
BASE_URL + '/foo', payload='{"error": "rate limit"}',
headers={'Content-Type': 'application/octet'}, status=429)
with pytest.raises(YNABAPIError, match='rate limit'):
client.loop.run_until_complete(client._request('/foo', 'GET'))
def test_request_success(client):
mock_res = {'data': 'foo'}
with aioresponses() as mock_req:
mock_req.get(BASE_URL + '/foo', payload=mock_res)
actual = client.loop.run_until_complete(client._request('/foo', 'GET'))
assert 'foo' == actual
def test_request_api_error(client):
mock_res = {
'error': {
'id': '404.1',
'name': 'not_found',
'detail': 'Invalid URI',
},
}
with aioresponses() as mock_req:
mock_req.get(BASE_URL + '/foo', payload=mock_res, status=404)
with pytest.raises(YNABAPIError) as err:
client.loop.run_until_complete(client._request('/foo', 'GET'))
assert 404 == err.value.status
assert '404 - Invalid URI' == str(err.value)
assert mock_res['error'] == err.value.error_data
def test_user(client):
mock_res = {
'data': {
'user': {'id': 'abcd'},
},
}
with aioresponses() as mock_req:
mock_req.get(BASE_URL + '/user', payload=mock_res)
actual = client.loop.run_until_complete(client.user())
assert mock_res['data'] == actual
def test_budgets(client):
mock_res = {
'data': {
'budgets': [{
'currency_format': {
'currency_symbol': '$',
'decimal_digits': 2,
'decimal_separator': '.',
'display_symbol': True,
'example_format': '123,456.78',
'group_separator': ',',
'iso_code': 'USD',
'symbol_first': True,
},
'date_format': {'format': 'MM/DD/YYYY'},
'first_month': '2018-09-01',
'id': '01234567-012a-3fe0-abc1-9e123456789c',
'last_modified_on': '2019-03-21T00:03:57+00:00',
'last_month': '2019-04-01',
'name': 'Our Budget',
}],
},
}
with aioresponses() as mock_req:
mock_req.get(BASE_URL + '/budgets', payload=mock_res)
actual = client.loop.run_until_complete(client.budgets())
assert mock_res['data'] == actual
def test_budget(client):
mock_res = {
'data': {
'budget': {}
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(
BASE_URL + '/budgets/{}?last_knowledge_of_server=2'.format(
budget_id), payload=mock_res)
actual = client.loop.run_until_complete(client.budget(budget_id, 2))
assert mock_res['data'] == actual
def test_budget_settings(client):
mock_res = {
'data': {
'settings': {}
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(BASE_URL + '/budgets/{}/settings'.format(budget_id),
payload=mock_res)
actual = client.loop.run_until_complete(
client.budget_settings(budget_id))
assert mock_res['data'] == actual
def test_accounts(client):
mock_res = {
'data': {
'accounts': []
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(
BASE_URL + '/budgets/{}/accounts?last_knowledge_of_server=2'.format(
budget_id), payload=mock_res)
actual = client.loop.run_until_complete(client.accounts(budget_id, 2))
assert mock_res['data'] == actual
def test_account(client):
mock_res = {
'data': {
'account': {}
},
}
with aioresponses() as mock_req:
budget_id = account_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(BASE_URL + '/budgets/{}/accounts/{}'.format(
budget_id, account_id), payload=mock_res)
actual = client.loop.run_until_complete(
client.account(budget_id, account_id))
assert mock_res['data'] == actual
def test_categories(client):
mock_res = {
'data': {
'category_groups': []
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
url = '{}/budgets/{}/categories?last_knowledge_of_server=1'.format(
BASE_URL, budget_id)
mock_req.get(url, payload=mock_res)
actual = client.loop.run_until_complete(client.categories(budget_id, 1))
assert mock_res['data'] == actual
def test_category(client):
mock_res = {
'data': {
'category': {}
},
}
with aioresponses() as mock_req:
budget_id = category_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(BASE_URL + '/budgets/{}/categories/{}'.format(
budget_id, category_id), payload=mock_res)
actual = client.loop.run_until_complete(
client.category(budget_id, category_id))
assert mock_res['data'] == actual
def test_category_month(client):
mock_res = {
'data': {
'category': {}
},
}
with aioresponses() as mock_req:
budget_id = category_id = '01234567-012a-3fe0-abc1-9e123456789c'
month = '2018-01-01'
endpoint = '/budgets/{}/months/{}/categories/{}'.format(
budget_id, month, category_id)
mock_req.get(BASE_URL + endpoint, payload=mock_res)
actual = client.loop.run_until_complete(
client.category_month(budget_id, category_id, month))
assert mock_res['data'] == actual
def test_update_category_month(client):
mock_res = {
'data': {
'category': {}
},
}
with aioresponses() as mock_req:
budget_id = category_id = '01234567-012a-3fe0-abc1-9e123456789c'
month = '2018-01-01'
data = {
'category': {
'budgeted': 0,
},
}
endpoint = '/budgets/{}/months/{}/categories/{}'.format(
budget_id, month, category_id)
mock_req.patch(BASE_URL + endpoint, payload=mock_res)
actual = client.loop.run_until_complete(
client.update_category_month(budget_id, category_id, month, data))
assert mock_res['data'] == actual
def test_payees(client):
mock_res = {
'data': {
'payees': [],
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(
BASE_URL + '/budgets/{}/payees?last_knowledge_of_server=1'.format(
budget_id), payload=mock_res)
actual = client.loop.run_until_complete(client.payees(budget_id, 1))
assert mock_res['data'] == actual
def test_payee(client):
mock_res = {
'data': {
'payee': {},
},
}
with aioresponses() as mock_req:
budget_id = payee_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(
BASE_URL + '/budgets/{}/payees/{}'.format(budget_id, payee_id),
payload=mock_res)
actual = client.loop.run_until_complete(
client.payee(budget_id, payee_id))
assert mock_res['data'] == actual
def test_payee_locations(client):
mock_res = {
'data': {
'payee_locations': {},
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(
BASE_URL + '/budgets/{}/payee_locations'.format(budget_id),
payload=mock_res)
actual = client.loop.run_until_complete(
client.payee_locations(budget_id))
assert mock_res['data'] == actual
def test_payee_location(client):
mock_res = {
'data': {
'payee_location': {},
},
}
with aioresponses() as mock_req:
budget_id = payee_loc_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(
BASE_URL + '/budgets/{}/payee_locations/{}'.format(
budget_id, payee_loc_id), payload=mock_res)
actual = client.loop.run_until_complete(
client.payee_location(budget_id, payee_loc_id))
assert mock_res['data'] == actual
def test_locations_payee(client):
mock_res = {
'data': {
'payee_locations': [],
},
}
with aioresponses() as mock_req:
budget_id = payee_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(
BASE_URL + '/budgets/{}/payees/{}/payee_locations'.format(
budget_id, payee_id), payload=mock_res)
actual = client.loop.run_until_complete(
client.locations_payee(budget_id, payee_id))
assert mock_res['data'] == actual
def test_budget_months(client):
mock_res = {
'data': {
'months': [],
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(
BASE_URL + '/budgets/{}/months?last_knowledge_of_server=1'.format(
budget_id), payload=mock_res)
actual = client.loop.run_until_complete(
client.budget_months(budget_id, 1))
assert mock_res['data'] == actual
def test_budget_month(client):
mock_res = {
'data': {
'month': {},
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
month = '2018-01-01'
mock_req.get(
BASE_URL + '/budgets/{}/months/{}'.format(budget_id, month),
payload=mock_res)
actual = client.loop.run_until_complete(
client.budget_month(budget_id, month))
assert mock_res['data'] == actual
def test_transactions(client):
mock_res = {
'data': {
'transactions': [],
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
since_date = '2018-01-01'
type = 'unapproved'
url = (BASE_URL + '/budgets/{}/transactions?last_knowledge_of_server=1&'
'since_date=2018-01-01&type=unapproved'.format(budget_id))
mock_req.get(url, payload=mock_res)
actual = client.loop.run_until_complete(
client.transactions(budget_id, since_date, type, 1))
assert mock_res['data'] == actual
def test_create_transactions_no_transaction_provided(client):
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
with pytest.raises(ValueError, match='Must specify one of'):
client.loop.run_until_complete(client.create_transactions(budget_id))
def test_create_transactions_both_params_provided(client):
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
with pytest.raises(ValueError, match='Only one of transaction or'):
client.loop.run_until_complete(
client.create_transactions(budget_id, transaction={'foo': 'bar'},
transactions=[{'foo': 'bar'}]))
def test_create_transactions_one_transaction(client):
mock_res = {
'data': {
'transaction': {},
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
transaction = {'memo': 'food'}
mock_req.post(BASE_URL + '/budgets/{}/transactions'.format(budget_id),
payload=mock_res)
actual = client.loop.run_until_complete(
client.create_transactions(budget_id, transaction))
assert mock_res['data'] == actual
def test_create_transactions_multiple_transactions(client):
mock_res = {
'data': {
'transactions': [],
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
transactions = [{'memo': 'food'}]
mock_req.post(BASE_URL + '/budgets/{}/transactions'.format(budget_id),
payload=mock_res)
actual = client.loop.run_until_complete(
client.create_transactions(budget_id, transactions=transactions))
assert mock_res['data'] == actual
def test_update_transactions_no_transaction_provided(client):
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
with pytest.raises(ValueError, match='Must specify one of'):
client.loop.run_until_complete(client.update_transactions(budget_id))
def test_update_transactions_both_params_provided(client):
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
with pytest.raises(ValueError, match='Only one of transaction or'):
client.loop.run_until_complete(
client.update_transactions(budget_id, transaction={'foo': 'bar'},
transactions=[{'foo': 'bar'}]))
def test_update_transactions_one_transaction(client):
mock_res = {
'data': {
'transaction': {},
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
transaction = {'memo': 'food'}
mock_req.patch(BASE_URL + '/budgets/{}/transactions'.format(budget_id),
payload=mock_res)
actual = client.loop.run_until_complete(
client.update_transactions(budget_id, transaction))
assert mock_res['data'] == actual
def test_update_transactions_multiple_transactions(client):
mock_res = {
'data': {
'transactions': [],
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
transactions = [{'memo': 'food'}]
mock_req.patch(BASE_URL + '/budgets/{}/transactions'.format(budget_id),
payload=mock_res)
actual = client.loop.run_until_complete(
client.update_transactions(budget_id, transactions=transactions))
assert mock_res['data'] == actual
def test_get_transaction(client):
mock_res = {
'data': {
'transaction': {},
},
}
with aioresponses() as mock_req:
budget_id = transaction_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(BASE_URL + '/budgets/{}/transactions/{}'.format(
budget_id, transaction_id), payload=mock_res)
actual = client.loop.run_until_complete(
client.transaction(budget_id, transaction_id))
assert mock_res['data'] == actual
def test_update_transaction(client):
mock_res = {
'data': {
'transaction': {},
},
}
with aioresponses() as mock_req:
budget_id = transaction_id = '01234567-012a-3fe0-abc1-9e123456789c'
data = {'transaction': {}}
mock_req.put(BASE_URL + '/budgets/{}/transactions/{}'.format(
budget_id, transaction_id), payload=mock_res)
actual = client.loop.run_until_complete(
client.update_transaction(budget_id, transaction_id, data))
assert mock_res['data'] == actual
def test_account_transactions(client):
mock_res = {
'data': {
'transactions': [],
},
}
with aioresponses() as mock_req:
budget_id = account_id = '01234567-012a-3fe0-abc1-9e123456789c'
since_date = '2019-01-01'
type = 'unapproved'
url = (BASE_URL + '/budgets/{}/accounts/{}/transactions?'
'last_knowledge_of_server=1&since_date=2019-01-01&'
'type=unapproved'.format(budget_id, account_id))
mock_req.get(url, payload=mock_res)
actual = client.loop.run_until_complete(
client.account_transactions(
budget_id, account_id, since_date, type, 1))
assert mock_res['data'] == actual
def test_category_transactions(client):
mock_res = {
'data': {
'transactions': [],
},
}
with aioresponses() as mock_req:
budget_id = category_id = '01234567-012a-3fe0-abc1-9e123456789c'
since_date = '2019-01-01'
type = 'unapproved'
url = (BASE_URL + '/budgets/{}/categories/{}/transactions?'
'last_knowledge_of_server=1&since_date=2019-01-01&'
'type=unapproved'.format(budget_id, category_id))
mock_req.get(url, payload=mock_res)
actual = client.loop.run_until_complete(
client.category_transactions(
budget_id, category_id, since_date, type, 1))
assert mock_res['data'] == actual
def test_payee_transactions(client):
mock_res = {
'data': {
'transactions': [],
},
}
with aioresponses() as mock_req:
budget_id = payee_id = '01234567-012a-3fe0-abc1-9e123456789c'
since_date = '2019-01-01'
type = 'unapproved'
url = (BASE_URL + '/budgets/{}/payees/{}/transactions?'
'last_knowledge_of_server=1&since_date=2019-01-01&'
'type=unapproved'.format(budget_id, payee_id))
mock_req.get(url, payload=mock_res)
actual = client.loop.run_until_complete(
client.payee_transactions(
budget_id, payee_id, since_date, type, 1))
assert mock_res['data'] == actual
def test_scheduled_transactions(client):
mock_res = {
'data': {
'scheduled_transactions': [],
},
}
with aioresponses() as mock_req:
budget_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(BASE_URL + '/budgets/{}/scheduled_transactions'.format(
budget_id), payload=mock_res)
actual = client.loop.run_until_complete(
client.scheduled_transactions(budget_id))
assert mock_res['data'] == actual
def test_scheduled_transaction(client):
mock_res = {
'data': {
'scheduled_transaction': {},
},
}
with aioresponses() as mock_req:
budget_id = transaction_id = '01234567-012a-3fe0-abc1-9e123456789c'
mock_req.get(BASE_URL + '/budgets/{}/scheduled_transactions/{}'.format(
budget_id, transaction_id), payload=mock_res)
actual = client.loop.run_until_complete(
client.scheduled_transaction(budget_id, transaction_id))
assert mock_res['data'] == actual
| 33.625
| 80
| 0.598207
|
4a0e7bcc1e386f9ed6f976c80a94f2ec792ddb96
| 755
|
py
|
Python
|
src/vendor/configrpc/crpc/configrpc.py
|
radomirklacza/C-BAS
|
5005cf43f57302dc0f58b9d1b9cf7e4e3ab70e32
|
[
"BSD-3-Clause"
] | null | null | null |
src/vendor/configrpc/crpc/configrpc.py
|
radomirklacza/C-BAS
|
5005cf43f57302dc0f58b9d1b9cf7e4e3ab70e32
|
[
"BSD-3-Clause"
] | null | null | null |
src/vendor/configrpc/crpc/configrpc.py
|
radomirklacza/C-BAS
|
5005cf43f57302dc0f58b9d1b9cf7e4e3ab70e32
|
[
"BSD-3-Clause"
] | 2
|
2017-08-07T15:24:05.000Z
|
2018-10-11T10:53:23.000Z
|
import eisoil.core.pluginmanager as pm
import eisoil.core.log
logger=eisoil.core.log.getLogger('configrpc')
xmlrpc = pm.getService('xmlrpc')
config = pm.getService("config")
# TODO IMPORTANT: add authentication / authorization here
class ConfigRPC(xmlrpc.Dispatcher):
"""
"""
def __init__(self):
super(ConfigRPC, self).__init__(logger)
def ListConfigKeys(self):
"""
Returns a list of config items:
[ ..., [key, value, desc], ...]
"""
result = []
items = config.getAll()
for item in items:
result.append([item['key'], item['value'], item['description']])
return result
def ChangeConfig(self, key, value):
item = config.set(key, value)
| 26.034483
| 76
| 0.607947
|
4a0e7d74e62f2dcc1f51246a9e189556b83c0ef4
| 9,056
|
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/operations/_network_interface_ip_configurations_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/operations/_network_interface_ip_configurations_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/operations/_network_interface_ip_configurations_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfaceIPConfigurationsOperations(object):
"""NetworkInterfaceIPConfigurationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceIPConfigurationListResult"]
"""Get all ip configurations in a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceIPConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_06_01.models.NetworkInterfaceIPConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceIPConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/ipConfigurations'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_interface_name, # type: str
ip_configuration_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkInterfaceIPConfiguration"
"""Gets the specified network interface ip configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the ip configuration name.
:type ip_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterfaceIPConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_06_01.models.NetworkInterfaceIPConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterfaceIPConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/ipConfigurations/{ipConfigurationName}'} # type: ignore
| 48.951351
| 220
| 0.675795
|
4a0e7e160f6f1ba1359c6eec91741b730bd70dd4
| 7,261
|
py
|
Python
|
contrib/py/pycoiniclesnet/pycoiniclesnet/instance.py
|
coiniclesdev/coinicles-network
|
32a817d174de4b597d96cd6204c0ede1cab04788
|
[
"Zlib"
] | null | null | null |
contrib/py/pycoiniclesnet/pycoiniclesnet/instance.py
|
coiniclesdev/coinicles-network
|
32a817d174de4b597d96cd6204c0ede1cab04788
|
[
"Zlib"
] | null | null | null |
contrib/py/pycoiniclesnet/pycoiniclesnet/instance.py
|
coiniclesdev/coinicles-network
|
32a817d174de4b597d96cd6204c0ede1cab04788
|
[
"Zlib"
] | null | null | null |
#!/usr/bin/env python3
#
# coiniclesnet runtime wrapper
#
from ctypes import *
import configparser
import signal
import time
import threading
import os
import sys
import requests
from pycoiniclesnet import rc
lib_file = os.path.join(os.path.realpath('.'), 'libcoiniclesnet-shared.so')
def log(msg):
sys.stderr.write("coiniclesnet: {}\n".format(msg))
sys.stderr.flush()
class CoiniclesNET(threading.Thread):
lib = None
ctx = 0
failed = False
up = False
asRouter = True
def configure(self, lib, conf, ip=None, port=None, ifname=None, seedfile=None, coiniclesd_host=None, coiniclesd_port=None):
log("configure lib={} conf={}".format(lib, conf))
if not os.path.exists(os.path.dirname(conf)):
os.mkdir(os.path.dirname(conf))
try:
self.lib = CDLL(lib)
except OSError as ex:
log("failed to load library: {}".format(ex))
return False
if self.lib.llarp_ensure_config(conf.encode('utf-8'), os.path.dirname(conf).encode('utf-8'), True, self.asRouter):
config = configparser.ConfigParser()
config.read(conf)
log('overwrite ip="{}" port="{}" ifname="{}" seedfile="{}" coiniclesd=("{}", "{}")'.format(
ip, port, ifname, seedfile, coiniclesd_host, coiniclesd_port))
if seedfile and coiniclesd_host and coiniclesd_port:
if not os.path.exists(seedfile):
log('cannot access service node seed at "{}"'.format(seedfile))
return False
config['coiniclesd'] = {
'service-node-seed': seedfile,
'enabled': "true",
'jsonrpc': "{}:{}".format(coiniclesd_host, coiniclesd_port)
}
if ip:
config['router']['public-address'] = '{}'.format(ip)
if port:
config['router']['public-port'] = '{}'.format(port)
if ifname and port:
config['bind'] = {
ifname: '{}'.format(port)
}
with open(conf, "w") as f:
config.write(f)
self.ctx = self.lib.llarp_main_init(conf.encode('utf-8'))
else:
return False
return self.lib.llarp_main_setup(self.ctx, False) == 0
def inform_fail(self):
"""
inform coiniclesnet crashed
"""
self.failed = True
self._inform()
def inform_up(self):
self.up = True
self._inform()
def _inform(self):
"""
inform waiter
"""
def wait_for_up(self, timeout):
"""
wait for coiniclesnet to go up for :timeout: seconds
:return True if we are up and running otherwise False:
"""
# return self._up.wait(timeout)
def signal(self, sig):
if self.ctx and self.lib:
self.lib.llarp_main_signal(self.ctx, int(sig))
def run(self):
# self._up.acquire()
self.up = True
code = self.lib.llarp_main_run(self.ctx)
log("llarp_main_run exited with status {}".format(code))
if code:
self.inform_fail()
self.up = False
# self._up.release()
def close(self):
if self.lib and self.ctx:
self.lib.llarp_main_free(self.ctx)
def getconf(name, fallback=None):
return name in os.environ and os.environ[name] or fallback
def run_main(args):
seedfile = getconf("COINICLES_SEED_FILE")
if seedfile is None:
print("COINICLES_SEED_FILE was not set")
return
coiniclesd_host = getconf("COINICLES_RPC_HOST", "127.0.0.1")
coiniclesd_port = getconf("COINICLES_RPC_PORT", "22023")
root = getconf("COINICLESNET_ROOT")
if root is None:
print("COINICLESNET_ROOT was not set")
return
rc_callback = getconf("COINICLESNET_SUBMIT_URL")
if rc_callback is None:
print("COINICLESNET_SUBMIT_URL was not set")
return
bootstrap = getconf("COINICLESNET_BOOTSTRAP_URL")
if bootstrap is None:
print("COINICLESNET_BOOTSTRAP_URL was not set")
lib = getconf("COINICLESNET_LIB", lib_file)
if not os.path.exists(lib):
lib = "libcoiniclesnet-shared.so"
timeout = int(getconf("COINICLESNET_TIMEOUT", "5"))
ping_interval = int(getconf("COINICLESNET_PING_INTERVAL", "60"))
ping_callback = getconf("COINICLESNET_PING_URL")
ip = getconf("COINICLESNET_IP")
port = getconf("COINICLESNET_PORT")
ifname = getconf("COINICLESNET_IFNAME")
if ping_callback is None:
print("COINICLESNET_PING_URL was not set")
return
conf = os.path.join(root, "daemon.ini")
log("going up")
coinicles = CoiniclesNET()
log("bootstrapping...")
try:
r = requests.get(bootstrap)
if r.status_code == 404:
log("bootstrap gave no RCs, we are probably the seed node")
elif r.status_code != 200:
raise Exception("http {}".format(r.status_code))
else:
data = r.content
if rc.validate(data):
log("valid RC obtained")
with open(os.path.join(root, "bootstrap.signed"), "wb") as f:
f.write(data)
else:
raise Exception("invalid RC")
except Exception as ex:
log("failed to bootstrap: {}".format(ex))
coinicles.close()
return
if coinicles.configure(lib, conf, ip, port, ifname, seedfile, coiniclesd_host, coiniclesd_port):
log("configured")
coinicles.start()
try:
log("waiting for spawn")
while timeout > 0:
time.sleep(1)
if coinicles.failed:
log("failed")
break
log("waiting {}".format(timeout))
timeout -= 1
if coinicles.up:
log("submitting rc")
try:
with open(os.path.join(root, 'self.signed'), 'rb') as f:
r = requests.put(rc_callback, data=f.read(), headers={
"content-type": "application/octect-stream"})
log('submit rc reply: HTTP {}'.format(r.status_code))
except Exception as ex:
log("failed to submit rc: {}".format(ex))
coinicles.signal(signal.SIGINT)
time.sleep(2)
else:
while coinicles.up:
time.sleep(ping_interval)
try:
r = requests.get(ping_callback)
log("ping reply: HTTP {}".format(r.status_code))
except Exception as ex:
log("failed to submit ping: {}".format(ex))
else:
log("failed to go up")
coinicles.signal(signal.SIGINT)
except KeyboardInterrupt:
coinicles.signal(signal.SIGINT)
time.sleep(2)
finally:
coinicles.close()
else:
coinicles.close()
def main():
run_main(sys.argv[1:])
if __name__ == "__main__":
main()
| 32.271111
| 127
| 0.549649
|
4a0e7e7f60b9bf86ecb377c6a8279a8e514464de
| 757
|
py
|
Python
|
back_flask/src/app.py
|
thibaud-c/3DperceptionUX
|
04707986e98ced9d121083263ce6a6e09f747666
|
[
"MIT"
] | null | null | null |
back_flask/src/app.py
|
thibaud-c/3DperceptionUX
|
04707986e98ced9d121083263ce6a6e09f747666
|
[
"MIT"
] | 2
|
2022-01-27T16:11:48.000Z
|
2022-02-27T02:54:50.000Z
|
back_flask/src/app.py
|
thibaud-c/3DperceptionUX
|
04707986e98ced9d121083263ce6a6e09f747666
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask_cors import CORS
import logging
from .config import app_config
from .models import db
# import user_api blueprint
from .views.UserView import user_api as user_blueprint
def create_app(env_name):
"""
Create app
"""
#init log
logging.basicConfig(filename='Flask.log', level=logging.DEBUG, format='%(asctime)s %(levelname)s %(name)s %(threadName)s : %(message)s')
# app initiliazation
app = Flask(__name__)
CORS(app)
app.config.from_object(app_config[env_name])
# initializing db
db.init_app(app)
app.register_blueprint(user_blueprint, url_prefix='/api/v1/')
@app.route('/', methods=['GET'])
def index():
"""
example endpoint
"""
return 'Server is running...'
return app
| 21.027778
| 138
| 0.704095
|
4a0e7f91cdf93f93dea667bded806c86d417a7c9
| 22,657
|
py
|
Python
|
test/test_functionalization.py
|
jeejakp12/pytorch
|
043cf1f9c746b4dda2c404ba6c76c6ccad5e2cbe
|
[
"Intel"
] | null | null | null |
test/test_functionalization.py
|
jeejakp12/pytorch
|
043cf1f9c746b4dda2c404ba6c76c6ccad5e2cbe
|
[
"Intel"
] | null | null | null |
test/test_functionalization.py
|
jeejakp12/pytorch
|
043cf1f9c746b4dda2c404ba6c76c6ccad5e2cbe
|
[
"Intel"
] | 1
|
2022-03-26T14:42:50.000Z
|
2022-03-26T14:42:50.000Z
|
# Owner(s): ["module: codegen"]
import torch
from torch.testing._internal.common_utils import TestCase, run_tests
from torch.testing._internal.logging_tensor import LoggingTensor, LoggingTensorReentrant, capture_logs, log_input
from torch.utils._pytree import tree_map
import logging
def are_aliased(x, y):
if x._base is None and y._base is None:
return False
if x._base is not None and y._base is None:
return x._base is y
if x._base is None and y._base is not None:
return y._base is x
return x._base is y._base
# Just for testing: a logging tensor that also transforms out-of-place ops into inplace ops.
# That way even if the outer wrapper is functionalized, the inner wrapper will also need functionalization.
class InplaceLoggingTensor(LoggingTensorReentrant):
@staticmethod
def __new__(cls, e):
r = torch.Tensor._make_wrapper_subclass(cls, e.shape, dtype=e.dtype, requires_grad=False)
r.elem = e
return r
__torch_function__ = torch._C._disabled_torch_function_impl
def __str__(self):
return f'InplaceLoggingTensor({self.elem})'
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
def unwrap(e):
if isinstance(e, InplaceLoggingTensor):
return e.elem
else:
return e
def wrap(e):
if isinstance(e, torch.Tensor):
return InplaceLoggingTensor(e)
else:
return e
f = func
# this subclass converts all `add()` ops into `add_()` ops
if f is torch.ops.aten.add.Tensor:
f = torch.ops.aten.add_.Tensor
with cls.context():
rs = tree_map(wrap, f(*tree_map(unwrap, args), **tree_map(unwrap, kwargs)))
# after running the (potentially transformed) op,
# log the original op that we saw.
logging.getLogger("LoggingTensor").info(f"{func.__module__}.{func.__name__}", args, kwargs, rs)
return rs
class TestFunctionalization(TestCase):
def get_logs(self, func, inpt, *, reapply_views=False):
input_clone_logging = LoggingTensor(inpt.clone())
input_functional_logging = torch._to_functional_tensor(input_clone_logging)
with capture_logs() as logs:
log_input("input", input_clone_logging)
torch._enable_functionalization(reapply_views=reapply_views)
try:
func(input_functional_logging)
finally:
torch._disable_functionalization()
return logs
def assert_functionalization(self, func, inpt, *, reapply_views=False):
input_clone = inpt.clone()
input_clone2 = inpt.clone()
input_functional = torch._to_functional_tensor(input_clone2)
# Compare outputs (and mutated inputs), with and without functionalization.
out_ref = func(inpt)
torch._enable_functionalization(reapply_views=reapply_views)
try:
out_functional = func(input_functional)
finally:
torch._disable_functionalization()
# We need to sync the input tensors first, in case there are any queued mutations left.
torch._sync(input_functional)
torch._sync(out_functional)
self.assertEqual(out_ref, torch._from_functional_tensor(out_functional))
self.assertEqual(inpt, torch._from_functional_tensor(input_functional)) # input mutations should still occur
def test_simple(self):
def f(x):
# simple test: 1 view op, 1 inplace op
tmp = torch.ones(4, 2)
y = x.view(4, 2)
y.add_(tmp)
z = x * x
return y
self.assert_functionalization(f, torch.ones(4, 2))
logs = self.get_logs(f, torch.ones(4, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.view_copy.default($0, [4, 2])
$2 = torch._ops.aten.add.Tensor($1, tensor([[1., 1.],
[1., 1.],
[1., 1.],
[1., 1.]]))
$3 = torch._ops.aten.view_copy.default($2, [4, 2])
$4 = torch._ops.aten.mul.Tensor($3, $3)""")
def test_simple_out(self):
def f(x):
tmp = torch.ones(4, 2)
y = x.view(4, 2)
# the out= tensor will get resized, since it has size=0 to start.
z = torch.empty(())
torch.add(y, tmp, out=z)
w = z * z
return w
self.assert_functionalization(f, torch.ones(4, 2))
logs = self.get_logs(f, torch.ones(4, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.view_copy.default($0, [4, 2])
$2 = torch._ops.aten.add.Tensor($1, tensor([[1., 1.],
[1., 1.],
[1., 1.],
[1., 1.]]))
$3 = torch._ops.aten.mul.Tensor($2, $2)""")
def test_multi_out(self):
def f(x):
# aminmax.out returns a tuple of tensors.
# functionalization should properly handle the tuple.
out_min = torch.empty(4)
out_max = torch.empty(4)
torch.aminmax(x, dim=0, out=(out_max, out_min))
return out_max
self.assert_functionalization(f, torch.arange(8, dtype=torch.float32))
logs = self.get_logs(f, torch.arange(8, dtype=torch.float32))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1, $2 = torch._ops.aten.aminmax.default($0, dim=0)""")
def test_tensor_ctr(self):
def f(x):
y = torch.tensor((1, 2, 3))
z = y.view(-1)
z.add_(1)
return y
self.assert_functionalization(f, torch.arange(3, dtype=torch.float32))
logs = self.get_logs(f, torch.arange(3, dtype=torch.float32))
self.assertExpectedInline('\n'.join(logs), """$0 = input('input')""")
def test_inplace_on_non_view(self):
def f(x):
# test for the case where we functionalize an inplace op on the other tensor - not a view.
# This is worth checking because the tensor will have an empty ViewMeta stack, which needs to be special cased.
tmp = torch.ones(4, 2)
y = x.view(4, 2)
x.add_(tmp)
return y
self.assert_functionalization(f, torch.ones(4, 2))
logs = self.get_logs(f, torch.ones(4, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.view_copy.default($0, [4, 2])
$2 = torch._ops.aten.add.Tensor($0, tensor([[1., 1.],
[1., 1.],
[1., 1.],
[1., 1.]]))""")
# Some ops that are mutable are neither inplace nor out= ops.
# They also need special handling.
def test_mutable_op_not_inplace_or_other(self):
def f(x):
return torch._fused_moving_avg_obs_fq_helper(x, x, x, x, x, x, x, 1.0, 0, 1, 0)
logs = self.get_logs(f, torch.ones(1))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1, $2, $3, $4, $5, $6 = torch._ops.aten._fused_moving_avg_obs_fq_helper.functional($0, $0, $0, $0, $0, $0, $0, 1.0, 0, 1, 0)""")
def test_tensor_list_composite(self):
def f(x):
# Test an op with TensorList input
y = torch.block_diag(x, x)
return y
self.assert_functionalization(f, torch.ones(2, 2))
logs = self.get_logs(f, torch.ones(2, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.expand_copy.default($0, [2, 2])
$2 = torch._ops.aten.slice_scatter.default(tensor([[0., 0., 0., 0.],
[0., 0., 0., 0.]]), $1, 1, 0, 2)
$3 = torch._ops.aten.slice_scatter.default(tensor([[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.]]), $2, 0, 0, 2)
$4 = torch._ops.aten.slice_copy.Tensor($3, 0, 2, 4)
$5 = torch._ops.aten.slice_copy.Tensor($4, 1, 2, 4)
$6 = torch._ops.aten.expand_copy.default($0, [2, 2])""")
def test_cat(self):
def f(x):
out = torch.empty(0)
torch.cat((x,), out=out)
return out
self.assert_functionalization(f, torch.ones(2, 2))
logs = self.get_logs(f, torch.ones(2, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.cat.default([LoggingTensor(tensor([[1., 1.],
[1., 1.]]))])""")
def test_diagonal(self):
def f(x):
# test: view ops that take a subset of the original tensor (select/diagonal)
tmp = torch.ones(2)
y = x.diagonal()
y.add_(tmp)
z = x * x
return z
self.assert_functionalization(f, torch.ones(2, 2))
logs = self.get_logs(f, torch.ones(2, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.diagonal_copy.default($0)
$2 = torch._ops.aten.add.Tensor($1, tensor([1., 1.]))
$3 = torch._ops.aten.diagonal_scatter.default($0, $2)
$4 = torch._ops.aten.mul.Tensor($3, $3)""")
def test_diagonal_mutated_input(self):
def f(x):
# simple test: there are pending updates afterwards, which the test syncs manually
tmp = torch.ones(2)
y = x.diagonal()
y.add_(tmp)
return x
x = torch.ones(2, 2)
self.assert_functionalization(f, x)
def test_split(self):
def f(x):
# test: view ops that return multiple tensors (split)
tmp = torch.ones(2)
y1, y2 = x.split(2)
y3 = y2.diagonal()
y3.add_(tmp)
z = x * x
return y3
self.assert_functionalization(f, torch.ones(4, 2))
logs = self.get_logs(f, torch.ones(4, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1, $2 = torch._ops.aten.split_copy.Tensor($0, 2)
$3 = torch._ops.aten.diagonal_copy.default($2)
$4 = torch._ops.aten.add.Tensor($3, tensor([1., 1.]))
$5, $6 = torch._ops.aten.split_copy.Tensor($0, 2)
$7 = torch._ops.aten.diagonal_scatter.default($6, $4)
$8 = torch._ops.aten.slice_scatter.default($0, $7, 0, 2, 4)
$9 = torch._ops.aten.mul.Tensor($8, $8)""")
def test_view_inplace(self):
def f(x):
# test: view + inplace op (transpose_)
tmp = torch.ones(4)
x.transpose_(1, 0)
y = x[0]
y.add_(tmp)
return y
self.assert_functionalization(f, torch.ones(4, 2))
logs = self.get_logs(f, torch.ones(4, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.transpose_copy.int($0, 1, 0)
$2 = torch._ops.aten.select_copy.int($1, 0, 0)
$3 = torch._ops.aten.add.Tensor($2, tensor([1., 1., 1., 1.]))""")
def test_optional_tensor_list(self):
def f(x):
# test: an operator that takes in a List[Optional[Tensor]] argument
# (index_put)
y = x.view(8)
indices = torch.arange(4)
values = torch.arange(4, dtype=y.dtype)
y.index_put_((indices,), values, accumulate=False)
return y
self.assert_functionalization(f, torch.ones(4, 2))
logs = self.get_logs(f, torch.ones(4, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.view_copy.default($0, [8])
$2 = torch._ops.aten.index_put.default($1, [tensor([0, 1, 2, 3])], tensor([0., 1., 2., 3.]))""")
def test_scalars(self):
def f(x):
# test: the pass can handle scalar inputs properly
tmp = torch.ones(4, 2)
y = x.view(4, 2)
y.add_(1)
z = 2 * y
z.div_(1)
return z
self.assert_functionalization(f, torch.ones(4, 2))
logs = self.get_logs(f, torch.ones(4, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.view_copy.default($0, [4, 2])
$2 = torch._ops.aten.add.Tensor($1, 1)
$3 = torch._ops.aten.mul.Tensor($2, 2)
$4 = torch._ops.aten.div.Tensor($3, 1)""")
def test_only_one_view(self):
def f(x):
# This tests that we don't have any unnecessary views in the trace.
# If the input wasn't mutated, we don't need to regenerate it,
# so there should be a total of 1 op in the output trace.
return x.view(4, 2)
logs = self.get_logs(f, torch.ones(4, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.view_copy.default($0, [4, 2])""")
def test_everything(self):
def f(x):
# test: everything
tmp = torch.ones(2, 2)
x2 = x + x
y = x2.view(8)
z0 = y.reshape(2, 4)
z1 = z0.transpose(1, 0)
z1.unsqueeze_(0)
z1.squeeze_()
z2, z3 = z1.split(2)
z2.add_(tmp)
z4 = z0[0] + z2.reshape(4)
return z2
self.assert_functionalization(f, torch.ones(4, 2))
logs = self.get_logs(f, torch.ones(4, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.add.Tensor($0, $0)
$2 = torch._ops.aten.view_copy.default($1, [8])
$3 = torch._ops.aten._reshape_alias_copy.default($2, [2, 4], [4, 1])
$4 = torch._ops.aten.transpose_copy.int($3, 1, 0)
$5 = torch._ops.aten.view_copy.default($1, [8])
$6 = torch._ops.aten._reshape_alias_copy.default($5, [2, 4], [4, 1])
$7 = torch._ops.aten.transpose_copy.int($6, 1, 0)
$8 = torch._ops.aten.unsqueeze_copy.default($7, 0)
$9 = torch._ops.aten.view_copy.default($1, [8])
$10 = torch._ops.aten._reshape_alias_copy.default($9, [2, 4], [4, 1])
$11 = torch._ops.aten.transpose_copy.int($10, 1, 0)
$12 = torch._ops.aten.unsqueeze_copy.default($11, 0)
$13 = torch._ops.aten.squeeze_copy.default($12)
$14, $15 = torch._ops.aten.split_copy.Tensor($13, 2)
$16 = torch._ops.aten.add.Tensor($14, tensor([[1., 1.],
[1., 1.]]))
$17 = torch._ops.aten.select_copy.int($3, 0, 0)
$18 = torch._ops.aten.clone.default($16, memory_format=torch.contiguous_format)
$19 = torch._ops.aten._unsafe_view.default($18, [4])
$20 = torch._ops.aten.view_copy.default($1, [8])
$21 = torch._ops.aten._reshape_alias_copy.default($20, [2, 4], [4, 1])
$22 = torch._ops.aten.transpose_copy.int($21, 1, 0)
$23 = torch._ops.aten.unsqueeze_copy.default($22, 0)
$24 = torch._ops.aten.squeeze_copy.default($23)
$25 = torch._ops.aten.slice_scatter.default($24, $16, 0, 0, 2)
$26 = torch._ops.aten.unsqueeze_copy.default($25, 0)
$27 = torch._ops.aten.squeeze_copy.dim($26, 0)
$28 = torch._ops.aten.transpose_copy.int($27, 1, 0)
$29 = torch._ops.aten._reshape_alias_copy.default($28, [8], [1])
$30 = torch._ops.aten.view_copy.default($29, [4, 2])
$31 = torch._ops.aten.view_copy.default($30, [8])
$32 = torch._ops.aten._reshape_alias_copy.default($31, [2, 4], [4, 1])
$33 = torch._ops.aten.select_copy.int($32, 0, 0)
$34 = torch._ops.aten.add.Tensor($33, $19)""")
def test_reapply_views_simple(self):
def f(x):
tmp = torch.ones(4, 2)
y = x.view(4, 2)
y.add_(tmp)
z = x * x
return y
self.assert_functionalization(f, torch.ones(4, 2), reapply_views=True)
logs = self.get_logs(f, torch.ones(4, 2), reapply_views=True)
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.view.default($0, [4, 2])
$2 = torch._ops.aten.add.Tensor($1, tensor([[1., 1.],
[1., 1.],
[1., 1.],
[1., 1.]]))
$3 = torch._ops.aten.view.default($2, [4, 2])
$4 = torch._ops.aten.mul.Tensor($3, $3)""")
def test_aliases_maintained_after_pass_when_reapplying_views(self):
def f(x):
tmp = torch.ones(4, 2)
y = x.view(4, 2)
z = x.view(4, 2)
y.add_(tmp)
return y, z
input_functional = torch._to_functional_tensor(torch.ones(4, 2))
torch._enable_functionalization(reapply_views=True)
try:
y, z = f(input_functional)
torch._sync(y)
torch._sync(z)
finally:
torch._disable_functionalization()
# y and z are aliases inside of the function, and that aliasing relationship should be maintained.
_y = torch._from_functional_tensor(y)
_z = torch._from_functional_tensor(z)
self.assertTrue(are_aliased(_y, _z))
# copy_() gets its own test, because it is special cased in functionalization.
# self.copy_(src) decomposes into src.to(self).expand_as(self).
def test_copy_(self):
def f(x):
tmp = torch.zeros(2, 2)
# NOTE: LoggingTensor isn't a mode, which means that the diagonal call
# will not be logged. This is fine for testing.
tmp_slice = tmp.diagonal()
y = tmp_slice.copy_(x)
z = y.add_(x)
return z
# Test 1: copy_() with same dtype and shape
# to() is a composite op that noops when the dtype/shape match, so nothing gets logged.
self.assert_functionalization(f, torch.ones(2))
logs = self.get_logs(f, torch.ones(2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.expand_copy.default($0, [2])
$2 = torch._ops.aten.add.Tensor($1, $0)""")
# Test 2: copy_() with same dtype, different shape
self.assert_functionalization(f, torch.ones(1))
logs = self.get_logs(f, torch.ones(1))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.expand_copy.default($0, [2])
$2 = torch._ops.aten.add.Tensor($1, $0)""")
# Test 3: copy_() with different dtype, same shape
self.assert_functionalization(f, torch.ones(2, dtype=torch.long))
logs = self.get_logs(f, torch.ones(2, dtype=torch.long))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten._to_copy.default($0, dtype=torch.float32, layout=torch.strided, device=device(type='cpu'), pin_memory=False)
$2 = torch._ops.aten.expand_copy.default($1, [2])
$3 = torch._ops.aten.add.Tensor($2, $0)""")
# Test 4: copy_() with different dtype, different shape
self.assert_functionalization(f, torch.ones(1, dtype=torch.long))
logs = self.get_logs(f, torch.ones(1, dtype=torch.long))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten._to_copy.default($0, dtype=torch.float32, layout=torch.strided, device=device(type='cpu'), pin_memory=False)
$2 = torch._ops.aten.expand_copy.default($1, [2])
$3 = torch._ops.aten.add.Tensor($2, $0)""")
def test_fill_(self):
def f(x):
y = x + x
z = y.diagonal()
z.fill_(0)
return y
self.assert_functionalization(f, torch.ones(2, 2))
logs = self.get_logs(f, torch.ones(2, 2))
self.assertExpectedInline('\n'.join(logs), """\
$0 = input('input')
$1 = torch._ops.aten.add.Tensor($0, $0)
$2 = torch._ops.aten.diagonal_copy.default($1)
$3 = torch._ops.aten.fill.Scalar($2, 0)""")
def test_nested_functions_propagate_updates(self):
def g(x):
# Create a view of x
y = x[0]
y.add_(1)
# The view, y, gets deallocated at the end of this function
def f(x):
# Calling g(x) should mutate x
g(x)
# We expect x to be synced here, even though the alias created in g() has been deallocated!
y = x + x
return y
self.assert_functionalization(f, torch.ones(2, 2))
def test_mixed_wrappers_valid(self):
def f(x, y):
z = x + y
z.add_(1)
return z
x1_not_functional = LoggingTensor(torch.ones(4))
x2_functional = torch._to_functional_tensor(LoggingTensor(torch.ones(4)))
with capture_logs() as logs:
y = f(x1_not_functional, x2_functional)
# Make sure that functionalization ran the "+" kernel
# with a functional + non-functional tensor, and wrapped the output appropriately.
self.assertExpectedInline('\n'.join(logs), """\
$2 = torch._ops.aten.add.Tensor($0, $1)
$3 = torch._ops.aten.add.Tensor($2, 1)""")
def test_mixed_wrappers_invalid(self):
x1_not_functional = torch.ones(4)
x2_functional = torch._to_functional_tensor(torch.ones(4))
# When dealing with mixed functional + nonfunctional tensors,
# normal_tensor.add_(functional_tensor) is not valid
# because normal_tensor would need to be "promoted" to a functional tensor.
with self.assertRaises(RuntimeError):
x1_not_functional.add_(x2_functional)
# This tests the behavior of functionalization with multiple layers of wrapped tensor subclasses.
def test_multiple_levels_of_wrapping(self):
def f(x):
# call an inplace op and have it get logged twice (by the outer + inner wrapper)
x.add_(1)
# Test 1: both the inner and outer wrapper are "functionalized"
x_inner_and_outer_functional = torch._to_functional_tensor(
InplaceLoggingTensor(torch._to_functional_tensor(LoggingTensor(torch.ones(4)))))
with capture_logs() as logs:
f(x_inner_and_outer_functional)
# Since both wrappers were unctionalized, they both log "add"
self.assertExpectedInline('\n'.join(logs), """\
$1 = torch._ops.aten.add.Tensor($0, 1)
$3 = torch._ops.aten.add.Tensor($2, 1)""")
# Test 2: only the inner wrapper is "functionalized"
x_only_inner_functional = InplaceLoggingTensor(torch._to_functional_tensor(LoggingTensor(torch.ones(4))))
with capture_logs() as logs:
f(x_only_inner_functional)
# Since only the inner wrapper is functionalized, then the inner (first) log is functionalized
self.assertExpectedInline('\n'.join(logs), """\
$1 = torch._ops.aten.add.Tensor($0, 1)
$3 = torch._ops.aten.add_.Tensor($2, 1)""")
# Test 3: only the inner wrapper is "functionalized"
x_only_outer_functional = torch._to_functional_tensor(InplaceLoggingTensor(LoggingTensor(torch.ones(4))))
with capture_logs() as logs:
f(x_only_outer_functional)
# Only the outer add_ is functionalized
# Since only the outer wrapper is functionalized, then the outer (second) log is functionalized
self.assertExpectedInline('\n'.join(logs), """\
$1 = torch._ops.aten.add_.Tensor($0, 1)
$3 = torch._ops.aten.add.Tensor($2, 1)""")
if __name__ == '__main__':
run_tests()
| 39.61014
| 129
| 0.600697
|
4a0e80faa8147ee154ce90191e52e77fc78def3c
| 36,246
|
py
|
Python
|
sdks/python/apache_beam/transforms/ptransform.py
|
rehmanmuradali/beam
|
de8ff705145cbbc41bea7750a0a5d3553924ab3a
|
[
"Apache-2.0"
] | 1
|
2022-01-24T22:07:52.000Z
|
2022-01-24T22:07:52.000Z
|
sdks/python/apache_beam/transforms/ptransform.py
|
rehmanmuradali/beam
|
de8ff705145cbbc41bea7750a0a5d3553924ab3a
|
[
"Apache-2.0"
] | null | null | null |
sdks/python/apache_beam/transforms/ptransform.py
|
rehmanmuradali/beam
|
de8ff705145cbbc41bea7750a0a5d3553924ab3a
|
[
"Apache-2.0"
] | 1
|
2019-05-21T11:30:31.000Z
|
2019-05-21T11:30:31.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""PTransform and descendants.
A PTransform is an object describing (not executing) a computation. The actual
execution semantics for a transform is captured by a runner object. A transform
object always belongs to a pipeline object.
A PTransform derived class needs to define the expand() method that describes
how one or more PValues are created by the transform.
The module defines a few standard transforms: FlatMap (parallel do),
GroupByKey (group by key), etc. Note that the expand() methods for these
classes contain code that will add nodes to the processing graph associated
with a pipeline.
As support for the FlatMap transform, the module also defines a DoFn
class and wrapper class that allows lambda functions to be used as
FlatMap processing functions.
"""
# pytype: skip-file
from __future__ import absolute_import
import copy
import itertools
import operator
import os
import sys
import threading
from builtins import hex
from builtins import object
from builtins import zip
from functools import reduce
from functools import wraps
from typing import TYPE_CHECKING
from typing import Any
from typing import Callable
from typing import Dict
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import Type
from typing import TypeVar
from typing import Union
from typing import overload
from google.protobuf import message
from apache_beam import error
from apache_beam import pvalue
from apache_beam.internal import pickler
from apache_beam.internal import util
from apache_beam.portability import python_urns
from apache_beam.pvalue import DoOutputsTuple
from apache_beam.transforms.display import DisplayDataItem
from apache_beam.transforms.display import HasDisplayData
from apache_beam.typehints import native_type_compatibility
from apache_beam.typehints import typehints
from apache_beam.typehints.decorators import IOTypeHints
from apache_beam.typehints.decorators import TypeCheckError
from apache_beam.typehints.decorators import WithTypeHints
from apache_beam.typehints.decorators import get_signature
from apache_beam.typehints.decorators import get_type_hints
from apache_beam.typehints.decorators import getcallargs_forhints
from apache_beam.typehints.trivial_inference import instance_to_type
from apache_beam.typehints.typehints import validate_composite_type_param
from apache_beam.utils import proto_utils
if TYPE_CHECKING:
from apache_beam import coders
from apache_beam.pipeline import Pipeline
from apache_beam.runners.pipeline_context import PipelineContext
from apache_beam.transforms.core import Windowing
from apache_beam.portability.api import beam_runner_api_pb2
__all__ = [
'PTransform',
'ptransform_fn',
'label_from_callable',
]
T = TypeVar('T')
PTransformT = TypeVar('PTransformT', bound='PTransform')
ConstructorFn = Callable[
['beam_runner_api_pb2.PTransform', Optional[Any], 'PipelineContext'], Any]
class _PValueishTransform(object):
"""Visitor for PValueish objects.
A PValueish is a PValue, or list, tuple, dict of PValuesish objects.
This visits a PValueish, contstructing a (possibly mutated) copy.
"""
def visit_nested(self, node, *args):
if isinstance(node, (tuple, list)):
args = [self.visit(x, *args) for x in node]
if isinstance(node, tuple) and hasattr(node.__class__, '_make'):
# namedtuples require unpacked arguments in their constructor
return node.__class__(*args)
else:
return node.__class__(args)
elif isinstance(node, dict):
return node.__class__(
{key: self.visit(value, *args)
for (key, value) in node.items()})
else:
return node
class _SetInputPValues(_PValueishTransform):
def visit(self, node, replacements):
if id(node) in replacements:
return replacements[id(node)]
else:
return self.visit_nested(node, replacements)
# Caches to allow for materialization of values when executing a pipeline
# in-process, in eager mode. This cache allows the same _MaterializedResult
# object to be accessed and used despite Runner API round-trip serialization.
_pipeline_materialization_cache = {}
_pipeline_materialization_lock = threading.Lock()
def _allocate_materialized_pipeline(pipeline):
pid = os.getpid()
with _pipeline_materialization_lock:
pipeline_id = id(pipeline)
_pipeline_materialization_cache[(pid, pipeline_id)] = {}
def _allocate_materialized_result(pipeline):
pid = os.getpid()
with _pipeline_materialization_lock:
pipeline_id = id(pipeline)
if (pid, pipeline_id) not in _pipeline_materialization_cache:
raise ValueError(
'Materialized pipeline is not allocated for result '
'cache.')
result_id = len(_pipeline_materialization_cache[(pid, pipeline_id)])
result = _MaterializedResult(pipeline_id, result_id)
_pipeline_materialization_cache[(pid, pipeline_id)][result_id] = result
return result
def _get_materialized_result(pipeline_id, result_id):
pid = os.getpid()
with _pipeline_materialization_lock:
if (pid, pipeline_id) not in _pipeline_materialization_cache:
raise Exception(
'Materialization in out-of-process and remote runners is not yet '
'supported.')
return _pipeline_materialization_cache[(pid, pipeline_id)][result_id]
def _release_materialized_pipeline(pipeline):
pid = os.getpid()
with _pipeline_materialization_lock:
pipeline_id = id(pipeline)
del _pipeline_materialization_cache[(pid, pipeline_id)]
class _MaterializedResult(object):
def __init__(self, pipeline_id, result_id):
self._pipeline_id = pipeline_id
self._result_id = result_id
self.elements = []
def __reduce__(self):
# When unpickled (during Runner API roundtrip serailization), get the
# _MaterializedResult object from the cache so that values are written
# to the original _MaterializedResult when run in eager mode.
return (_get_materialized_result, (self._pipeline_id, self._result_id))
class _MaterializedDoOutputsTuple(pvalue.DoOutputsTuple):
def __init__(self, deferred, results_by_tag):
super(_MaterializedDoOutputsTuple,
self).__init__(None, None, deferred._tags, deferred._main_tag)
self._deferred = deferred
self._results_by_tag = results_by_tag
def __getitem__(self, tag):
if tag not in self._results_by_tag:
raise KeyError(
'Tag %r is not a a defined output tag of %s.' % (tag, self._deferred))
return self._results_by_tag[tag].elements
class _AddMaterializationTransforms(_PValueishTransform):
def _materialize_transform(self, pipeline):
result = _allocate_materialized_result(pipeline)
# Need to define _MaterializeValuesDoFn here to avoid circular
# dependencies.
from apache_beam import DoFn
from apache_beam import ParDo
class _MaterializeValuesDoFn(DoFn):
def process(self, element):
result.elements.append(element)
materialization_label = '_MaterializeValues%d' % result._result_id
return (materialization_label >> ParDo(_MaterializeValuesDoFn()), result)
def visit(self, node):
if isinstance(node, pvalue.PValue):
transform, result = self._materialize_transform(node.pipeline)
node | transform
return result
elif isinstance(node, pvalue.DoOutputsTuple):
results_by_tag = {}
for tag in itertools.chain([node._main_tag], node._tags):
results_by_tag[tag] = self.visit(node[tag])
return _MaterializedDoOutputsTuple(node, results_by_tag)
else:
return self.visit_nested(node)
class _FinalizeMaterialization(_PValueishTransform):
def visit(self, node):
if isinstance(node, _MaterializedResult):
return node.elements
elif isinstance(node, _MaterializedDoOutputsTuple):
return node
else:
return self.visit_nested(node)
def get_named_nested_pvalues(pvalueish):
if isinstance(pvalueish, tuple):
# Check to see if it's a named tuple.
fields = getattr(pvalueish, '_fields', None)
if fields and len(fields) == len(pvalueish):
tagged_values = zip(fields, pvalueish)
else:
tagged_values = enumerate(pvalueish)
elif isinstance(pvalueish, list):
tagged_values = enumerate(pvalueish)
elif isinstance(pvalueish, dict):
tagged_values = pvalueish.items()
else:
if isinstance(pvalueish, (pvalue.PValue, pvalue.DoOutputsTuple)):
yield None, pvalueish
return
for tag, subvalue in tagged_values:
for subtag, subsubvalue in get_named_nested_pvalues(subvalue):
if subtag is None:
yield tag, subsubvalue
else:
yield '%s.%s' % (tag, subtag), subsubvalue
class _ZipPValues(object):
"""Pairs each PValue in a pvalueish with a value in a parallel out sibling.
Sibling should have the same nested structure as pvalueish. Leaves in
sibling are expanded across nested pvalueish lists, tuples, and dicts.
For example
ZipPValues().visit({'a': pc1, 'b': (pc2, pc3)},
{'a': 'A', 'b', 'B'})
will return
[('a', pc1, 'A'), ('b', pc2, 'B'), ('b', pc3, 'B')]
"""
def visit(self, pvalueish, sibling, pairs=None, context=None):
if pairs is None:
pairs = []
self.visit(pvalueish, sibling, pairs, context)
return pairs
elif isinstance(pvalueish, (pvalue.PValue, pvalue.DoOutputsTuple)):
pairs.append((context, pvalueish, sibling))
elif isinstance(pvalueish, (list, tuple)):
self.visit_sequence(pvalueish, sibling, pairs, context)
elif isinstance(pvalueish, dict):
self.visit_dict(pvalueish, sibling, pairs, context)
def visit_sequence(self, pvalueish, sibling, pairs, context):
if isinstance(sibling, (list, tuple)):
for ix, (p, s) in enumerate(zip(pvalueish,
list(sibling) + [None] * len(pvalueish))):
self.visit(p, s, pairs, 'position %s' % ix)
else:
for p in pvalueish:
self.visit(p, sibling, pairs, context)
def visit_dict(self, pvalueish, sibling, pairs, context):
if isinstance(sibling, dict):
for key, p in pvalueish.items():
self.visit(p, sibling.get(key), pairs, key)
else:
for p in pvalueish.values():
self.visit(p, sibling, pairs, context)
class PTransform(WithTypeHints, HasDisplayData):
"""A transform object used to modify one or more PCollections.
Subclasses must define an expand() method that will be used when the transform
is applied to some arguments. Typical usage pattern will be:
input | CustomTransform(...)
The expand() method of the CustomTransform object passed in will be called
with input as an argument.
"""
# By default, transforms don't have any side inputs.
side_inputs = () # type: Sequence[pvalue.AsSideInput]
# Used for nullary transforms.
pipeline = None # type: Optional[Pipeline]
# Default is unset.
_user_label = None # type: Optional[str]
def __init__(self, label=None):
# type: (Optional[str]) -> None
super(PTransform, self).__init__()
self.label = label # type: ignore # https://github.com/python/mypy/issues/3004
@property
def label(self):
# type: () -> str
return self._user_label or self.default_label()
@label.setter
def label(self, value):
# type: (Optional[str]) -> None
self._user_label = value
def default_label(self):
# type: () -> str
return self.__class__.__name__
def default_type_hints(self):
fn_type_hints = IOTypeHints.from_callable(self.expand)
if fn_type_hints is not None:
fn_type_hints = fn_type_hints.strip_pcoll()
# Prefer class decorator type hints for backwards compatibility.
return get_type_hints(self.__class__).with_defaults(fn_type_hints)
def with_input_types(self, input_type_hint):
"""Annotates the input type of a :class:`PTransform` with a type-hint.
Args:
input_type_hint (type): An instance of an allowed built-in type, a custom
class, or an instance of a
:class:`~apache_beam.typehints.typehints.TypeConstraint`.
Raises:
TypeError: If **input_type_hint** is not a valid type-hint.
See
:obj:`apache_beam.typehints.typehints.validate_composite_type_param()`
for further details.
Returns:
PTransform: A reference to the instance of this particular
:class:`PTransform` object. This allows chaining type-hinting related
methods.
"""
input_type_hint = native_type_compatibility.convert_to_beam_type(
input_type_hint)
validate_composite_type_param(
input_type_hint, 'Type hints for a PTransform')
return super(PTransform, self).with_input_types(input_type_hint)
def with_output_types(self, type_hint):
"""Annotates the output type of a :class:`PTransform` with a type-hint.
Args:
type_hint (type): An instance of an allowed built-in type, a custom class,
or a :class:`~apache_beam.typehints.typehints.TypeConstraint`.
Raises:
TypeError: If **type_hint** is not a valid type-hint. See
:obj:`~apache_beam.typehints.typehints.validate_composite_type_param()`
for further details.
Returns:
PTransform: A reference to the instance of this particular
:class:`PTransform` object. This allows chaining type-hinting related
methods.
"""
type_hint = native_type_compatibility.convert_to_beam_type(type_hint)
validate_composite_type_param(type_hint, 'Type hints for a PTransform')
return super(PTransform, self).with_output_types(type_hint)
def type_check_inputs(self, pvalueish):
self.type_check_inputs_or_outputs(pvalueish, 'input')
def infer_output_type(self, unused_input_type):
return self.get_type_hints().simple_output_type(self.label) or typehints.Any
def type_check_outputs(self, pvalueish):
self.type_check_inputs_or_outputs(pvalueish, 'output')
def type_check_inputs_or_outputs(self, pvalueish, input_or_output):
type_hints = self.get_type_hints()
hints = getattr(type_hints, input_or_output + '_types')
if hints is None or not any(hints):
return
arg_hints, kwarg_hints = hints
if arg_hints and kwarg_hints:
raise TypeCheckError(
'PTransform cannot have both positional and keyword type hints '
'without overriding %s._type_check_%s()' %
(self.__class__, input_or_output))
root_hint = (
arg_hints[0] if len(arg_hints) == 1 else arg_hints or kwarg_hints)
for context, pvalue_, hint in _ZipPValues().visit(pvalueish, root_hint):
if isinstance(pvalue_, DoOutputsTuple):
continue
if pvalue_.element_type is None:
# TODO(robertwb): It's a bug that we ever get here. (typecheck)
continue
if hint and not typehints.is_consistent_with(pvalue_.element_type, hint):
at_context = ' %s %s' % (input_or_output, context) if context else ''
raise TypeCheckError(
'{type} type hint violation at {label}{context}: expected {hint}, '
'got {actual_type}\nFull type hint:\n{debug_str}'.format(
type=input_or_output.title(),
label=self.label,
context=at_context,
hint=hint,
actual_type=pvalue_.element_type,
debug_str=type_hints.debug_str()))
def _infer_output_coder(self, input_type=None, input_coder=None):
# type: (...) -> Optional[coders.Coder]
"""Returns the output coder to use for output of this transform.
Note: this API is experimental and is subject to change; please do not rely
on behavior induced by this method.
The Coder returned here should not be wrapped in a WindowedValueCoder
wrapper.
Args:
input_type: An instance of an allowed built-in type, a custom class, or a
typehints.TypeConstraint for the input type, or None if not available.
input_coder: Coder object for encoding input to this PTransform, or None
if not available.
Returns:
Coder object for encoding output of this PTransform or None if unknown.
"""
# TODO(ccy): further refine this API.
return None
def _clone(self, new_label):
"""Clones the current transform instance under a new label."""
transform = copy.copy(self)
transform.label = new_label
return transform
def expand(self, input_or_inputs):
raise NotImplementedError
def __str__(self):
return '<%s>' % self._str_internal()
def __repr__(self):
return '<%s at %s>' % (self._str_internal(), hex(id(self)))
def _str_internal(self):
return '%s(PTransform)%s%s%s' % (
self.__class__.__name__,
' label=[%s]' % self.label if
(hasattr(self, 'label') and self.label) else '',
' inputs=%s' % str(self.inputs) if
(hasattr(self, 'inputs') and self.inputs) else '',
' side_inputs=%s' % str(self.side_inputs) if self.side_inputs else '')
def _check_pcollection(self, pcoll):
# type: (pvalue.PCollection) -> None
if not isinstance(pcoll, pvalue.PCollection):
raise error.TransformError('Expecting a PCollection argument.')
if not pcoll.pipeline:
raise error.TransformError('PCollection not part of a pipeline.')
def get_windowing(self, inputs):
# type: (Any) -> Windowing
"""Returns the window function to be associated with transform's output.
By default most transforms just return the windowing function associated
with the input PCollection (or the first input if several).
"""
# TODO(robertwb): Assert all input WindowFns compatible.
return inputs[0].windowing
def __rrshift__(self, label):
return _NamedPTransform(self, label)
def __or__(self, right):
"""Used to compose PTransforms, e.g., ptransform1 | ptransform2."""
if isinstance(right, PTransform):
return _ChainedPTransform(self, right)
return NotImplemented
def __ror__(self, left, label=None):
"""Used to apply this PTransform to non-PValues, e.g., a tuple."""
pvalueish, pvalues = self._extract_input_pvalues(left)
pipelines = [v.pipeline for v in pvalues if isinstance(v, pvalue.PValue)]
if pvalues and not pipelines:
deferred = False
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam import pipeline
from apache_beam.options.pipeline_options import PipelineOptions
# pylint: enable=wrong-import-order, wrong-import-position
p = pipeline.Pipeline('DirectRunner', PipelineOptions(sys.argv))
else:
if not pipelines:
if self.pipeline is not None:
p = self.pipeline
else:
raise ValueError(
'"%s" requires a pipeline to be specified '
'as there are no deferred inputs.' % self.label)
else:
p = self.pipeline or pipelines[0]
for pp in pipelines:
if p != pp:
raise ValueError(
'Mixing value from different pipelines not allowed.')
deferred = not getattr(p.runner, 'is_eager', False)
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.transforms.core import Create
# pylint: enable=wrong-import-order, wrong-import-position
replacements = {
id(v): p | 'CreatePInput%s' % ix >> Create(v, reshuffle=False)
for ix,
v in enumerate(pvalues)
if not isinstance(v, pvalue.PValue) and v is not None
}
pvalueish = _SetInputPValues().visit(pvalueish, replacements)
self.pipeline = p
result = p.apply(self, pvalueish, label)
if deferred:
return result
_allocate_materialized_pipeline(p)
materialized_result = _AddMaterializationTransforms().visit(result)
p.run().wait_until_finish()
_release_materialized_pipeline(p)
return _FinalizeMaterialization().visit(materialized_result)
def _extract_input_pvalues(self, pvalueish):
"""Extract all the pvalues contained in the input pvalueish.
Returns pvalueish as well as the flat inputs list as the input may have to
be copied as inspection may be destructive.
By default, recursively extracts tuple components and dict values.
Generally only needs to be overriden for multi-input PTransforms.
"""
# pylint: disable=wrong-import-order
from apache_beam import pipeline
# pylint: enable=wrong-import-order
if isinstance(pvalueish, pipeline.Pipeline):
pvalueish = pvalue.PBegin(pvalueish)
def _dict_tuple_leaves(pvalueish):
if isinstance(pvalueish, tuple):
for a in pvalueish:
for p in _dict_tuple_leaves(a):
yield p
elif isinstance(pvalueish, dict):
for a in pvalueish.values():
for p in _dict_tuple_leaves(a):
yield p
else:
yield pvalueish
return pvalueish, tuple(_dict_tuple_leaves(pvalueish))
def _pvaluish_from_dict(self, input_dict):
if len(input_dict) == 1:
return next(iter(input_dict.values()))
else:
return input_dict
_known_urns = {} # type: Dict[str, Tuple[Optional[type], ConstructorFn]]
@classmethod
@overload
def register_urn(
cls,
urn, # type: str
parameter_type, # type: Type[T]
):
# type: (...) -> Callable[[Union[type, Callable[[beam_runner_api_pb2.PTransform, T, PipelineContext], Any]]], Callable[[T, PipelineContext], Any]]
pass
@classmethod
@overload
def register_urn(
cls,
urn, # type: str
parameter_type, # type: None
):
# type: (...) -> Callable[[Union[type, Callable[[beam_runner_api_pb2.PTransform, bytes, PipelineContext], Any]]], Callable[[bytes, PipelineContext], Any]]
pass
@classmethod
@overload
def register_urn(cls,
urn, # type: str
parameter_type, # type: Type[T]
constructor # type: Callable[[beam_runner_api_pb2.PTransform, T, PipelineContext], Any]
):
# type: (...) -> None
pass
@classmethod
@overload
def register_urn(cls,
urn, # type: str
parameter_type, # type: None
constructor # type: Callable[[beam_runner_api_pb2.PTransform, bytes, PipelineContext], Any]
):
# type: (...) -> None
pass
@classmethod
def register_urn(cls, urn, parameter_type, constructor=None):
def register(constructor):
if isinstance(constructor, type):
constructor.from_runner_api_parameter = register(
constructor.from_runner_api_parameter)
else:
cls._known_urns[urn] = parameter_type, constructor
return constructor
if constructor:
# Used as a statement.
register(constructor)
else:
# Used as a decorator.
return register
def to_runner_api(self, context, has_parts=False, **extra_kwargs):
# type: (PipelineContext, bool, Any) -> beam_runner_api_pb2.FunctionSpec
from apache_beam.portability.api import beam_runner_api_pb2
urn, typed_param = self.to_runner_api_parameter(context, **extra_kwargs)
if urn == python_urns.GENERIC_COMPOSITE_TRANSFORM and not has_parts:
# TODO(BEAM-3812): Remove this fallback.
urn, typed_param = self.to_runner_api_pickled(context)
return beam_runner_api_pb2.FunctionSpec(
urn=urn,
payload=typed_param.SerializeToString() if isinstance(
typed_param, message.Message) else typed_param.encode('utf-8')
if isinstance(typed_param, str) else typed_param)
@classmethod
def from_runner_api(cls,
proto, # type: Optional[beam_runner_api_pb2.PTransform]
context # type: PipelineContext
):
# type: (...) -> Optional[PTransform]
if proto is None or proto.spec is None or not proto.spec.urn:
return None
parameter_type, constructor = cls._known_urns[proto.spec.urn]
try:
return constructor(
proto,
proto_utils.parse_Bytes(proto.spec.payload, parameter_type),
context)
except Exception:
if context.allow_proto_holders:
# For external transforms we cannot build a Python ParDo object so
# we build a holder transform instead.
from apache_beam.transforms.core import RunnerAPIPTransformHolder
return RunnerAPIPTransformHolder(proto.spec, context)
raise
def to_runner_api_parameter(
self,
unused_context # type: PipelineContext
):
# type: (...) -> Tuple[str, Optional[Union[message.Message, bytes, str]]]
# The payload here is just to ease debugging.
return (
python_urns.GENERIC_COMPOSITE_TRANSFORM,
getattr(self, '_fn_api_payload', str(self)))
def to_runner_api_pickled(self, unused_context):
# type: (PipelineContext) -> Tuple[str, bytes]
return (python_urns.PICKLED_TRANSFORM, pickler.dumps(self))
def runner_api_requires_keyed_input(self):
return False
@PTransform.register_urn(python_urns.GENERIC_COMPOSITE_TRANSFORM, None)
def _create_transform(unused_ptransform, payload, unused_context):
empty_transform = PTransform()
empty_transform._fn_api_payload = payload
return empty_transform
@PTransform.register_urn(python_urns.PICKLED_TRANSFORM, None)
def _unpickle_transform(unused_ptransform, pickled_bytes, unused_context):
return pickler.loads(pickled_bytes)
class _ChainedPTransform(PTransform):
def __init__(self, *parts):
# type: (*PTransform) -> None
super(_ChainedPTransform, self).__init__(label=self._chain_label(parts))
self._parts = parts
def _chain_label(self, parts):
return '|'.join(p.label for p in parts)
def __or__(self, right):
if isinstance(right, PTransform):
# Create a flat list rather than a nested tree of composite
# transforms for better monitoring, etc.
return _ChainedPTransform(*(self._parts + (right, )))
return NotImplemented
def expand(self, pval):
return reduce(operator.or_, self._parts, pval)
class PTransformWithSideInputs(PTransform):
"""A superclass for any :class:`PTransform` (e.g.
:func:`~apache_beam.transforms.core.FlatMap` or
:class:`~apache_beam.transforms.core.CombineFn`)
invoking user code.
:class:`PTransform` s like :func:`~apache_beam.transforms.core.FlatMap`
invoke user-supplied code in some kind of package (e.g. a
:class:`~apache_beam.transforms.core.DoFn`) and optionally provide arguments
and side inputs to that code. This internal-use-only class contains common
functionality for :class:`PTransform` s that fit this model.
"""
def __init__(self, fn, *args, **kwargs):
# type: (WithTypeHints, *Any, **Any) -> None
if isinstance(fn, type) and issubclass(fn, WithTypeHints):
# Don't treat Fn class objects as callables.
raise ValueError('Use %s() not %s.' % (fn.__name__, fn.__name__))
self.fn = self.make_fn(fn, bool(args or kwargs))
# Now that we figure out the label, initialize the super-class.
super(PTransformWithSideInputs, self).__init__()
if (any([isinstance(v, pvalue.PCollection) for v in args]) or
any([isinstance(v, pvalue.PCollection) for v in kwargs.values()])):
raise error.SideInputError(
'PCollection used directly as side input argument. Specify '
'AsIter(pcollection) or AsSingleton(pcollection) to indicate how the '
'PCollection is to be used.')
self.args, self.kwargs, self.side_inputs = util.remove_objects_from_args(
args, kwargs, pvalue.AsSideInput)
self.raw_side_inputs = args, kwargs
# Prevent name collisions with fns of the form '<function <lambda> at ...>'
self._cached_fn = self.fn
# Ensure fn and side inputs are picklable for remote execution.
try:
self.fn = pickler.loads(pickler.dumps(self.fn))
except RuntimeError as e:
raise RuntimeError('Unable to pickle fn %s: %s' % (self.fn, e))
self.args = pickler.loads(pickler.dumps(self.args))
self.kwargs = pickler.loads(pickler.dumps(self.kwargs))
# For type hints, because loads(dumps(class)) != class.
self.fn = self._cached_fn
def with_input_types(
self, input_type_hint, *side_inputs_arg_hints, **side_input_kwarg_hints):
"""Annotates the types of main inputs and side inputs for the PTransform.
Args:
input_type_hint: An instance of an allowed built-in type, a custom class,
or an instance of a typehints.TypeConstraint.
*side_inputs_arg_hints: A variable length argument composed of
of an allowed built-in type, a custom class, or a
typehints.TypeConstraint.
**side_input_kwarg_hints: A dictionary argument composed of
of an allowed built-in type, a custom class, or a
typehints.TypeConstraint.
Example of annotating the types of side-inputs::
FlatMap().with_input_types(int, int, bool)
Raises:
:class:`TypeError`: If **type_hint** is not a valid type-hint.
See
:func:`~apache_beam.typehints.typehints.validate_composite_type_param`
for further details.
Returns:
:class:`PTransform`: A reference to the instance of this particular
:class:`PTransform` object. This allows chaining type-hinting related
methods.
"""
super(PTransformWithSideInputs, self).with_input_types(input_type_hint)
side_inputs_arg_hints = native_type_compatibility.convert_to_beam_types(
side_inputs_arg_hints)
side_input_kwarg_hints = native_type_compatibility.convert_to_beam_types(
side_input_kwarg_hints)
for si in side_inputs_arg_hints:
validate_composite_type_param(si, 'Type hints for a PTransform')
for si in side_input_kwarg_hints.values():
validate_composite_type_param(si, 'Type hints for a PTransform')
self.side_inputs_types = side_inputs_arg_hints
return WithTypeHints.with_input_types(
self, input_type_hint, *side_inputs_arg_hints, **side_input_kwarg_hints)
def type_check_inputs(self, pvalueish):
type_hints = self.get_type_hints()
input_types = type_hints.input_types
if input_types:
args, kwargs = self.raw_side_inputs
def element_type(side_input):
if isinstance(side_input, pvalue.AsSideInput):
return side_input.element_type
return instance_to_type(side_input)
arg_types = [pvalueish.element_type] + [element_type(v) for v in args]
kwargs_types = {k: element_type(v) for (k, v) in kwargs.items()}
argspec_fn = self._process_argspec_fn()
bindings = getcallargs_forhints(argspec_fn, *arg_types, **kwargs_types)
hints = getcallargs_forhints(
argspec_fn, *input_types[0], **input_types[1])
for arg, hint in hints.items():
if arg.startswith('__unknown__'):
continue
if hint is None:
continue
if not typehints.is_consistent_with(bindings.get(arg, typehints.Any),
hint):
raise TypeCheckError(
'Type hint violation for \'{label}\': requires {hint} but got '
'{actual_type} for {arg}\nFull type hint:\n{debug_str}'.format(
label=self.label,
hint=hint,
actual_type=bindings[arg],
arg=arg,
debug_str=type_hints.debug_str()))
def _process_argspec_fn(self):
"""Returns an argspec of the function actually consuming the data.
"""
raise NotImplementedError
def make_fn(self, fn, has_side_inputs):
# TODO(silviuc): Add comment describing that this is meant to be overriden
# by methods detecting callables and wrapping them in DoFns.
return fn
def default_label(self):
return '%s(%s)' % (self.__class__.__name__, self.fn.default_label())
class _PTransformFnPTransform(PTransform):
"""A class wrapper for a function-based transform."""
def __init__(self, fn, *args, **kwargs):
super(_PTransformFnPTransform, self).__init__()
self._fn = fn
self._args = args
self._kwargs = kwargs
def display_data(self):
res = {
'fn': (
self._fn.__name__
if hasattr(self._fn, '__name__') else self._fn.__class__),
'args': DisplayDataItem(str(self._args)).drop_if_default('()'),
'kwargs': DisplayDataItem(str(self._kwargs)).drop_if_default('{}')
}
return res
def expand(self, pcoll):
# Since the PTransform will be implemented entirely as a function
# (once called), we need to pass through any type-hinting information that
# may have been annotated via the .with_input_types() and
# .with_output_types() methods.
kwargs = dict(self._kwargs)
args = tuple(self._args)
# TODO(BEAM-5878) Support keyword-only arguments.
try:
if 'type_hints' in get_signature(self._fn).parameters:
args = (self.get_type_hints(), ) + args
except TypeError:
# Might not be a function.
pass
return self._fn(pcoll, *args, **kwargs)
def default_label(self):
if self._args:
return '%s(%s)' % (
label_from_callable(self._fn), label_from_callable(self._args[0]))
return label_from_callable(self._fn)
def ptransform_fn(fn):
"""A decorator for a function-based PTransform.
Experimental; no backwards-compatibility guarantees.
Args:
fn: A function implementing a custom PTransform.
Returns:
A CallablePTransform instance wrapping the function-based PTransform.
This wrapper provides an alternative, simpler way to define a PTransform.
The standard method is to subclass from PTransform and override the expand()
method. An equivalent effect can be obtained by defining a function that
accepts an input PCollection and additional optional arguments and returns a
resulting PCollection. For example::
@ptransform_fn
def CustomMapper(pcoll, mapfn):
return pcoll | ParDo(mapfn)
The equivalent approach using PTransform subclassing::
class CustomMapper(PTransform):
def __init__(self, mapfn):
super(CustomMapper, self).__init__()
self.mapfn = mapfn
def expand(self, pcoll):
return pcoll | ParDo(self.mapfn)
With either method the custom PTransform can be used in pipelines as if
it were one of the "native" PTransforms::
result_pcoll = input_pcoll | 'Label' >> CustomMapper(somefn)
Note that for both solutions the underlying implementation of the pipe
operator (i.e., `|`) will inject the pcoll argument in its proper place
(first argument if no label was specified and second argument otherwise).
"""
# TODO(robertwb): Consider removing staticmethod to allow for self parameter.
@wraps(fn)
def callable_ptransform_factory(*args, **kwargs):
return _PTransformFnPTransform(fn, *args, **kwargs)
return callable_ptransform_factory
def label_from_callable(fn):
if hasattr(fn, 'default_label'):
return fn.default_label()
elif hasattr(fn, '__name__'):
if fn.__name__ == '<lambda>':
return '<lambda at %s:%s>' % (
os.path.basename(fn.__code__.co_filename), fn.__code__.co_firstlineno)
return fn.__name__
return str(fn)
class _NamedPTransform(PTransform):
def __init__(self, transform, label):
super(_NamedPTransform, self).__init__(label)
self.transform = transform
def __ror__(self, pvalueish, _unused=None):
return self.transform.__ror__(pvalueish, self.label)
def expand(self, pvalue):
raise RuntimeError("Should never be expanded directly.")
| 36.20979
| 158
| 0.703747
|
4a0e80ffe148cd3b76bb22e63fb28a631d1700e3
| 191
|
py
|
Python
|
05/hijo.py
|
nogand/py0122
|
05d8b00bb1b8018ae6e616599adf4ef07196121a
|
[
"CC0-1.0"
] | null | null | null |
05/hijo.py
|
nogand/py0122
|
05d8b00bb1b8018ae6e616599adf4ef07196121a
|
[
"CC0-1.0"
] | null | null | null |
05/hijo.py
|
nogand/py0122
|
05d8b00bb1b8018ae6e616599adf4ef07196121a
|
[
"CC0-1.0"
] | null | null | null |
import time
def main():
print("starting work")
time.sleep(1)
print("work work work work work")
time.sleep(1)
print("done working")
if __name__ == '__main__':
main()
| 15.916667
| 37
| 0.612565
|
4a0e810a3b1b8d8b4f1eff010f6b40a8daff7208
| 1,848
|
py
|
Python
|
common_widgets/dialogs/get_file_dialog.py
|
shuge/Qt-Python-Binding-Examples
|
efe40c8af6c3e0805a5a7c3d053b8c8bf893a803
|
[
"BSD-3-Clause"
] | 179
|
2015-01-08T10:21:28.000Z
|
2020-03-24T07:03:04.000Z
|
common_widgets/dialogs/get_file_dialog.py
|
tonytony2020/Qt-Python-Binding-Examples
|
efe40c8af6c3e0805a5a7c3d053b8c8bf893a803
|
[
"BSD-3-Clause"
] | 1
|
2019-12-23T17:14:37.000Z
|
2020-01-09T16:45:58.000Z
|
common_widgets/dialogs/get_file_dialog.py
|
shuge/Qt-Python-Binding-Examples
|
efe40c8af6c3e0805a5a7c3d053b8c8bf893a803
|
[
"BSD-3-Clause"
] | 57
|
2015-01-05T09:34:15.000Z
|
2019-11-18T06:12:08.000Z
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
Provides a dialog widget for selecting a file
Tested environment:
Mac OS X 10.6.8
http://www.pyside.org/docs/pyside/PySide/QtGui/QFileDialog.html
"""
import os
import sys
try:
from PySide import QtCore
from PySide import QtGui
except ImportError:
from PyQt4 import QtCore
from PyQt4 import QtGui
class Demo(QtGui.QMainWindow):
def __init__(self):
super(Demo, self).__init__()
x, y, w, h = 500, 200, 300, 400
self.setGeometry(x, y, w, h)
self.text_edit = QtGui.QTextEdit()
self.setCentralWidget(self.text_edit)
self.statusBar()
self.setFocus()
# open_file_action = QtGui.QAction(QtGui.QIcon('open.png'), 'Open', self)
open_file_action = QtGui.QAction('Open', self)
open_file_action.setShortcut('Ctrl+O') # `command + O` on Mac OS X
open_file_action.setStatusTip('Open new File')
open_file_action.triggered.connect(self._open_file_cb)
menubar = self.menuBar()
file_menu = menubar.addMenu('&File')
file_menu.addAction(open_file_action)
def _open_file_cb(self):
filename, filter = QtGui.QFileDialog.getOpenFileName(parent=self,
caption='Open file',
dir=os.getenv("HOME"))
print 'filename:', filename
print 'filter:', filter
if os.path.exists(filename):
buf = open(filename).read()
self.text_edit.setText(buf)
def show_and_raise(self):
self.show()
self.raise_()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
demo = Demo()
demo.show_and_raise()
sys.exit(app.exec_())
| 27.176471
| 83
| 0.584416
|
4a0e8157b6ced320f1ae025a60de198765418cbb
| 8,204
|
py
|
Python
|
tests/ut/python/parallel/parallel_end_to_end/prelu/_test_prelu_parallel_4p.py
|
unseenme/mindspore
|
4ba052f0cd9146ac0ccc4880a778706f1b2d0af8
|
[
"Apache-2.0"
] | 1
|
2020-05-13T11:31:21.000Z
|
2020-05-13T11:31:21.000Z
|
tests/ut/python/parallel/parallel_end_to_end/prelu/_test_prelu_parallel_4p.py
|
unseenme/mindspore
|
4ba052f0cd9146ac0ccc4880a778706f1b2d0af8
|
[
"Apache-2.0"
] | null | null | null |
tests/ut/python/parallel/parallel_end_to_end/prelu/_test_prelu_parallel_4p.py
|
unseenme/mindspore
|
4ba052f0cd9146ac0ccc4880a778706f1b2d0af8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pytest
import numpy as np
import mindspore as ms
import mindspore.communication.management as distributedTool
from numpy import allclose
from mindspore import context
from mindspore.nn import Cell
from mindspore.ops import operations as P
from mindspore.common.tensor import Tensor
from mindspore.ops.composite import grad_all_with_sens
device_num=4
device_id = int(os.environ["RANK_ID"])
path = "./output/"
def setup_module():
print("~~~~~~~~~~~set up~~~~~~~~~~~~~")
context.set_context(mode=context.GRAPH_MODE)
context.set_auto_parallel_context(device_num=device_num, global_rank=device_id)
distributedTool.init()
distributedTool.create_group("0-3", [0,1,2,3])
print("~~~~~~~~~~~set up finished~~~~~~~~~~~~~")
def teardown_module():
print("~~~~~~~~~~~~tear down~~~~~~~~~~")
class PReLU(Cell):
def __init__(self, channel=1, w=0.25, strategy_=None, strategy1_=None):
super(PReLU, self).__init__()
self.add = P.TensorAdd(strategy=strategy1_)
self.prelu = P.PReLU(strategy=strategy_)
def construct(self, x, z, w):
out = self.add(x, z)
return self.prelu(out, w)
class Grad(Cell):
def __init__(self, network):
super(Grad, self).__init__()
self.network = network
def construct(self, input,z, w, output_grad):
return grad_all_with_sens(self.network)(input,z,w, output_grad)
class PReLUFactory:
def __init__(self, input_shape, strategy):
n, c = input_shape[:2]
prefix = ""
size = 1
for s in input_shape:
prefix = prefix + str(s)
size = size*s
self.prefix = prefix
number_range = min(1000, size)
self.input_np = np.reshape(np.arange(0, size)%number_range - number_range/2, input_shape).astype(np.float32)
self.output_grad_np = np.reshape((np.arange(0, size)%(number_range-10) - number_range/2)*0.1, input_shape).astype(np.float32)
self.channel = c
self.weight = np.array([np.float32(0.25)] * c)
self.strategy = strategy
def forward_mindspore_impl(self):
net = PReLU(channel=self.channel, w=self.weight)
x = Tensor(self.input_np)
z = Tensor(np.zeros(self.input_np.shape), ms.float32)
w = Tensor(self.weight)
out = net(x, z, w)
return out.asnumpy()
def forward_mindspore_parallel_impl(self):
net = PReLU(channel=self.channel, w=self.weight, strategy_=self.strategy, strategy1_=(self.strategy[0], self.strategy[1], self.strategy[1]))
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
net.set_auto_parallel()
x = Tensor(self.input_np)
z = Tensor(np.zeros(self.input_np.shape), ms.float32)
w = Tensor(self.weight)
inputs = self.get_parallel_blocks(self.input_np, self.strategy[1])
block_id = device_id%len(inputs)
x1 = Tensor(inputs[block_id])
z1 = Tensor(np.zeros(inputs[block_id].shape), ms.float32)
w1 = Tensor(self.weight)
out = net(x, z, w, parallel_inputs_compile=[x, z, w], parallel_inputs_run=[x1, z1 ,w1])
return out.asnumpy()
def grad_mindspore_impl(self):
output_grad = Tensor(self.output_grad_np)
x = Tensor(self.input_np)
z = Tensor(np.zeros(self.input_np.shape), ms.float32)
w = Tensor(self.weight)
net = PReLU(channel=self.channel, w=self.weight)
grad_net = Grad(net)
grad_net.set_train()
input_grad = grad_net(x, z, w, output_grad)
return input_grad
def grad_mindspore_parallel_impl(self):
output_grads = self.get_parallel_blocks(self.output_grad_np, self.strategy[1])
block_id = device_id%len(output_grads)
output_grad = Tensor(output_grads[block_id])
x = Tensor(self.input_np)
z = Tensor(np.zeros(self.input_np.shape), ms.float32)
w = Tensor(self.weight)
net = PReLU(channel=self.channel, w=self.weight, strategy_=self.strategy, strategy1_=(self.strategy[0], self.strategy[1], self.strategy[1]))
grad_net = Grad(net)
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
grad_net.set_auto_parallel()
grad_net.set_train()
inputs = self.get_parallel_blocks(self.input_np, self.strategy[1])
x1 = Tensor(inputs[block_id])
z1 = Tensor(np.zeros(inputs[block_id].shape), ms.float32)
w1 = Tensor(self.weight)
input_grad = grad_net(x, z, w, output_grad, parallel_inputs_compile=[x, z, w, output_grad], parallel_inputs_run=[x1, z1, w1, output_grad])
return input_grad
def get_parallel_blocks(self, input_, strategy):
blocks = [input_]
i = 0
for stra in strategy:
temp = []
while len(blocks)>0:
block = blocks.pop(0)
temp.extend(np.split(block, stra, axis=i))
blocks.extend(temp)
i+=1
return blocks
def forward_cmp(self):
out_mindspore = self.forward_mindspore_impl()
out_mindspore_parallel = self.forward_mindspore_parallel_impl()
out_blocks = self.get_parallel_blocks(out_mindspore, self.strategy[1])
block_id = device_id%len(out_blocks)
assert np.allclose(out_blocks[block_id], out_mindspore_parallel, 0.0001, 0.001)
def grad_cmp(self):
input_grad_mindspore = self.grad_mindspore_impl()
input_grad_mindspore_parallel = self.grad_mindspore_parallel_impl()
input_grad_mindspore0 = input_grad_mindspore[0].asnumpy()
input_grad_mindspore1 = input_grad_mindspore[1].asnumpy()
input_grad_mindspore2 = input_grad_mindspore[2].asnumpy()
input_grad_mindspore_parallel0 = input_grad_mindspore_parallel[0].asnumpy()
input_grad_mindspore_parallel1 = input_grad_mindspore_parallel[1].asnumpy()
input_grad_mindspore_parallel2 = input_grad_mindspore_parallel[2].asnumpy()
input_grad_blocks = self.get_parallel_blocks(input_grad_mindspore0, self.strategy[1])
input1_grad_blocks = self.get_parallel_blocks(input_grad_mindspore1, self.strategy[1])
block_id = device_id%len(input_grad_blocks)
assert np.allclose(input_grad_blocks[block_id], input_grad_mindspore_parallel0, 0.0001, 0.0001)
assert np.allclose(input_grad_mindspore2, input_grad_mindspore_parallel2, 0.0001, 0.0001)
assert np.allclose(input1_grad_blocks[block_id], input_grad_mindspore_parallel1, 0.0001, 0.0001)
@pytest.mark.reid_grad
def test_reid_prelu_input_128x64x112x112_repeat():
stra = (0,(1,1,2,1),(1))
fact = PReLUFactory(input_shape=(128, 64, 112, 112), strategy=stra)
fact.forward_cmp()
@pytest.mark.reid_grad
def test_reid_grad_prelu_input_128x64x112x112_repeat():
stra = (0,(1,1,2,1),(1))
fact = PReLUFactory(input_shape=(128, 64, 112, 112), strategy=stra)
fact.grad_cmp()
@pytest.mark.reid_grad
def test_reid_prelu_input_128x64x112x112_mix():
stra = (0,(2,1,1,2),(1))
fact = PReLUFactory(input_shape=(128, 64, 112, 112), strategy=stra)
fact.forward_cmp()
@pytest.mark.reid_grad
def test_reid_grad_prelu_input_128x64x112x112_mix():
stra = (0,(2,1,1,2),(1))
fact = PReLUFactory(input_shape=(128, 64, 112, 112), strategy=stra)
fact.grad_cmp()
| 41.434343
| 149
| 0.656875
|
4a0e816e4323ab0c85b66f4b14b8e5322f9758d9
| 1,813
|
py
|
Python
|
code.py
|
nikhilmate07/python-mini-challenges
|
0c7ef7518344bbf13ed9e93970ec0970af96607b
|
[
"MIT"
] | null | null | null |
code.py
|
nikhilmate07/python-mini-challenges
|
0c7ef7518344bbf13ed9e93970ec0970af96607b
|
[
"MIT"
] | null | null | null |
code.py
|
nikhilmate07/python-mini-challenges
|
0c7ef7518344bbf13ed9e93970ec0970af96607b
|
[
"MIT"
] | null | null | null |
# --------------
#Code starts here
#Function to check for palindrome
def palindrome_check(num):
num=str(num)
return (num[::-1]==num)
#Function to find the smallest palindrome
def palindrome(num):
while(1):
num=num+1
if palindrome_check(num):
return num
#Code ends here
# --------------
#Code starts here
#Function to find anagram of one word in another
def a_scramble(str_1,str_2):
result=True
for i in (str_2.lower()):
if i not in (str_1.lower()):
result=False
break
str_1=str_1.replace(i,'',1) #Removing the letters from str_1 that are already checked
return (result)
#Code ends here
# --------------
#Importing header files
from math import sqrt
#Code starts here
#Function to check for perfect square
def is_perfect_square(x):
s = sqrt(x)
return (int(s)*int(s) == x)
#Function to check for fibonacci number
def check_fib(num):
if is_perfect_square((5*num*num) + 4) or is_perfect_square((5*num*num) - 4): #Formula for checking fibonacci number
return True
return False
#Code ends here
# --------------
#Code starts here
#Function to compress string
def compress(word):
word=word.lower()
mist=[]
l=0
while(l<len(word)):
m=word[l]
j=0
while(l<len(word) and word[l]==m):
j=j+1
l=l+1
mist.append(m)
mist.append(str(j))
return ''.join(mist)
#Code ends here
# --------------
#Code starts here
#Function to check existence of k distinct characters in string
def k_distinct(string,k):
s_list=(set(string.lower()))
return len(s_list)>=k
#Code ends here
| 19.287234
| 120
| 0.562052
|
4a0e8332fe8f717deec8aed8828f0a3547c1addd
| 1,191
|
py
|
Python
|
tests/testapp/management/commands/runserver.py
|
AgDude/gargoyle
|
2ead9ad412f0f29eb5b82fc5b9083f30e36ac870
|
[
"Apache-2.0"
] | 138
|
2016-01-08T13:37:53.000Z
|
2022-03-25T10:41:06.000Z
|
tests/testapp/management/commands/runserver.py
|
AgDude/gargoyle
|
2ead9ad412f0f29eb5b82fc5b9083f30e36ac870
|
[
"Apache-2.0"
] | 39
|
2015-12-28T23:16:17.000Z
|
2018-05-26T09:30:29.000Z
|
tests/testapp/management/commands/runserver.py
|
AgDude/gargoyle
|
2ead9ad412f0f29eb5b82fc5b9083f30e36ac870
|
[
"Apache-2.0"
] | 11
|
2018-07-13T10:10:24.000Z
|
2020-12-28T20:24:29.000Z
|
"""
Exists to ensure you can always log in with admin/password when running in
'dev' mode - see tests/README.rst
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from django.contrib.auth.models import User
from django.contrib.staticfiles.management.commands.runserver import Command as BaseCommand
from django.core.management import call_command
from gargoyle.models import SELECTIVE, Switch
class Command(BaseCommand):
def check_migrations(self):
call_command('migrate', interactive=True)
if not User.objects.exists():
self.stdout.write(
"Welcome to Gargoyle test mode\n"
"Login with username 'admin', password 'password'",
)
user = User(username='admin', is_superuser=True, is_staff=True)
user.set_password('password')
user.save()
if not Switch.objects.exists():
Switch.objects.create(
label="Example Switch",
key='example_switch',
description="This is an example switch prebuilt for trying out the nexus interface",
status=SELECTIVE,
)
| 35.029412
| 100
| 0.655751
|
4a0e8335950a9f443b1dc7e74184c6107bb906c4
| 204
|
py
|
Python
|
vgg/model/loss.py
|
mhd53/vgg-from-torch
|
fbcca53432648a492550fb14d2c42c10230d76f5
|
[
"MIT"
] | null | null | null |
vgg/model/loss.py
|
mhd53/vgg-from-torch
|
fbcca53432648a492550fb14d2c42c10230d76f5
|
[
"MIT"
] | null | null | null |
vgg/model/loss.py
|
mhd53/vgg-from-torch
|
fbcca53432648a492550fb14d2c42c10230d76f5
|
[
"MIT"
] | null | null | null |
import torch.nn.functional as F
import torch.nn as nn
def nll_loss(output, target):
return F.nll_loss(output, target)
def cross_entropy(output, target):
return F.cross_entropy(output, target)
| 18.545455
| 42
| 0.75
|
4a0e84457055fd493e71267701641c911879ad57
| 13,043
|
py
|
Python
|
Final_THE/final1.py
|
BengisuA14/METU-ImageProcessing
|
a955b8c123ea33a20c0fa8d75aa1989e1c078bee
|
[
"MIT"
] | null | null | null |
Final_THE/final1.py
|
BengisuA14/METU-ImageProcessing
|
a955b8c123ea33a20c0fa8d75aa1989e1c078bee
|
[
"MIT"
] | null | null | null |
Final_THE/final1.py
|
BengisuA14/METU-ImageProcessing
|
a955b8c123ea33a20c0fa8d75aa1989e1c078bee
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2
from sklearn.cluster import MeanShift, estimate_bandwidth
from skimage.color import label2rgb, lab2rgb
from skimage import segmentation, filters, color
from skimage.future import graph
from skimage.util import img_as_float
import matplotlib.pyplot as plt
from scipy import ndimage as ndi
import math
from skimage.segmentation import clear_border
import mahotas
import os
def final_q1(input_file_path, output_path):
i=1
for filename in os.listdir(input_file_path):
img = cv2.imread(os.path.join(input_file_path,filename),0)
step1=q1_step1(img,input_file_path, output_path, str(i))
step2 = q1_step2(step1,input_file_path, output_path, str(i))
i = i+1
def q1_step1(img,image_path, output_path,k):
# adaptive thresholding
image = cv2.medianBlur(img,3)
image = cv2.GaussianBlur(image,(5,5),0)
th3 = cv2.adaptiveThreshold(image,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,\
cv2.THRESH_BINARY,11,2)
# inverse image if necessary by checking background color
inv = cv2.bitwise_not(th3)
# clear border
clearborder = clear_border(inv)
cropped_sides = crop_sides(clearborder)
cropped_sides = cropped_sides.astype(np.uint8)
# remove small objects
small_objects_removed = bwareaopen(cropped_sides, 30)
left, top, right, down = find_boundaries(small_objects_removed)
otsu = otsu_th(img, left, top, right, down)
otsu = otsu.astype(np.uint8)
cv2.imwrite(output_path + image_path[:-4]+k+"_step1.png", otsu)
return otsu
def find_boundaries(img):
output = cv2.connectedComponentsWithStats(img, 8, cv2.CV_32S)
# The first cell is the number of labels
num_labels = output[0]
# The second cell is the label matrix
labels = output[1]
# The third cell is the stat matrix
stats = output[2]
left = img.shape[1]
right = 0
top = img.shape[0]
down = 0
for i in range(num_labels):
row = stats[i][1]
col = stats[i][0]
width = stats[i][2]
height = stats[i][3]
if row != 0 and col!=0 and width < img.shape[1] and height < img.shape[0]:
if row+height > down:
down = row+height
if row < top:
top = row
if col < left:
left = col
if col+width > right:
right = col + width
rect = cv2.rectangle(img, (left, top), (right, down), (255,0,0) , 1)
return left, top, right, down
def otsu_th(img, left, top, right, down):
cropped = img[top:down,left:right]
ret2,th2 = cv2.threshold(cropped,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
if th2[0][0] == 255:
th2 = cv2.bitwise_not(th2)
new_img = np.zeros(img.shape)
new_img[top:down,left:right] = th2
return new_img
def skeletonize(img):
skel = img.copy()
img = img.copy()
skel[:,:] = 0
kernel = cv2.getStructuringElement(cv2.MORPH_CROSS, (3,3))
while True:
eroded = cv2.morphologyEx(img, cv2.MORPH_ERODE, kernel)
temp = cv2.morphologyEx(eroded, cv2.MORPH_DILATE, kernel)
temp = cv2.subtract(img, temp)
skel = cv2.bitwise_or(skel, temp)
img[:,:] = eroded[:,:]
if cv2.countNonZero(img) == 0:
break
return skel
def q1_step2(img, image_path, output_path,k):
colors = [(255, 0, 0),(0, 255, 0),(0, 0, 255)]
output = cv2.connectedComponentsWithStats(img, 8, cv2.CV_32S)
# Get the results
# The first cell is the number of labels
num_labels = output[0]
# The second cell is the label matrix
labels = output[1]
# The third cell is the stat matrix
stats = output[2]
boxes = []
for i in range(num_labels):
row = stats[i][0]
col = stats[i][1]
width = stats[i][2]
height = stats[i][3]
if(row != 0 and col != 0):
boxed = img[col:col+height,row:row+width]
num_labels, labels_im = cv2.connectedComponents(boxed)
while num_labels > 2:
kernel = np.ones((2,2),np.uint8)
boxed = cv2.dilate(boxed,kernel,iterations = 1)
num_labels, labels_im = cv2.connectedComponents(boxed)
boxes.append(boxed)
img[col:col+height,row:row+width] = boxed
rgb = cv2.cvtColor(img,cv2.COLOR_GRAY2RGB)
output = cv2.connectedComponentsWithStats(img, 8, cv2.CV_32S)
# Get the results
# The first cell is the number of labels
num_labels = output[0]
# The second cell is the label matrix
labels = output[1]
# The third cell is the stat matrix
stats = output[2]
# The fourth cell is the centroid matrix
centroids = output[3]
thinned = img.copy()
for i in range(num_labels):
row = stats[i][0]
col = stats[i][1]
width = stats[i][2]
height = stats[i][3]
if(row != 0 and col != 0):
rgb = cv2.rectangle(rgb, (row, col), (row+width, col+height), colors[i%3] , 1)
skeleton = skeletonize(img[col:col+height,row:row+width])
thinned[col:col+height,row:row+width] = skeleton
# need to check more, dont thing necessary
num_labels, labels_im = cv2.connectedComponents(img[col:col+height,row:row+width])
e = mahotas.euler(labels_im)
if e < 0:
print("Class 1 numeral in " + image_path)
else:
print("Class 2 numeral in " + image_path)
cv2.imwrite(output_path + image_path[:-4]+k+"_step3.png", thinned)
cv2.imwrite(output_path + image_path[:-4]+k+"_step2.png", rgb)
return boxes
def crop_sides(img):
x = img.shape[0]
y = img.shape[1]
crop_ratio = 0.15
crop_y_percentage = int(y*crop_ratio)
crop_x_percentage = int(x*crop_ratio)
cropped_image = np.zeros((x,y))
cropped_image[crop_x_percentage:(x-crop_x_percentage),crop_y_percentage:(y-crop_y_percentage)] = img[crop_x_percentage:(x-crop_x_percentage),crop_y_percentage:(y-crop_y_percentage)]
return cropped_image
def final_q2(image_path, output_path):
#Loading original image
originImg = cv2.imread(image_path)
originImg = cv2.cvtColor(originImg, cv2.COLOR_BGR2RGB)
# lab_image = cv2.cvtColor(originImg, cv2.COLOR_RGB2Lab)
r, g, b = cv2.split(originImg)
# Shape of original image
originShape = originImg.shape
x, y = np.mgrid[0:originShape[0],0:originShape[1]]
new5dImage = cv2.merge((r,g,b,x.astype(np.uint8),y.astype(np.uint8)))
# Converting image into array of dimension [nb of pixels in originImage, 3]
# based on r g b intensities
flatImg_shape = np.reshape(new5dImage, [-1, 5])
ms = MeanShift(bandwidth=30.0, bin_seeding=True, max_iter=100)
# Performing meanshift on flatImg
ms.fit(flatImg_shape)
# (r,g,b) vectors corresponding to the different clusters after meanshift
labels=ms.labels_
# Remaining colors after meanshift
cluster_centers = ms.cluster_centers_
# Finding and diplaying the number of clusters
labels_unique = np.unique(labels)
n_clusters_ = len(labels_unique)
# Displaying segmented image
segmentedImg = np.reshape(labels, originShape[:2])
superpixels=label2rgb(segmentedImg,originImg,kind='avg',bg_label=-1,)
rgb_image = cv2.cvtColor(superpixels, cv2.COLOR_RGB2BGR)
cv2.imwrite(output_path + image_path[:-4]+"_step1.png", rgb_image)
# define rag
texture_graph = texture_rag(originImg, segmentedImg)
show_rag = graph.show_rag(segmentedImg, texture_graph, originImg)
cbar = plt.colorbar(show_rag)
plt.savefig(output_path + image_path[:-4]+"_step2.png")
labels2 = graph.cut_normalized(segmentedImg, texture_graph)
out2 = label2rgb(labels2, originImg,bg_label=-1,kind='avg')
cv2.imwrite(output_path + image_path[:-4]+"_step3.png", cv2.cvtColor(out2, cv2.COLOR_RGB2BGR))
def compute_feats(image, kernels):
feats = np.zeros((len(kernels), 2), dtype=np.double)
for k, kernel in enumerate(kernels):
filtered = ndi.convolve(image, kernel, mode='wrap')
feats[k, 0] = filtered.mean()
feats[k, 1] = filtered.var()
return feats
def texture_rag(image, labels):
#initialize the RAG
g = graph.RAG(labels, connectivity=2)
hsv = cv2.cvtColor(image, cv2.COLOR_RGB2HSV)
_, _, v = cv2.split(hsv)
kernels = []
for theta in range(4):
theta = theta / 4. * np.pi
for sigma in (1, 3):
for frequency in (0.05, 0.25):
kernel = np.real(filters.gabor_kernel(frequency, theta=theta,
sigma_x=sigma, sigma_y=sigma))
kernels.append(kernel)
#lets say we want for each node on the graph a label, a pixel count and a total color
for n in g:
g.nodes[n].update({'labels': [n],'pixels': np.zeros(labels.shape), 'gabor':0, 'i':0,'locationx': []
, 'locationy':[], 'centerx':0, 'centery':0})
for index in np.ndindex(labels.shape):
current = labels[index]
g.nodes[current]['pixels'][index[0]][index[1]] = v[index[0]][index[1]]
g.nodes[current]['i'] = v[index[0]][index[1]]
g.nodes[current]['locationx'].append(index[0])
g.nodes[current]['locationy'].append(index[1])
for n in g:
g.nodes[n]['centerx'] = np.mean(np.asarray(g.nodes[n]['locationx']))
g.nodes[n]['centery'] = np.mean(np.asarray(g.nodes[n]['locationy']))
#calculate your own weights here
for x, y, d in g.edges(data=True):
ix = g.nodes[x]['i']
iy = g.nodes[y]['i']
shrink = (slice(0, None, 3), slice(0, None, 3))
p_x = img_as_float(g.nodes[x]['pixels'])[shrink]
gx = compute_feats(p_x, kernels) * g.nodes[y]['i'] * ix
p_y = img_as_float(g.nodes[y]['pixels'])[shrink]
gy = compute_feats(p_y, kernels) * g.nodes[y]['i'] *iy
error = np.sum(abs(gx - gy))/1000000.0
texture_dist = np.exp(-error)
spatial_dist = math.sqrt((g.nodes[x]['centerx'] - g.nodes[y]['centerx'])**2 + (g.nodes[x]['centery'] - g.nodes[y]['centery'])**2)
if spatial_dist > 100:
spatial_dist = 0
else:
spatial_dist = spatial_dist / 100000.0
spatial_dist = np.exp(-spatial_dist)
similarity = texture_dist* spatial_dist
d['weight'] = similarity
return g
def final_q3(image_path, output_path):
image = cv2.imread(image_path)
# blurred = cv2.GaussianBlur(image,(3,3),0)
# convert to HSV color system
hsv_image = cv2.cvtColor(image, cv2.COLOR_RGB2HSV)
# define green mask
# green is applied to all the images as the dogs are all in grass
low_green = np.array([35, 0, 0])
high_green = np.array([86, 255, 255])
mask_final = cv2.inRange(hsv_image, low_green, high_green)
# define blue mask and apply it
low_blue = np.array([80, 0, 0])
high_blue = np.array([125, 255, 255])
mask_blue = cv2.inRange(hsv_image, low_blue, high_blue)
mask_final = mask_final + mask_blue
# # define white mask and apply it
low_white = np.array([0, 0, 200])
high_white = np.array([145,60,255])
mask_white = cv2.inRange(hsv_image, low_white, high_white)
mask_final = mask_final + mask_white
# make object white and background black
mask_final = 255 - mask_final
kernel = np.ones((3,3),np.uint8)
closing = cv2.morphologyEx(mask_final, cv2.MORPH_CLOSE, kernel)
kernel = np.ones((3,3),np.uint8)
opening = cv2.morphologyEx(closing, cv2.MORPH_OPEN, kernel)
small_objects_removed = bwareaopen(opening,150)
edges = cv2.Canny(small_objects_removed, 50, 200, None, 3)
cv2.imwrite(output_path + image_path[:-4]+"_step1.png", edges)
cdst = cv2.cvtColor(edges, cv2.COLOR_GRAY2BGR)
cdstP = np.copy(cdst)
newimage = np.copy(image)
linesP = cv2.HoughLinesP(edges, 1, np.pi / 180, 50, None, 40, 10)
if linesP is not None:
for i in range(0, len(linesP)):
l = linesP[i][0]
cv2.line(cdstP, (l[0], l[1]), (l[2], l[3]), (0,0,255), 3, cv2.LINE_AA)
cv2.line(newimage, (l[0], l[1]), (l[2], l[3]), (0,0,255), 3, cv2.LINE_AA)
cv2.imwrite(output_path + image_path[:-4]+"_step2.png", cdstP)
cv2.imwrite(output_path + image_path[:-4]+"_step3.png", newimage)
def bwareaopen(imgBW, areaPixels):
# Given a black and white image, first find all of its contours
imgBWcopy = imgBW.copy()
contours,hierarchy = cv2.findContours(imgBWcopy.copy(), cv2.RETR_LIST,
cv2.CHAIN_APPROX_SIMPLE)
# For each contour, determine its total occupying area
for idx in np.arange(len(contours)):
area = cv2.contourArea(contours[idx])
if (area >= 0 and area <= areaPixels):
cv2.drawContours(imgBWcopy, contours, idx, (0,0,0), -1)
return imgBWcopy
final_q1("Dataset1/","final/Dataset2/" )
final_q2("Dataset2/1.jpg","final/")
| 32.6075
| 185
| 0.628843
|
4a0e84bf7be22a389d7642d51a3d29a9f52dc496
| 4,415
|
py
|
Python
|
pollenNotifier/pollenNotifier.py
|
eifinger/appdaemon-scripts
|
e4a113a27a8501104c23330c4a7ea6aaf6f35939
|
[
"MIT"
] | 122
|
2018-08-17T07:07:10.000Z
|
2022-03-14T03:16:44.000Z
|
pollenNotifier/pollenNotifier.py
|
eifinger/appdaemon-scripts
|
e4a113a27a8501104c23330c4a7ea6aaf6f35939
|
[
"MIT"
] | 4
|
2018-08-16T06:37:10.000Z
|
2020-10-04T11:17:23.000Z
|
pollenNotifier/pollenNotifier.py
|
eifinger/appdaemon-scripts
|
e4a113a27a8501104c23330c4a7ea6aaf6f35939
|
[
"MIT"
] | 19
|
2018-09-24T01:07:40.000Z
|
2022-02-15T21:10:11.000Z
|
import appdaemon.plugins.hass.hassapi as hass
import datetime
#
# App which notifies you when there is a pollen forecast for today
# Used with sensors getting data from https://opendata.dwd.de/climate_environment/health/alerts/s31fg.json
#
#
# Args:
#
# app_switch: on/off switch for this app. example: input_boolean.turn_fan_on_when_hot
# pollen_sensor: sensor which shows pollen for today. example: sensor.pollen_101_roggen_today
# pollen_name: Name of the allergen. example: Roggen
# notify_name: Who to notify. example: group_notifications
# notify_time: When to notify. example: 08:00
# notify_threshold: Minimum level of pollen needed to notify. example: 1.0
# message: localized message to use in notification
#
# Release Notes
#
# Version 1.3.1:
# Use consistent message variable
#
# Version 1.3:
# use Notify App
#
# Version 1.2:
# message now directly in own yaml instead of message module
#
# Version 1.1:
# Added notify_threshold
#
# Version 1.0:
# Initial Version
class PollenNotifier(hass.Hass):
def initialize(self):
self.timer_handle_list = []
self.listen_event_handle_list = []
self.listen_state_handle_list = []
self.app_switch = self.args["app_switch"]
self.pollen_sensor = self.args["pollen_sensor"]
self.pollen_name = self.args["pollen_name"]
self.notify_name = self.args["notify_name"]
self.notify_time = self.args["notify_time"]
self.notify_threshold = self.args["notify_threshold"]
self.message = self.args["message"]
self.message_no_data = self.args["message_no_data"]
self.mappingsdict = {}
self.mappingsdict["-1"] = "keine Daten"
self.mappingsdict["0"] = "Keine"
self.mappingsdict["0-1"] = "Keine bis Geringe"
self.mappingsdict["1"] = "Geringe"
self.mappingsdict["1-2"] = "Geringe bis Mittlere"
self.mappingsdict["2"] = "Mittlere"
self.mappingsdict["2-3"] = "Mittlere bis Hohe"
self.mappingsdict["3"] = "Hohe"
self.level_mapping_dict = {}
self.level_mapping_dict["-1"] = -1.0
self.level_mapping_dict["0"] = 0.0
self.level_mapping_dict["0-1"] = 0.5
self.level_mapping_dict["1"] = 1.0
self.level_mapping_dict["1-2"] = 1.5
self.level_mapping_dict["2"] = 2.0
self.level_mapping_dict["2-3"] = 2.5
self.level_mapping_dict["3"] = 3
self.notifier = self.get_app("Notifier")
hours = self.notify_time.split(":", 1)[0]
minutes = self.notify_time.split(":", 1)[1]
self.timer_handle_list.append(
self.run_daily(
self.run_daily_callback, datetime.time(int(hours), int(minutes), 0)
)
)
def run_daily_callback(self, kwargs):
"""Check if there is an pollen forcast and notify the user about it"""
if self.get_state(self.app_switch) == "on":
pollen_sensor_state = self.get_state(self.pollen_sensor)
self.log(
"{} Belastung Heute: {}".format(self.pollen_name, pollen_sensor_state)
)
if pollen_sensor_state == "-1":
message = self.message_no_data.format("Heute", self.pollen_name)
elif pollen_sensor_state == "0":
message = (
self.message.format(
"Heute",
self.mappingsdict[pollen_sensor_state],
self.pollen_name,
)
+ " Genieß den Tag!"
)
else:
message = self.message.format(
"Heute", self.mappingsdict[pollen_sensor_state], self.pollen_name
)
if self.level_mapping_dict[pollen_sensor_state] >= float(
self.notify_threshold
):
self.log("Notifying user")
self.notifier.notify(self.notify_name, message)
else:
self.log("Threshold not met. Not notifying user")
def terminate(self):
for timer_handle in self.timer_handle_list:
self.cancel_timer(timer_handle)
for listen_event_handle in self.listen_event_handle_list:
self.cancel_listen_event(listen_event_handle)
for listen_state_handle in self.listen_state_handle_list:
self.cancel_listen_state(listen_state_handle)
| 35.604839
| 106
| 0.619706
|
4a0e84db2c82b2797aca396125d62ee176b9a43a
| 34,790
|
py
|
Python
|
base/tests/test_views/outline_initial_views/test_initial_form.py
|
rafsaf/Tribal-Wars-Planer
|
083af9b1efe814be3abe975b9ac8faccc00ebb09
|
[
"Apache-2.0"
] | 1
|
2021-12-01T18:20:08.000Z
|
2021-12-01T18:20:08.000Z
|
base/tests/test_views/outline_initial_views/test_initial_form.py
|
rafsaf/Tribal-Wars-Planer
|
083af9b1efe814be3abe975b9ac8faccc00ebb09
|
[
"Apache-2.0"
] | 21
|
2021-11-01T14:04:19.000Z
|
2022-03-25T06:31:03.000Z
|
base/tests/test_views/outline_initial_views/test_initial_form.py
|
rafsaf/Tribal-Wars-Planer
|
083af9b1efe814be3abe975b9ac8faccc00ebb09
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Rafał Safin (rafsaf). All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from django.urls import reverse
from django.utils import timezone
from base import forms
from base.models import TargetVertex, WeightMaximum
from base.models.player import Player
from base.models.stats import Stats
from base.tests.test_utils.mini_setup import MiniSetup
from utils.outline_initial import MakeOutline
class InitialForm(MiniSetup):
def test_planer_initial_form___302_not_auth_redirect_login(self):
outline = self.get_outline()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
response = self.client.get(PATH)
assert response.status_code == 302
assert response.url == self.login_page_path(next=PATH)
def test_planer_initial_form___404_foreign_user_no_access(self):
outline = self.get_outline()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_foreign_user()
response = self.client.get(PATH)
assert response.status_code == 404
response = self.client.post(PATH)
assert response.status_code == 404
def test_planer_initial_form___302_redirect_when_off_troops_empty(self):
outline = self.get_outline()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
REDIRECT = reverse("base:planer_detail", args=[outline.pk])
self.login_me()
response = self.client.get(PATH)
assert response.status_code == 302
assert response.url == REDIRECT
def test_planer_initial_form___302_redirect_when_invalid_off_troops(self):
outline = self.get_outline()
outline.off_troops = self.random_lower_string()
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
REDIRECT = reverse("base:planer_detail", args=[outline.pk])
self.login_me()
response = self.client.get(PATH)
assert response.status_code == 302
assert response.url == REDIRECT
def test_planer_initial_form___302_redirect_when_written(self):
outline = self.get_outline(written="active")
outline.off_troops = self.random_lower_string()
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
REDIRECT = reverse("base:planer_initial", args=[outline.pk])
self.login_me()
response = self.client.get(PATH)
assert response.status_code == 302
assert response.url == REDIRECT
def test_planer_initial_form___200_off_troops_correct_and_creating_weights_and_mode_always_correct(
self,
):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
response = self.client.get(PATH)
assert response.status_code == 200
assert (
WeightMaximum.objects.filter(outline=outline, start="102|102").count() == 1
)
assert response.context.get("premium_error") is False
assert response.context.get("real_dups") == []
assert response.context.get("fake_dups") == []
assert response.context.get("ruin_dups") == []
assert response.context.get("len_real") == 0
assert response.context.get("len_fake") == 0
assert response.context.get("len_ruin") == 0
assert response.context.get("estimated_time") == 0
assert response.context.get("mode") == "real"
response = self.client.get(PATH + "?t=fake")
assert response.status_code == 200
assert response.context.get("mode") == "fake"
response = self.client.get(PATH + "?t=ruin")
assert response.status_code == 200
assert response.context.get("mode") == "ruin"
session = self.client.session
session["premium_error"] = True
session.save()
response = self.client.get(PATH)
assert response.context.get("premium_error") is True
def test_planer_initial_form___200_show_lens_and_duplicates_correct(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.create_target_on_test_world(outline=outline)
self.create_target_on_test_world(outline=outline, fake=True)
self.create_target_on_test_world(outline=outline, fake=True)
self.create_target_on_test_world(outline=outline, ruin=True)
self.create_target_on_test_world(outline=outline, ruin=True)
self.create_target_on_test_world(outline=outline, ruin=True)
self.create_target_on_test_world(outline=outline, ruin=True)
self.login_me()
response = self.client.get(PATH)
assert response.status_code == 200
assert response.context.get("premium_error") is False
assert response.context.get("real_dups") == []
assert response.context.get("fake_dups") == [
{"target": "200|200", "duplicate": 2, "lines": "1,2"}
]
assert response.context.get("ruin_dups") == [
{"target": "200|200", "duplicate": 4, "lines": "1,2,3,..."}
]
assert response.context.get("len_real") == 1
assert response.context.get("len_fake") == 2
assert response.context.get("len_ruin") == 4
assert response.context.get("estimated_time") == 102
assert response.context.get("mode") == "real"
def test_planer_initial_form___200_initial_values_for_forms_works_well_1(self):
# form1
initial_outline_targets = self.random_lower_string()
initial_outline_fakes = self.random_lower_string()
initial_outline_ruins = self.random_lower_string()
# form2
initial_outline_front_dist = self.random_integer()
initial_outline_maximum_front_dist = self.random_integer()
initial_outline_target_dist = self.random_integer()
initial_outline_min_off = self.random_integer()
initial_outline_excluded_coords = self.random_lower_string()
# form3
date = timezone.now().date()
# form4
mode_off = "far"
mode_noble = "far"
mode_division = "separatly"
mode_guide = "single"
mode_split = "together"
initial_outline_fake_limit = self.random_integer(maximum=15)
initial_outline_fake_mode = "all"
# form 5
night_bonus = True
enter_t1 = self.random_integer(0, 10)
enter_t2 = self.random_integer(10, 20)
# form6
initial_outline_off_left_catapult = 200
initial_outline_catapult_default = 300
initial_outline_average_ruining_points = "medium"
# form7
morale_on_targets_greater_than = 85
morale_on = True
# outline
outline = self.get_outline(test_world=True)
outline.morale_on_targets_greater_than = morale_on_targets_greater_than
outline.morale_on = morale_on
outline.date = date
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.initial_outline_targets = initial_outline_targets
outline.initial_outline_fakes = initial_outline_fakes
outline.initial_outline_ruins = initial_outline_ruins
outline.initial_outline_front_dist = initial_outline_front_dist
outline.initial_outline_maximum_front_dist = initial_outline_maximum_front_dist
outline.initial_outline_target_dist = initial_outline_target_dist
outline.initial_outline_min_off = initial_outline_min_off
outline.initial_outline_excluded_coords = initial_outline_excluded_coords
outline.mode_off = mode_off
outline.mode_noble = mode_noble
outline.mode_division = mode_division
outline.mode_guide = mode_guide
outline.mode_split = mode_split
outline.initial_outline_fake_limit = initial_outline_fake_limit
outline.initial_outline_fake_mode = initial_outline_fake_mode
outline.night_bonus = night_bonus
outline.enter_t1 = enter_t1
outline.enter_t2 = enter_t2
outline.initial_outline_off_left_catapult = initial_outline_off_left_catapult
outline.initial_outline_catapult_default = initial_outline_catapult_default
outline.initial_outline_average_ruining_points = (
initial_outline_average_ruining_points
)
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
response = self.client.get(PATH)
assert response.status_code == 200
form1: forms.InitialOutlineForm = response.context["form1"]
form2: forms.AvailableTroopsForm = response.context["form2"]
form3: forms.SettingDateForm = response.context["form3"]
form4: forms.ModeOutlineForm = response.context["form4"]
form5: forms.NightBonusSetForm = response.context["form5"]
form6: forms.RuiningOutlineForm = response.context["form6"]
form7: forms.MoraleOutlineForm = response.context["form7"]
assert form1["target"].initial == initial_outline_targets
assert form2["initial_outline_front_dist"].initial == initial_outline_front_dist
assert (
form2["initial_outline_maximum_front_dist"].initial
== initial_outline_maximum_front_dist
)
assert (
form2["initial_outline_target_dist"].initial == initial_outline_target_dist
)
assert form2["initial_outline_min_off"].initial == initial_outline_min_off
assert (
form2["initial_outline_excluded_coords"].initial
== initial_outline_excluded_coords
)
assert form3["date"].initial == date
assert form4["mode_off"].initial == mode_off
assert form4["mode_noble"].initial == mode_noble
assert form4["mode_division"].initial == mode_division
assert form4["mode_guide"].initial == mode_guide
assert form4["mode_split"].initial == mode_split
assert form4["initial_outline_fake_limit"].initial == initial_outline_fake_limit
assert form4["initial_outline_fake_mode"].initial == initial_outline_fake_mode
assert form5["night_bonus"].initial == night_bonus
assert form5["enter_t1"].initial == enter_t1
assert form5["enter_t2"].initial == enter_t2
assert (
form6["initial_outline_off_left_catapult"].initial
== initial_outline_off_left_catapult
)
assert (
form6["initial_outline_catapult_default"].initial
== initial_outline_catapult_default
)
assert (
form6["initial_outline_average_ruining_points"].initial
== initial_outline_average_ruining_points
)
assert (
form7["morale_on_targets_greater_than"].initial
== morale_on_targets_greater_than
)
assert form7["morale_on"].initial == morale_on
response = self.client.get(PATH + "?t=fake")
assert response.status_code == 200
form1: forms.InitialOutlineForm = response.context["form1"]
assert form1["target"].initial == initial_outline_fakes
response = self.client.get(PATH + "?t=ruin")
assert response.status_code == 200
form1: forms.InitialOutlineForm = response.context["form1"]
assert form1["target"].initial == initial_outline_ruins
def test_planer_initial_form___200_initial_values_for_forms_works_well_2(self):
# form1
initial_outline_targets = self.random_lower_string()
initial_outline_fakes = self.random_lower_string()
initial_outline_ruins = self.random_lower_string()
# form2
initial_outline_front_dist = self.random_integer()
initial_outline_maximum_front_dist = self.random_integer()
initial_outline_target_dist = self.random_integer()
initial_outline_min_off = self.random_integer()
initial_outline_excluded_coords = self.random_lower_string()
# form3
date = timezone.now().date()
# form4
mode_off = "closest"
mode_noble = "random"
mode_division = "not_divide"
mode_guide = "many"
mode_split = "split"
initial_outline_fake_limit = self.random_integer(maximum=15)
initial_outline_fake_mode = "off"
# form 5
night_bonus = False
enter_t1 = self.random_integer(0, 10)
enter_t2 = self.random_integer(10, 20)
# form6
initial_outline_off_left_catapult = 75
initial_outline_catapult_default = 99
initial_outline_average_ruining_points = "big"
# form7
morale_on_targets_greater_than = 75
morale_on = False
# outline
outline = self.get_outline(test_world=True)
outline.morale_on_targets_greater_than = morale_on_targets_greater_than
outline.morale_on = morale_on
outline.date = date
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.initial_outline_targets = initial_outline_targets
outline.initial_outline_fakes = initial_outline_fakes
outline.initial_outline_ruins = initial_outline_ruins
outline.initial_outline_front_dist = initial_outline_front_dist
outline.initial_outline_maximum_front_dist = initial_outline_maximum_front_dist
outline.initial_outline_target_dist = initial_outline_target_dist
outline.initial_outline_min_off = initial_outline_min_off
outline.initial_outline_excluded_coords = initial_outline_excluded_coords
outline.mode_off = mode_off
outline.mode_noble = mode_noble
outline.mode_division = mode_division
outline.mode_guide = mode_guide
outline.mode_split = mode_split
outline.initial_outline_fake_limit = initial_outline_fake_limit
outline.initial_outline_fake_mode = initial_outline_fake_mode
outline.night_bonus = night_bonus
outline.enter_t1 = enter_t1
outline.enter_t2 = enter_t2
outline.initial_outline_off_left_catapult = initial_outline_off_left_catapult
outline.initial_outline_catapult_default = initial_outline_catapult_default
outline.initial_outline_average_ruining_points = (
initial_outline_average_ruining_points
)
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
response = self.client.get(PATH)
assert response.status_code == 200
form1: forms.InitialOutlineForm = response.context["form1"]
form2: forms.AvailableTroopsForm = response.context["form2"]
form3: forms.SettingDateForm = response.context["form3"]
form4: forms.ModeOutlineForm = response.context["form4"]
form5: forms.NightBonusSetForm = response.context["form5"]
form6: forms.RuiningOutlineForm = response.context["form6"]
form7: forms.MoraleOutlineForm = response.context["form7"]
assert form1["target"].initial == initial_outline_targets
assert form2["initial_outline_front_dist"].initial == initial_outline_front_dist
assert (
form2["initial_outline_maximum_front_dist"].initial
== initial_outline_maximum_front_dist
)
assert (
form2["initial_outline_target_dist"].initial == initial_outline_target_dist
)
assert form2["initial_outline_min_off"].initial == initial_outline_min_off
assert (
form2["initial_outline_excluded_coords"].initial
== initial_outline_excluded_coords
)
assert form3["date"].initial == date
assert form4["mode_off"].initial == mode_off
assert form4["mode_noble"].initial == mode_noble
assert form4["mode_division"].initial == mode_division
assert form4["mode_guide"].initial == mode_guide
assert form4["mode_split"].initial == mode_split
assert form4["initial_outline_fake_limit"].initial == initial_outline_fake_limit
assert form4["initial_outline_fake_mode"].initial == initial_outline_fake_mode
assert form5["night_bonus"].initial == night_bonus
assert form5["enter_t1"].initial == enter_t1
assert form5["enter_t2"].initial == enter_t2
assert (
form6["initial_outline_off_left_catapult"].initial
== initial_outline_off_left_catapult
)
assert (
form6["initial_outline_catapult_default"].initial
== initial_outline_catapult_default
)
assert (
form6["initial_outline_average_ruining_points"].initial
== initial_outline_average_ruining_points
)
assert (
form7["morale_on_targets_greater_than"].initial
== morale_on_targets_greater_than
)
assert form7["morale_on"].initial == morale_on
response = self.client.get(PATH + "?t=fake")
assert response.status_code == 200
form1: forms.InitialOutlineForm = response.context["form1"]
assert form1["target"].initial == initial_outline_fakes
response = self.client.get(PATH + "?t=ruin")
assert response.status_code == 200
form1: forms.InitialOutlineForm = response.context["form1"]
assert form1["target"].initial == initial_outline_ruins
def test_planer_initial_form___302_test_form1_real(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
# INVALID TRY 1
response = self.client.post(
PATH, data={"form1": "", "target": self.random_lower_string()}
)
assert response.status_code == 200
form1: forms.InitialOutlineForm = response.context["form1"]
assert len(form1.errors) == 1
assert TargetVertex.objects.filter(fake=False, ruin=False).count() == 0
# INVALID TRY 2
outline.off_troops = self.random_lower_string()
outline.save()
response = self.client.post(PATH, data={"form1": "", "target": "200|200:0:0"})
REDIRECT = reverse("base:planer_detail", args=[outline.pk])
assert response.status_code == 302
assert response.url == REDIRECT
assert TargetVertex.objects.filter(fake=False, ruin=False).count() == 0
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
# VALID TRY 1
response = self.client.post(
PATH, data={"form1": "", "target": "200|200:0:0\r\n200|200:0:0"}
)
assert response.status_code == 302
response = self.client.get(PATH)
assert response.status_code == 200
assert TargetVertex.objects.filter(fake=False, ruin=False).count() == 2
TargetVertex.objects.all().delete()
def test_planer_initial_form___302_test_form1_fake(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk]) + "?t=fake"
self.login_me()
# INVALID TRY 1
response = self.client.post(
PATH, data={"form1": "", "target": self.random_lower_string()}
)
assert response.status_code == 200
form1: forms.InitialOutlineForm = response.context["form1"]
assert len(form1.errors) == 1
assert TargetVertex.objects.filter(fake=True, ruin=False).count() == 0
# INVALID TRY 2
outline.off_troops = self.random_lower_string()
outline.save()
response = self.client.post(PATH, data={"form1": "", "target": "200|200:0:0"})
REDIRECT = reverse("base:planer_detail", args=[outline.pk])
assert response.status_code == 302
assert response.url == REDIRECT
assert TargetVertex.objects.filter(fake=True, ruin=False).count() == 0
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
# VALID TRY 1
response = self.client.post(
PATH, data={"form1": "", "target": "200|200:0:0\r\n200|200:0:0"}
)
assert response.status_code == 302
response = self.client.get(PATH)
assert response.status_code == 200
assert TargetVertex.objects.filter(fake=True, ruin=False).count() == 2
TargetVertex.objects.all().delete()
def test_planer_initial_form___302_test_form1_ruin(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk]) + "?t=ruin"
self.login_me()
# INVALID TRY 1
response = self.client.post(
PATH, data={"form1": "", "target": self.random_lower_string()}
)
assert response.status_code == 200
form1: forms.InitialOutlineForm = response.context["form1"]
assert len(form1.errors) == 1
assert TargetVertex.objects.filter(fake=False, ruin=True).count() == 0
# INVALID TRY 2
outline.off_troops = self.random_lower_string()
outline.save()
response = self.client.post(PATH, data={"form1": "", "target": "200|200:0:0"})
REDIRECT = reverse("base:planer_detail", args=[outline.pk])
assert response.status_code == 302
assert response.url == REDIRECT
assert TargetVertex.objects.filter(fake=False, ruin=True).count() == 0
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
# VALID TRY 1
response = self.client.post(
PATH, data={"form1": "", "target": "200|200:0:0\r\n200|200:0:0"}
)
assert response.status_code == 302
response = self.client.get(PATH)
assert response.status_code == 200
assert TargetVertex.objects.filter(fake=False, ruin=True).count() == 2
TargetVertex.objects.all().delete()
def test_planer_initial_form___302_test_form2(self):
outline = self.get_outline(test_world=True)
outline.off_troops = self.TEST_WORLD_DATA
outline.save()
self.create_target_on_test_world(outline)
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
response = self.client.post(
PATH,
data={
"form2": "",
"initial_outline_min_off": 15000,
"initial_outline_max_off": 28000,
"initial_outline_front_dist": 90,
"initial_outline_maximum_front_dist": 115,
"initial_outline_target_dist": 100,
"initial_outline_excluded_coords": "250|250 251|251",
},
)
assert response.status_code == 302
assert response.url == PATH + "?t=real"
outline.refresh_from_db()
assert outline.initial_outline_min_off == 15000
assert outline.initial_outline_front_dist == 90
assert outline.initial_outline_maximum_front_dist == 115
assert outline.initial_outline_target_dist == 100
assert outline.initial_outline_excluded_coords == "250|250 251|251"
# also table is filled correctly
assert outline.avaiable_offs == [50, 13, 18, 19]
assert outline.avaiable_nobles == [60, 46, 14, 0]
assert outline.avaiable_offs_near == [20, 13, 7, 0]
assert outline.avaiable_nobles_near == [60, 46, 14, 0]
assert outline.avaiable_ruins == 1850
assert WeightMaximum.objects.filter(too_far_away=True).count() == 19
assert WeightMaximum.objects.filter(first_line=True).count() == 13
def test_planer_initial_form___302_test_form3(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
date = timezone.localdate()
response = self.client.post(
PATH,
data={
"form3": "",
"date": date,
},
)
assert response.status_code == 302
assert response.url == PATH + "?t=real"
outline.refresh_from_db()
assert outline.date == date
def test_planer_initial_form___302_test_form4(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
self.create_target_on_test_world(outline)
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
mode_off = "far"
mode_noble = "far"
mode_division = "separatly"
mode_guide = "single"
mode_split = "together"
initial_outline_fake_limit = self.random_integer(maximum=15)
initial_outline_fake_mode = "all"
response = self.client.post(
PATH,
data={
"form4": "",
"mode_off": mode_off,
"mode_noble": mode_noble,
"mode_division": mode_division,
"mode_guide": mode_guide,
"mode_split": mode_split,
"initial_outline_fake_limit": initial_outline_fake_limit,
"initial_outline_fake_mode": initial_outline_fake_mode,
},
)
assert response.status_code == 302
assert response.url == PATH + "?t=real"
outline.refresh_from_db()
assert outline.mode_off == mode_off
assert outline.mode_noble == mode_noble
assert outline.mode_division == mode_division
assert outline.mode_guide == mode_guide
assert outline.mode_split == mode_split
assert outline.initial_outline_fake_limit == initial_outline_fake_limit
assert outline.initial_outline_fake_mode == initial_outline_fake_mode
target: TargetVertex = TargetVertex.objects.get(target="200|200")
assert target.mode_off == mode_off
assert target.mode_noble == mode_noble
assert target.mode_division == mode_division
assert target.mode_guide == mode_guide
weight_max: WeightMaximum = WeightMaximum.objects.get(start="102|102")
assert weight_max.fake_limit == initial_outline_fake_limit
def test_planer_initial_form___200_test_form5(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
self.create_target_on_test_world(outline)
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
night_bonus = "on"
enter_t1 = self.random_integer(0, 10)
enter_t2 = self.random_integer(10, 20)
response = self.client.post(
PATH,
data={
"form5": "",
"night_bonus": night_bonus,
"enter_t1": enter_t1,
"enter_t2": enter_t2,
},
)
assert response.status_code == 302
assert response.url == PATH + "?t=real"
outline.refresh_from_db()
assert outline.night_bonus is True
assert outline.enter_t1 == enter_t1
assert outline.enter_t2 == enter_t2
def test_planer_initial_form___200_test_form6(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.save()
self.create_target_on_test_world(outline)
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
initial_outline_off_left_catapult = 200
initial_outline_catapult_default = 75
initial_outline_average_ruining_points = "medium"
response = self.client.post(
PATH,
data={
"form6": "",
"initial_outline_off_left_catapult": initial_outline_off_left_catapult,
"initial_outline_catapult_default": initial_outline_catapult_default,
"initial_outline_average_ruining_points": initial_outline_average_ruining_points,
},
)
assert response.status_code == 302
assert response.url == PATH + "?t=real"
outline.refresh_from_db()
assert (
outline.initial_outline_off_left_catapult
== initial_outline_off_left_catapult
)
assert (
outline.initial_outline_catapult_default == initial_outline_catapult_default
)
assert (
outline.initial_outline_average_ruining_points
== initial_outline_average_ruining_points
)
def test_planer_initial_form___200_test_form7(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.morale_on_targets_greater_than = 50
outline.morale_on = True
outline.save()
self.create_target_on_test_world(outline)
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
morale_on_targets_greater_than = 90
morale_on = False
response = self.client.post(
PATH,
data={
"form7": "",
"morale_on_targets_greater_than": morale_on_targets_greater_than,
"morale_on": morale_on,
},
)
assert response.status_code == 302
assert response.url == PATH + "?t=real"
outline.refresh_from_db()
assert outline.morale_on == morale_on
assert outline.morale_on_targets_greater_than == morale_on_targets_greater_than
def test_planer_initial_form___200_ok_when_ally_player_not_updated(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.morale_on = True
outline.save()
# weight maxs must be already created
make_outline = MakeOutline(outline=outline)
make_outline()
self.create_target_on_test_world(outline=outline, many=1, off=5, noble=5)
PATH = reverse("base:planer_initial_form", args=[outline.pk])
Player.objects.filter(name="AllyPlayer0").delete()
self.login_me()
response = self.client.get(PATH)
assert response.status_code == 200
def test_planer_initial_form___200_ok_tab_when_target_player_not_updated(self):
outline = self.get_outline(test_world=True)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.morale_on = True
outline.save()
# weight maxs must be already created
make_outline = MakeOutline(outline=outline)
make_outline()
self.create_target_on_test_world(outline=outline, many=1, off=5, noble=5)
PATH = reverse("base:planer_initial_form", args=[outline.pk])
Player.objects.filter(name="AllyPlayer3").delete()
self.login_me()
response = self.client.get(PATH)
assert response.status_code == 200
def test_planer_initial_form___200_correct_processing_off_troops_changes(self):
outline = self.get_outline(test_world=True)
outline.create_stats()
stats: Stats = Stats.objects.get(outline=outline)
outline.off_troops = "102|102,100,100,7002,0,100,2802,0,0,350,100,0,0,0,0,0,"
outline.off_troops_hash = outline.get_or_set_off_troops_hash()
outline.avaiable_offs = [1, 2, 3, 4]
outline.avaiable_offs_near = [1, 2, 3, 4]
outline.avaiable_nobles = [4, 4, 4]
outline.avaiable_nobles_near = [4, 4, 4]
outline.avaiable_ruins = 1555
outline.save()
PATH = reverse("base:planer_initial_form", args=[outline.pk])
self.login_me()
# this should create one weight_max from off_troops
response = self.client.get(PATH)
assert response.status_code == 200
assert WeightMaximum.objects.count() == 1
outline.refresh_from_db()
stats.refresh_from_db()
assert outline.avaiable_offs == []
assert outline.avaiable_offs_near == []
assert outline.avaiable_nobles == []
assert outline.avaiable_nobles_near == []
assert outline.avaiable_ruins is None
assert stats.troops_refreshed == 1
assert outline.off_troops_weightmodels_hash == outline.off_troops_hash
# this should not do anything to weight models and reset `available` fields
response = self.client.get(PATH)
assert response.status_code == 200
assert WeightMaximum.objects.count() == 1
outline.refresh_from_db()
stats.refresh_from_db()
assert stats.troops_refreshed == 1
assert outline.off_troops_weightmodels_hash == outline.off_troops_hash
# after just changing off troops weight models should be recreated
outline.off_troops = self.TEST_WORLD_DATA
outline.off_troops_hash = outline.get_or_set_off_troops_hash(
force_recalculate=True
)
outline.save()
response = self.client.get(PATH)
assert response.status_code == 200
assert WeightMaximum.objects.count() == 50
outline.refresh_from_db()
stats.refresh_from_db()
assert stats.troops_refreshed == 2
assert outline.off_troops_weightmodels_hash == outline.off_troops_hash
| 42.272175
| 103
| 0.657689
|
4a0e85832047f73106bd79717d91f80b339c6e44
| 3,654
|
py
|
Python
|
cmlkit/model.py
|
sirmarcel/cmlk
|
e099bf3e255b60675e8e1b3ad29db750dbd6faf3
|
[
"MIT"
] | 24
|
2018-06-22T17:31:20.000Z
|
2022-03-29T14:52:49.000Z
|
cmlkit/model.py
|
sirmarcel/cmlk
|
e099bf3e255b60675e8e1b3ad29db750dbd6faf3
|
[
"MIT"
] | 8
|
2019-06-06T14:51:57.000Z
|
2021-06-30T19:43:13.000Z
|
cmlkit/model.py
|
sirmarcel/cmlk
|
e099bf3e255b60675e8e1b3ad29db750dbd6faf3
|
[
"MIT"
] | 5
|
2018-07-30T18:53:08.000Z
|
2022-02-10T09:12:15.000Z
|
"""Model class.
A Model is a combination of representation and regression method,
and can be regarded as a rudimentary pipeline.
It essentially wraps these two components, and takes care
of passing the computed representation around to the regressor.
The only additional task a Model has is to make sure the property
to be predicted is converted appropriately for the regression method.
For instance, it is occasionally better to predict a target quantity
normalised by the number of atoms in a structure, or the convention
in a community demands per-atom predictions, but the model is better
suited to predict quantitites for the entire system.
The `cmlkit` convention is that all properties are stored scaled
with the number of atoms in the system. This is arbitrary, but it
makes the conversion a bit more easy.
An alternative approach is to always use `None` as `per`, in which
case no conversion is ever done!
"""
from cmlkit.engine import Component
from cmlkit import from_config
from cmlkit.representation import Composed
from .utility import convert, unconvert
class Model(Component):
"""Model class.
When training, automatically computes a representation,
and then trains a regression method. When predicting,
automatically computes the representation for that,
and then predicts using the regression method.
Attributes:
representation: Representation instance.
regression: Regression method.
per: Preferred units/scaling for target property.
(Popular choices: "atom", "cell", "mol")
(see `conversion.py` for more info.)
"""
kind = "model"
def __init__(self, representation, regression, per=None, context={}):
"""Create model.
Args:
representation: Representation instance, or config for one, or
a list with any of the above, in which case a Composed representation
is automatically generated.
regression: Regression method or config of one.
per: Optional, String (or None) specifying per what the regression should
internally predict. Default is to not convert.
"""
super().__init__(context=context)
# Allowing myself ONE piece of "magic"!
if isinstance(representation, (list, tuple)):
self.representation = Composed(*representation, context=self.context)
else:
self.representation = from_config(representation, context=self.context)
self.regression = from_config(regression, context=self.context)
self.per = per
def _get_config(self):
return {
"representation": self.representation.get_config(),
"regression": self.regression.get_config(),
"per": self.per,
}
def train(self, data, target):
"""Train model.
Args:
data: Dataset instance
target: Name of target property,
must be present in data.
"""
x = self.representation(data)
y = data.pp(target, self.per)
self.regression.train(x=x, y=y)
return self # return trained Model
def predict(self, data, per=None):
"""Predict with model.
Args:
data: Dataset instance
per: Optional, String specifying in which units
the prediciton should be made.
Returns:
ndarray with predictions.
"""
z = self.representation(data)
pred = self.regression.predict(z)
pred = unconvert(data, pred, from_per=self.per)
return convert(data, pred, per)
| 32.336283
| 85
| 0.664751
|
4a0e859535215247f61281d9224c50c708dcd801
| 15,270
|
py
|
Python
|
scripts/validate_unwanted_patterns.py
|
Abishek15592/pandas
|
6929e262dd22ac35baabf87a5236d451255fb66d
|
[
"BSD-3-Clause"
] | 1
|
2020-09-15T01:39:01.000Z
|
2020-09-15T01:39:01.000Z
|
scripts/validate_unwanted_patterns.py
|
Abishek15592/pandas
|
6929e262dd22ac35baabf87a5236d451255fb66d
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/validate_unwanted_patterns.py
|
Abishek15592/pandas
|
6929e262dd22ac35baabf87a5236d451255fb66d
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
"""
Unwanted patterns test cases.
The reason this file exist despite the fact we already have
`ci/code_checks.sh`,
(see https://github.com/pandas-dev/pandas/blob/master/ci/code_checks.sh)
is that some of the test cases are more complex/imposible to validate via regex.
So this file is somewhat an extensions to `ci/code_checks.sh`
"""
import argparse
import ast
import os
import sys
import token
import tokenize
from typing import IO, Callable, FrozenSet, Iterable, List, Set, Tuple
PRIVATE_IMPORTS_TO_IGNORE: Set[str] = {
"_extension_array_shared_docs",
"_index_shared_docs",
"_interval_shared_docs",
"_merge_doc",
"_shared_docs",
"_apply_docs",
"_new_Index",
"_new_PeriodIndex",
"_doc_template",
"_agg_template",
"_pipe_template",
"_get_version",
"__main__",
"_transform_template",
"_arith_doc_FRAME",
"_flex_comp_doc_FRAME",
"_make_flex_doc",
"_op_descriptions",
"_IntegerDtype",
"_use_inf_as_na",
"_get_plot_backend",
"_matplotlib",
"_arrow_utils",
"_registry",
"_get_offset", # TODO: remove after get_offset deprecation enforced
"_test_parse_iso8601",
"_json_normalize", # TODO: remove after deprecation is enforced
"_testing",
"_test_decorators",
"__version__", # check np.__version__ in compat.numpy.function
}
def _get_literal_string_prefix_len(token_string: str) -> int:
"""
Getting the length of the literal string prefix.
Parameters
----------
token_string : str
String to check.
Returns
-------
int
Length of the literal string prefix.
Examples
--------
>>> example_string = "'Hello world'"
>>> _get_literal_string_prefix_len(example_string)
0
>>> example_string = "r'Hello world'"
>>> _get_literal_string_prefix_len(example_string)
1
"""
try:
return min(
token_string.find(quote)
for quote in (r"'", r'"')
if token_string.find(quote) >= 0
)
except ValueError:
return 0
def bare_pytest_raises(file_obj: IO[str]) -> Iterable[Tuple[int, str]]:
"""
Test Case for bare pytest raises.
For example, this is wrong:
>>> with pytest.raise(ValueError):
... # Some code that raises ValueError
And this is what we want instead:
>>> with pytest.raise(ValueError, match="foo"):
... # Some code that raises ValueError
Parameters
----------
file_obj : IO
File-like object containing the Python code to validate.
Yields
------
line_number : int
Line number of unconcatenated string.
msg : str
Explenation of the error.
Notes
-----
GH #23922
"""
contents = file_obj.read()
tree = ast.parse(contents)
for node in ast.walk(tree):
if not isinstance(node, ast.Call):
continue
try:
if not (node.func.value.id == "pytest" and node.func.attr == "raises"):
continue
except AttributeError:
continue
if not node.keywords:
yield (
node.lineno,
"Bare pytests raise have been found. "
"Please pass in the argument 'match' as well the exception.",
)
else:
# Means that there are arguments that are being passed in,
# now we validate that `match` is one of the passed in arguments
if not any(keyword.arg == "match" for keyword in node.keywords):
yield (
node.lineno,
"Bare pytests raise have been found. "
"Please pass in the argument 'match' as well the exception.",
)
PRIVATE_FUNCTIONS_ALLOWED = {"sys._getframe"} # no known alternative
def private_function_across_module(file_obj: IO[str]) -> Iterable[Tuple[int, str]]:
"""
Checking that a private function is not used across modules.
Parameters
----------
file_obj : IO
File-like object containing the Python code to validate.
Yields
------
line_number : int
Line number of the private function that is used across modules.
msg : str
Explenation of the error.
"""
contents = file_obj.read()
tree = ast.parse(contents)
imported_modules: Set[str] = set()
for node in ast.walk(tree):
if isinstance(node, (ast.Import, ast.ImportFrom)):
for module in node.names:
module_fqdn = module.name if module.asname is None else module.asname
imported_modules.add(module_fqdn)
if not isinstance(node, ast.Call):
continue
try:
module_name = node.func.value.id
function_name = node.func.attr
except AttributeError:
continue
# Exception section #
# (Debatable) Class case
if module_name[0].isupper():
continue
# (Debatable) Dunder methods case
elif function_name.startswith("__") and function_name.endswith("__"):
continue
elif module_name + "." + function_name in PRIVATE_FUNCTIONS_ALLOWED:
continue
if module_name in imported_modules and function_name.startswith("_"):
yield (node.lineno, f"Private function '{module_name}.{function_name}'")
def private_import_across_module(file_obj: IO[str]) -> Iterable[Tuple[int, str]]:
"""
Checking that a private function is not imported across modules.
Parameters
----------
file_obj : IO
File-like object containing the Python code to validate.
Yields
------
line_number : int
Line number of import statement, that imports the private function.
msg : str
Explenation of the error.
"""
contents = file_obj.read()
tree = ast.parse(contents)
for node in ast.walk(tree):
if not (isinstance(node, ast.Import) or isinstance(node, ast.ImportFrom)):
continue
for module in node.names:
module_name = module.name.split(".")[-1]
if module_name in PRIVATE_IMPORTS_TO_IGNORE:
continue
if module_name.startswith("_"):
yield (node.lineno, f"Import of internal function {repr(module_name)}")
def strings_to_concatenate(file_obj: IO[str]) -> Iterable[Tuple[int, str]]:
"""
This test case is necessary after 'Black' (https://github.com/psf/black),
is formating strings over multiple lines.
For example, when this:
>>> foo = (
... "bar "
... "baz"
... )
Is becoming this:
>>> foo = ("bar " "baz")
'Black' is not considering this as an
issue (see https://github.com/psf/black/issues/1051),
so we are checking it here instead.
Parameters
----------
file_obj : IO
File-like object containing the Python code to validate.
Yields
------
line_number : int
Line number of unconcatenated string.
msg : str
Explenation of the error.
Notes
-----
GH #30454
"""
tokens: List = list(tokenize.generate_tokens(file_obj.readline))
for current_token, next_token in zip(tokens, tokens[1:]):
if current_token.type == next_token.type == token.STRING:
yield (
current_token.start[0],
(
"String unnecessarily split in two by black. "
"Please merge them manually."
),
)
def strings_with_wrong_placed_whitespace(
file_obj: IO[str],
) -> Iterable[Tuple[int, str]]:
"""
Test case for leading spaces in concated strings.
For example:
>>> rule = (
... "We want the space at the end of the line, "
... "not at the beginning"
... )
Instead of:
>>> rule = (
... "We want the space at the end of the line,"
... " not at the beginning"
... )
Parameters
----------
file_obj : IO
File-like object containing the Python code to validate.
Yields
------
line_number : int
Line number of unconcatenated string.
msg : str
Explenation of the error.
"""
def has_wrong_whitespace(first_line: str, second_line: str) -> bool:
"""
Checking if the two lines are mattching the unwanted pattern.
Parameters
----------
first_line : str
First line to check.
second_line : str
Second line to check.
Returns
-------
bool
True if the two recived string match, an unwanted pattern.
Notes
-----
The unwanted pattern that we are trying to catch is if the spaces in
a string that is concatenated over multiple lines are placed at the
end of each string, unless this string is ending with a
newline character (\n).
For example, this is bad:
>>> rule = (
... "We want the space at the end of the line,"
... " not at the beginning"
... )
And what we want is:
>>> rule = (
... "We want the space at the end of the line, "
... "not at the beginning"
... )
And if the string is ending with a new line character (\n) we
do not want any trailing whitespaces after it.
For example, this is bad:
>>> rule = (
... "We want the space at the begging of "
... "the line if the previous line is ending with a \n "
... "not at the end, like always"
... )
And what we do want is:
>>> rule = (
... "We want the space at the begging of "
... "the line if the previous line is ending with a \n"
... " not at the end, like always"
... )
"""
if first_line.endswith(r"\n"):
return False
elif first_line.startswith(" ") or second_line.startswith(" "):
return False
elif first_line.endswith(" ") or second_line.endswith(" "):
return False
elif (not first_line.endswith(" ")) and second_line.startswith(" "):
return True
return False
tokens: List = list(tokenize.generate_tokens(file_obj.readline))
for first_token, second_token, third_token in zip(tokens, tokens[1:], tokens[2:]):
# Checking if we are in a block of concated string
if (
first_token.type == third_token.type == token.STRING
and second_token.type == token.NL
):
# Striping the quotes, with the string litteral prefix
first_string: str = first_token.string[
_get_literal_string_prefix_len(first_token.string) + 1 : -1
]
second_string: str = third_token.string[
_get_literal_string_prefix_len(third_token.string) + 1 : -1
]
if has_wrong_whitespace(first_string, second_string):
yield (
third_token.start[0],
(
"String has a space at the beginning instead "
"of the end of the previous string."
),
)
def main(
function: Callable[[IO[str]], Iterable[Tuple[int, str]]],
source_path: str,
output_format: str,
file_extensions_to_check: str,
excluded_file_paths: str,
) -> bool:
"""
Main entry point of the script.
Parameters
----------
function : Callable
Function to execute for the specified validation type.
source_path : str
Source path representing path to a file/directory.
output_format : str
Output format of the error message.
file_extensions_to_check : str
Comma separated values of what file extensions to check.
excluded_file_paths : str
Comma separated values of what file paths to exclude during the check.
Returns
-------
bool
True if found any patterns are found related to the given function.
Raises
------
ValueError
If the `source_path` is not pointing to existing file/directory.
"""
if not os.path.exists(source_path):
raise ValueError("Please enter a valid path, pointing to a file/directory.")
is_failed: bool = False
file_path: str = ""
FILE_EXTENSIONS_TO_CHECK: FrozenSet[str] = frozenset(
file_extensions_to_check.split(",")
)
PATHS_TO_IGNORE = frozenset(excluded_file_paths.split(","))
if os.path.isfile(source_path):
file_path = source_path
with open(file_path, "r") as file_obj:
for line_number, msg in function(file_obj):
is_failed = True
print(
output_format.format(
source_path=file_path, line_number=line_number, msg=msg
)
)
for subdir, _, files in os.walk(source_path):
if any(path in subdir for path in PATHS_TO_IGNORE):
continue
for file_name in files:
if not any(
file_name.endswith(extension) for extension in FILE_EXTENSIONS_TO_CHECK
):
continue
file_path = os.path.join(subdir, file_name)
with open(file_path, "r") as file_obj:
for line_number, msg in function(file_obj):
is_failed = True
print(
output_format.format(
source_path=file_path, line_number=line_number, msg=msg
)
)
return is_failed
if __name__ == "__main__":
available_validation_types: List[str] = [
"bare_pytest_raises",
"private_function_across_module",
"private_import_across_module",
"strings_to_concatenate",
"strings_with_wrong_placed_whitespace",
]
parser = argparse.ArgumentParser(description="Unwanted patterns checker.")
parser.add_argument(
"path", nargs="?", default=".", help="Source path of file/directory to check."
)
parser.add_argument(
"--format",
"-f",
default="{source_path}:{line_number}:{msg}",
help="Output format of the error message.",
)
parser.add_argument(
"--validation-type",
"-vt",
choices=available_validation_types,
required=True,
help="Validation test case to check.",
)
parser.add_argument(
"--included-file-extensions",
default="py,pyx,pxd,pxi",
help="Comma separated file extensions to check.",
)
parser.add_argument(
"--excluded-file-paths",
default="asv_bench/env",
help="Comma separated file paths to exclude.",
)
args = parser.parse_args()
sys.exit(
main(
function=globals().get(args.validation_type), # type: ignore
source_path=args.path,
output_format=args.format,
file_extensions_to_check=args.included_file_extensions,
excluded_file_paths=args.excluded_file_paths,
)
)
| 28.811321
| 87
| 0.584676
|
4a0e869c98e559b927116894b415a7de5b42e91d
| 16,638
|
py
|
Python
|
rhasspywake_snowboy_hermes/__init__.py
|
rhasspy/rhasspy-wake-snowboy-hermes
|
72d970350f4ba7a764427724c015d1cbb18e6c67
|
[
"MIT"
] | 1
|
2020-05-27T02:20:19.000Z
|
2020-05-27T02:20:19.000Z
|
rhasspywake_snowboy_hermes/__init__.py
|
rhasspy/rhasspy-wake-snowboy-hermes
|
72d970350f4ba7a764427724c015d1cbb18e6c67
|
[
"MIT"
] | 2
|
2020-04-27T14:54:19.000Z
|
2021-06-05T19:36:37.000Z
|
rhasspywake_snowboy_hermes/__init__.py
|
rhasspy/rhasspy-wake-snowboy-hermes
|
72d970350f4ba7a764427724c015d1cbb18e6c67
|
[
"MIT"
] | 4
|
2020-07-25T14:01:24.000Z
|
2021-04-29T21:08:17.000Z
|
"""Hermes MQTT server for Rhasspy wakeword with snowboy"""
import asyncio
import io
import logging
import queue
import socket
import threading
import typing
import wave
from dataclasses import dataclass, field
from pathlib import Path
from rhasspyhermes.audioserver import AudioFrame
from rhasspyhermes.base import Message
from rhasspyhermes.client import GeneratorType, HermesClient, TopicArgs
from rhasspyhermes.wake import (
GetHotwords,
Hotword,
HotwordDetected,
HotwordError,
Hotwords,
HotwordToggleOff,
HotwordToggleOn,
HotwordToggleReason,
)
from snowboy import snowboydecoder, snowboydetect
WAV_HEADER_BYTES = 44
_LOGGER = logging.getLogger("rhasspywake_snowboy_hermes")
# -----------------------------------------------------------------------------
@dataclass
class SiteInfo:
"""Self-contained information for a single site"""
site_id: str
enabled: bool = True
disabled_reasons: typing.Set[str] = field(default_factory=set)
detection_thread: typing.Optional[threading.Thread] = None
audio_buffer: bytes = bytes()
first_audio: bool = True
model_ids: typing.List[str] = field(default_factory=list)
detectors: typing.List[snowboydetect.SnowboyDetect] = field(default_factory=list)
# Queue of (bytes, is_raw)
wav_queue: "queue.Queue[typing.Tuple[bytes, bool]]" = field(
default_factory=queue.Queue
)
@dataclass
class SnowboyModel:
"""Settings for a single snowboy model"""
model_path: Path
sensitivity: str = "0.5"
audio_gain: float = 1.0
apply_frontend: bool = False
def float_sensitivity(self) -> float:
"""Get float of first sensitivity value."""
# 0.5,0.5
return float(self.sensitivity.split(",")[0])
# -----------------------------------------------------------------------------
class WakeHermesMqtt(HermesClient):
"""Hermes MQTT server for Rhasspy wakeword with snowboy."""
def __init__(
self,
client,
models: typing.List[SnowboyModel],
wakeword_ids: typing.List[str],
model_dirs: typing.Optional[typing.List[Path]] = None,
site_ids: typing.Optional[typing.List[str]] = None,
sample_rate: int = 16000,
sample_width: int = 2,
channels: int = 1,
chunk_size: int = 960,
udp_audio: typing.Optional[typing.List[typing.Tuple[str, int, str]]] = None,
udp_chunk_size: int = 2048,
udp_raw_audio: typing.Optional[typing.Iterable[str]] = None,
udp_forward_mqtt: typing.Optional[typing.Iterable[str]] = None,
lang: typing.Optional[str] = None,
):
super().__init__(
"rhasspywake_snowboy_hermes",
client,
sample_rate=sample_rate,
sample_width=sample_width,
channels=channels,
site_ids=site_ids,
)
self.subscribe(AudioFrame, HotwordToggleOn, HotwordToggleOff, GetHotwords)
self.models = models
self.wakeword_ids = wakeword_ids
self.model_dirs = model_dirs or []
# Required audio format
self.sample_rate = sample_rate
self.sample_width = sample_width
self.channels = channels
self.chunk_size = chunk_size
self.site_info: typing.Dict[str, SiteInfo] = {}
# Create site information for known sites
for site_id in self.site_ids:
site_info = SiteInfo(site_id=site_id)
# Create and start detection thread
site_info.detection_thread = threading.Thread(
target=self.detection_thread_proc, daemon=True, args=(site_info,)
)
site_info.detection_thread.start()
self.site_info[site_id] = site_info
self.lang = lang
# Listen for raw audio on UDP too
self.udp_chunk_size = udp_chunk_size
# Site ids where UDP audio is raw 16Khz, 16-bit mono PCM chunks instead
# of WAV chunks.
self.udp_raw_audio = set(udp_raw_audio or [])
# Site ids where UDP audio should be forward to MQTT after detection.
self.udp_forward_mqtt = set(udp_forward_mqtt or [])
if udp_audio:
for udp_host, udp_port, udp_site_id in udp_audio:
threading.Thread(
target=self.udp_thread_proc,
args=(udp_host, udp_port, udp_site_id),
daemon=True,
).start()
# -------------------------------------------------------------------------
def load_detectors(self, site_info: SiteInfo):
"""Load snowboy detectors from models"""
site_info.model_ids = []
site_info.detectors = []
for model in self.models:
assert model.model_path.is_file(), f"Missing {model.model_path}"
_LOGGER.debug("Loading snowboy model: %s", model)
detector = snowboydetect.SnowboyDetect(
snowboydecoder.RESOURCE_FILE.encode(), str(model.model_path).encode()
)
detector.SetSensitivity(model.sensitivity.encode())
detector.SetAudioGain(model.audio_gain)
detector.ApplyFrontend(model.apply_frontend)
site_info.detectors.append(detector)
site_info.model_ids.append(model.model_path.stem)
# -------------------------------------------------------------------------
def stop(self):
"""Stop detection threads."""
_LOGGER.debug("Stopping detection threads...")
for site_info in self.site_info.values():
if site_info.detection_thread is not None:
site_info.wav_queue.put((None, None))
site_info.detection_thread.join()
site_info.detection_thread = None
site_info.porcupine = None
_LOGGER.debug("Stopped")
# -------------------------------------------------------------------------
async def handle_audio_frame(self, wav_bytes: bytes, site_id: str = "default"):
"""Process a single audio frame"""
site_info = self.site_info.get(site_id)
if site_info is None:
# Create information for new site
site_info = SiteInfo(site_id=site_id)
site_info.detection_thread = threading.Thread(
target=self.detection_thread_proc, daemon=True, args=(site_info,)
)
site_info.detection_thread.start()
self.site_info[site_id] = site_info
site_info.wav_queue.put((wav_bytes, False))
async def handle_detection(
self, model_index: int, wakeword_id: str, site_info: SiteInfo
) -> typing.AsyncIterable[
typing.Union[typing.Tuple[HotwordDetected, TopicArgs], HotwordError]
]:
"""Handle a successful hotword detection"""
try:
assert (
len(site_info.model_ids) > model_index
), f"Missing {model_index} in models"
sensitivity = 0.5
if model_index < len(self.models):
sensitivity = self.models[model_index].float_sensitivity()
yield (
HotwordDetected(
site_id=site_info.site_id,
model_id=site_info.model_ids[model_index],
current_sensitivity=sensitivity,
model_version="",
model_type="personal",
lang=self.lang,
),
{"wakeword_id": wakeword_id},
)
except Exception as e:
_LOGGER.exception("handle_detection")
yield HotwordError(
error=str(e), context=str(model_index), site_id=site_info.site_id
)
async def handle_get_hotwords(
self, get_hotwords: GetHotwords
) -> typing.AsyncIterable[typing.Union[Hotwords, HotwordError]]:
"""Report available hotwords"""
try:
if self.model_dirs:
# Add all models from model dirs
model_paths = []
for model_dir in self.model_dirs:
if not model_dir.is_dir():
_LOGGER.warning("Model directory missing: %s", str(model_dir))
continue
for model_file in model_dir.iterdir():
if model_file.is_file() and (
model_file.suffix in [".umdl", ".pmdl"]
):
model_paths.append(model_file)
else:
# Add current model(s) only
model_paths = [Path(model.model_path) for model in self.models]
hotword_models: typing.List[Hotword] = []
for model_path in model_paths:
model_words = " ".join(model_path.with_suffix("").name.split("_"))
model_type = "universal" if model_path.suffix == ".umdl" else "personal"
hotword_models.append(
Hotword(
model_id=model_path.name,
model_words=model_words,
model_type=model_type,
)
)
yield Hotwords(
models=hotword_models, id=get_hotwords.id, site_id=get_hotwords.site_id
)
except Exception as e:
_LOGGER.exception("handle_get_hotwords")
yield HotwordError(
error=str(e), context=str(get_hotwords), site_id=get_hotwords.site_id
)
def detection_thread_proc(self, site_info: SiteInfo):
"""Handle WAV audio chunks."""
try:
while True:
wav_bytes, is_raw = site_info.wav_queue.get()
if wav_bytes is None:
# Shutdown signal
break
if site_info.first_audio:
_LOGGER.debug("Receiving audio %s", site_info.site_id)
site_info.first_audio = False
if not site_info.detectors:
self.load_detectors(site_info)
if is_raw:
# Raw audio chunks
audio_data = wav_bytes
else:
# WAV chunks
audio_data = self.maybe_convert_wav(wav_bytes)
# Add to persistent buffer
site_info.audio_buffer += audio_data
# Process in chunks.
# Any remaining audio data will be kept in buffer.
while len(site_info.audio_buffer) >= self.chunk_size:
chunk = site_info.audio_buffer[: self.chunk_size]
site_info.audio_buffer = site_info.audio_buffer[self.chunk_size :]
for detector_index, detector in enumerate(site_info.detectors):
# Return is:
# -2 silence
# -1 error
# 0 voice
# n index n-1
result_index = detector.RunDetection(chunk)
if result_index > 0:
# Detection
if detector_index < len(self.wakeword_ids):
wakeword_id = self.wakeword_ids[detector_index]
else:
wakeword_id = ""
if not wakeword_id:
if detector_index < len(self.models):
# Use file name
wakeword_id = self.models[
detector_index
].model_path.stem
else:
# Fall back to default
wakeword_id = "default"
_LOGGER.debug(
"Wake word detected: %s (site_id=%s)",
wakeword_id,
site_info.site_id,
)
assert self.loop is not None
asyncio.run_coroutine_threadsafe(
self.publish_all(
self.handle_detection(
detector_index, wakeword_id, site_info=site_info
)
),
self.loop,
)
except Exception:
_LOGGER.exception("detection_thread_proc")
# -------------------------------------------------------------------------
def udp_thread_proc(self, host: str, port: int, site_id: str):
"""Handle WAV chunks from UDP socket."""
try:
site_info = self.site_info[site_id]
is_raw_audio = site_id in self.udp_raw_audio
forward_to_mqtt = site_id in self.udp_forward_mqtt
udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
udp_socket.bind((host, port))
_LOGGER.debug(
"Listening for audio on UDP %s:%s (siteId=%s, raw=%s)",
host,
port,
site_id,
is_raw_audio,
)
chunk_size = self.udp_chunk_size
if is_raw_audio:
chunk_size += WAV_HEADER_BYTES
while True:
wav_bytes, _ = udp_socket.recvfrom(chunk_size)
if site_info.enabled:
site_info.wav_queue.put((wav_bytes, is_raw_audio))
elif forward_to_mqtt:
# When the wake word service is disabled, ASR should be active
if is_raw_audio:
# Re-package as WAV chunk and publish to MQTT
with io.BytesIO() as wav_buffer:
wav_file: wave.Wave_write = wave.open(wav_buffer, "wb")
with wav_file:
wav_file.setframerate(self.sample_rate)
wav_file.setsampwidth(self.sample_width)
wav_file.setnchannels(self.channels)
wav_file.writeframes(wav_bytes)
publish_wav_bytes = wav_buffer.getvalue()
else:
# Use WAV chunk as-is
publish_wav_bytes = wav_bytes
self.publish(
AudioFrame(wav_bytes=publish_wav_bytes),
site_id=site_info.site_id,
)
except Exception:
_LOGGER.exception("udp_thread_proc")
# -------------------------------------------------------------------------
async def on_message_blocking(
self,
message: Message,
site_id: typing.Optional[str] = None,
session_id: typing.Optional[str] = None,
topic: typing.Optional[str] = None,
) -> GeneratorType:
"""Received message from MQTT broker."""
# Check enable/disable messages
site_info = self.site_info.get(site_id) if site_id else None
if isinstance(message, HotwordToggleOn):
if site_info:
if message.reason == HotwordToggleReason.UNKNOWN:
# Always enable on unknown
site_info.disabled_reasons.clear()
else:
site_info.disabled_reasons.discard(message.reason)
if site_info.disabled_reasons:
_LOGGER.debug("Still disabled: %s", site_info.disabled_reasons)
else:
site_info.enabled = True
site_info.first_audio = True
_LOGGER.debug("Enabled")
elif isinstance(message, HotwordToggleOff):
if site_info:
site_info.enabled = False
site_info.disabled_reasons.add(message.reason)
_LOGGER.debug("Disabled")
elif isinstance(message, AudioFrame):
if site_info and site_info.enabled:
await self.handle_audio_frame(
message.wav_bytes, site_id=site_info.site_id
)
elif isinstance(message, GetHotwords):
async for hotword_result in self.handle_get_hotwords(message):
yield hotword_result
else:
_LOGGER.warning("Unexpected message: %s", message)
| 37.138393
| 88
| 0.527888
|
4a0e86ec9769b17eaba5a143c010b2228ac8f1bd
| 10,693
|
py
|
Python
|
netapp/santricity/models/symbol/thermal_sensor.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 5
|
2016-08-23T17:52:22.000Z
|
2019-05-16T08:45:30.000Z
|
netapp/santricity/models/symbol/thermal_sensor.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 2
|
2016-11-10T05:30:21.000Z
|
2019-04-05T15:03:37.000Z
|
netapp/santricity/models/symbol/thermal_sensor.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 7
|
2016-08-25T16:11:44.000Z
|
2021-02-22T05:31:25.000Z
|
# coding: utf-8
"""
ThermalSensor.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from pprint import pformat
from six import iteritems
class ThermalSensor(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
ThermalSensor - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'thermal_sensor_ref': 'str', # (required parameter)
'status': 'str', # (required parameter)
'physical_location': 'Location', # (required parameter)
'reserved1': 'str',
'reserved2': 'str',
'rtr_attributes': 'RTRAttributes', # (required parameter)
'repair_policy': 'RepairPolicy', # (required parameter)
'id': 'str'
}
self.attribute_map = {
'thermal_sensor_ref': 'thermalSensorRef', # (required parameter)
'status': 'status', # (required parameter)
'physical_location': 'physicalLocation', # (required parameter)
'reserved1': 'reserved1',
'reserved2': 'reserved2',
'rtr_attributes': 'rtrAttributes', # (required parameter)
'repair_policy': 'repairPolicy', # (required parameter)
'id': 'id'
}
self._thermal_sensor_ref = None
self._status = None
self._physical_location = None
self._reserved1 = None
self._reserved2 = None
self._rtr_attributes = None
self._repair_policy = None
self._id = None
@property
def thermal_sensor_ref(self):
"""
Gets the thermal_sensor_ref of this ThermalSensor.
The reference for this physical thermal sensor.
:return: The thermal_sensor_ref of this ThermalSensor.
:rtype: str
:required/optional: required
"""
return self._thermal_sensor_ref
@thermal_sensor_ref.setter
def thermal_sensor_ref(self, thermal_sensor_ref):
"""
Sets the thermal_sensor_ref of this ThermalSensor.
The reference for this physical thermal sensor.
:param thermal_sensor_ref: The thermal_sensor_ref of this ThermalSensor.
:type: str
"""
self._thermal_sensor_ref = thermal_sensor_ref
@property
def status(self):
"""
Gets the status of this ThermalSensor.
The operational status of the thermal sensor.
:return: The status of this ThermalSensor.
:rtype: str
:required/optional: required
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this ThermalSensor.
The operational status of the thermal sensor.
:param status: The status of this ThermalSensor.
:type: str
"""
allowed_values = ["optimal", "nominalTempExceed", "maxTempExceed", "removed", "unknown", "__UNDEFINED"]
if status not in allowed_values:
raise ValueError(
"Invalid value for `status`, must be one of {0}"
.format(allowed_values)
)
self._status = status
@property
def physical_location(self):
"""
Gets the physical_location of this ThermalSensor.
The physical location of the thermal sensor. The parent reference in Location identifies the CRU that physically houses the thermal sensor, and the position field is the parent-relative/like-component relative number of the thermal sensor, starting at one.
:return: The physical_location of this ThermalSensor.
:rtype: Location
:required/optional: required
"""
return self._physical_location
@physical_location.setter
def physical_location(self, physical_location):
"""
Sets the physical_location of this ThermalSensor.
The physical location of the thermal sensor. The parent reference in Location identifies the CRU that physically houses the thermal sensor, and the position field is the parent-relative/like-component relative number of the thermal sensor, starting at one.
:param physical_location: The physical_location of this ThermalSensor.
:type: Location
"""
self._physical_location = physical_location
@property
def reserved1(self):
"""
Gets the reserved1 of this ThermalSensor.
:return: The reserved1 of this ThermalSensor.
:rtype: str
:required/optional: optional
"""
return self._reserved1
@reserved1.setter
def reserved1(self, reserved1):
"""
Sets the reserved1 of this ThermalSensor.
:param reserved1: The reserved1 of this ThermalSensor.
:type: str
"""
self._reserved1 = reserved1
@property
def reserved2(self):
"""
Gets the reserved2 of this ThermalSensor.
:return: The reserved2 of this ThermalSensor.
:rtype: str
:required/optional: optional
"""
return self._reserved2
@reserved2.setter
def reserved2(self, reserved2):
"""
Sets the reserved2 of this ThermalSensor.
:param reserved2: The reserved2 of this ThermalSensor.
:type: str
"""
self._reserved2 = reserved2
@property
def rtr_attributes(self):
"""
Gets the rtr_attributes of this ThermalSensor.
The CRU type of the thermal sensor plus its ready-to-remove attributes, which are based on the CRU type
:return: The rtr_attributes of this ThermalSensor.
:rtype: RTRAttributes
:required/optional: required
"""
return self._rtr_attributes
@rtr_attributes.setter
def rtr_attributes(self, rtr_attributes):
"""
Sets the rtr_attributes of this ThermalSensor.
The CRU type of the thermal sensor plus its ready-to-remove attributes, which are based on the CRU type
:param rtr_attributes: The rtr_attributes of this ThermalSensor.
:type: RTRAttributes
"""
self._rtr_attributes = rtr_attributes
@property
def repair_policy(self):
"""
Gets the repair_policy of this ThermalSensor.
The repair policy for the thermal sensor component.
:return: The repair_policy of this ThermalSensor.
:rtype: RepairPolicy
:required/optional: required
"""
return self._repair_policy
@repair_policy.setter
def repair_policy(self, repair_policy):
"""
Sets the repair_policy of this ThermalSensor.
The repair policy for the thermal sensor component.
:param repair_policy: The repair_policy of this ThermalSensor.
:type: RepairPolicy
"""
self._repair_policy = repair_policy
@property
def id(self):
"""
Gets the id of this ThermalSensor.
:return: The id of this ThermalSensor.
:rtype: str
:required/optional: optional
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this ThermalSensor.
:param id: The id of this ThermalSensor.
:type: str
"""
self._id = id
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
if self is None:
return None
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 34.05414
| 844
| 0.627046
|
4a0e875a33527ccbcf6a55b02585e70b1e64fa58
| 350
|
py
|
Python
|
pms-docking-demo/backend/manage.py
|
fish2018/openpms
|
88ca124ba0980aef5dd5474af03209f2dbefdcca
|
[
"Apache-2.0"
] | 40
|
2019-09-12T00:41:22.000Z
|
2022-03-25T03:29:28.000Z
|
pms-docking-demo/backend/manage.py
|
fish2018/openpms
|
88ca124ba0980aef5dd5474af03209f2dbefdcca
|
[
"Apache-2.0"
] | null | null | null |
pms-docking-demo/backend/manage.py
|
fish2018/openpms
|
88ca124ba0980aef5dd5474af03209f2dbefdcca
|
[
"Apache-2.0"
] | 14
|
2019-09-07T11:49:55.000Z
|
2022-03-25T03:36:20.000Z
|
from app.models import db
from app.utils.factory import create_app
from flask_script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
app = create_app()
manager = Manager(app)
migrate = Migrate(app, db, compare_type=True)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| 23.333333
| 50
| 0.745714
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.