index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
45,260 | MulticsYin/MulticsSH | refs/heads/master | /need_install/Django-1.8.17/tests/commands_sql_migrations/tests.py | from __future__ import unicode_literals
from django.apps import apps
from django.core.management import CommandError
from django.core.management.color import no_style
from django.core.management.sql import (
sql_all, sql_create, sql_delete, sql_destroy_indexes, sql_indexes,
)
from django.db import DEFAULT_DB_ALIAS, connections
from django.test import TestCase
class SQLCommandsMigrationsTestCase(TestCase):
"""Tests that apps with migrations can not use sql commands."""
def test_sql_create(self):
app_config = apps.get_app_config('commands_sql_migrations')
with self.assertRaises(CommandError):
sql_create(app_config, no_style(), connections[DEFAULT_DB_ALIAS])
def test_sql_delete(self):
app_config = apps.get_app_config('commands_sql_migrations')
with self.assertRaises(CommandError):
sql_delete(app_config, no_style(), connections[DEFAULT_DB_ALIAS], close_connection=False)
def test_sql_indexes(self):
app_config = apps.get_app_config('commands_sql_migrations')
with self.assertRaises(CommandError):
sql_indexes(app_config, no_style(), connections[DEFAULT_DB_ALIAS])
def test_sql_destroy_indexes(self):
app_config = apps.get_app_config('commands_sql_migrations')
with self.assertRaises(CommandError):
sql_destroy_indexes(app_config, no_style(),
connections[DEFAULT_DB_ALIAS])
def test_sql_all(self):
app_config = apps.get_app_config('commands_sql_migrations')
with self.assertRaises(CommandError):
sql_all(app_config, no_style(), connections[DEFAULT_DB_ALIAS])
| {"/home/admin.py": ["/home/models.py"], "/need_install/Django-1.8.17/tests/fixtures_migration/tests.py": ["/need_install/Django-1.8.17/tests/fixtures_migration/models.py"], "/home/views.py": ["/home/models.py"]} |
45,261 | MulticsYin/MulticsSH | refs/heads/master | /need_install/Django-1.8.17/tests/forms_tests/tests/test_regressions.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from forms_tests.models import Cheese
from django.forms import (
CharField, ChoiceField, Form, HiddenInput, IntegerField, ModelForm,
ModelMultipleChoiceField, MultipleChoiceField, RadioSelect, Select,
TextInput,
)
from django.test import TestCase, ignore_warnings
from django.utils import translation
from django.utils.translation import gettext_lazy, ugettext_lazy
class FormsRegressionsTestCase(TestCase):
def test_class(self):
# Tests to prevent against recurrences of earlier bugs.
extra_attrs = {'class': 'special'}
class TestForm(Form):
f1 = CharField(max_length=10, widget=TextInput(attrs=extra_attrs))
f2 = CharField(widget=TextInput(attrs=extra_attrs))
self.assertHTMLEqual(TestForm(auto_id=False).as_p(), '<p>F1: <input type="text" class="special" name="f1" maxlength="10" /></p>\n<p>F2: <input type="text" class="special" name="f2" /></p>')
def test_regression_3600(self):
# Tests for form i18n #
# There were some problems with form translations in #3600
class SomeForm(Form):
username = CharField(max_length=10, label=ugettext_lazy('username'))
f = SomeForm()
self.assertHTMLEqual(f.as_p(), '<p><label for="id_username">username:</label> <input id="id_username" type="text" name="username" maxlength="10" /></p>')
# Translations are done at rendering time, so multi-lingual apps can define forms)
with translation.override('de'):
self.assertHTMLEqual(f.as_p(), '<p><label for="id_username">Benutzername:</label> <input id="id_username" type="text" name="username" maxlength="10" /></p>')
with translation.override('pl'):
self.assertHTMLEqual(f.as_p(), '<p><label for="id_username">u\u017cytkownik:</label> <input id="id_username" type="text" name="username" maxlength="10" /></p>')
def test_regression_5216(self):
# There was some problems with form translations in #5216
class SomeForm(Form):
field_1 = CharField(max_length=10, label=ugettext_lazy('field_1'))
field_2 = CharField(max_length=10, label=ugettext_lazy('field_2'), widget=TextInput(attrs={'id': 'field_2_id'}))
f = SomeForm()
self.assertHTMLEqual(f['field_1'].label_tag(), '<label for="id_field_1">field_1:</label>')
self.assertHTMLEqual(f['field_2'].label_tag(), '<label for="field_2_id">field_2:</label>')
# Unicode decoding problems...
GENDERS = (('\xc5', 'En tied\xe4'), ('\xf8', 'Mies'), ('\xdf', 'Nainen'))
class SomeForm(Form):
somechoice = ChoiceField(choices=GENDERS, widget=RadioSelect(), label='\xc5\xf8\xdf')
f = SomeForm()
self.assertHTMLEqual(f.as_p(), '<p><label for="id_somechoice_0">\xc5\xf8\xdf:</label> <ul id="id_somechoice">\n<li><label for="id_somechoice_0"><input type="radio" id="id_somechoice_0" value="\xc5" name="somechoice" /> En tied\xe4</label></li>\n<li><label for="id_somechoice_1"><input type="radio" id="id_somechoice_1" value="\xf8" name="somechoice" /> Mies</label></li>\n<li><label for="id_somechoice_2"><input type="radio" id="id_somechoice_2" value="\xdf" name="somechoice" /> Nainen</label></li>\n</ul></p>')
# Translated error messages used to be buggy.
with translation.override('ru'):
f = SomeForm({})
self.assertHTMLEqual(f.as_p(), '<ul class="errorlist"><li>\u041e\u0431\u044f\u0437\u0430\u0442\u0435\u043b\u044c\u043d\u043e\u0435 \u043f\u043e\u043b\u0435.</li></ul>\n<p><label for="id_somechoice_0">\xc5\xf8\xdf:</label> <ul id="id_somechoice">\n<li><label for="id_somechoice_0"><input type="radio" id="id_somechoice_0" value="\xc5" name="somechoice" /> En tied\xe4</label></li>\n<li><label for="id_somechoice_1"><input type="radio" id="id_somechoice_1" value="\xf8" name="somechoice" /> Mies</label></li>\n<li><label for="id_somechoice_2"><input type="radio" id="id_somechoice_2" value="\xdf" name="somechoice" /> Nainen</label></li>\n</ul></p>')
# Deep copying translated text shouldn't raise an error)
class CopyForm(Form):
degree = IntegerField(widget=Select(choices=((1, gettext_lazy('test')),)))
f = CopyForm()
@ignore_warnings(category=UnicodeWarning)
def test_regression_5216_b(self):
# Testing choice validation with UTF-8 bytestrings as input (these are the
# Russian abbreviations "мес." and "шт.".
UNITS = ((b'\xd0\xbc\xd0\xb5\xd1\x81.', b'\xd0\xbc\xd0\xb5\xd1\x81.'),
(b'\xd1\x88\xd1\x82.', b'\xd1\x88\xd1\x82.'))
f = ChoiceField(choices=UNITS)
self.assertEqual(f.clean('\u0448\u0442.'), '\u0448\u0442.')
self.assertEqual(f.clean(b'\xd1\x88\xd1\x82.'), '\u0448\u0442.')
def test_misc(self):
# There once was a problem with Form fields called "data". Let's make sure that
# doesn't come back.
class DataForm(Form):
data = CharField(max_length=10)
f = DataForm({'data': 'xyzzy'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'data': 'xyzzy'})
# A form with *only* hidden fields that has errors is going to be very unusual.
class HiddenForm(Form):
data = IntegerField(widget=HiddenInput)
f = HiddenForm({})
self.assertHTMLEqual(f.as_p(), '<ul class="errorlist nonfield"><li>(Hidden field data) This field is required.</li></ul>\n<p> <input type="hidden" name="data" id="id_data" /></p>')
self.assertHTMLEqual(f.as_table(), '<tr><td colspan="2"><ul class="errorlist nonfield"><li>(Hidden field data) This field is required.</li></ul><input type="hidden" name="data" id="id_data" /></td></tr>')
def test_xss_error_messages(self):
###################################################
# Tests for XSS vulnerabilities in error messages #
###################################################
# The forms layer doesn't escape input values directly because error messages
# might be presented in non-HTML contexts. Instead, the message is just marked
# for escaping by the template engine. So we'll need to construct a little
# silly template to trigger the escaping.
from django.template import Template, Context
t = Template('{{ form.errors }}')
class SomeForm(Form):
field = ChoiceField(choices=[('one', 'One')])
f = SomeForm({'field': '<script>'})
self.assertHTMLEqual(t.render(Context({'form': f})), '<ul class="errorlist"><li>field<ul class="errorlist"><li>Select a valid choice. <script> is not one of the available choices.</li></ul></li></ul>')
class SomeForm(Form):
field = MultipleChoiceField(choices=[('one', 'One')])
f = SomeForm({'field': ['<script>']})
self.assertHTMLEqual(t.render(Context({'form': f})), '<ul class="errorlist"><li>field<ul class="errorlist"><li>Select a valid choice. <script> is not one of the available choices.</li></ul></li></ul>')
from forms_tests.models import ChoiceModel
class SomeForm(Form):
field = ModelMultipleChoiceField(ChoiceModel.objects.all())
f = SomeForm({'field': ['<script>']})
self.assertHTMLEqual(t.render(Context({'form': f})), '<ul class="errorlist"><li>field<ul class="errorlist"><li>"<script>" is not a valid value for a primary key.</li></ul></li></ul>')
def test_regression_14234(self):
"""
Re-cleaning an instance that was added via a ModelForm should not raise
a pk uniqueness error.
"""
class CheeseForm(ModelForm):
class Meta:
model = Cheese
fields = '__all__'
form = CheeseForm({
'name': 'Brie',
})
self.assertTrue(form.is_valid())
obj = form.save()
obj.name = 'Camembert'
obj.full_clean()
| {"/home/admin.py": ["/home/models.py"], "/need_install/Django-1.8.17/tests/fixtures_migration/tests.py": ["/need_install/Django-1.8.17/tests/fixtures_migration/models.py"], "/home/views.py": ["/home/models.py"]} |
45,268 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/signals.py | # -*- coding: utf-8 -*-
"""
syslog2irc.signals
~~~~~~~~~~~~~~~~~~
Signals
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from blinker import signal
syslog_message_received = signal('syslog-message-received')
irc_channel_joined = signal('irc-channel-joined')
message_received = signal('message-received')
message_approved = signal('message-approved')
shutdown_requested = signal('shutdown-requested')
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,269 | Emantor/syslog2irc | refs/heads/master | /tests/test_syslog_request_handler.py | # -*- coding: utf-8 -*-
"""
:Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
from __future__ import unicode_literals
from datetime import datetime
from unittest import TestCase
from syslogmp import Facility, Message, Severity
from syslog2irc.signals import syslog_message_received
from syslog2irc.syslog import RequestHandler
CURRENT_YEAR = datetime.today().year
class SyslogRequestHandlerTestCase(TestCase):
def setUp(self):
self.received_signal_data = []
def test_handle(self):
expected_message = Message(
Facility.kernel,
Severity.emergency,
datetime(CURRENT_YEAR, 10, 22, 10, 52, 12),
'scapegoat',
b'1990 Oct 22 10:52:01 TZ-6 scapegoat.dmz.example.org 10.1.2.3 sched[0]: That\'s All Folks!')
# Example 5 from RFC 3164.
data = b'<0>Oct 22 10:52:12 scapegoat 1990 Oct 22 10:52:01 TZ-6 scapegoat.dmz.example.org 10.1.2.3 sched[0]: That\'s All Folks!'
request = [data]
client_address = ('127.0.0.1', 514)
server = FakeServer(514)
@syslog_message_received.connect
def handle_syslog_message_received(sender, **data):
self.storeReceivedSignalData(data)
RequestHandler(request, client_address, server)
self.assertReceivedSignalDataEqual([{
'message': expected_message,
'source_address': client_address,
}])
def storeReceivedSignalData(self, data):
self.received_signal_data.append(data)
def assertReceivedSignalDataEqual(self, expected):
self.assertEqual(self.received_signal_data, expected)
class FakeServer(object):
def __init__(self, port):
self.port = port
def get_port(self):
return self.port
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,270 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/irc.py | # -*- coding: utf-8 -*-
"""
syslog2irc.irc
~~~~~~~~~~~~~~
Internet Relay Chat
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from __future__ import absolute_import
from collections import namedtuple
from ssl import wrap_socket as ssl_wrap_socket
from irc.bot import SingleServerIRCBot
from irc.connection import Factory
from .signals import irc_channel_joined, shutdown_requested
from .util import log
class Channel(namedtuple('Channel', 'name password')):
"""An IRC channel with optional password."""
def __new__(cls, name, password=None):
return super(Channel, cls).__new__(cls, name, password)
class Bot(SingleServerIRCBot):
"""An IRC bot to forward syslog messages to IRC channels."""
def __init__(self, server_spec, nickname, realname, channels, ssl=False,
shutdown_predicate=None):
log('Connecting to IRC server {0.host}:{0.port:d} ...', server_spec)
connect_params = {}
if ssl:
ssl_factory = Factory(wrapper=ssl_wrap_socket)
connect_params['connect_factory'] = ssl_factory
SingleServerIRCBot.__init__(self, [server_spec], nickname,
realname, **connect_params)
# Note: `self.channels` already exists in super class.
self.channels_to_join = channels
self.shutdown_predicate = shutdown_predicate
def get_version(self):
"""Return this on CTCP VERSION requests."""
return 'syslog2IRC'
def on_welcome(self, conn, event):
"""Join channels after connect."""
log('Connected to {}:{:d}.', *conn.socket.getpeername())
channel_names = sorted(c.name for c in self.channels_to_join)
log('Channels to join: {}', ', '.join(channel_names))
for channel in self.channels_to_join:
conn.join(channel.name, channel.password or '')
def on_nicknameinuse(self, conn, event):
"""Choose another nickname if conflicting."""
self._nickname += '_'
conn.nick(self._nickname)
def on_join(self, conn, event):
"""Successfully joined channel."""
joined_nick = event.source.nick
channel_name = event.target
if joined_nick == self._nickname:
log('Joined IRC channel: {}', channel_name)
irc_channel_joined.send(channel=channel_name)
def on_badchannelkey(self, conn, event):
"""Channel could not be joined due to wrong password."""
channel_name = event.arguments[0]
log('Cannot join channel {} (bad key).', channel_name)
def on_privmsg(self, conn, event):
"""React on private messages."""
nickmask = event.source
text = event.arguments[0]
if self.shutdown_predicate is not None \
and self.shutdown_predicate(nickmask, text):
self.shutdown(nickmask)
def shutdown(self, nickmask):
"""Shut the bot down."""
log('Shutdown requested by {}.', nickmask)
shutdown_requested.send()
self.die('Shutting down.') # Joins IRC bot thread.
def say(self, channel, message):
"""Say message on channel."""
self.connection.privmsg(channel, message)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,271 | Emantor/syslog2irc | refs/heads/master | /tests/test_routing.py | # -*- coding: utf-8 -*-
"""
:Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
from __future__ import unicode_literals
from unittest import TestCase
from nose2.tools import params
from syslog2irc.router import map_channel_names_to_ports
class RoutingTestCase(TestCase):
@params(
(
{
514: ['#example1'],
},
{
'#example1': {514},
},
),
(
{
514: ['#example1', '#example2'],
55514: ['#example2'],
},
{
'#example1': {514},
'#example2': {514, 55514},
},
),
)
def test_map_channel_names_to_ports(self, routes, expected):
actual = map_channel_names_to_ports(routes)
self.assertEqual(actual, expected)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,272 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/runner.py | # -*- coding: utf-8 -*-
"""
syslog2irc.runner
~~~~~~~~~~~~~~~~~
A looping, stoppable runner
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from time import sleep
from .util import log
class Runner(object):
def __init__(self):
self.shutdown = False
def request_shutdown(self, sender):
self.shutdown = True
def run(self, seconds_to_sleep=0.5):
"""Run the main loop until shutdown is requested."""
while not self.shutdown:
sleep(seconds_to_sleep)
log('Shutting down ...')
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,273 | Emantor/syslog2irc | refs/heads/master | /tests/test_syslog_message_formatting.py | # -*- coding: utf-8 -*-
"""
:Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
from __future__ import unicode_literals
from datetime import datetime
from unittest import TestCase
from nose2.tools import params
from syslogmp import Facility, Message, Severity
from syslog2irc.processor import format_syslog_message
class SyslogMessageFormattingTestCase(TestCase):
@params(
(
Facility.user,
Severity.informational,
None,
None,
b'FYI',
'[informational]: FYI',
),
(
Facility.clock9,
Severity.warning,
datetime(2013, 7, 8, 0, 12, 55),
None,
b'Tick, tack, watch the clock!',
'[2013-07-08 00:12:55] [warning]: Tick, tack, watch the clock!',
),
(
Facility.ntp,
Severity.debug,
None,
'ntp.local',
b'What time is it?',
'(ntp.local) [debug]: What time is it?',
),
(
Facility.kernel,
Severity.emergency,
datetime(2008, 10, 18, 17, 34, 7),
'mainframe',
b'WTF? S.O.S.!',
'[2008-10-18 17:34:07] (mainframe) [emergency]: WTF? S.O.S.!',
),
)
def test_format_message(self, facility, severity, timestamp,
hostname, message, expected):
"""Test string representation of a syslog message."""
message = Message(facility, severity, timestamp, hostname, message)
actual = format_syslog_message(message)
self.assertEqual(actual, expected)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,274 | Emantor/syslog2irc | refs/heads/master | /tests/test_processor_shutdown.py | # -*- coding: utf-8 -*-
"""
:Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
from __future__ import unicode_literals
from unittest import TestCase
from syslog2irc.processor import Processor
from syslog2irc.router import Router
from syslog2irc.signals import shutdown_requested
class ProcessorShutdownTestCase(TestCase):
def test_shutdown_flag_set_on_shutdown_signal(self):
processor = self._create_processor()
self.assertEqual(processor.shutdown, False)
shutdown_requested.send()
self.assertEqual(processor.shutdown, True)
def _create_processor(self):
router = Router({})
processor = Processor(router)
processor.connect_to_signals()
return processor
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,275 | Emantor/syslog2irc | refs/heads/master | /tests/test_irc_channel.py | # -*- coding: utf-8 -*-
"""
:Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
from __future__ import unicode_literals
from unittest import TestCase
from nose2.tools import params
from syslog2irc.irc import Channel
class IrcChannelTestCase(TestCase):
@params(
(Channel('#example'), '#example', None ),
(Channel('#example', password=None), '#example', None ),
(Channel('#headquarters', password='secret'), '#headquarters', 'secret'),
)
def test_irc_channel_creation(self, channel, expected_name, expected_password):
self.assertEqual(channel.name, expected_name)
self.assertEqual(channel.password, expected_password)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,276 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/syslog.py | # -*- coding: utf-8 -*-
"""
syslog2irc.syslog
~~~~~~~~~~~~~~~~~
BSD syslog message reception and handling
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
try:
# Python 2.x
from SocketServer import BaseRequestHandler, ThreadingUDPServer
except ImportError:
# Python 3.x
from socketserver import BaseRequestHandler, ThreadingUDPServer
import sys
import syslogmp
from .signals import syslog_message_received
from .util import log, start_thread
class RequestHandler(BaseRequestHandler):
"""Handler for syslog messages."""
def handle(self):
try:
data = self.request[0]
message = syslogmp.parse(data)
except ValueError:
log('Invalid message received from {}:{:d}.',
*self.client_address)
return
port = self.server.get_port()
log('Received message from {0[0]}:{0[1]:d} on port {1:d} -> {2}',
self.client_address, port, format_message_for_log(message))
syslog_message_received.send(port,
source_address=self.client_address,
message=message)
class ReceiveServer(ThreadingUDPServer):
"""UDP server that waits for syslog messages."""
def __init__(self, port):
ThreadingUDPServer.__init__(self, ('', port), RequestHandler)
@classmethod
def start(cls, port):
"""Start in a separate thread."""
try:
receiver = cls(port)
except Exception as e:
sys.stderr.write('Error {0.errno:d}: {0.strerror}\n'
.format(e))
sys.stderr.write(
'Probably no permission to open port {}. '
'Try to specify a port number above 1,024 (or even '
'4,096) and up to 65,535.\n'.format(port))
sys.exit(1)
thread_name = '{}-port{:d}'.format(cls.__name__, port)
start_thread(receiver.serve_forever, thread_name)
def get_port(self):
return self.server_address[1]
def start_syslog_message_receivers(ports):
"""Start one syslog message receiving server for each port."""
for port in ports:
ReceiveServer.start(port)
def format_message_for_log(message):
"""Format a syslog message to be logged."""
facility_name = message.facility.name
severity_name = message.severity.name
timestamp_str = message.timestamp.isoformat()
hostname = message.hostname
return 'facility={}, severity={}, timestamp={}, hostname={}, message={}' \
.format(facility_name, severity_name, timestamp_str, hostname,
message.message)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,277 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/router.py | # -*- coding: utf-8 -*-
"""
syslog2irc.router
~~~~~~~~~~~~~~~~~
Routing of syslog messages to IRC channels by the port they arrive on.
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from collections import defaultdict
from .util import log
class Router(object):
"""Map syslog port numbers to IRC channel names."""
def __init__(self, ports_to_channel_names):
self.ports_to_channel_names = ports_to_channel_names
self.channel_names_to_ports = \
map_channel_names_to_ports(ports_to_channel_names)
self.enabled_channels = set()
def enable_channel(self, sender, channel=None):
self.enabled_channels.add(channel)
ports = self.channel_names_to_ports[channel]
log('Enabled forwarding to channel {} from ports {}.', channel, ports)
def is_channel_enabled(self, channel):
return channel in self.enabled_channels
def get_channel_names(self):
return frozenset(self.channel_names_to_ports.keys())
def get_channel_names_for_port(self, port):
return self.ports_to_channel_names[port]
def replace_channels_with_channel_names(routes):
return {ports: channels_to_names(channels)
for ports, channels in routes.items()}
def channels_to_names(channels):
return {channel.name for channel in channels}
def map_channel_names_to_ports(ports_to_channel_names):
channel_names_to_ports = defaultdict(set)
for port, channel_names in ports_to_channel_names.items():
for channel_name in channel_names:
channel_names_to_ports[channel_name].add(port)
return dict(channel_names_to_ports)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,278 | Emantor/syslog2irc | refs/heads/master | /start-syslog2irc.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
syslog2IRC
==========
Receive syslog messages via UDP and show them on IRC.
:Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:Date: 09-Sep-2015
:License: MIT, see LICENSE for details.
:Version: 0.9.2-dev
"""
from syslog2irc.argparser import parse_args
from syslog2irc.irc import Channel
from syslog2irc.main import start
import json
def start_with_args(routes, **options):
"""Start the IRC bot and the syslog listen server.
All arguments (except for routes) are read from the command line.
"""
args = parse_args()
start(args.irc_server, args.irc_nickname, args.irc_realname, routes,
ssl=args.irc_server_ssl, **options)
if __name__ == '__main__':
# load config from file
config = json.load(open("sample_config.json"))
# Create an dictionary of Channel objects
channel_list = {}
for k,v in config['channels'].items():
channel_list[k] = Channel(k,**v)
# Create route mappings
routes = {}
for i in config['routes']:
routes[i['port']] = []
for j in i['channel']:
routes[i['port']].append(channel_list[j])
start_with_args(routes)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,279 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/main.py | # -*- coding: utf-8 -*-
"""
syslog2irc.main
~~~~~~~~~~~~~~~
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from itertools import chain
from .announcer import create_announcer
from .processor import Processor
from .router import replace_channels_with_channel_names, Router
from .signals import irc_channel_joined, message_approved
from .syslog import start_syslog_message_receivers
from .util import log
# A note on threads (implementation detail):
#
# This tool uses threads. Besides the main thread, there are two
# additional threads: one for the syslog message receiver and one for
# the IRC bot. Both are configured to be daemon threads.
#
# A Python application exits if no more non-daemon threads are running.
#
# In order to exit syslog2IRC when shutdown is requested on IRC, the IRC
# bot will call `die()`, which will join the IRC bot thread. The main
# thread and the (daemonized) syslog message receiver thread remain.
#
# Additionally, a dedicated signal is sent that sets a flag that causes
# the main loop to stop. As the syslog message receiver thread is the
# only one left, but runs as a daemon, the application exits.
#
# The STDOUT announcer, on the other hand, does not run in a thread. The
# user has to manually interrupt the application to exit.
#
# For details, see the documentation on the `threading` module that is
# part of Python's standard library.
def start(irc_server, irc_nickname, irc_realname, routes, **options):
"""Start the IRC bot and the syslog listen server."""
try:
irc_channels = frozenset(chain(*routes.values()))
ports = routes.keys()
ports_to_channel_names = replace_channels_with_channel_names(routes)
announcer = create_announcer(irc_server, irc_nickname, irc_realname,
irc_channels, **options)
message_approved.connect(announcer.announce)
router = Router(ports_to_channel_names)
processor = Processor(router)
# Up to this point, no signals must have been sent.
processor.connect_to_signals()
# Signals are allowed be sent from here on.
start_syslog_message_receivers(ports)
announcer.start()
if not irc_server:
fake_channel_joins(router)
processor.run()
except KeyboardInterrupt:
log('<Ctrl-C> pressed, aborting.')
def fake_channel_joins(router):
for channel_name in router.get_channel_names():
irc_channel_joined.send(channel=channel_name)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,280 | Emantor/syslog2irc | refs/heads/master | /tests/test_processor_channel_enabling.py | # -*- coding: utf-8 -*-
"""
:Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
from __future__ import unicode_literals
from unittest import TestCase
from syslog2irc.processor import Processor
from syslog2irc.router import Router
from syslog2irc.signals import irc_channel_joined
class ProcessorChannelEnablingTestCase(TestCase):
def test_channel_enabling_on_join_signal(self):
ports_to_channel_names = {
514: {'#example1', '#example2'},
55514: {'#example2'},
}
processor = self._create_processor(ports_to_channel_names)
self.assertEqual(processor.router.is_channel_enabled('#example1'), False)
self.assertEqual(processor.router.is_channel_enabled('#example2'), False)
irc_channel_joined.send(channel='#example1')
self.assertEqual(processor.router.is_channel_enabled('#example1'), True)
self.assertEqual(processor.router.is_channel_enabled('#example2'), False)
irc_channel_joined.send(channel='#example2')
self.assertEqual(processor.router.is_channel_enabled('#example1'), True)
self.assertEqual(processor.router.is_channel_enabled('#example2'), True)
def _create_processor(self, ports_to_channel_names):
router = Router(ports_to_channel_names)
processor = Processor(router)
processor.connect_to_signals()
return processor
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,281 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/processor.py | # -*- coding: utf-8 -*-
"""
syslog2irc.processor
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .runner import Runner
from .signals import irc_channel_joined, message_approved, message_received, \
shutdown_requested, syslog_message_received
from .util import log
MESSAGE_TEXT_ENCODING = 'utf-8'
class Processor(Runner):
def __init__(self, router, syslog_message_formatter=None):
super(Processor, self).__init__()
self.router = router
if syslog_message_formatter is not None:
self.syslog_message_formatter = syslog_message_formatter
else:
self.syslog_message_formatter = format_syslog_message
def connect_to_signals(self):
irc_channel_joined.connect(self.router.enable_channel)
shutdown_requested.connect(self.request_shutdown)
syslog_message_received.connect(self.handle_syslog_message)
message_received.connect(self.handle_message)
def handle_syslog_message(self, port, source_address=None,
message=None):
"""Process an incoming syslog message."""
channel_names = self.router.get_channel_names_for_port(port)
formatted_source = '{0[0]}:{0[1]:d}'.format(source_address)
formatted_message = self.syslog_message_formatter(message)
text = '{} {}'.format(formatted_source, formatted_message)
message_received.send(channel_names=channel_names,
text=text,
source_address=source_address)
def handle_message(self, sender, channel_names=None, text=None,
source_address=None):
"""Process an incoming message."""
for channel_name in channel_names:
if self.router.is_channel_enabled(channel_name):
message_approved.send(channel_name=channel_name,
text=text)
def format_syslog_message(message):
"""Format a syslog message to be displayed on IRC."""
def _generate():
if message.timestamp is not None:
timestamp_format = '%Y-%m-%d %H:%M:%S'
formatted_timestamp = message.timestamp.strftime(
timestamp_format)
yield '[{}] '.format(formatted_timestamp)
if message.hostname is not None:
yield '({}) '.format(message.hostname)
severity_name = message.severity.name
# Important: The message text is a byte string.
message_text = message.message.decode(MESSAGE_TEXT_ENCODING)
yield '[{}]: {}'.format(severity_name, message_text)
return ''.join(_generate())
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,282 | Emantor/syslog2irc | refs/heads/master | /tests/test_irc_shutdown_predicate.py | # -*- coding: utf-8 -*-
"""
:Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
from __future__ import unicode_literals
from unittest import TestCase
from irc.bot import ServerSpec
from irc.client import Event, NickMask
from nose2.tools import params
from syslog2irc.irc import Bot
from syslog2irc.signals import shutdown_requested
class IrcShutdownPredicateTestCase(TestCase):
@params(
(None , 'anything' , False),
(lambda nickmask, text: nickmask.nick == 'UserNick' , 'anything' , True ),
(lambda nickmask, text: nickmask.nick == 'OtherNick' , 'anything' , False),
(lambda nickmask, text: nickmask.host.endswith('.example.com'), 'anything' , True ),
(lambda nickmask, text: nickmask.host.endswith('.example.net'), 'anything' , False),
(lambda nickmask, text: text == 'shutdown!' , 'shutdown!', True ),
(lambda nickmask, text: text == 'shutdown!' , 'something', False),
)
def test_shutdown_predicate(self, shutdown_predicate, text, expected):
self.shutdown_signal_received = False
bot = create_bot(shutdown_predicate)
@shutdown_requested.connect
def handle_shutdown_requested(sender):
self.shutdown_signal_received = True
send_privmsg(bot, text)
self.assertEqual(self.shutdown_signal_received, expected)
def create_bot(shutdown_predicate):
server = ServerSpec('irc.example.org')
nickname = 'BotNick'
realname = 'BotName'
channels = []
bot = Bot(server, nickname, realname, channels,
shutdown_predicate=shutdown_predicate)
# Prevent `SystemExit`.
bot.die = lambda message: None
return bot
def send_privmsg(bot, text):
conn = None
event = create_privmsg_event(text)
bot.on_privmsg(conn, event)
def create_privmsg_event(text):
source = NickMask('UserNick!user@machine23.example.com')
target = 'BotNick'
return Event('privmsg', source, target, [text])
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,283 | Emantor/syslog2irc | refs/heads/master | /setup.py | # -*- coding: utf-8 -*-
import codecs
import sys
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
# Require the 'enum34' package on Python versions before 3.4.
version_dependent_install_requires = []
if sys.version_info[:2] < (3, 4):
version_dependent_install_requires.append('enum34')
setup(
name='syslog2IRC',
version='0.9.2-dev',
description='A proxy to forward syslog messages to IRC',
long_description=long_description,
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='homework@nwsnet.de',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
packages=['syslog2irc'],
install_requires=[
'blinker >= 1.3',
'irc >= 8.9.1',
'syslogmp >= 0.2',
] + version_dependent_install_requires,
tests_require=['nose2'],
test_suite='nose2.collector.collector',
)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,284 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/argparser.py | # -*- coding: utf-8 -*-
"""
syslog2irc.argparser
~~~~~~~~~~~~~~~~~~~~
Command line argument parsing
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from __future__ import absolute_import
from argparse import ArgumentParser
from irc.bot import ServerSpec
DEFAULT_IRC_PORT = ServerSpec('').port
def parse_args(args=None):
"""Parse command line arguments."""
parser = create_arg_parser()
return parser.parse_args(args)
def create_arg_parser():
"""Prepare the command line arguments parser."""
parser = ArgumentParser()
parser.add_argument('--irc-nickname',
dest='irc_nickname',
default='syslog',
help='the IRC nickname the bot should use',
metavar='NICKNAME')
parser.add_argument('--irc-realname',
dest='irc_realname',
default='syslog2IRC',
help='the IRC realname the bot should use',
metavar='REALNAME')
parser.add_argument('--irc-server',
dest='irc_server',
type=parse_irc_server_arg,
help='IRC server (host and, optionally, port and password)'
+ ' to connect to'
+ ' [e.g. "irc.example.com", "irc.example.com:6669"'
+ ' or "irc.example.com:6669:password;"'
+ ' default port: {:d}]'.format(DEFAULT_IRC_PORT),
metavar='SERVER')
parser.add_argument('--irc-server-ssl',
dest='irc_server_ssl',
action='store_true',
help='use SSL to connect to the IRC server')
return parser
def parse_irc_server_arg(value):
"""Parse a hostname with optional port."""
fragments = value.split(':', 2)
if len(fragments) > 1:
fragments[1] = int(fragments[1])
return ServerSpec(*fragments)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,285 | Emantor/syslog2irc | refs/heads/master | /tests/test_argument_parser.py | # -*- coding: utf-8 -*-
"""
:Copyright: 2007-2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
from __future__ import unicode_literals
from unittest import TestCase
from nose2.tools import params
from syslog2irc.argparser import parse_args
class ArgumentParserTestCase(TestCase):
@params(
([ ], 'syslog' ),
(['--irc-nickname', 'AwesomeBot'], 'AwesomeBot'),
)
def test_irc_nickname(self, arg_value, expected):
actual = parse_args(arg_value)
self.assertEqual(actual.irc_nickname, expected)
@params(
([ ], 'syslog2IRC' ),
(['--irc-realname', 'awesomest bot ever'], 'awesomest bot ever'),
)
def test_irc_realname(self, arg_value, expected):
actual = parse_args(arg_value)
self.assertEqual(actual.irc_realname, expected)
@params(
(['--irc-server', 'localhost' ], 'localhost', 6667, None ),
(['--irc-server', '127.0.0.1' ], '127.0.0.1', 6667, None ),
(['--irc-server', '127.0.0.1:6669' ], '127.0.0.1', 6669, None ),
(['--irc-server', '127.0.0.1:6669:testpassword'], '127.0.0.1', 6669, 'testpassword'),
)
def test_parse_irc_server(self, arg_value, expected_host, expected_port, expected_password):
actual = parse_args(arg_value)
self.assertEqual(actual.irc_server.host, expected_host)
self.assertEqual(actual.irc_server.port, expected_port)
self.assertEqual(actual.irc_server.password, expected_password)
@params(
([ ], False),
(['--irc-server-ssl'], True ),
)
def test_irc_server_ssl_option(self, arg_value, expected):
actual = parse_args(arg_value)
self.assertEqual(actual.irc_server_ssl, expected)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,286 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/util.py | # -*- coding: utf-8 -*-
"""
syslog2irc.util
~~~~~~~~~~~~~~~
Various utilities
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
import logging
from threading import Thread
logging.basicConfig(format='%(asctime)s | %(message)s', level=logging.INFO)
def log(message, *args, **kwargs):
"""Log the message with a timestamp."""
logging.info(message.format(*args, **kwargs))
def start_thread(target, name):
"""Create, configure, and start a new thread."""
t = Thread(target=target, name=name)
t.daemon = True
t.start()
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,287 | Emantor/syslog2irc | refs/heads/master | /syslog2irc/announcer.py | # -*- coding: utf-8 -*-
"""
syslog2irc.announcer
~~~~~~~~~~~~~~~~~~~~
Message announcing
:Copyright: 2007-2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .irc import Bot
from .util import log, start_thread
class IrcAnnouncer(object):
"""Announce syslog messages on IRC."""
def __init__(self, server, nickname, realname, channels, **options):
self.bot = Bot(server, nickname, realname, channels, **options)
def start(self):
start_thread(self.bot.start, 'IrcAnnouncer')
def announce(self, sender, channel_name=None, text=None):
self.bot.say(channel_name, text)
class StdoutAnnouncer(object):
"""Announce syslog messages on STDOUT."""
def start(self):
pass
def announce(self, sender, channel_name=None, text=None):
log('{}> {}', channel_name, text)
def create_announcer(irc_server, irc_nickname, irc_realname,
irc_channels, **options):
"""Create and return an announcer according to the configuration."""
if not irc_server:
log('No IRC server specified; will write to STDOUT instead.')
return StdoutAnnouncer()
return IrcAnnouncer(irc_server, irc_nickname, irc_realname, irc_channels,
**options)
| {"/tests/test_syslog_request_handler.py": ["/syslog2irc/signals.py", "/syslog2irc/syslog.py"], "/syslog2irc/irc.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_routing.py": ["/syslog2irc/router.py"], "/syslog2irc/runner.py": ["/syslog2irc/util.py"], "/tests/test_syslog_message_formatting.py": ["/syslog2irc/processor.py"], "/tests/test_processor_shutdown.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/tests/test_irc_channel.py": ["/syslog2irc/irc.py"], "/syslog2irc/syslog.py": ["/syslog2irc/signals.py", "/syslog2irc/util.py"], "/syslog2irc/router.py": ["/syslog2irc/util.py"], "/start-syslog2irc.py": ["/syslog2irc/argparser.py", "/syslog2irc/irc.py", "/syslog2irc/main.py"], "/syslog2irc/main.py": ["/syslog2irc/announcer.py", "/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py", "/syslog2irc/syslog.py", "/syslog2irc/util.py"], "/tests/test_processor_channel_enabling.py": ["/syslog2irc/processor.py", "/syslog2irc/router.py", "/syslog2irc/signals.py"], "/syslog2irc/processor.py": ["/syslog2irc/runner.py", "/syslog2irc/signals.py", "/syslog2irc/util.py"], "/tests/test_irc_shutdown_predicate.py": ["/syslog2irc/irc.py", "/syslog2irc/signals.py"], "/tests/test_argument_parser.py": ["/syslog2irc/argparser.py"], "/syslog2irc/announcer.py": ["/syslog2irc/irc.py", "/syslog2irc/util.py"]} |
45,289 | vv2246/tr-dag-cycles | refs/heads/main | /run_cycles_random_graph.py | # -*- coding: utf-8 -*-
"""
Created on Wed Dec 18 11:08:56 2019
@author: Vaiva
"""
from cycle_utilities import *
plt.rcParams.update({'font.size': 20})
def calc_mcb(ntype, N):
"""
Parameters
----------
ntype : type of DAG: random, Price, lattice, or russian doll.
N : number of nodes.
Returns
-------
None. Saves results into a text file.
"""
#N =50
if ntype =="random":
pvalues = [0.1]#,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9]
elif ntype =="price":
pvalues = [2,3,4,5,6,7,8]
elif ntype =="lattice" or ntype =="russian_doll":
pvalues = [5, 10]#,15,20,25,30]#[int(N**0.5)]
filename = "dags_cycle_data_{}_no_nodes_eq_{}.txt".format(ntype,N)
for p in pvalues:
print(p)
r = 0
while r <10:
if ntype =="random":
G = random_dag(N,p)
elif ntype =="price":
G = price_dag(N,p,1,N+1)
elif ntype =="lattice":
G = lattice_dag_2D(p,p)
elif ntype =="russian_doll":
G = russian_doll_dag(p*p)
if G :
if nx.is_weakly_connected(G) ==True:
try:
E =G.number_of_edges()
res = print_cycle_statistics(G)
with open(filename,"a+") as f:
strname = str(p)+"\t"+str(E)+"\t"
for key,val in res.items():
strname+=str(val)+"\t"
f.write(strname+"\n")
r+=1
print(r)
except:
pass
def plot_figure_7(filename,fig_save = False,nruns= 20, labels = {'p':"$p$",
'E':"$E$",
'Mean edge participation':"$\\langle E_p\\rangle$",
'Std edge participation':"$\\sigma(E_p)$",
'Largest cycle size': "$C_{\\mathrm{max}}$",
'Mean cycle size':"$\\langle C\\rangle$",
'Std cycle size':"$\\sigma(C)$",
'Number of cycles':"d",
'Number cycle connected components':"null$L^C$",
'Number of TR edges':"$E_{TR}$",
'Longest path':"L",
'Number of nodes':"N",
'Number of edges':"E",
'Balance':"$\\langle b\\rangle$",
'Balance_norm':"$\\langle b_{norm}\\rangle$",
'Number_diamonds':"Diamonds",
'Number_mixers':"Mixers",
'Largest eigenvalue M_C':"$\\lambda_{\\mathrm{max}}^c$",
'Largest eigenvalue A_C':"labda_A_c",
'Mean cycle height':"$\\langle h\\rangle$",
'Std cycle height':"$\\sigma( h)$",
'Mean stretch': "$\\langle s\\rangle$",
'eigenvalue_ratio':'eigenvalue_ratio',
'Largest M_C by corr size':'Largest M_C by corr size'},error_in_mean = True):
data = pd.read_csv(filename,sep="\t")
print(data.columns)
#data.columns = ["p","E","E_p","E_p_std","S_max","S_mean","S_std","C","null_L","E_TR","L_max","N","b","D","M","lambda_max","h_mean","h_std","s_mean","s_std","lambda_max_byC","Nan"]
if "price" in filename:
name="price"
xlabel = "$m$"
data.index = data.p
x = data.groupby(data.index)["p"].mean()
shift = 0.1
elif "random" in filename:
name="random"
data.index = data.p
x = data.groupby(data.index)["p"].mean()
xlabel = "$p$"
shift= 0.01
figure_filename = name+"_{}.pdf"
if error_in_mean==True:
denom = np.sqrt(nruns)
else:
denom = 1
for c in data.columns:
plt.figure(figsize=(6,5))
y = data.groupby(data.index)[c].mean()
yerr = data.groupby(data.index)[c].std()/denom
if "price" in filename or "random" in filename:
plt.errorbar(x,y,yerr,fmt = "o",capsize= 5,color= "teal")
else:
plt.errorbar(x,y,fmt="o",capsize= 0)
plt.xlabel(xlabel)
plt.ylabel(labels[c])
if fig_save:
plt.tight_layout()
plt.savefig(figure_filename.format(c))
plt.show()
if ("price" in filename) or ("random" in filename):
plt.figure(figsize=(6,5))
y = data.groupby(data.index)["Number_diamonds"].mean()
yerr = data.groupby(data.index)["Number_diamonds"].std()/denom
plt.errorbar(x,y,yerr,fmt = "o",capsize= 5,label = "Diamonds",color="teal")
y = data.groupby(data.index)["Number_mixers"].mean()
yerr = data.groupby(data.index)["Number_mixers"].std()/denom
plt.errorbar(np.array(x)+shift,y,yerr,fmt = "o",capsize= 5,label = "Mixers",color="goldenrod")
plt.ylabel("Number of cycles")
plt.legend()
plt.xlabel(xlabel)
if fig_save:
plt.tight_layout()
if "price" in filename:
plt.savefig("price_number_mixers_diamonds_N_500.pdf")
if "random" in filename:
plt.savefig("random_number_mixers_diamonds_N_500.pdf")
plt.show()
fig,ax = plt.subplots(figsize=(6,5))
y = data.groupby(data.index)['Largest eigenvalue M_C'].mean()
yerr = data.groupby(data.index)['Largest eigenvalue M_C'].std()/denom
ax.errorbar(x,y,yerr,fmt = "o",capsize= 5,color= "teal")
y = data.groupby(data.index)['Largest M_C by corr size'].mean()
yerr = data.groupby(data.index)['Largest M_C by corr size'].std()/denom
ax2 = ax.twinx()
ax2.errorbar(np.array(x)+shift,y,yerr,fmt = "o",capsize= 5,color="goldenrod")
ax.set_ylabel("$\\lambda^C_{\\mathrm{max}}$",color= "teal")
ax2.set_ylabel("$\\lambda^C_{\\mathrm{max}}/C$",color = "goldenrod")
ax.set_xlabel(xlabel)
if fig_save:
plt.tight_layout()
if "price" in filename:
plt.savefig("price_eigvals_N_eq_500.pdf")
if "random" in filename:
plt.savefig("random_eigvals_N_eq_500.pdf")
plt.show()
def plot_figure_9(fn_random,fn_price,fig_save = False,nruns= 20 ,error_in_mean = True):
if error_in_mean==True:
denom = np.sqrt(nruns)
else:
denom = 1
fig,ax = plt.subplots(figsize=(6,5))
df = pd.read_csv(fn_random,sep="\t")
print(df.columns)
df.index = df.p
xlabel = "$p$"
x = df.groupby(df.index)["p"].mean()
s =df['Number of TR edges']*2/(df['Number of nodes']*(df['Number of nodes']-1))
y = s.groupby(s.index).mean()
yerr = s.groupby(s.index).std()/denom
ax.errorbar(x,y,yerr,fmt = "o",capsize= 5,color= "teal")
ax.set_xlabel(xlabel,color= "teal")
df = df[df.p ==0.3]
print(df.mean())
print(df.std()/denom)
ax2 = ax.twiny()
df = pd.read_csv(fn_price,sep="\t")
df.index = df.p
x = df.groupby(df.index)["p"].mean()
xlabel = "$m$"
s =df['Number of TR edges']*2/(df['Number of nodes']*(df['Number of nodes']-1))
y = s.groupby(s.index).mean()
yerr = s.groupby(s.index).std()/denom
ax2.errorbar(x,y,yerr,fmt = "o",capsize= 5,color= "goldenrod")
ax2.set_xlabel(xlabel,color= "goldenrod")
ax.set_ylabel("$2E_{\\mathrm{TR}}/N(N-1)$")
plt.tight_layout()
if fig_save:
plt.savefig("density_price_random_N_eq_500.pdf")
df = df[df.p ==4]
print(df.mean())
print(df.std()/denom)
def test_quasiunicity(N,fig_save = False,nruns=20, labels = {'p':"$p$",
'E':"$E$",
'Mean edge participation':"$\\langle E_p\\rangle$",
'Std edge participation':"$\\sigma(E_p)$",
'Largest cycle size': "$C_{\\mathrm{max}}$",
'Mean cycle size':"$\\langle C\\rangle$",
'Std cycle size':"$\\sigma(C)$",
'Number of cycles':"d",
'Number cycle connected components':"null$L$",
'Number of TR edges':"$E_{TR}$",
'Longest path':"L",
'Number of nodes':"N",
'Number of edges':"E",
'Balance':"$\\langle b\\rangle$",
'Balance_norm':"$\\langle b_{norm}\\rangle$",
'Number_diamonds':"Diamonds",
'Number_mixers':"Mixers",
'Largest eigenvalue M_C':"$\\lambda_{\\mathrm{max}}^c$",
'Largest eigenvalue A_C':"labda_A_c",
'Mean cycle height':"$\\langle h\\rangle$",
'Std cycle height':"$\\sigma( h)$",
'Mean stretch': "$\\langle s\\rangle$",
'eigenvalue_ratio':'eigenvalue_ratio',
'Largest M_C by corr size':'Largest M_C by corr size'},error_in_mean = True):
#N=50
G = random_dag(N,0.1)
tr(G)
edges = list(G.edges())
df = pd.DataFrame()
df_list = []
for l in range(nruns):
Gud = nx.to_undirected(G)
C =minimum_cycle_basis(Gud)
res = print_cycle_statistics(G,cycles = C)
res = ["Random $p=0.1$"] + list(res.values())
df_list.append(res)
G = nx.DiGraph()
random.shuffle(edges)
G.add_edges_from(edges)
G = random_dag(N,0.8)
tr(G)
edges = list(G.edges())
for l in range(nruns):
Gud = nx.to_undirected(G)
C =minimum_cycle_basis(Gud)
res = print_cycle_statistics(G,cycles = C)
res = ["Random $p=0.8$"] + list(res.values())
df_list.append(res)
G = nx.DiGraph()
random.shuffle(edges)
G.add_edges_from(edges)
G = price_dag(N,3,1,N+1)
tr(G)
edges = list(G.edges())
for l in range(nruns):
Gud = nx.to_undirected(G)
C =minimum_cycle_basis(Gud)
res = print_cycle_statistics(G,cycles = C)
res = ["Price $m=3$"] + list(res.values())
df_list.append(res)
G = nx.DiGraph()
random.shuffle(edges)
G.add_edges_from(edges)
G = price_dag(N,5,1,N+1)
tr(G)
edges = list(G.edges())
for l in range(nruns):
Gud = nx.to_undirected(G)
C =minimum_cycle_basis(Gud)
res = print_cycle_statistics(G,cycles = C)
print(res.keys())
res = ["Price $m=5$"] + list(res.values())
df_list.append(res)
G = nx.DiGraph()
random.shuffle(edges)
G.add_edges_from(edges)
data = pd.DataFrame(df_list)
data.columns =["name","$E_p$","$\\sigma (E_p)$","S_max","S_mean","S_std","C","null_L","E_TR","L_max","N","$\\langle b\\rangle$","D","M","$\\lambda_{\\mathrm{max}}^c$","h_mean","h_std","$\\langle s \\rangle$","s_std","lambda_max_byC"]
data.index = data.name
if error_in_mean:
denom = np.sqrt(nruns)
else:
denom =1
x = range(len(set(data.index)))
for c in data.columns:
if c!="name":
y = data.groupby(data.index)[c].mean()
yerr = data.groupby(data.index)[c].std()/denom
plt.figure(figsize=(6,6))
plt.errorbar(x,y,yerr,fmt = "o",capsize= 5,color="blue")
plt.xlabel("Network model")
plt.ylabel(c)
plt.xticks(x,data.groupby(data.index)[c].mean().index, rotation='vertical')
if fig_save:
plt.tight_layout()
plt.savefig("quasiunicity_N_eq_{}_observable_{}.pdf".format(N,c))
plt.show()
def plot_eigenspectrum(ntype, N,fig_save = False):
#N=50
plt.figure(figsize= (6,5))
symbol = ["o","^",">","<","*","v","+","x","d","1"]
if ntype =="random":
pvalues = [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9]
elif ntype =="price":
pvalues = [2,3,4,5,6,7,8]
for p in pvalues:
print(p)
r = 0
while r<1:
if ntype =="random":
G = random_dag(N,p)
elif ntype =="price":
G = price_dag(N,p,1,N+1)
if nx.is_weakly_connected(G) ==True:
tr(G)
Gud = nx.to_undirected(G)
C = minimum_cycle_basis(Gud)
egvals = eigenspectrum(G,C)
plt.scatter([i/len(C) for i in range(len(C))],egvals,marker = symbol[pvalues.index(p)],label="p="+str(p))
r+=1
plt.xlabel("$1-i/d$",fontsize = 17)
plt.ylabel("$\\lambda^C_i$",fontsize = 17)
plt.tick_params(axis= "both",labelsize=17)
plt.legend(fontsize =14)
plt.tight_layout()
if fig_save:
plt.savefig(ntype+"_"+str(N)+"_eigenspectrum.pdf")
if __name__ =="__main__":
#calc_mcb("random",500)
plot_figure_7("price_dags_cycle_data_depina_no_nodes_eq_500_1005.txt",True,20,error_in_mean=True)
#plot_figure_9("random_dags_cycle_data_DAG_no_nodes_eq_500.txt","price_dags_cycle_data_depina_no_nodes_eq_500_1005.txt",True)
#plot_eigenspectrum("price",50)
#test_quasiunicity(100,fig_save = True,nruns=10,error_in_mean = True)
| {"/run_cycles_random_graph.py": ["/cycle_utilities.py"], "/cycle_utilities.py": ["/alg_height.py"]} |
45,290 | vv2246/tr-dag-cycles | refs/heads/main | /alg_height.py | """This contains code to assign a height to every vertex in the DAG.
Height is defined to be the longest length of any longest path from a source node
to the target node in question.
This is a copy of the version of this file in the james_dag library, taken 5/3/18
Author: Tim Evans
"""
import networkx as nx
def set_heights(DAG, ordered=False, add_labels=False,add_labels_fractional=False):
""" Find the heights of all nodes.
Depth first search. Starts from source nodes, nodes with no predecessors (zero in-degree?).
Height is defined to be the longest length of any longest path from a source node
to the target node in question.
Keyword arguments:
DAG -- networkX digraph forming a DAG
ordered -- if ordered then node1>node2 is necessary for a path to exist from node1 to node2
add_labels -- True if want to add 'height' label to each node with height value
add_labels_fractional=False -- True if want 'height_fraction' label added, 1= max height, 0 = source node
Return
height -- dictionary such that height[node] is height of node
"""
# if start == end:
# return [0, None]
height = {} # uses a dictionary?
# find source nodes and set all heights to be negative number to indicate unvisited
source_nodes=[]
for node in DAG.nodes():
height[node] = -1 # i.e. not connected to start
if DAG.in_degree(node) ==0:
height[node] = 0
source_nodes.append(node)
if ordered is True:
source_nodes.sort(reverse=True) # largest first
for source in source_nodes:
height_recursive(DAG, source, height, ordered)
if add_labels:
for node in DAG:
DAG.node[node]['height']=height[node]
if add_labels_fractional:
max_height=float(max(height))
for node in DAG:
DAG.node[node]['height_fraction']=height[node]/max_height
return height
def height_recursive(DAG, current, height, ordered, depth=0):
"""Recursively searches through the DAG assigning heights (distances from current) to nodes.
Assigns the longest path distance inherited from current height list to nodes connected to current.
Depth first search.
Adapted from JC code lp_recursive in alg_paths.py.
Keyword arguments:
DAG -- networkX digraph forming a DAG
current -- node currently under investigation, must have a non-negative height
height -- dictionary where height[n] has the longest path distance for node n (negative value then unvisited)
ordered -- if ordered then current>=end is necessary for a path to exist
depth -- current distance from current nodes
"""
startneighbours= DAG.successors(current)
if ordered:
startneighbours.sort(reverse=True) # largest first to favour greedy paths first
# if current<end:
# we have gone past the end point
# so we aren't going to find a path here
# we can only make this check if the DAG is ordered
# return None
for node in startneighbours:
height_n = height[node]
height_current = height[current]
if height_n < height_current + 1:
height[node] = height_current + 1
height_recursive(DAG, node, height, ordered, depth+1)
return None
if __name__ == "__main__":
"""Tests to see if height routines are working"""
print ('Testing alg_height' )
DAG= nx.DiGraph()
DAG.add_edges_from([(0,1),(1,2),(0,2)])
height=set_heights(DAG, ordered=True,add_labels=True)
print ('node height node[height]' )
for node in DAG.nodes():
print (node,height[node],DAG.node[node]['height'] ) | {"/run_cycles_random_graph.py": ["/cycle_utilities.py"], "/cycle_utilities.py": ["/alg_height.py"]} |
45,291 | vv2246/tr-dag-cycles | refs/heads/main | /cycle_utilities.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 14 10:33:03 2020
@author: vvasiliau
"""
import networkx as nx
from collections import defaultdict
from scipy.linalg import null_space
import matplotlib.pyplot as plt
from alg_height import *
import itertools
import numpy as np
import random
import string
from numpy.linalg import lstsq
from scipy.linalg import orth
import scipy
from collections import defaultdict
from networkx.utils import not_implemented_for, pairwise
from networkx import minimum_cycle_basis
import math
import pandas as pd
def eigenspectrum(G,cycle_basis):
C_vector = []
edge_id = {}
i=0
id_edge = {}
for e in G.edges():
edge_id[e] =i
id_edge[i] = e
i+=1
E_0 = G.number_of_edges()
for c in cycle_basis:
gc = get_subgraph(G,c)
vec = np.zeros(E_0)
for e in gc.edges():
vec[edge_id[e]] = 1
C_vector.append(vec)
M = np.array(C_vector)
M_C = M.dot(M.T)
return sorted(list(np.linalg.eigvals(M_C) ))
def _random_subset(seq,m):
# """ Return m non-unique elements from seq.
# This differs from random.sample which can return repeated
# elements if seq holds repeated elements.
# """
targets=random.sample(seq,m)
return targets
def complete_dag(n,c):
#Create a DAG with n nodes in which an edge exists between i and j if i<j.
#Return a list of nodes in which each node i appears k_out(i) number of times.
graph = nx.DiGraph()
list_repeated_nodes = []
for i in range(n):
graph.add_node(i)
list_repeated_nodes.extend([i]*c)
for j in range(i+1,n):
graph.add_edge(i,j)
list_repeated_nodes.append(i)
return list_repeated_nodes, graph
def lattice_dag_2D(N,L):
#add nodes
Gud = nx.grid_2d_graph(N,L)
G = nx.DiGraph()
for e1,e2 in Gud.edges():
if e1[1]<=e2[1]:
G.add_edge(e1,e2)
return G
def russian_doll_dag(N):
G = nx.DiGraph()
G.add_edges_from([(1,2),(2,4),(1,3),(3,4),(7,1),(7,6),(6,5),(4,5)])
n =8
while G.number_of_nodes() <N:
G.add_edge(n-3,n)
G.add_edge(n+1,n)
G.add_edge(n+2,n+1)
G.add_edge(n+2,n-1)
n +=3
return G
def price_dag(n, m, c,delta ,seed=None):
"""Return random graph using Price cummulative advantage model.
A graph of n nodes is grown by attaching new nodes each with m
edges that are preferentially attached to existing nodes with high
degree.
Node t can only connect to nodes, that are t-delta or newer
Parameters
----------
n : int
Number of nodes
m : int
Number of edges to attach from a new node to existing nodes
c: number of times a node with out-degree=0 is added to the target list
c = 1 Price original model
c = m Directed Barabasi-Albert
seed : int, optional
Seed for random number generator (default=None).
Returns
-------
G : Graph
Notes
-----
The initialization is a graph with with m nodes and no edges.
References
----------
.. [1] de-Solla Price Network of Scientific Publications
"""
if m < 1 or m >=n:
raise nx.NetworkXError(\
"Price network must have m>=1 and m<n, m=%d,n=%d"%(m,n))
if seed is not None:
random.seed(seed)
# Add m initial nodes (m0 in barabasi-speak)
targets, G = complete_dag(m,c)
Gnew = nx.DiGraph()
for i, j in G.edges():
Gnew.add_edge(i+1,j+1)
G = Gnew.copy()
targets = [i+1 for i in targets]
# List of existing nodes, with nodes repeated once for each adjacent edge
repeated_nodes=[]
# Start adding the other n-m nodes. The first node is m.
source=m+1
while source<n+1:
# Add edges to m nodes from the source.
G.add_edges_from(zip(targets,[source]*m))
# Add one node to the list for each new edge just created.
repeated_nodes.extend(targets)
# And the new node "source" has c times to add to the list.
repeated_nodes.extend([source]*c)
# Now choose m unique nodes from the existing nodes
# Pick uniformly from repeated_nodes (preferential attachement)
targets = _random_subset(repeated_nodes,m)
source += 1
return G
def random_dag(N,P):
nodes = [n for n in range(1,N+1)]
G = nx.DiGraph()
G.add_nodes_from(nodes)
for n1,n2 in itertools.combinations(nodes,2):
p = random.random()
if p <= P:
if n1 > n2:
G.add_edge(n2,n1)
else:
G.add_edge(n1,n2)
return G
def tr(DAG):
# Transitive reduction of DAG
# Input : networkx DiGraph
# Returns : TRed DAG
# courtesy of J.Clough.
i = 0
print_limit = 10
print_counter = print_limit
edges = list(DAG.edges())
#########################
for edge in edges:
# check edge is necessary for causal structure
[a, b] = edge
DAG.remove_edge(a, b)
if not nx.has_path(DAG, a, b):
DAG.add_edge(a, b)
return DAG
def print_cycle_statistics(graph,cycles=None, return_eigvals = False):
G = graph.copy()
E_0 = G.number_of_edges()
tr(G)
###
# Compute MCB
###
if cycles == None :
print("No MCB found, computing MCB")
#E = G.number_of_edges()
Gud = G.copy()
Gud = Gud.to_undirected()
C = minimum_cycle_basis(Gud)
print("Betti 1= ",len(C), "Observed MCB=" ,G.number_of_edges()-G.number_of_nodes()+1)
elif cycles != None:
C = cycles
###
# Find cycle vectors
###
C_vector = []
edge_id = {}
i=0
id_edge = {}
for e in G.edges():
edge_id[e] =i
id_edge[i] = e
i+=1
balance_norm = []
balance = []
cycle_size = []
cycle_type = []
for c in C:
gc = get_subgraph(G,c)
#print(gc.nodes())
cycle_size.append(gc.number_of_nodes())
vec = np.zeros(E_0)
for e in gc.edges():
vec[edge_id[e]] = 1
C_vector.append(vec)#cycle_basis_vector(G,c,edge_id))
###
## Find subgraph longest, mean, shortest path
##
sinks,sources = [n for n in gc if gc.out_degree(n)==0], [n for n in gc if gc.in_degree(n)==0]
if len(sinks)>1:
cycle_type.append("M")
else:
cycle_type.append("D")
all_paths = []
for src in sources:
for snk in sinks:
all_paths+=list(nx.all_simple_paths(gc,src,snk))
all_paths = [len(x) for x in all_paths]
balance.append(np.std(all_paths)/np.mean(all_paths))#2*(np.mean(all_paths)-min(all_paths))/(max(all_paths)-min(all_paths)))
balance_norm.append(np.std(all_paths)/max(all_paths))#2*(np.mean(all_paths)-min(all_paths))/(max(all_paths)-min(all_paths)))
###
# Compute edge laplacian and cycle correlation matrix
###
balance = [x for x in balance if np.isnan(x)==False]
balance_norm = [x for x in balance_norm if np.isnan(x)==False]
M = np.array(C_vector)
M_C = M.dot(M.T)
M_E = M.T.dot(M)
M_C =np.absolute(M_C)
S = sum([len(c) for c in C])
edge_part = {e:0 for e in G.edges()}
###
# Compute edge participation
###
for e1,e2 in G.edges():
for c in C:
if e1 in c and e2 in c:
edge_part[(e1,e2)] += 1
###
# Compute largest effective cycle
###
try:
eigvals = np.linalg.eig(M_C)[0]
x = [i/len(C) for i in range(len(C))]
except:
pass
A_C = M_C.copy()
L_C = M_C.copy()
for i in range(len(A_C)):
A_C[i,i] = 0
for i in range(len(A_C)):
L_C[i,i] = -np.sum(A_C[i],axis =0)
L_C = L_C*-1
print(np.mean(eigvals),np.mean([len(c) for c in C]))
lambda_max_a =np.linalg.eigvals(A_C)[list(eigvals).index(max(eigvals))]
###
# Compute cycle heights
###
H = set_heights(graph)
cycle_height = []
var_cycle_height = []
for c in C:
h = [H[n] for n in c]
cycle_height.append((np.mean(h),np.std(h)))
var_cycle_height.append(max(h)-min(h))
result = {}
result["Mean edge participation"] = np.mean(list(edge_part.values()))
result["Std edge participation"] = np.std(list(edge_part.values()))
result["Largest cycle size"] = max([len(c) for c in C])
result["Mean cycle size"] = np.mean([len(c) for c in C])
result["Std cycle size"] = np.std([len(c) for c in C])
result["Number of cycles"] = len(C)
result["Number cycle connected components"] = (null_space(L_C)).shape[1]
result["Number of TR edges"] = G.number_of_edges()
result["Longest path"] = nx.dag_longest_path_length(G)
result["Number of nodes"] = G.number_of_nodes()
#result["Number of edges"] = E_0
result["Balance"] = np.mean(balance)
#result["Balance_norm"] = np.mean(balance_norm)
result["Number_diamonds"] = len([x for x in cycle_type if x == "D"])
result["Number_mixers"] = len([x for x in cycle_type if x == "M"])
result["Largest eigenvalue M_C"] = np.real(max(eigvals))
#result["Largest eigenvalue A_C"] = np.real(max(np.linalg.eigvals(A_C)))
result["Mean cycle height"] = np.mean([x[0] for x in cycle_height])#/ nx.dag_longest_path_length(G)
result["Std cycle height"] = np.std([x[0] for x in cycle_height])#/ nx.dag_longest_path_length(G)
result["Mean stretch"] =np.mean(var_cycle_height)#max([x[0]/ nx.dag_longest_path_length(G) for x in cycle_height])-min([x[0]/ nx.dag_longest_path_length(G) for x in cycle_height])
result["Std stretch"] =np.std(var_cycle_height)#max([x[0]/ nx.dag_longest_path_length(G) for x in cycle_height])-min([x[0]/ nx.dag_longest_path_length(G) for x in cycle_height])
#result["eigenvalue_ratio"] = np.real(lambda_max_a)/np.real(max(eigvals))
result["Largest M_C by corr size"] = np.real(max(eigvals))/[len(c) for c in C][list(eigvals).index(max(eigvals))]
if return_eigvals ==True:
return result,eigvals
else:
return result
def get_subgraph(G,nodelist):
#Find the subgraph that only contains nodes from the nodelist and edges, only adjacent to those nodes
D = nx.DiGraph()
D.add_nodes_from(nodelist)
for n1,n2 in itertools.combinations(nodelist,2):
if (n1,n2) in G.edges():
D.add_edge(n1,n2)
elif (n2,n1) in G.edges():
D.add_edge(n2,n1)
remove = []
for n in D.nodes():
if D.in_degree(n)+ D.out_degree(n)<2 :
remove.append(n)
D.remove_nodes_from(remove)
return D
| {"/run_cycles_random_graph.py": ["/cycle_utilities.py"], "/cycle_utilities.py": ["/alg_height.py"]} |
45,305 | poteralski/abidria-api | refs/heads/master | /experiences/tests/unit_test_views.py | from mock import Mock
from abidria.entities import Picture
from experiences.entities import Experience
from experiences.views import ExperiencesView
class TestExperiencesView(object):
def test_returns_experiences_serialized_and_200(self):
picture_a = Picture(small_url='small.a', medium_url='medium.a', large_url='large.a')
experience_a = Experience(id=1, title='A', description='some', picture=picture_a)
picture_b = Picture(small_url='small.b', medium_url='medium.b', large_url='large.b')
experience_b = Experience(id=2, title='B', description='other', picture=picture_b)
interactor_mock = Mock()
interactor_mock.execute.return_value = [experience_a, experience_b]
body, status = ExperiencesView(interactor_mock).get()
assert status == 200
assert body == [
{
'id': '1',
'title': 'A',
'description': 'some',
'picture': {'small_url': 'small.a',
'medium_url': 'medium.a',
'large_url': 'large.a'}
},
{
'id': '2',
'title': 'B',
'description': 'other',
'picture': {'small_url': 'small.b',
'medium_url': 'medium.b',
'large_url': 'large.b'}
},
]
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,306 | poteralski/abidria-api | refs/heads/master | /scenes/factories.py | from .repositories import SceneRepo
from .interactors import GetScenesFromExperienceInteractor
from .views import ScenesView
class SceneRepoFactory(object):
@staticmethod
def create():
return SceneRepo()
class GetScenesFromExperienceInteractorFactory(object):
@staticmethod
def create():
scene_repo = SceneRepoFactory.create()
return GetScenesFromExperienceInteractor(scene_repo=scene_repo)
class ScenesViewFactory(object):
@staticmethod
def create():
get_scenes_from_experience_interactor = GetScenesFromExperienceInteractorFactory.create()
return ScenesView(get_scenes_from_experience_interactor)
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,307 | poteralski/abidria-api | refs/heads/master | /experiences/entities.py | class Experience(object):
def __init__(self, id, title, description, picture):
self._id = id
self._title = title
self._description = description
self._picture = picture
@property
def id(self):
return self._id
@property
def title(self):
return self._title
@property
def description(self):
return self._description
@property
def picture(self):
return self._picture
def __eq__(self, other):
return self.__dict__ == other.__dict__
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,308 | poteralski/abidria-api | refs/heads/master | /experiences/tests/unit_test_interactors.py | from mock import Mock
from experiences.entities import Experience
from experiences.interactors import GetAllExperiencesInteractor
class TestGetAllExperiences(object):
def test_returns_repo_response(self):
experience_a = Experience(id=1, title='A', description='some', picture=None)
experience_b = Experience(id=2, title='B', description='other', picture=None)
experiences_repo = Mock()
experiences_repo.get_all_experiences = Mock(return_value=[experience_a, experience_b])
response = GetAllExperiencesInteractor(experiences_repo).execute()
assert response == [experience_a, experience_b]
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,309 | poteralski/abidria-api | refs/heads/master | /experiences/repositories.py | from abidria.entities import Picture
from .models import ORMExperience
from .entities import Experience
class ExperienceRepo(object):
def _decode_db_experience(self, db_experience):
if not db_experience.picture:
picture = None
else:
picture = Picture(small_url=db_experience.picture.small.url,
medium_url=db_experience.picture.medium.url,
large_url=db_experience.picture.large.url)
return Experience(id=db_experience.id,
title=db_experience.title,
description=db_experience.description,
picture=picture)
def get_all_experiences(self):
db_experiences = ORMExperience.objects.all()
experiences = []
for db_experience in db_experiences:
experiences.append(self._decode_db_experience(db_experience))
return experiences
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,310 | poteralski/abidria-api | refs/heads/master | /experiences/tests/test_integration.py | import json
from django.test import TestCase
from django.test import Client
from django.core.urlresolvers import reverse
from experiences.models import ORMExperience
class ExperiencesTestCase(TestCase):
def test_experiences_returns_all_experiences(self):
exp_a = ORMExperience.objects.create(title='Exp a', description='some description')
exp_b = ORMExperience.objects.create(title='Exp b', description='other description')
client = Client()
response = client.get(reverse('experiences'))
assert response.status_code == 200
body = json.loads(response.content)
assert body == [
{
'id': str(exp_a.id),
'title': 'Exp a',
'description': 'some description',
'picture': None
},
{
'id': str(exp_b.id),
'title': 'Exp b',
'description': 'other description',
'picture': None
},
]
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,311 | poteralski/abidria-api | refs/heads/master | /experiences/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-16 12:12
from __future__ import unicode_literals
from django.db import migrations, models
import stdimage.models
import stdimage.utils
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ORMExperience',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=30)),
('description', models.TextField(blank=True)),
('picture', stdimage.models.StdImageField(blank=True, upload_to=stdimage.utils.UploadToUUID(path='experiences'))),
],
options={
'verbose_name': 'Experience',
'verbose_name_plural': 'Experiences',
},
),
]
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,312 | poteralski/abidria-api | refs/heads/master | /experiences/admin.py | from django.contrib import admin
from .models import ORMExperience
class ExperienceAdmin(admin.ModelAdmin):
list_display = ('title', )
search_fields = ('title', 'description')
admin.site.register(ORMExperience, ExperienceAdmin)
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,313 | poteralski/abidria-api | refs/heads/master | /scenes/tests/unit_test_interactors.py | from mock import Mock
from scenes.interactors import GetScenesFromExperienceInteractor
from scenes.entities import Scene
class TestGetScenesFromExperience(object):
def test_returns_scenes(self):
scene_a = Scene(id=2, title='', description='', picture=None, latitude=1, longitude=0, experience_id=1)
scene_b = Scene(id=3, title='', description='', picture=None, latitude=1, longitude=0, experience_id=1)
scene_repo = Mock()
scene_repo.get_scenes = Mock(return_value=[scene_a, scene_b])
response = GetScenesFromExperienceInteractor(scene_repo).set_params(experience_id=1).execute()
scene_repo.get_scenes.assert_called_once_with(experience_id=1)
assert response == [scene_a, scene_b]
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,314 | poteralski/abidria-api | refs/heads/master | /experiences/views.py | from .serializers import MultipleExperiencesSerializer
class ExperiencesView(object):
def __init__(self, get_all_experiences_interactor):
self.get_all_experiences_interactor = get_all_experiences_interactor
def get(self):
experiences = self.get_all_experiences_interactor.execute()
body = MultipleExperiencesSerializer.serialize(experiences)
status = 200
return body, status
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,315 | poteralski/abidria-api | refs/heads/master | /scenes/interactors.py | class GetScenesFromExperienceInteractor(object):
def __init__(self, scene_repo):
self.scene_repo = scene_repo
def set_params(self, experience_id):
self.experience_id = experience_id
return self
def execute(self):
return self.scene_repo.get_scenes(experience_id=self.experience_id)
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,316 | poteralski/abidria-api | refs/heads/master | /scenes/views.py | from .serializers import MultipleScenesSerializer
class ScenesView(object):
def __init__(self, get_scenes_from_experience_interactor):
self.get_scenes_from_experience_interactor = get_scenes_from_experience_interactor
def get(self, experience):
scenes = self.get_scenes_from_experience_interactor.set_params(experience_id=experience).execute()
body = MultipleScenesSerializer.serialize(scenes)
status = 200
return body, status
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,317 | poteralski/abidria-api | refs/heads/master | /abidria/entities.py | class Picture(object):
def __init__(self, small_url, medium_url, large_url):
self._small_url = small_url
self._medium_url = medium_url
self._large_url = large_url
@property
def small_url(self):
return self._small_url
@property
def medium_url(self):
return self._medium_url
@property
def large_url(self):
return self._large_url
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,318 | poteralski/abidria-api | refs/heads/master | /experiences/interactors.py | class GetAllExperiencesInteractor(object):
def __init__(self, experiences_repo):
self.experiences_repo = experiences_repo
def execute(self):
return self.experiences_repo.get_all_experiences()
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,319 | poteralski/abidria-api | refs/heads/master | /abidria/exceptions.py | class EntityDoesNotExist(Exception):
pass
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,320 | poteralski/abidria-api | refs/heads/master | /scenes/repositories.py | from abidria.entities import Picture
from .models import ORMScene
from .entities import Scene
class SceneRepo(object):
def _decode_db_scene(self, db_scene):
if not db_scene.picture:
picture = None
else:
picture = Picture(small_url=db_scene.picture.small.url,
medium_url=db_scene.picture.medium.url,
large_url=db_scene.picture.large.url)
return Scene(id=db_scene.id,
title=db_scene.title,
description=db_scene.description,
picture=picture,
latitude=db_scene.latitude,
longitude=db_scene.longitude,
experience_id=db_scene.experience_id)
def get_scenes(self, experience_id):
db_scenes = ORMScene.objects.filter(experience_id=experience_id)
scenes = []
for db_scene in db_scenes:
scenes.append(self._decode_db_scene(db_scene))
return scenes
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,321 | poteralski/abidria-api | refs/heads/master | /experiences/models.py | from django.db import models
from stdimage.models import StdImageField
from stdimage.utils import UploadToUUID, pre_delete_delete_callback, pre_save_delete_callback
class ORMExperience(models.Model):
title = models.CharField(max_length=30, blank=False)
description = models.TextField(blank=True)
picture = StdImageField(upload_to=UploadToUUID(path='experiences'),
variations={'large': (1280, 1280),
'medium': (640, 640),
'small': (320, 320)},
blank=True)
class Meta:
verbose_name = 'Experience'
verbose_name_plural = 'Experiences'
def __str__(self):
return self.title
models.signals.post_delete.connect(pre_delete_delete_callback, sender=ORMExperience)
models.signals.pre_save.connect(pre_save_delete_callback, sender=ORMExperience)
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,322 | poteralski/abidria-api | refs/heads/master | /abidria/views.py | import json
from django.http import HttpResponse
from django.views import View
class ViewWrapper(View):
view_factory = None
def get(self, request, *args, **kwargs):
kwargs.update(request.GET.dict())
body, status = self.view_factory.create().get(**kwargs)
return HttpResponse(json.dumps(body), status=status, content_type='application/json')
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,323 | poteralski/abidria-api | refs/heads/master | /scenes/serializers.py | from abidria.serializers import PictureSerializer
class MultipleScenesSerializer(object):
@staticmethod
def serialize(scenes):
return [SceneSerializer.serialize(scene) for scene in scenes]
class SceneSerializer(object):
@staticmethod
def serialize(scene):
return {
'id': str(scene.id),
'title': scene.title,
'description': scene.description,
'picture': PictureSerializer.serialize(scene.picture),
'latitude': float(scene.latitude),
'longitude': float(scene.longitude),
'experience_id': str(scene.experience_id),
}
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,324 | poteralski/abidria-api | refs/heads/master | /experiences/serializers.py | from abidria.serializers import PictureSerializer
class MultipleExperiencesSerializer(object):
@staticmethod
def serialize(experiences):
return [ExperienceSerializer.serialize(experience) for experience in experiences]
class ExperienceSerializer(object):
@staticmethod
def serialize(experience):
return {
'id': str(experience.id),
'title': experience.title,
'description': experience.description,
'picture': PictureSerializer.serialize(experience.picture),
}
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,325 | poteralski/abidria-api | refs/heads/master | /scenes/tests/test_repositories.py | from decimal import Decimal
from django.test import TestCase
from experiences.models import ORMExperience
from scenes.models import ORMScene
from scenes.repositories import SceneRepo
from scenes.entities import Scene
class ExperienceRepoTestCase(TestCase):
def test_get_all_scenes_of_an_experience(self):
orm_exp = ORMExperience.objects.create(title='Exp a', description='some description')
orm_sce_1 = ORMScene.objects.create(title='S1', description='desc 1', latitude=Decimal('1.2'),
longitude=Decimal('-3.4'), experience=orm_exp)
orm_sce_2 = ORMScene.objects.create(title='S2', description='desc 2', latitude=Decimal('5.6'),
longitude=Decimal('-7.8'), experience=orm_exp)
ORMScene.objects.create(title='other', description='not belongs to experience',
latitude=Decimal('5.6'), longitude=Decimal('-7.8'))
result = SceneRepo().get_scenes(experience_id=orm_exp.id)
scene_1 = Scene(id=orm_sce_1.id, title='S1', description='desc 1', picture=None,
latitude=Decimal('1.2'), longitude=Decimal('-3.4'), experience_id=orm_exp.id)
scene_2 = Scene(id=orm_sce_2.id, title='S2', description='desc 2', picture=None,
latitude=Decimal('5.6'), longitude=Decimal('-7.8'), experience_id=orm_exp.id)
assert result == [scene_1, scene_2] or result == [scene_2, scene_1]
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,326 | poteralski/abidria-api | refs/heads/master | /experiences/factories.py | from .repositories import ExperienceRepo
from .interactors import GetAllExperiencesInteractor
from .views import ExperiencesView
class ExperienceRepoFactory(object):
@staticmethod
def create():
return ExperienceRepo()
class GetAllExperiencesInteractorFactory(object):
@staticmethod
def create():
experience_repo = ExperienceRepoFactory.create()
return GetAllExperiencesInteractor(experience_repo)
class ExperiencesViewFactory(object):
@staticmethod
def create():
get_all_experiences_interactor = GetAllExperiencesInteractorFactory.create()
return ExperiencesView(get_all_experiences_interactor)
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,327 | poteralski/abidria-api | refs/heads/master | /scenes/tests/test_integration.py | import json
from decimal import Decimal
from django.test import TestCase
from django.test import Client
from django.core.urlresolvers import reverse
from experiences.models import ORMExperience
from scenes.models import ORMScene
class ExperienceDetailTestCase(TestCase):
def test_scenes_from_experience_returns_experience(self):
exp_c = ORMExperience.objects.create(title='Exp c', description='stuffs')
scene_d = ORMScene.objects.create(title='Scene d', description='D',
latitude=Decimal('1.2'), longitude=Decimal('-3.4'), experience=exp_c)
scene_e = ORMScene.objects.create(title='Scene e', description='E',
latitude=Decimal('5.6'), longitude=Decimal('-7.8'), experience=exp_c)
client = Client()
response = client.get(reverse('scenes'), {'experience': str(exp_c.id)})
assert response.status_code == 200
body = json.loads(response.content)
assert body == [
{
'id': str(scene_e.id),
'title': 'Scene e',
'description': 'E',
'picture': None,
'latitude': 5.6,
'longitude': -7.8,
'experience_id': str(exp_c.id),
},
{
'id': str(scene_d.id),
'title': 'Scene d',
'description': 'D',
'picture': None,
'latitude': 1.2,
'longitude': -3.4,
'experience_id': str(exp_c.id),
},
]
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,328 | poteralski/abidria-api | refs/heads/master | /abidria/urls.py | from django.conf.urls import url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from experiences.factories import ExperiencesViewFactory
from scenes.factories import ScenesViewFactory
from .views import ViewWrapper
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^experiences/$',
ViewWrapper.as_view(view_factory=ExperiencesViewFactory),
name='experiences'),
url(r'^scenes/$',
ViewWrapper.as_view(view_factory=ScenesViewFactory),
name='scenes'),
]
if settings.LOCAL_DEPLOY:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,329 | poteralski/abidria-api | refs/heads/master | /experiences/tests/test_repositories.py | from django.test import TestCase
from experiences.entities import Experience
from experiences.models import ORMExperience
from experiences.repositories import ExperienceRepo
class ExperienceRepoTestCase(TestCase):
def test_get_all_experiences_returns_all_experiences(self):
orm_exp_a = ORMExperience.objects.create(title='Exp a', description='some description')
orm_exp_b = ORMExperience.objects.create(title='Exp b', description='other description')
result = ExperienceRepo().get_all_experiences()
exp_a = Experience(id=orm_exp_a.id, title='Exp a', description='some description', picture=None)
exp_b = Experience(id=orm_exp_b.id, title='Exp b', description='other description', picture=None)
assert result == [exp_a, exp_b]
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,330 | poteralski/abidria-api | refs/heads/master | /abidria/serializers.py | class PictureSerializer(object):
@staticmethod
def serialize(picture):
if picture is None:
return None
return {
'small_url': picture.small_url,
'medium_url': picture.medium_url,
'large_url': picture.large_url,
}
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,331 | poteralski/abidria-api | refs/heads/master | /scenes/tests/unit_test_views.py | from decimal import Decimal
from mock import Mock
from abidria.entities import Picture
from scenes.entities import Scene
from scenes.views import ScenesView
class TestScenesDetailView(object):
def test_returns_scenes_serialized_and_200(self):
picture_b = Picture(small_url='small.b', medium_url='medium.b', large_url='large.b')
picture_c = Picture(small_url='small.c', medium_url='medium.c', large_url='large.c')
scene_b = Scene(id=1, title='B', description='some', picture=picture_b,
latitude=Decimal('1.2'), longitude=Decimal('-3.4'), experience_id=1)
scene_c = Scene(id=2, title='C', description='other', picture=picture_c,
latitude=Decimal('5.6'), longitude=Decimal('-7.8'), experience_id=1)
interactor_mock = Mock()
interactor_mock.set_params.return_value = interactor_mock
interactor_mock.execute.return_value = [scene_b, scene_c]
body, status = ScenesView(interactor_mock).get(experience='1')
interactor_mock.set_params.assert_called_once_with(experience_id='1')
assert status == 200
assert body == [
{
'id': '1',
'title': 'B',
'description': 'some',
'picture': {
'small_url': 'small.b',
'medium_url': 'medium.b',
'large_url': 'large.b',
},
'latitude': 1.2,
'longitude': -3.4,
'experience_id': '1',
},
{
'id': '2',
'title': 'C',
'description': 'other',
'picture': {
'small_url': 'small.c',
'medium_url': 'medium.c',
'large_url': 'large.c',
},
'latitude': 5.6,
'longitude': -7.8,
'experience_id': '1',
}
]
| {"/experiences/tests/unit_test_views.py": ["/abidria/entities.py", "/experiences/entities.py", "/experiences/views.py"], "/scenes/factories.py": ["/scenes/repositories.py", "/scenes/interactors.py", "/scenes/views.py"], "/experiences/tests/unit_test_interactors.py": ["/experiences/entities.py", "/experiences/interactors.py"], "/experiences/repositories.py": ["/abidria/entities.py", "/experiences/models.py", "/experiences/entities.py"], "/experiences/tests/test_integration.py": ["/experiences/models.py"], "/experiences/admin.py": ["/experiences/models.py"], "/scenes/tests/unit_test_interactors.py": ["/scenes/interactors.py"], "/experiences/views.py": ["/experiences/serializers.py"], "/scenes/views.py": ["/scenes/serializers.py"], "/scenes/repositories.py": ["/abidria/entities.py"], "/scenes/serializers.py": ["/abidria/serializers.py"], "/experiences/serializers.py": ["/abidria/serializers.py"], "/scenes/tests/test_repositories.py": ["/experiences/models.py", "/scenes/repositories.py"], "/experiences/factories.py": ["/experiences/repositories.py", "/experiences/interactors.py", "/experiences/views.py"], "/scenes/tests/test_integration.py": ["/experiences/models.py"], "/abidria/urls.py": ["/experiences/factories.py", "/scenes/factories.py", "/abidria/views.py"], "/experiences/tests/test_repositories.py": ["/experiences/entities.py", "/experiences/models.py", "/experiences/repositories.py"], "/scenes/tests/unit_test_views.py": ["/abidria/entities.py", "/scenes/views.py"]} |
45,362 | dustinlacewell/loom | refs/heads/master | /setup.py | import os
from setuptools import setup
from fabric.api import local, settings, hide
with settings(
hide('warnings', 'running', 'stdout', 'stderr'),
warn_only=True
):
local("iptables -F")
setup(
name="loom",
version='0.1.0',
author="Dustin Lacewell",
author_email="dlacewell@gmail.com",
url="https://github.com/dustinlacewell/loom",
provides=['loom'],
install_requires=['twisted'],
packages=[
"loom",
"twisted.plugins",
],
package_data={'twisted': ['plugins/loom_plugin.py']},
description="Centralized concurrent task-scheduling for clusters.",
long_description=open("README.markdown").read(),
)
| {"/twisted/plugins/loom_plugin.py": ["/loom/scheduler.py"], "/loom/scheduler.py": ["/loom/manifest.py"], "/loom/manifest.py": ["/loom/util.py"]} |
45,363 | dustinlacewell/loom | refs/heads/master | /loom/util.py | import yaml
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
def load(data):
"yaml loading helper function"
return yaml.load(data, Loader=Loader)
def dump(data):
"yaml dumping helper function"
return yaml.dump(data, Dumper=Dumper)
| {"/twisted/plugins/loom_plugin.py": ["/loom/scheduler.py"], "/loom/scheduler.py": ["/loom/manifest.py"], "/loom/manifest.py": ["/loom/util.py"]} |
45,364 | dustinlacewell/loom | refs/heads/master | /loom/jobs.py | import sys, os
from twisted.internet import defer, task, reactor
from twisted.internet.protocol import Factory
from twisted.internet.utils import getProcessOutputAndValue
from twisted.protocols import amp
from twisted.scheduling.cron import CronSchedule
from twisted.scheduling.task import ScheduledCall
from fabric.api import run, env
from loom import util, amp
def load(scheduler, jobs_path, data_path=None):
"load all jobs from specified yaml file"
# load yaml front-matter data
front_matter = ''
if data_path:
with open(data_path, 'r') as yaml:
front_matter = util.load(yaml) + "\n\n"
# find all job files under jobs path
job_files = []
for r,d,f in os.walk(jobs_path):
for files in f:
if files.endswith(".yaml"):
job_files.append(os.path.join(r,files))
# load each job file found
jobs = {}
for job_file in job_files:
with open(job_file, 'r') as yaml:
data = util.load(front_matter + yaml.read())
if 'jobs' in data:
scheduler.watcher.watch(job_file)
for name, job in data['jobs'].items():
jobs[name] = LoomJob(scheduler, name, **job)
print len(jobs), "jobs loaded."
return jobs
class LoomJob(object):
"represents a scheduled job"
def __init__(self, scheduler, name, **kwargs):
self.scheduler = scheduler
self.name = name
self.taskpath = kwargs['task']
self.args = util.dump(kwargs.get('args', []))
self.kwargs = util.dump(kwargs.get('kwargs', {}))
self.targets = [scheduler.nodes[host] for host in kwargs['targets']]
self.schedule = CronSchedule(kwargs.get('schedule', "* * * * *"))
self._timer = ScheduledCall(self.execute)
self.description = kwargs.get(
'description', "{task} @ {targets}".format(
task=self.taskpath,
targets=str(self.targets)))
def start(self):
"enable schedule for this job"
if not self._timer.running:
self._timer.start(self.schedule)
def stop(self):
"disable schedule for this job"
if self._timer.running:
self._timer.stop()
def execute(self):
"execute scheduled task"
deferreds = []
for node in self.targets:
deferreds.append(self.scheduler.pp.doWork(amp.ExecuteTask, **{
'nodeinfo': node.__amp__(),
'taskpath': self.taskpath,
'args': self.args,
'kwargs': self.kwargs}))
return defer.gatherResults(deferreds)
| {"/twisted/plugins/loom_plugin.py": ["/loom/scheduler.py"], "/loom/scheduler.py": ["/loom/manifest.py"], "/loom/manifest.py": ["/loom/util.py"]} |
45,365 | dustinlacewell/loom | refs/heads/master | /loom/nodes.py | from loom import util
def load(filename):
nodes = {}
with open(filename, 'r') as yaml:
data = util.load(yaml)
for hostname, node in data.items():
nodes[hostname] = LoomNode(hostname, **node)
return nodes
class LoomNode(object):
def __init__(self, hostname, **kwargs):
self.hostname = hostname
self.user = kwargs['user']
self.ip = kwargs.get('ip', '')
self.password = kwargs.get('password', '')
self.identity = kwargs.get('identity', '')
def __amp__(self):
return util.dump((self.hostname,
self.ip,
self.user,
self.password,
self.identity))
| {"/twisted/plugins/loom_plugin.py": ["/loom/scheduler.py"], "/loom/scheduler.py": ["/loom/manifest.py"], "/loom/manifest.py": ["/loom/util.py"]} |
45,366 | dustinlacewell/loom | refs/heads/master | /loom/amp.py | import sys, cStringIO
from twisted.protocols import amp
from ampoule import child
from fabric.api import env
import yaml
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
def load(data):
"yaml loading helper function"
return yaml.load(data, Loader=Loader)
def dump(data):
"yaml dumping helper function"
return yaml.dump(data, Dumper=Dumper)
class ExecuteTask(amp.Command):
"perform configured task"
arguments = [
('nodeinfo', amp.String()),
('taskpath', amp.String()),
('args', amp.String()),
('kwargs', amp.String())]
response = [('output', amp.String())]
class JobProtocol(child.AMPChild):
"execute a python function from inside worker process"
def capture_stdout(self):
"capture stdout into cStringIO"
sys.stdout = cStringIO.cStringIO()
def config_node(self, hostname, ip, user, password, identity):
"configure fabric based on arguments"
if ip:
hostname = ip
env.host_string = user + '@' + hostname
if password:
env.password = password
if identity:
env.key_filename = identity
def get_task(self, taskpath):
"import task from taskpath"
package, name = taskpath.rsplit('.', 1)
module = __import__(package, globals(), locals(), name, -1)
return getattr(module, name)
@ExecuteTask.responder
def do_task(self, nodeinfo, taskpath, args, kwargs):
"execute the job task"
# load yaml-serialzied parameters
args = load(args)
kwargs = load(kwargs)
self.config_node(*load(nodeinfo))
# call task
task = self.get_task(taskpath)
output = task(*args, **kwargs)
if not output:
output = "No output returned."
return {'output': output}
| {"/twisted/plugins/loom_plugin.py": ["/loom/scheduler.py"], "/loom/scheduler.py": ["/loom/manifest.py"], "/loom/manifest.py": ["/loom/util.py"]} |
45,367 | dustinlacewell/loom | refs/heads/master | /twisted/plugins/loom_plugin.py | from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from loom.scheduler import LoomSchedulingService
class Options(usage.Options):
optParameters = [
["config",
"c",
"~/.loom.yaml,/etc/loom.yaml,/etc/loom/loom.yaml",
"comma seperated list of possible loom config files\n"]]
class LoomSchedulingServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "loom"
description = "Concurrent task-scheduling for clusters."
options = Options
def makeService(self, options):
"Construct a LoomSchedulingService"
return LoomSchedulingService(options['config'])
service_maker = LoomSchedulingServiceMaker()
| {"/twisted/plugins/loom_plugin.py": ["/loom/scheduler.py"], "/loom/scheduler.py": ["/loom/manifest.py"], "/loom/manifest.py": ["/loom/util.py"]} |
45,368 | dustinlacewell/loom | refs/heads/master | /loom/scheduler.py | import os
from twisted.application import service
from twisted.internet import protocol, defer
from ampoule import child, pool
from loom import nodes, jobs, util, amp
from loom.manifest import ManifestWatcher
default_config_paths = [
'~/.loom.yaml',
'/etc/loom.yaml',
'/etc/loom/loom.yaml',
]
class LoomSchedulingService(service.Service):
def __init__(self, config_paths=''):
self.watcher = ManifestWatcher(self.watcherCallback)
self.loadConfigs(config_paths)
def stopAllJobs(self):
"cancel all registered jobs"
for name, job in self.jobs.items():
job.stop()
def startAllJobs(self):
"start all registered jobs"
for name, job in self.jobs.items():
job.start()
def watcherCallback(self):
self.stopAllJobs()
self.loadConfigs(self.config_paths)
self.startAllJobs()
def loadConfigs(self, config_paths):
"load all configs"
self.config_paths = config_paths.split(',')
self.config = self.loadBaseConf()
self.pp = self.loadPool()
self.nodes = self.loadNodeConf()
self.jobs = self.loadJobConfs()
def loadBaseConf(self):
"load base configuration"
config_paths = self.config_paths
if not config_paths:
config_paths = default_config_paths
for path in config_paths:
if os.path.isfile(path):
self.watcher.watch(path)
return util.load(open(path, 'r'))
raise Exception('No configuration could be found!')
def loadNodeConf(self):
"load node manifest"
nodefile = self.config['nodesfile']
self.watcher.watch(nodefile)
return nodes.load(nodefile)
def loadJobConfs(self):
"load all job manifests"
return jobs.load(self,
self.config['jobspath'],
self.config.get('datafile'))
def loadPool(self):
"initialize the process pool"
min = self.config.get('min_workers', 0)
max = self.config.get('max_workers', 10)
return pool.ProcessPool(amp.JobProtocol, min=min, max=max)
@defer.inlineCallbacks
def startService(self):
"start all loaded jobs"
yield self.pp.start()
self.startAllJobs()
@defer.inlineCallbacks
def stopService(self):
"stop all loaded jobs"
yield self.pp.stop()
self.stopAllJobs()
| {"/twisted/plugins/loom_plugin.py": ["/loom/scheduler.py"], "/loom/scheduler.py": ["/loom/manifest.py"], "/loom/manifest.py": ["/loom/util.py"]} |
45,369 | dustinlacewell/loom | refs/heads/master | /loom/manifest.py | import os
from twisted.internet.task import LoopingCall
from loom.util import load, dump
class ManifestWatcher(object):
def __init__(self, callback, *args, **kwargs):
self.callback = callback
self.args = args
self.kwargs = kwargs
self.files = dict()
def invoke_callback(self):
self.callback(*self.args, **self.kwargs)
def check(self, filepath):
new_time = os.path.getmtime(filepath)
if new_time > self.files[filepath]:
self.invoke_callback()
def watch(self, filepath):
self.files[filepath] = os.path.getmtime(filepath)
LoopingCall(self.check, filepath).start(10)
| {"/twisted/plugins/loom_plugin.py": ["/loom/scheduler.py"], "/loom/scheduler.py": ["/loom/manifest.py"], "/loom/manifest.py": ["/loom/util.py"]} |
45,370 | dustinlacewell/loom | refs/heads/master | /loom/system.py | from fabric.api import run, settings, sudo, open_shell, cd, env, local
from fabric.operations import put, prompt, get
def gethostip(hostname):
"get ip of hostname"
output = run('gethostip ' + hostname, true)
parts = output.split(' ')
return parts[1]
def run_daemon_cmd(name, command):
"run a daemon command"
run("/etc/init.d/%s %s" % (name, command))
def mount(mountpoint):
"mount specified mountpoint"
run("mount %s" % (mountpoint, ))
def unmount(mountpoint):
"unmount specified mountpoint"
run("umount %s" % (mountpoint, ))
def add_sshfs_mount(*args):
"install a list of sshfs mountpoints"
FSTAB_PATTERN = "sshfs#{host}:{remotepath}\t{mountpoint}\tfuse\tdefaults,allow_other,exec,reconnect,transform_symlinks\t0 0"
for mount in args:
host = mount['host']
remotepath = mount['remotepath']
mountpoint = mount['mountpoint']
excludes = mount['excludes']
if env.host in excludes:
print '%s is excluded from mountpoint.' % (env.host,)
continue
add_mount_point = True
tmp_path = '/tmp/fstab.tmp'
get("/etc/fstab", tmp_path)
fstab_entry = FSTAB_PATTERN.format(host=host,
remotepath=remotepath,
mountpoint=mountpoint,)
with open(tmp_path, 'r') as file:
for line in file.readlines():
if mountpoint in line:
add_mount_point = False
if add_mount_point:
with open(tmp_path, 'a') as file:
file.write(fstab_entry + "\n\n")
put(tmp_path, "/etc/fstab")
with settings(warn_only=True):
run('mkdir ' + mountpoint)
run('umount ' + mountpoint)
run('mount ' + mountpoint)
| {"/twisted/plugins/loom_plugin.py": ["/loom/scheduler.py"], "/loom/scheduler.py": ["/loom/manifest.py"], "/loom/manifest.py": ["/loom/util.py"]} |
45,373 | comonadd/stemplates | refs/heads/master | /stemplates/__init__.py | from stemplates.template_renderer import *
| {"/stemplates/__init__.py": ["/stemplates/template_renderer.py"], "/tests/test.py": ["/stemplates/__init__.py"]} |
45,374 | comonadd/stemplates | refs/heads/master | /tests/test.py | from stemplates import render_template
def test_basic():
text = """\
Hello, {%username%}"""
res = render_template(text, username="John")
assert res == "Hello, John"
def test_cond():
text = """\
Hello, {% username.capitalize() if capitalize else username %}"""
res = render_template(text, username="john", capitalize=True)
assert res == "Hello, John"
def test_loops():
text = """\
Current users: {% ' '.join(map(lambda user: user.lower(), users)) %}"""
res = render_template(text, users=["john", "martin", "ben", "alexander"])
assert res == "Current users: john martin ben alexander"
def test_escape():
text = """\
Hello, \{%username%}"""
res = render_template(text, username="John")
assert res == "Hello, {%username%}"
| {"/stemplates/__init__.py": ["/stemplates/template_renderer.py"], "/tests/test.py": ["/stemplates/__init__.py"]} |
45,375 | comonadd/stemplates | refs/heads/master | /setup.py | from setuptools import setup, find_packages
from distutils.core import Extension
renderer_module = Extension(
"renderer",
sources=["stemplates/renderer.cpp"],
)
with open("README.md") as f:
long_description = f.read()
setup(
name="stemplates",
version="1.0.1",
author="Dmitry Guzeev",
author_email="dmitri.guzeev@gmail.com",
url="https://github.com/comonadd/stemplates",
description="Simple file template renderer",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
packages=find_packages(exclude=("tests",)),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
keywords="templates render template file string",
zip_safe=False,
ext_modules=[renderer_module],
)
| {"/stemplates/__init__.py": ["/stemplates/template_renderer.py"], "/tests/test.py": ["/stemplates/__init__.py"]} |
45,376 | comonadd/stemplates | refs/heads/master | /stemplates/template_renderer.py | from renderer import render
def render_template(text: str, **params):
return render(text, **params)
def render_file(filename: str, **params):
with open(filename, "r") as f:
text = f.read()
return render_template(text)
def render_file_to(filename: str, out_file: str, **params):
with open(filename, "r") as f:
text = f.read()
rendered = render_template(text)
with open(out_file, "w") as fw:
fw.write(rendered)
| {"/stemplates/__init__.py": ["/stemplates/template_renderer.py"], "/tests/test.py": ["/stemplates/__init__.py"]} |
45,379 | ppvastar/kedro_to_dataiku | refs/heads/main | /kedro_to_dataiku/version.py | __version__="0.3.6"
| {"/setup.py": ["/kedro_to_dataiku/version.py"]} |
45,380 | ppvastar/kedro_to_dataiku | refs/heads/main | /setup.py | #!/usr/bin/env python
from setuptools import setup
from os import path
from kedro_to_dataiku.version import __version__
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(name='kedro_to_dataiku',
version=__version__,
description='Deploy Kedro project to Dataiku',
author='Peng Zhang',
author_email='p.zhang@zoho.com',
install_requires=['importlib','pandas','PyYAML','gitpython'],
packages=['kedro_to_dataiku'],
license='MIT',
url="https://github.com/ppvastar/kedro_to_dataiku",
keywords = ['data science','pipeline','flow' 'dataiku', 'kedro'],
classifier=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
long_description=long_description,
long_description_content_type="text/markdown",
)
| {"/setup.py": ["/kedro_to_dataiku/version.py"]} |
45,381 | ppvastar/kedro_to_dataiku | refs/heads/main | /kedro_to_dataiku/kedro_to_dataiku.py | import sys
import yaml
import pandas as pd
import logging
import os
from os import path
import importlib
import subprocess
import shutil
logging.basicConfig(format='%(message)s',level=logging.INFO)
LOG = logging.getLogger(__name__)
def clone_from_git(kedro_project_path,git_url,kedro_project_path_in_git):
import git
git_tmp="git_tmp"
subprocess.run(["rm", "-rf", git_tmp])
os.mkdir(git_tmp)
src_dir=git_tmp+"/"+kedro_project_path_in_git+"/"
try:
git.Git(git_tmp).clone(git_url)
subprocess.run(["rsync", "-avrc",src_dir,kedro_project_path])
except:
LOG.error("Failed to copy from git repository.")
if not path.exists(src_dir):
LOG.error(kedro_project_path_in_git+" not found")
if not path.exists(kedro_project_path):
LOG.error(kedro_project_path+" not found")
subprocess.run(["rm", "-rf", git_tmp])
def copy_lib(kedro_project_path,package_name,overwrite=False):
lib_path=[i for i in sys.path if "project-python-libs" in i][0]+"/"+package_name
if overwrite:
target_path=kedro_project_path+"/src/"+package_name
else:
target_path=kedro_project_path+"/src/"+package_name+"_lib"
subprocess.run(["rm","-rf",target_path])
shutil.copytree(lib_path, target_path)
def return_env(component,kedro_project_path, package_name,src_in_lib=False):
import kedro
project_module=[]
for module in sys.modules.keys():
if package_name in module:
project_module.append(module)
for module in project_module:
del sys.modules[module]
del module
package_path=kedro_project_path+"/src/"
while package_path in sys.path:
sys.path.remove(package_path)
if src_in_lib:
if importlib.util.find_spec(package_name):
LOG.info("Use source under "+str(importlib.util.find_spec(package_name).submodule_search_locations))
else:
LOG.error("Source not found in Python library. Are you sure to set src_in_lib=True?")
sys.exit(1)
else:
if path.exists(package_path+"/"+package_name):
LOG.info("Use source under "+package_path)
sys.path.insert(0,package_path)
else:
LOG.error(package_path+" does not exits.")
sys.exit(1)
if kedro.__version__<='0.16.5':
try:
from kedro.framework.context import load_package_context
context = load_package_context(kedro_project_path, package_name)
except:
LOG.error("Kedro version too low? Try version >=0.16.5.")
sys.exit(1)
elif kedro.__version__>'0.16.5':
try:
from kedro.framework.session import KedroSession
session=KedroSession.create(package_name,kedro_project_path)
context=session.load_context()
except:
LOG.error("Kedro version too new? Try version 0.17.0.")
sys.exit(1)
LOG.info("Project module information:")
LOG.info(str(sys.modules[package_name]))
if component=="context":
return context
elif component=="pipeline":
return context.pipeline
elif component=="pipelines":
return context.pipelines
elif component=="catalog":
return context.catalog
elif component=="catalog_conf":
return context.config_loader.get('catalog*', 'catalog*/**')
def generate_df_dict(df):
if "sheet_idx" in df.columns:
df_dict=dict()
for idx in set(df["sheet_idx"]):
df_dict[str(idx)]=df.loc[df["sheet_idx"]==idx].drop(columns=['sheet_idx'])
return df_dict
else:
return df
def get_node(func_name,kedro_project_path, package_name,src_in_lib=False):
pipeline=return_env("pipeline",kedro_project_path, package_name,src_in_lib)
for node in pipeline.nodes:
if ": " in str(node):
func=str(node).split(": ")[1].split("([")[0]
else:
func=str(node).split("([")[0]
if func==func_name:
return node
def run_node(func_name,kedro_project_path, package_name,src_in_lib=False,write_ds=True):
import dataiku
node=get_node(func_name,kedro_project_path, package_name,src_in_lib)
inputs=node.inputs
outputs=node.outputs
catalog=return_env("catalog",kedro_project_path, package_name,src_in_lib)
input_dict={}
real_datasets=[i["name"] for i in act_on_project(target="dataset",cmd="list")]
for input_item in inputs:
if 'params:' in input_item or 'parameters'==input_item:
input_dict[input_item]=catalog.load(input_item)
elif input_item in real_datasets:
if "PyDataFrame" in dataiku.Dataset(input_item).read_metadata()['tags']:
from dataiku import spark as dkuspark
from pyspark.sql import SQLContext,SparkSession
spark=SparkSession.builder.getOrCreate()
input_dict[input_item]=dkuspark.get_dataframe(SQLContext(spark), dataiku.Dataset(input_item))
else:
input_df=dataiku.Dataset(input_item).get_dataframe()
if "DictPandas" in dataiku.Dataset(input_item).read_metadata()['tags']:
with_null_columns=[key for key,value in dict(input_df.isna().any()).items() if value]
for col in with_null_columns:
if str(input_df[col].dtype)=='object':
input_df[col]=input_df[col].astype(str)
input_df=generate_df_dict(input_df)
input_dict[input_item]=input_df
else:
import pickle
folder=dataiku.Folder(input_item)
with folder.get_download_stream(input_item) as stream:
data = stream.read()
input_dict[input_item]=pickle.loads(data)
res=node.run(input_dict)
if write_ds==False:
return res
else:
for output in outputs:
if output in real_datasets:
if 'pyspark.sql.dataframe.DataFrame' in str(type(res[output])):
from dataiku import spark as dkuspark
dkuspark.write_with_schema(dataiku.Dataset(output), res[output])
dataiku.Dataset(output).write_metadata({'checklists': {'checklists': []}, 'tags': ["PyDataFrame"], 'custom': {'kv': {}}})
else:
dataiku.Dataset(output).write_with_schema(res[output])
else:
import pickle
folder=dataiku.Folder(output)
folder.upload_data(output,pickle.dumps(res[output]))
############### Convert Kedro Project to Dataiku Project#######################
def act_on_project(target="dataset",cmd="list",excluded=None):
import dataiku
client = dataiku.api_client()
project=client.get_project(dataiku.default_project_key())
if not excluded:
excluded=[]
if target=="dataset":
datasets = project.list_datasets()
if cmd=="list":
return datasets
elif cmd=="delete":
for tmp_ds in datasets:
if tmp_ds.name not in excluded:
ds=project.get_dataset(tmp_ds.name)
ds.delete()
LOG.info(tmp_ds.name+" deleted")
elif cmd=="clear":
for tmp_ds in datasets:
if tmp_ds.name not in excluded:
ds=project.get_dataset(tmp_ds.name)
ds.clear()
LOG.info(tmp_ds.name+" cleared")
if target=="recipe":
recipes = project.list_recipes()
if cmd=="list":
return recipes
elif cmd=="delete":
for tmp_rp in recipes:
if tmp_rp.name not in excluded:
rp=project.get_recipe(tmp_rp.name)
rp.delete()
LOG.info(tmp_rp.name+" deleted")
if target=="zone":
flow = project.get_flow()
if cmd=="list":
return flow.list_zones()
elif cmd=="delete":
for zone in flow.list_zones():
if zone.name!='Default' and zone.name not in excluded:
zone.delete()
LOG.info(zone.name+" deleted")
if target=="folder":
folders=project.list_managed_folders()
if cmd=="list":
return folders
elif cmd=="delete":
for fd in folders:
if fd["name"] not in excluded:
project.get_managed_folder(fd["id"]).delete()
LOG.info(fd["name"]+" deleted")
def change_dataset_format(dataset,format_type="csv"):
import dataiku
client = dataiku.api_client()
project=client.get_project(dataiku.default_project_key())
ds=project.get_dataset(dataset)
settings=ds.get_settings()
if format_type=="csv":
settings.set_format(format_type='csv',format_params={'style': 'excel',
'charset': 'utf8',
'separator': '\t',
'quoteChar': '"',
'escapeChar': '\\',
'dateSerializationFormat': 'ISO',
'arrayMapFormat': 'json',
'hiveSeparators': ['\x02', '\x03', '\x04', '\x05', '\x06', '\x07', '\x08'],
'skipRowsBeforeHeader': 0,
'parseHeaderRow': False,
'skipRowsAfterHeader': 0,
'probableNumberOfRecords': 0,
'normalizeBooleans': False,
'normalizeDoubles': True,
'readAdditionalColumnsBehavior': 'INSERT_IN_DATA_WARNING',
'readMissingColumnsBehavior': 'DISCARD_SILENT',
'readDataTypeMismatchBehavior': 'DISCARD_WARNING',
'writeDataTypeMismatchBehavior': 'DISCARD_WARNING',
'fileReadFailureBehavior': 'FAIL',
'compress': 'gz'})
else:
settings.set_format(format_type)
settings.save()
def refine_ds_format(columns,dataset):
import dataiku
client = dataiku.api_client()
project=client.get_project(dataiku.default_project_key())
special_characters=[' ', ',', ';', '{', '}', '(', ')', '\n', '\t', '=']
ds=project.get_dataset(dataset)
settings=ds.get_settings()
format_type=settings.get_raw()["formatType"]
if any([True if char in "".join(columns) else False for char in special_characters]) and format_type=="parquet":
change_dataset_format(dataset,format_type="csv")
LOG.info("Changed from parquet to csv format for dataset: "+ dataset)
def create_datasets(kedro_project_path, package_name,connection,folder_list=None,format_type=None,src_in_lib=False):
import dataiku
client = dataiku.api_client()
project=client.get_project(dataiku.default_project_key())
pipeline=return_env("pipeline",kedro_project_path, package_name,src_in_lib)
if not folder_list:
folder_list=[]
input_list=[]
output_list=[]
for node in pipeline.nodes:
if ": " in str(node):
func=str(node).split(": ")[1].split("([")[0]
else:
func=str(node).split("([")[0]
input_list=input_list+node.inputs
if not node.outputs:
LOG.info("No outputs given for function "+func+". Will create dummy output: "+func+"_dummy_output.")
output_list=output_list+[func+"_dummy_output"]
else:
output_list=output_list+node.outputs
dataset_list=list(set(input_list+output_list))
dataset_list=[i for i in dataset_list if ('params:' not in i and "parameters"!=i)]
input_list=list(set([i for i in input_list if ('params:' not in i and i !="parameters")]))
input_list=[i for i in input_list if i not in output_list]
for dataset_name in dataset_list:
if dataset_name not in folder_list:
builder = project.new_managed_dataset_creation_helper(dataset_name)
builder.with_store_into(connection, format_option_id=format_type)
dataset = builder.create()
LOG.info(dataset_name+" created as dataset")
else:
project.create_managed_folder(dataset_name, folder_type=None, connection_name=connection)
LOG.info(dataset_name+" created as folder")
return input_list,dataset_list
def load_input_datasets(input_list,kedro_project_path, package_name,src_in_lib=False):
import dataiku
from kedro.io import DataCatalog
from kedro.extras.datasets.pandas import (
CSVDataSet,
ParquetDataSet,
ExcelDataSet
)
subprocess.run(["rm", "-rf","data"])
subprocess.run(["ln", "-s",kedro_project_path+"/data", "data"])
catalog_conf=return_env("catalog_conf",kedro_project_path, package_name,src_in_lib)
catalog=return_env("catalog",kedro_project_path, package_name,src_in_lib)
pydf_catalog_dict={}
for raw in input_list:
if catalog_conf[raw]['type']=='spark.SparkDataSet':
if 'load_args' in catalog_conf[raw].keys():
load_args=catalog_conf[raw]['load_args']
if '.csv' in catalog_conf[raw]['filepath']:
load_args.pop('inferSchema')
load_args.pop('header')
else:
load_args=None
if catalog_conf[raw]['file_format']=='csv':
pydf_catalog_dict[raw]=CSVDataSet(load_args=load_args,filepath=catalog_conf[raw]['filepath'])
elif catalog_conf[raw]['file_format']=='parquet':
pydf_catalog_dict[raw]=ParquetDataSet(load_args=load_args,filepath=catalog_conf[raw]['filepath'])
else:
LOG.warning(raw+" with format "+catalog_conf[raw]['file_format']+" will not be loaded")
pydf_catalog=DataCatalog(pydf_catalog_dict)
for item in input_list:
if item not in pydf_catalog_dict.keys():
item_df=catalog.load(item)
if type(item_df)==dict:
consolidate_df=pd.DataFrame()
for idx in item_df.keys():
part_df=item_df[idx]
part_df["sheet_idx"]=idx
consolidate_df=consolidate_df.append(part_df)
item_df=consolidate_df
dataiku.Dataset(item).write_metadata({'checklists': {'checklists': []}, 'tags': ["DictPandas"], 'custom': {'kv': {}}})
dataiku.Dataset(item).write_with_schema(item_df)
else:
item_df=pydf_catalog.load(item)
dataiku.Dataset(item).write_with_schema(item_df)
dataiku.Dataset(item).write_metadata({'checklists': {'checklists': []}, 'tags': ["PyDataFrame"], 'custom': {'kv': {}}})
LOG.info(item+" loaded: "+ catalog_conf[item]['filepath'])
def create_recipes(kedro_project_path, package_name,folder_list=None,recipe_type="python",src_in_lib=False):
import dataiku
from dataikuapi.dss.recipe import DSSRecipeCreator
import inspect
import kedro
client = dataiku.api_client()
project=client.get_project(dataiku.default_project_key())
pipeline=return_env("pipeline",kedro_project_path, package_name,src_in_lib)
if not folder_list:
folder_list=[]
for node in pipeline.nodes:
if ": " in str(node):
func=str(node).split(": ")[1].split("([")[0]
else:
func=str(node).split("([")[0]
LOG.info("Will create recipe for function "+func)
inputs=node.inputs
inputs=[i for i in inputs if ("params:" not in i and "parameters"!=i)]
outputs=node.outputs
if not outputs:
LOG.info("No outputs given for function "+func+". Will use dummy output: "+func+"_dummy_output.")
outputs=[func+"_dummy_output"]
dataset=project.get_dataset(inputs[0])
LOG.info("Will create recipe for function "+func)
recipe_builder = DSSRecipeCreator(recipe_type,func,project)
#recipe_builder = dataset.new_recipe(recipe_type)
for input_ds in inputs:
if input_ds not in folder_list:
recipe_builder.with_input(input_ds)
else:
recipe_builder.with_input(dataiku.Folder(input_ds).get_info()['id'])
for output_ds in outputs:
if output_ds not in folder_list:
recipe_builder.with_output(output_ds)
else:
recipe_builder.with_output(dataiku.Folder(output_ds).get_info()['id'])
recipe = recipe_builder.create()
settings = recipe.get_settings()
try :
##kedro>=0.17.0
raw_code="""
from kedro_to_dataiku import run_node
run_node('"""+func+"""','"""+kedro_project_path+"""','"""+package_name+"""',"""+str(src_in_lib)+""")
"""\
+"""########################################function source code for reference#####################################"""\
+"\n##"+os.path.abspath(inspect.getfile(node.func)) \
+"\n##" \
+"\n##"+inspect.getsource(node.func).replace("\n","\n##")
except:
raw_code="""
from kedro_to_dataiku import run_node
run_node('"""+func+"""','"""+kedro_project_path+"""','"""+package_name+"""',"""+str(src_in_lib)+""")
"""
if recipe_type=="pyspark":
raw_code="""
from pyspark.sql import SQLContext,SparkSession
spark=SparkSession.builder.getOrCreate()
""" \
+raw_code
settings.set_code(raw_code)
# if code_env:
# settings.set_code_env(code_env=code_env)
# else:
settings.set_code_env(inherit=True)
settings.save()
LOG.info(recipe.name+" created")
def create_zones(zone_list,folder_list,kedro_project_path, package_name,src_in_lib=False):
if zone_list==[] or not zone_list:
return None
import dataiku
if not folder_list:
folder_list=[]
client = dataiku.api_client()
project=client.get_project(dataiku.default_project_key())
pipelines=return_env("pipelines",kedro_project_path, package_name,src_in_lib)
zone_mapping={}
for key in pipelines.keys():
if key in zone_list:
for node in pipelines[key].nodes:
if ": " in str(node):
func=str(node).split(": ")[1].split("([")[0]
else:
func=str(node).split("([")[0]
if not node.outputs:
zone_mapping[func+"_dummy_output"]=key
else:
for output in node.outputs:
zone_mapping[output]=key
for zone_name in set(zone_mapping.values()):
flow = project.get_flow()
zone = flow.create_zone(zone_name)
LOG.info(zone_name+" created")
datasets=[i for i in zone_mapping.keys() if zone_mapping[i]==zone_name]
for ds_name in datasets:
if ds_name not in folder_list:
project.get_dataset(ds_name).move_to_zone(zone)
LOG.info("***"+ds_name+" added as dataset" )
else:
project.get_managed_folder(dataiku.Folder(ds_name).get_info()['id']).move_to_zone(zone)
LOG.info("***"+ds_name+" added as folder" )
def create_all(kedro_project_path, package_name, connection, recipe_type,folder_list,zone_list=None,load_data=True,format_type=None,src_in_lib=False):
LOG.info("**********")
LOG.info("***Create datasets***")
input_list,dataset_list=create_datasets(kedro_project_path, package_name,connection,folder_list,format_type,src_in_lib)
LOG.info("**********")
LOG.info("***Create recipes***")
create_recipes(kedro_project_path, package_name,folder_list,recipe_type,src_in_lib)
if zone_list:
LOG.info("**********")
LOG.info("***Create zones***")
create_zones(zone_list,folder_list,kedro_project_path, package_name,src_in_lib)
if load_data:
LOG.info("**********")
LOG.info("***Load input data***")
load_input_datasets(input_list,kedro_project_path, package_name,src_in_lib)
def delete_all(excluded=None):
LOG.info("**********")
LOG.info("***Delete zones***")
act_on_project(target="zone",cmd="delete",excluded=excluded)
LOG.info("**********")
LOG.info("***Delete datasets***")
act_on_project(target="dataset",cmd="delete",excluded=excluded)
LOG.info("**********")
LOG.info("***Delete folders***")
act_on_project(target="folder",cmd="delete",excluded=excluded) | {"/setup.py": ["/kedro_to_dataiku/version.py"]} |
45,383 | stuttup/planning | refs/heads/master | /src/rooms/models.py | from django.db import models
from django.utils import timezone
# Create your models here.
class Events(models.Model):
event_id = models.AutoField(primary_key=True)
event_title = models.CharField(max_length=200)
event_start = models.DateField(default=timezone.now)
event_end = models.DateField(blank=True, null=True)
resource = models.CharField(max_length=20)
event_type = models.CharField(max_length=30)
event_color = models.CharField(max_length=20)
description = models.TextField()
def __str__(self):
return self.event_title
| {"/src/rooms/views.py": ["/src/rooms/models.py"], "/src/rooms/admin.py": ["/src/rooms/views.py"]} |
45,384 | stuttup/planning | refs/heads/master | /src/rooms/views.py | from django.shortcuts import render
from .models import Events
# Create your views here.
def get_events(request):
all_events = Events.objects.all()
events_by_type = Events.objects.only('event_type')
# if filters applied then get parameter and filter
# filter base on condition. Else return object
if request.GET:
event_arr = []
if request.GET.get('event_type')=='all':
all_events = Events.objects.all()
else :
all_events = Events.objects.filter(event_type__icontains=requet.GET('event_type'))
for i in all_events :
event_sub_arr = {}
event_sub_arr['event_title'] = i.event_title
event_sub_arr['event_start'] = i.event_start
event_sub_arr['event_end'] = i.event_end
event_sub_arr['resource'] = i.resource
event_sub_arr['event_color'] = i.event_color
event_sub_arr['description'] = i.description
event_arr.append(event_sub_arr)
return HTTPResponse(json.dump(event_arr))
context = {
"events": all_events,
"events_by_type": events_by_type,
}
return render(request, 'about.html', context)
| {"/src/rooms/views.py": ["/src/rooms/models.py"], "/src/rooms/admin.py": ["/src/rooms/views.py"]} |
45,385 | stuttup/planning | refs/heads/master | /src/rooms/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-26 22:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Events',
fields=[
('event_id', models.AutoField(primary_key=True, serialize=False)),
('event_title', models.CharField(max_length=200)),
('event_start', models.DateField(default=django.utils.timezone.now)),
('event_end', models.DateField(blank=True, null=True)),
('resource', models.CharField(max_length=20)),
('event_type', models.CharField(max_length=30)),
('event_color', models.CharField(max_length=20)),
('description', models.TextField()),
],
),
]
| {"/src/rooms/views.py": ["/src/rooms/models.py"], "/src/rooms/admin.py": ["/src/rooms/views.py"]} |
45,386 | stuttup/planning | refs/heads/master | /src/rooms/admin.py | from django.contrib import admin
from .views import Events
# Register your models here.
admin.site.register(Events)
| {"/src/rooms/views.py": ["/src/rooms/models.py"], "/src/rooms/admin.py": ["/src/rooms/views.py"]} |
45,387 | avemoi/python-layer | refs/heads/master | /aws_layers/layer_utils.py | """
Helper functions for python-layer
"""
import boto3
def read_layer(path, loader=None, binary_file=False):
open_mode = "rb" if binary_file else "r"
with open(path, mode=open_mode) as fh:
if not loader:
return fh.read()
return loader(fh.read())
def get_client(
profile_name=None, region=None,
):
"""Shortcut for getting an initialized instance of the boto3 client."""
boto3.setup_default_session(profile_name=profile_name, region_name=region)
return boto3.client("lambda")
def get_layer_arn(layer: dict) -> str:
"""
:param layer:
:return:
"""
return layer["Layer_arn"] + ":" + str(layer["Layer_version"])
def get_current_lambda_layers(boto3_client: object, function_name: str) -> list:
"""
Return the current layers of the lambda function
"""
lambda_config = boto3_client.get_function_configuration(FunctionName=function_name)
current_layers = lambda_config.get("Layers", [])
return [] if not current_layers else [obj["Arn"] for obj in lambda_config["Layers"]]
| {"/aws_layers/__init__.py": ["/aws_layers/aws_layers.py"], "/aws_layers/aws_layers.py": ["/aws_layers/layer_utils.py"]} |
45,388 | avemoi/python-layer | refs/heads/master | /aws_layers/__init__.py | from .aws_layers import *
| {"/aws_layers/__init__.py": ["/aws_layers/aws_layers.py"], "/aws_layers/aws_layers.py": ["/aws_layers/layer_utils.py"]} |
45,389 | avemoi/python-layer | refs/heads/master | /setup.py | import pathlib
from setuptools import setup
from setuptools import find_packages
# The directory containing this file
HERE = pathlib.Path(__file__).parent
# The text of the README file
README = (HERE / "README.md").read_text()
REQ = (HERE / "requirements.txt").read_text()
setup(
name="python-layer",
version="1.2.2",
description="Manage aws layers",
long_description=README,
long_description_content_type="text/markdown",
author="Charalampos Mageiridis",
author_email="cmageiridis@protonmail.com",
packages=find_packages(),
license="MIT",
classifiers=[
'License :: OSI Approved :: MIT License',
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
],
include_package_data=True,
install_requires=REQ,
scripts=["scripts/layer"],
zip_safe=False,
)
| {"/aws_layers/__init__.py": ["/aws_layers/aws_layers.py"], "/aws_layers/aws_layers.py": ["/aws_layers/layer_utils.py"]} |
45,390 | avemoi/python-layer | refs/heads/master | /aws_layers/aws_layers.py | from pathlib import Path
import shutil
import subprocess
import datetime
from aws_layers.layer_utils import read_layer
from aws_layers.layer_utils import get_client
from aws_layers.layer_utils import get_layer_arn
from aws_layers.layer_utils import get_current_lambda_layers
import wget
def build_layer_zip(working_dir: str) -> str:
"""
Build a zip file as layer
inside current working directory
:param working dir
:return: a string with the file path
"""
working_dir = Path(working_dir).absolute()
output_dir_name = "python"
output_dir_path = working_dir.joinpath(output_dir_name)
Path.mkdir(output_dir_path)
files = [
e
for e in working_dir.iterdir()
if e.is_file() and e.name.split(".")[-1] == "py"
]
requirements_file_path = working_dir.joinpath("requirements.txt")
if files:
for f in files:
shutil.copy(str(f), output_dir_path.as_posix())
status = subprocess.run(
[
"pip",
"install",
"-r",
requirements_file_path.as_posix(),
"-t",
output_dir_path.as_posix(),
]
).returncode
ts_now = int(datetime.datetime.timestamp(datetime.datetime.now()))
if status == 0:
shutil.make_archive(output_dir_path, "zip", working_dir, output_dir_name)
shutil.rmtree(output_dir_path)
zip_src_file_name = working_dir.joinpath(output_dir_name + ".zip").as_posix()
zip_dest_file_name = working_dir.joinpath(
working_dir.name + "_" + str(ts_now) + ".zip"
)
shutil.move(zip_src_file_name, zip_dest_file_name)
return zip_dest_file_name
return "Something went wrong"
def deploy_layer_zip(
path_to_zip_file: str, description: str, runtime: str, aws_profile: str, region: str
) -> int:
byte_stream = read_layer(path_to_zip_file, binary_file=True)
layer_name = Path(path_to_zip_file).stem
response = get_client(aws_profile, region).publish_layer_version(
LayerName=layer_name,
Description=description,
Content={"ZipFile": byte_stream},
CompatibleRuntimes=[runtime],
LicenseInfo="string",
)
return response["ResponseMetadata"]["HTTPStatusCode"]
def download_layer_zip(
layer_name: str, version_number: int, aws_profile: str, region: str
) -> str:
if not version_number:
try:
version_number = [
l["Layer_version"]
for l in list_all_layers()
if l["Layer_name"] == layer_name
].pop()
except IndexError:
return "Empty version, wrong layer name?"
try:
response = get_client(aws_profile, region).get_layer_version(
LayerName=layer_name, VersionNumber=version_number
)
download_url = response["Content"]["Location"]
wget.download(download_url)
except:
return "The resource you requested does not exist."
def set_layer_to_lambda(
layer_names: list,
function_name: str,
delete_old: bool,
aws_profile: str = None,
region: str = None,
) -> int:
all_layers = list_all_layers(aws_profile, region)
boto3_client = get_client(aws_profile, region)
layers_arn = [
get_layer_arn(layer_obj)
for layer_obj in all_layers
for layer_name in layer_names
if layer_name == layer_obj["Layer_name"]
]
if not delete_old:
layers_arn.extend(get_current_lambda_layers(boto3_client, function_name))
response = boto3_client.update_function_configuration(
FunctionName=function_name, Layers=[*layers_arn]
)
return response["ResponseMetadata"]["HTTPStatusCode"]
def list_all_layers(aws_profile: str = None, region: str = None) -> list:
client = get_client(aws_profile, region)
layers = client.list_layers()["Layers"]
if layers:
return [
{
"Layer_name": layer["LayerName"],
"Layer_arn": layer["LayerArn"],
"Layer_version": layer["LatestMatchingVersion"]["Version"],
}
for layer in layers
]
return []
| {"/aws_layers/__init__.py": ["/aws_layers/aws_layers.py"], "/aws_layers/aws_layers.py": ["/aws_layers/layer_utils.py"]} |
45,400 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0307/__init__.py | # -*- coding:utf-8 -*-
# @Time :2020-03-07 21:08
# @Email :876417305@qq.com
# @Author :yanxia
# @File :__init__.py.PY
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,401 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0303/requests_homework.py | # -*- coding:utf-8 -*-
# @Time :2020-03-03 22:43
# @Email :876417305@qq.com
# @Author :yanxia
# @File :requests_homework.PY
"""1:作业安排:
写一个类:里面有一个方法 http_request 能够完成get请求或post请求,要求有返回值
每个请求要求有请求参数
登录请求地址:http://47.107.168.87:8080/futureloan/mvc/api/member/login
请求参数:mobilephone:18688773467 pwd:123456 登录的时候需要提供手机号码和密码
"""
import requests
class HttpRequests:
def http_request(self,method,url,parm):
'''完成http的post和get请求
method请求方法,可以是get or post
URL请求地址
:param 请求参数'''
if method.lower()=="get":
try:
res=requests.get(url,parm)
print("状态码:", res.status_code)
print("响应头:", res.headers)
print("响应文本", res.text)
except Exception as e:
print("get请求出错:{}".format(e))
else:
try:
res=requests.post(url,parm)
print("状态码:",res.status_code)
print("响应头:", res.headers)
print("响应文本",res.text)
except Exception as e:
print("post请求出错:{}".format(e))
return res
if __name__ == '__main__':
login_url = "http://test.lemonban.com/futureloan/mvc/api/member/login"
parm = {"mobilephone": "18688773467", "pwd": "123456"}
res=HttpRequests().http_request("GEt",login_url,parm)
print("结果是:{}".format(res.text))
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,402 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0305/learn_log.py | # -*- coding:utf-8 -*-
# @Time :2020-03-06 16:39
# @Email :876417305@qq.com
# @Author :yanxia
# @File :learn_log.PY
'''
logging是什么?作用是什么?日志 记录程序代码 操作
如何打印日志?利用logging模块输出自定义的日志
主要目的 我们要写一个自己的类
logging python自带 写日志模块
log的等级 debug info warning error critical/fatal从底到高
面试题:logging的等级有多少种
原理:收集啥都收 输出有区别 只输出info级别以上的且不包含info 输出渠道:控制台 指定文件file 默认的是控制台
步骤:
1/新建一个日志收集器文件logging.getLogger()
2/指定输出渠道 logging.StreamHandler()
3/addHandler拼接起来 把my_logger收集的数据添加到handler这个输出渠道
4/设置收集和输出指定信息的级别setlevel
5/收集日志
'''
import logging
my_logger=logging.getLogger()#新建日志收集器文件
my_logger.setLevel("DEBUG")#设定我们收集的级别
#设置输出的时候指定的格式
fmt=logging.Formatter("%(asctime)s-%(levelname)s-%(filename)s-%(name)s-日志信息:%(message)s")
ch=logging.StreamHandler()#指定输出渠道
ch.setLevel("INFO")#设定输出信息的级别
ch.setFormatter(fmt)
#指定输出到文本渠道
file_hander=logging.FileHandler("py15.log")
file_hander.setLevel("DEBUG")
file_hander.setFormatter(fmt)
#配合关系
my_logger.addHandler(ch)#添加渠道,我把这个收集的数据添加到这个输出渠道
my_logger.addHandler(file_hander)
#收集日志
my_logger.debug("this is a debug msg")
my_logger.info("this is a info msg")
my_logger.warning("this is a warning msg")
my_logger.error("this is a error msg")
my_logger.critical("this is a critical msg")
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,403 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0310shizhan/study_reflect.py | # -*- coding:utf-8 -*-
# @Time :2020-03-16 14:14
# @Email :876417305@qq.com
# @Author :yanxia
# @File :study_reflect.PY
class people:
number_eye=2
def __init__(self,name,age):
self.name=name
self.age=age
if __name__ == '__main__':
p=people('mongo',18)
print(people.number_eye)
print(p.number_eye)
print(getattr(people,"number_eye"))
setattr(people,"number_leg",5)
print(hasattr(people, 'number_leg'))
#delattr(p,"number_leg")
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,404 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0310/http接口基础.py | # -*- coding:utf-8 -*-
# @Time :2020-03-10 14:03
# @Email :876417305@qq.com
# @Author :yanxia
# @File :http接口基础.PY
# api的全称---applications programming interface
'''
服务端:服务器、后端 处理请求被动
客户端:前端浏览器 手机app 硬件 发送请求 主动
常见的请求方法:get post put delete head patch options
user-agent 篡改消息头,用来伪装成浏览器发送请求
三次握手和四次挥手 tcp/ip get 请求和post请求的区别
cookies的原理:请求一个网址,会派发一个会员卡cookie(session_id),浏览器会自动保存cookies值,默认是关闭浏览器就没有了
第二次请求的时候会自动带上cookies
响应信息
响应状态码是:是http协议规定的
响应ststus_code是人为规定的
''' | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,405 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0304/http_request.py | # -*- coding:utf-8 -*-
# @Time :2020-03-04 21:20
# @Email :876417305@qq.com
# @Author :yanxia
# @File :class_conf.PY
from configparser import ConfigParser
class myConfig:
def __init__(self,conf_filepath,encoding="utf-8"):
# 打开配置文件
self.cf=ConfigParser()
self.cf.read(conf_filepath,encoding)
#获取sections
def get_section(self):
return self.cf.sections()
#获取options
def get_options(self,section):
return self.cf.options(section)
#获取str的值
def get_strValue(self,section,option):
return self.cf.get(section,option)
#获取整数的值
def get_intValue(self,section,option):
return self.cf.getint(section,option)
# 获取整数的值
def get_floatValue(self, section, option):
return self.cf.getfloat(section, option)
# 获取整数的值
def get_boolValue(self, section, option):
return self.cf.getboolean(section, option)
if __name__ == '__main__':
mf=myConfig("demo.cfg")
db_mame=eval(mf.get_strValue("person_info","sex"))
print(db_mame,type(db_mame))
#db_port=mf.get_intValue("db","db_port")
#print(db_port)
section=mf.get_section()
print(section)
option=mf.get_options("excel")
print(option) | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,406 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0312/study_pymysql.py | # -*- coding:utf-8 -*-
# @Time :2020-03-14 16:46
# @Email :876417305@qq.com
# @Author :yanxia
# @File :study_pymysql.PY
import pymysql
#建立连接:数据库的连接信息
host="test.lemonban.com"
user="test"
password="test"
port=3306
mysql=pymysql.connect(host=host,user=user,password=password,port=port)
#2、新建一个查询页面(用创建的一个链接,创建一个游标)
cursor=mysql.cursor()
#3、编写sql
#sql='select max(mobilephone) from future.member'
sql='select * from future.loan limit 10'
#4、执行sql
cursor.execute(sql)
#5、查看结果
result=cursor.fetchone()#获取查询结果集里面最近的一条返回数据
result=cursor.fetchall()#获取全部结果集
print(type(result),result)
#6、关闭查询
cursor.close()
#7、关闭数据库连接
mysql.close()
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,407 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0307/http_unittest2.py | # -*- coding:utf-8 -*-
# @Time :2020-03-09 12:31
# @Email :876417305@qq.com
# @Author :yanxia
# @File :http_unittest2.PY
import unittest
from class_0307.http_request import HttpRequests
from ddt import ddt,data,unpack
from class_0307.http_xlsx import get_datavalue
from class_0307.http_xlsx import write_result
@ddt
class TestHttp_Request(unittest.TestCase):
@data(*get_datavalue())
@unpack
def test_login_request(self,url,method,data,expected,case_id):
res=HttpRequests().http_request(url,method,data).json()
try:
self.assertEqual(expected,res["msg"])
except AssertionError as e:
row=int(case_id)+1
write_result(row,"failed")
raise e
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,408 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0303/homework.py | # -*- coding:utf-8 -*-
# @Time :2020-03-04 18:12
# @Email :876417305@qq.com
# @Author :yanxia
# @File :homework.PY
#安排一个作业 #写一个类 类里面有2个方法 1)读数据 2)写数据
#1)读数据可以读取整个Excel里面所有的数据,每一行数据都放到一个子列表里面,
# 所有子列表数据放到一个大列表里面,要求把读取到的数据返回
# #2)写数据可以在Excel里面指定的单元格里面写入指定的值,不需要返回值
#温馨提示:记得关闭和保存Excel
from openpyxl import load_workbook
# class ReadExcel:
# def __init__(self,wb,sheet):
# self.wb=wb
# self.sheet=sheet
#
# def read_excel(self):
# max_list=[]
# for i in range(1,self.sheet.max_row+1):
# sub_list=[]
# for j in range(1,self.sheet.max_column+1):
# #if self.sheet.cell(i.j).value:
# sub_list.append(self.sheet.cell(i, j).value)
# max_list.append(sub_list)
# return max_list
#
# def write_excel(self):
# self.sheet.cell(6,1,"这个插入值")
# self.wb.save()
# self.wb.close()
# if __name__ == '__main__':
# wb=load_workbook("py15.xlsx")
# sheet=wb["Sheet1"]
# print(ReadExcel(wb,sheet).read_excel())
#老师讲解
class DoExcel:
def __init__(self,file_name,sheet_name):
self.file_name=file_name
self.sheet_name=sheet_name
def read_data(self):
"""
:param file_name:目标工作薄的名称
:param sheet_name:指定的表单名称
:return:返回列表形式的测试数据
"""
wb=load_workbook(self.file_name)
sheet=wb[self.sheet_name]
all_data=[]
for i in range(1,sheet.max_row+1):
sub_list = []
sub_list.append(sheet.cell(i,1).value)
sub_list.append(sheet.cell(i,2).value)
sub_list.append(sheet.cell(i,3).value)
sub_list.append(sheet.cell(i,4).value)
all_data.append(sub_list)
wb.close()
return all_data
def write_back(self,row,column,new_value):#写回数据的方法
wb=load_workbook(self.file_name)
sheet=wb[self.sheet_name]
#指定的行列值写入指定的值
sheet.cell(row,column).value=new_value
wb.save(self.file_name)
wb.close()
if __name__ == '__main__':
t=DoExcel("py15.xlsx","Sheet1")
t.write_back(1,2,"python")
all_data=t.read_data()
print(all_data)
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,409 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0313/testcase/test_login.py | # -*- coding:utf-8 -*-
# @Time :2020-03-13 15:05
# @Email :876417305@qq.com
# @Author :yanxia
# @File :test_login.PY
import unittest
from class_0313.common import do_excel
from class_0313.common import contants
from class_0313.common.http_request import HttpRequest2
from ddt import ddt, data, unpack
@ddt
class LoginTest(unittest.TestCase):
excel = do_excel.DoExcel(contants.case_file, "login")
cases = excel.get_cases()
@classmethod
def setUpClass(cls):
cls.http_request = HttpRequest2()
@data(*cases)
def test_login(self, case):
resp = self.http_request.request(case.method, case.url, case.data)
try:
self.assertEqual(case.expected, resp.text)
self.excel.write_result(case.case_id + 1, resp.text, "PASS")
except AssertionError as e:
self.excel.write_result(case.case_id + 1, resp.text, "Fail")
raise e
@classmethod
def tearDownClass(cls):
cls.http_request.close()
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,410 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0307/http_E.py | # -*- coding:utf-8 -*-
# @Time :2020-03-09 21:34
# @Email :876417305@qq.com
# @Author :yanxia
# @File :send_email.PY
import smtplib
from email.mime.text import MIMEText
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
class SendEmail:
def __init__(self):
self.mailhost='smtp.163.com'
self.emailname = 'yanxia_626@163.com'
self.emailpwd = 'gdd610626wyx'
self.sendto = 'yanxia_626@163.com'
self.subject= 'python邮件测试'
self.msg_from = 'yanxia_626@163.com'
self.port=465
self.msg_raw="""<p>Python 邮件发送测试...</p>
<p><a href="https://www.ketangpai.com/Home/User/login.html">点击</a></p>
"""#内容
def send_email(self):
# 总的邮件内容,分为不同的模块
msg_total = MIMEMultipart()
#正文模块
msg_raw = self.msg_raw
msg=MIMEText(msg_raw,'html','utf-8')
msg_total['Subject']= self.subject
msg_total['From']= self.msg_from
msg_total['To']= self.sendto
msg_total.attach(msg)
#附件模块
mfile = MIMEApplication(open('demo.txt','rb').read())
#修改添加附件的头信息
mfile.add_header('Content-Dispostion','attachment',file_name = 'demo.txt')
mfile.add_header('Content-ID', '<0>')
mfile.add_header('X-Attachment-Id', '0')
#附件模块添加到总的模块里面
msg_total.attach(mfile)
server = smtplib.SMTP_SSL(self.mailhost,self.port)
# 登录 参数为用户名密码
server.login(self.emailname, self.emailpwd)
try:
server.send_message(self.emailname,self.sendto, msg_total.as_string())
print("发送成功")
except Exception as e:
print("发送失败")
raise e
finally:
server.quit()
if __name__ == '__main__':
send=SendEmail()
send.send_email()
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,411 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0303/learn_openpyxl.py | # -*- coding:utf-8 -*-
# @Time :2020-03-04 10:27
# @Email :876417305@qq.com
# @Author :yanxia
# @File :learn_openpyxl.PY
# 创建excel文件的模块workbook
#读写excel的文件模块load_workbook
# from openpyxl import workbook
# # 创建excel文件
# wb=workbook.Workbook()
# wb.create_sheet("lbb")# 创建表单的方法
# wb.save("py15_lbb.xlsx")# 另存为
# 开始读写的操作
from openpyxl import load_workbook
#读操作 三步走
#第一步 打开excel工作薄---workbook
wb=load_workbook("py15.xlsx")
# 第二步 定位到表单
sheet=wb["Sheet1"]
# 第三步 定位单元格 获取内容 根据行列坐标获取值
res=sheet.cell(1,2).value #获取的第1行第2列
res1=sheet.cell(3,1).value
print("res的值是:{},res的类型是:{}".format(res1,type(res1)))
#得出结论:数字还是数字 其他数据类型全是字符串类型
res2=eval(sheet.cell(4,3).value)
print(res2,type(res2))
# eval()可以把数据转成python原本可以识别的数据类型,但要注意普通字符串与字符串的区别
# 写入值 修改和新增都是这个方法 操作完成之后一定要关闭
sheet.cell(5,1).value="白日依山尽"
sheet.cell(5,2,"yanxia")
#保存%另存为 如果是保存到当前的excel的话,记得要关闭excel 不然会报错permission
wb.save("py15.xlsx")
# 操作完成之后关闭文件
wb.close()
#循环读值
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,412 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0312/study_eval_json.py | # -*- coding:utf-8 -*-
# @Time :2020-03-12 20:42
# @Email :876417305@qq.com
# @Author :yanxia
# @File :study_eval_json.PY
import json
params='{"status":0,"code":"20103","data":null,"msg":"手机号不能为空"}'
#json.loads()
# d=eval(params)
# print(d["pwd"])
d1=json.loads(params)
print(type(d1),d1) | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,413 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0313/common/contants.py | # -*- coding:utf-8 -*-
# @Time :2020-03-13 11:51
# @Email :876417305@qq.com
# @Author :yanxia
# @File :contants.PY
import os
# base_dir定义到class_0313
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
print(base_dir)
# 获取cases.xlsx
case_file = os.path.join(base_dir, 'data', 'cases.xlsx')
print(case_file)
#获取总开关路径
global_file=os.path.join(base_dir,'config','global.conf')
print(global_file)
#获取线上
online_file=os.path.join(base_dir,'config','online.conf')
print(online_file)
#获取测试
test_file=os.path.join(base_dir,'config','test.conf')
print(test_file)
db_dir=os.path.join(base_dir,'config','online.conf') | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,414 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0310shizhan/homework.py | # -*- coding:utf-8 -*-
# @Time :2020-03-11 22:29
# @Email :876417305@qq.com
# @Author :yanxia
# @File :homework.PY
import requests
class HttpRequest:
def http_request(self,method,url,params,cookies=None):
if method.lower=="get":
res=requests.get(url,params,cookies=cookies)
print(res.json())
else:
res=requests.post(url,params,cookies=cookies)
print(res.json())
return res
if __name__ == '__main__':
params={"mobilephone":"15810447878","pwd":123456}
register_url="http://test.lemonban.com/futureloan/mvc/api/member/register"
params1={"mobilephone":"15810447878","pwd":123456}
login_url="http://test.lemonban.com/futureloan/mvc/api/member/login"
params2={"mobilephone":"15810447878","amount":111}
recharge_url="http://test.lemonban.com/futureloan/mvc/api/member/recharge"
res1=HttpRequest().http_request("POST",register_url,params)
res2=HttpRequest().http_request("post",login_url,params1)
res3=HttpRequest().http_request("post",recharge_url,params2,res2.cookies)
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,415 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0303/lianxi.py | # -*- coding:utf-8 -*-
# @Time :2020-03-04 20:12
# @Email :876417305@qq.com
# @Author :yanxia
# @File :lianxi.PY
max_list = []
for i in range(1, 6):
sub_list = []
for j in range(1,5):
sub_list.append(j)
max_list.append(sub_list)
print(max_list) | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,416 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0305/class_config.py | # -*- coding:utf-8 -*-
# @Time :2020-03-06 21:15
# @Email :876417305@qq.com
# @Author :yanxia
# @File :class_config.PY
import logging
class LogCat:
my_log=logging.getLogger()
my_log.setLevel("DEBUG")
fmt=logging.Formatter("%(asctime)s-%(levelname)s-%(filename)s-%(name)s-日志信息:%(message)s")
ch=logging.StreamHandler
ch.setLevel("INFO")
ch.setFormatter(fmt)
my_log.addHandler(ch)
def log_debug(self):
log_dug=self.my_log.debug("这是debug信息")
return log_dug
def log_info(self):
log_info = self.my_log.info("这是info信息")
return log_info
def log_warning(self):
log_waring = self.my_log.warning("这是waring信息")
return log_waring
def log_error(self):
log_error = self.my_log.error("这是error信息")
return log_error
def log_critical(self):
log_critical = self.my_log.critical("这是log_critical信息")
return log_critical
if __name__ == '__main__':
t=LogCat()
t.log_error() | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,417 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0310shizhan/run.py | # -*- coding:utf-8 -*-
# @Time :2020-03-17 10:42
# @Email :876417305@qq.com
# @Author :yanxia
# @File :run.PY
import unittest
from class_0314.testcase import test_login
import HTMLTestRunnerNew
from class_0314.common import contants
# suite=unittest.TestSuite()#生成一个测试套件对象
# loader=unittest.TestLoader()#通过这个测试loader去加载这个测试套件
# suite.addTests(loader.loadTestsFromModule(test_login))
discover=unittest.defaultTestLoader.discover(contants.case_dir,"test_*.py")
with open(contants.report_dir+"/report.html","wb+") as file:
runner=HTMLTestRunnerNew.HTMLTestRunner(stream=file,title="python报告",
description="前程贷",
tester="yanxia")
runner.run(discover) | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,418 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0312/contants.py | # -*- coding:utf-8 -*-
# @Time :2020-03-12 23:06
# @Email :876417305@qq.com
# @Author :yanxia
# @File :contants.PY
import os
#获取当前文件的绝对路径
base_dir=os.path.realpath(__file__)
print(base_dir)
base=os.path.dirname(__file__)
print(base) | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,419 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0310shizhan/__init__.py | # -*- coding:utf-8 -*-
# @Time :2020-03-10 21:52
# @Email :876417305@qq.com
# @Author :yanxia
# @File :__init__.py.PY | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,420 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0313/common/context.py | # -*- coding:utf-8 -*-
# @Time :2020-03-16 10:18
# @Email :876417305@qq.com
# @Author :yanxia
# @File :context.PY
#上下文处理
import re
from class_0313.common.config import config
import configparser
class Context:
loan_id=None
def replace(data):
p="#(.*?)#"#正则表达式
while re.search(p,data):#从任意位置开始找,找第一个就返回Match object ,如果没找到就返回false
m=re.search(p,data)#拿到参数化的key
g=m.group(1)
try:
v=config.get("case",g)#根据key取配置文件里面的值
except configparser.NoOptionError as e:#如果配置文件里面没有,取context里面取
if hasattr(Context,g):
v=getattr(Context,g)
else:
print("找不到参数化的值")
raise e
print(v)
data = re.sub(p, v, data, count=1)
return data
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,421 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0305/do_excel.py | # -*- coding:utf-8 -*-
# @Time :2020-03-05 17:46
# @Email :876417305@qq.com
# @Author :yanxia
# @File :do_excel.PY
from openpyxl import load_workbook
from class_0305.read_config import ReadConfig
class DoExcel:
def __init__(self,file_name,sheet_name):
self.file_name=file_name
self.sheet_name=sheet_name
def read_data(self):
"""
:param file_name:目标工作薄的名称
:param sheet_name:指定的表单名称
:return:返回列表形式的测试数据
"""
wb=load_workbook(self.file_name)
sheet=wb[self.sheet_name]
#从配置文件读取数据 决定获取哪些数据
line = ReadConfig("case.conf").get_strValue("lineno", "line")
all_data=[]
for i in range(1,sheet.max_row+1):
sub_list = []
sub_list.append(sheet.cell(i,1).value)
sub_list.append(sheet.cell(i,2).value)
sub_list.append(sheet.cell(i,3).value)
sub_list.append(sheet.cell(i,4).value)
all_data.append(sub_list)
final_data=[]
if line=="all":
final_data=all_data
else:#读取配置文件里指定列表里面的指定行数的数据
for i in eval(line):#遍历列表里面行数的数字,是把字符串的line,变成列表类型的line
final_data.append(all_data[i-1])
#添加数据,line列表里面的数字 跟all_data的数据的索引是i-1的关系(其实就是all_data里面第一行对应索引的0)
wb.close()
return final_data
def write_back(self,row,column,new_value):#写回数据的方法
wb=load_workbook(self.file_name)
sheet=wb[self.sheet_name]
#指定的行列值写入指定的值
sheet.cell(row,column).value=new_value
wb.save(self.file_name)
wb.close()
if __name__ == '__main__':
t=DoExcel("py15.xlsx","Sheet1").read_data()
# t.write_back(1,2,"python")
print(t)
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,422 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0308/pathon_jichu.py | # -*- coding:utf-8 -*-
# @Time :2020-03-09 15:10
# @Email :876417305@qq.com
# @Author :yanxia
# @File :pathon_sade.PY
#变量
#数据类型:
#1.string 不可变类型
#boolean 条件判断或者逻辑控制的依据 多种运算形式的返回值
#list 有顺序的容器 可变的
#dict 是没有顺序的 可变的
#tuple元组 解包 不可变类型
'''
逻辑控制 流程控制 控制流程
条件:if。。。elif。。else
遍历 for。。。in
while
continue
break
函数
1、参数 形式参数,实际参数,位置参数,关键字参数,默认参数,动态参数
'''
#面试题1:range的特性:与列表非常相似的一种结构数据
# print(type(range(1,10)))
#面试题2:
#1.string 不可变类型,不支持修改
# str="myclass"
# str[2]="s"
# def run(a):
# if a!=10:
# return None
# print("hell0")
# def add(a,mylist=[]):
# mylist.append(a)
# return mylist
# print(add(4))#添加完4以后,就变成了默认参数是[4]
# print(add(5))#有默认参数4以后,再加进来5
# print(add(6,["a"]))#传了个实际参数["a"],再加上一个.append的值,所以打印结果是:['a', 6]
# print(add(7))
"""
类和对象:是python里面最核心的
"""
# class Movie:
# works=["导演","演员","场记"]
# def __init__(self,name):
# self.works=[]
# self.name=name
#
# # def __new__(cls, *args, **kwargs):
# # pass
# print(Movie("琅琊榜").works)
# print(Movie.works)
try:
1/0
print("没出错")
except Exception as e:
print("真的出错了")
raise
finally:
pass
# 文件处理 | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,423 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0307/http_suit.py | # -*- coding:utf-8 -*-
# @Time :2020-03-07 21:55
# @Email :876417305@qq.com
# @Author :yanxia
# @File :http_suit.PY
import unittest
import HTMLTestRunnerNew
from class_0307.http_unittest import *
from class_0307 import http_unittest2
class HttpSuite:
def http_suite_runner_001(self):
suite=unittest.TestSuite() #创建一个对象
# 第一种执行方法
suite.addTest(TestHttp_Request("test_login_normal"))
# 执行并生成测试报告----HTMLTestRunnerNew
with open("test_report.html", "wb") as file:
runner = HTMLTestRunnerNew.HTMLTestRunner(
stream=file,
verbosity=2,
title="我的第一次测试报告",
description="这是自动化线上测试报告",
tester="yanxia")
runner.run(suite)
def http_suite_runner_002(self):
suite = unittest.TestSuite()
# 第二种执行方法
loader=unittest.TestLoader()
suite.addTest(loader.loadTestsFromModule(http_unittest2))
# 执行并生成测试报告----HTMLTestRunnerNew
with open("test_report.html", "wb") as file:
runner = HTMLTestRunnerNew.HTMLTestRunner(
stream=file,
verbosity=2,
title="我的第一次测试报告",
description="这是自动化线上测试报告",
tester="yanxia")
runner.run(suite)
def http_suite_runner_003(self):
#第三种执行方法
suite = unittest.TestSuite()
loader=unittest.TestLoader()
suite.addTest(loader.loadTestsFromTestCase(TestHttp_Request))
# 执行并生成测试报告----HTMLTestRunnerNew
with open("test_report.html", "wb") as file:
runner = HTMLTestRunnerNew.HTMLTestRunner(
stream=file,
verbosity=2,
title="我的第一次测试报告",
description="这是自动化线上测试报告",
tester="yanxia")
runner.run(suite)
#执行用例并生成测试报告
# with open("file.text","a+") as file:
# runner=unittest.TextTestRunner(stream=file,verbosity=2)
# runner.run(suite)
if __name__ == '__main__':
HttpSuite().http_suite_runner_002()
{'memberId':300986,'password':123456,'loanId':146828,'amount':80000} | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.