index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
2,220
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/services/__init__.py
|
# Copyright (C) 2013 by Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Services' IDs:
========= =====================
service IDs
========= =====================
Base ``base``
Blogger ``b``, ``blogger``
WordPress ``wp``, ``wordpress``
========= =====================
.. _service-options:
Options
=======
To assign options to chosen service, add ``service_options`` in :ref:`brc.py`,
for example:
.. code:: python
service = "<service id>"
service_options = {
'option1': 'value1',
'option2': 2,
}
.. _custom-service:
Writing a custom service
========================
A sample handler ``sample_service.py``:
.. code:: python
from bpy.service import base
class Service(base.Service):
# see bpy/services for examples
pass
And corresponding setting in :ref:`brc.py`:
.. code:: python
import re
# this matches the re
service = 'foobar'
services = {
'SampleService': {
'match': re.compile(r'^foobar$'),
'module': 'sample_service',
},
}
"""
import os
import re
import sys
import traceback
services = {
'Base': {
'match': re.compile(r'^base$', re.I),
'module': 'bpy.services.base',
},
'Blogger': {
'match': re.compile(r'^(b|blogger)$', re.I),
'module': 'bpy.services.blogger',
},
'WordPress': {
'match': re.compile(r'^(wp|wordpress)$', re.I),
'module': 'bpy.services.wordpress',
},
}
def find_service(service_name, service_options, *args, **kwargs):
sys.path.insert(0, os.getcwd())
module = None
for name, hdlr in services.items():
if hdlr['match'].match(service_name):
try:
module = __import__(hdlr['module'], fromlist=['Service'])
break
except Exception:
print('Cannot load module %s of service %s' % (hdlr['module'], name))
traceback.print_exc()
sys.path.pop(0)
if module:
return module.Service(service_options, *args, **kwargs)
return None
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,221
|
lbarchive/b.py
|
refs/heads/master
|
/tests/test_bpy_handlers_mkd.py
|
# Copyright (C) 2013, 2014 Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import unicode_literals
import unittest
import test_bpy_handlers_base as test_base
from bpy.handlers.mkd import Handler
class HandlerTestCase(test_base.BaseHandlerTestCase):
def setUp(self):
self.handler = Handler(None)
# =====
test_markup_affixes_EXPECT1 = '<p>prefix-content-suffix</p>'
test_markup_affixes_EXPECT2 = '<p>foobar</p>'
# =====
test_generate_title_common_markup_EXPECT = 'foo <em>bar</em>'
# =====
test_generate_str_EXPECT = '<p>\xc3\xa1</p>'
# =====
test_smartypants_EXPECT = '<p>foo “bar”</p>'
# =====
@unittest.skip('tested in BaseHandler')
def test_embed_images(self):
pass
test_embed_images_generate_SOURCE = ''
test_embed_images_generate_EXPECT = (
'<p><img alt="tests/test.png" src="%s" /></p>' % (
test_base.BaseHandlerTestCase.test_embed_images_data_URI
)
)
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,222
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/handlers/rst.py
|
# Copyright (C) 2011-2014 Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
You can specify settings-overrides_ for reStructuredText in :ref:`brc.py` or
the embed_images_, for example:
.. code:: python
handlers = {
'reStructuredText': {
'options': {
'embed_images': True,
'register_directives': {
'dir_name': MyDir,
},
'register_roles': {
'role_name': MyRole,
},
'settings_overrides': {
'footnote_references': 'brackets',
},
},
},
}
.. _settings-overrides:
http://docutils.sourceforge.net/docs/user/config.html#html4css1-writer
Custom Directives and Roles
===========================
For adding your own custom reStructuredText directives or roles, you can do it
in :ref:`brc.py` with one of the following method:
* by calling register functions of docutils directly,
* by adding in b.py's option as shown above, or
* by using decorator of b.py, for example:
.. code:: python
from docutils.parsers.rst import Directive
from bpy.handlers.rst import register_directive, register_role
@register_directive('mydir')
class MyDir(Directive):
pass
@register_role('myrole')
def myrole(name, rawtext, text, lineno, inliner, options=None,
content=None):
pass
"""
from __future__ import print_function, unicode_literals
from docutils.core import publish_parts
from docutils.parsers.rst import directives, roles
from bpy.handlers import base
def register_directive(dir_name):
"""For lazy guys
.. code:: python
@register_directive(name)
class MyDirective(Directive):
pass
"""
def _register_directive(directive):
directives.register_directive(dir_name, directive)
return directive
return _register_directive
def register_role(role_name):
def _register_role(role):
roles.register_canonical_role(role_name, role)
return role
return _register_role
class Handler(base.BaseHandler):
"""Handler for reStructuredText markup language
>>> handler = Handler(None)
>>> print(handler.generate_header({'title': 'foobar'}))
.. !b
title: foobar
<BLANKLINE>
"""
PREFIX_HEAD = '.. '
PREFIX_END = ''
HEADER_FMT = ' %s: %s'
def __init__(self, filename, options=None):
super(Handler, self).__init__(filename, options)
if not options:
return
for dir_name, directive in options.get('register_directives', {}).items():
directives.register_directive(dir_name, directive)
for role_name, role in options.get('register_roles', {}).items():
roles.register_canonical_role(role_name, role)
def _generate(self, markup=None):
"""Generate HTML from Markdown
>>> handler = Handler(None)
>>> print(handler._generate('a *b*'))
<p>a <em>b</em></p>
"""
if markup is None:
markup = self.markup
settings_overrides = {
'output_encoding': 'utf8',
'initial_header_level': 2,
'doctitle_xform': 0,
'footnote_references': 'superscript',
}
settings_overrides.update(self.options.get('settings_overrides', {}))
id_affix = self.id_affix
if id_affix:
settings_overrides['id_prefix'] = id_affix + '-'
self.set_header('id_affix', id_affix)
doc_parts = publish_parts(markup,
settings_overrides=settings_overrides,
writer_name="html")
html = doc_parts['body_pre_docinfo'] + doc_parts['body'].rstrip()
return html
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,223
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/handlers/text.py
|
# Copyright (C) 2013, 2014 Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
The Text handler for plain text always escape HTML, and add ``<br/>`` if not
``pre_wrap``.
You can specify the following options for plain text in :ref:`brc.py`, for
example:
.. code:: python
handlers = {
'Text': {
'options': {
'pre_wrap': False
},
},
}
``pre_wrap`` will wrap output in ``<pre/>`` tag.
"""
from __future__ import print_function, unicode_literals
import cgi
import re
from bpy.handlers import base
class Handler(base.BaseHandler):
"""Handler for plain text
>>> handler = Handler(None)
>>> handler.markup = 'post <content>\\n & something'
>>> print(handler.generate())
post <content><br/>
& something
>>> handler.options['pre_wrap'] = True
>>> print(handler.generate())
<pre>post <content>
& something</pre>
>>> handler = Handler(None)
>>> print(handler.generate_header({'title': 'foobar'}))
!b
title: foobar
<BLANKLINE>
"""
PREFIX_HEAD = ''
PREFIX_END = ''
HEADER_FMT = '%s: %s'
SUPPORT_EMBED_IMAGES = False
def generate_title(self, markup=None):
"""Generate HTML from plain text
>>> handler = Handler(None)
>>> print(handler.generate_title('a < b\\nc & d\\n\\nfoo'))
a < b c & d foo
"""
html = super(Handler, self).generate_title(markup)
html = html.replace('<pre>', '').replace('</pre>', '')
return re.sub('(<br/> )+', ' ', html)
def _generate(self, markup=None):
"""Generate HTML from plain text
>>> handler = Handler(None)
>>> print(handler._generate('a < b\\nc & d\\n\\xc3\\xa1'))
a < b<br/>
c & d<br/>
\xc3\xa1
>>> handler.options['pre_wrap'] = True
>>> print(handler._generate('abc\\ndef'))
<pre>abc
def</pre>
"""
if markup is None:
markup = self.markup
html = cgi.escape(markup)
if self.options.get('pre_wrap', False):
return '<pre>%s</pre>' % html
else:
return html.replace('\n', '<br/>\n')
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,224
|
lbarchive/b.py
|
refs/heads/master
|
/tests/test_setup.py
|
# Copyright (C) 2013 by Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import unittest
from docutils.core import publish_string
class SetupTestCase(unittest.TestCase):
def test_long_description(self):
"""Ensure long description can be generated"""
with open('README.rst') as f:
long_description = f.read()
overrides = {
# raises exception at warning level (2)
'halt_level': 2,
'raw_enabled': False,
}
html = publish_string(long_description, writer_name='html',
settings_overrides=overrides)
self.assertTrue(html)
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,225
|
lbarchive/b.py
|
refs/heads/master
|
/tests/test_bpy_handlers_base.py
|
# Copyright (C) 2013, 2014 Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import unicode_literals
import unittest
from bpy.handlers.base import BaseHandler
class Handler(BaseHandler):
def _generate(self, source=None):
return source
class BaseHandlerTestCase(unittest.TestCase):
def setUp(self):
self.handler = Handler(None)
def tearDown(self):
self.handler = None
def test_header_no_labels(self):
handler = self.handler
handler.source = '''!b
post content'''
header, markup = handler.split_header_markup()
self.assertEqual(header, {})
self.assertEqual(markup, 'post content')
def test_header_labels_none(self):
handler = self.handler
handler.source = '''!b
labels:
post content'''
header, markup = handler.split_header_markup()
self.assertEqual(header, {'labels': []})
self.assertEqual(markup, 'post content')
def test_header_labels_single(self):
handler = self.handler
handler.source = '''!b
labels: foobar
post content'''
header, markup = handler.split_header_markup()
self.assertEqual(header, {'labels': ['foobar']})
self.assertEqual(markup, 'post content')
def test_header_labels_two(self):
handler = self.handler
handler.source = '''!b
labels: foo, bar
post content'''
header, markup = handler.split_header_markup()
self.assertEqual(header, {'labels': ['foo', 'bar']})
self.assertEqual(markup, 'post content')
def test_header_labels_with_empty_label(self):
handler = self.handler
handler.source = '''!b
labels: foo, , bar
post content'''
header, markup = handler.split_header_markup()
self.assertEqual(header, {'labels': ['foo', 'bar']})
self.assertEqual(markup, 'post content')
# =====
def test_merge_header(self):
handler = self.handler
header = {'id': '123'}
handler.header = header.copy()
handler.merge_header(header.copy())
self.assertEqual(handler.header, header)
header['id'] = '456'
header['blah'] = 'lol'
handler.merge_header(header.copy())
del header['blah']
self.assertEqual(handler.header, header)
header['id'] = '789'
uheader = {'id': '789'}
handler.merge_header(uheader.copy())
self.assertEqual(handler.header, header)
self.assertIsInstance(handler.header['id'], type(''))
header['id'] = '123'
uheader = {'id': '123'}
handler.merge_header(uheader.copy())
self.assertEqual(handler.header, header)
self.assertIsInstance(handler.header['id'], type(''))
self.assertEqual(list(handler.header.keys()), ['id'])
self.assertIsInstance(list(handler.header.keys())[0], type(''))
handler.header = {}
handler.merge_header(uheader.copy())
self.assertEqual(handler.header, header)
self.assertIsInstance(handler.header['id'], type(''))
self.assertEqual(list(handler.header.keys()), ['id'])
self.assertIsInstance(list(handler.header.keys())[0], type(''))
# =====
def test_id_affix(self):
handler = self.handler
handler.title = 'test'
def test_header_override():
handler.header['id_affix'] = None
self.assertEqual(handler.id_affix, None)
handler.header['id_affix'] = ''
self.assertEqual(handler.id_affix, '098f')
handler.header['id_affix'] = 'prefix'
self.assertEqual(handler.id_affix, 'prefix')
# -----
self.assertEqual(handler.id_affix, None)
# -----
handler.options['id_affix'] = None
self.assertEqual(handler.id_affix, None)
test_header_override()
# -----
del handler.header['id_affix']
handler.options['id_affix'] = ''
self.assertEqual(handler.id_affix, '098f')
test_header_override()
# -----
del handler.header['id_affix']
handler.options['id_affix'] = 'prefix'
self.assertEqual(handler.id_affix, 'prefix')
test_header_override()
# =====
test_markup_affixes_EXPECT1 = 'prefix-content-suffix'
test_markup_affixes_EXPECT2 = 'foobar'
test_markup_affixes_EXPECT3 = 'title'
def test_markup_affixes(self):
handler = self.handler
handler.title = 'title'
handler.markup = 'content'
handler.options['markup_prefix'] = 'prefix-'
handler.options['markup_suffix'] = '-suffix'
self.assertEqual(
handler.generate(),
self.test_markup_affixes_EXPECT1)
self.assertEqual(
handler.generate('foobar'),
self.test_markup_affixes_EXPECT2)
self.assertEqual(
handler.generate_title(),
self.test_markup_affixes_EXPECT3)
# =====
def test_split_header_markup(self):
handler = self.handler
handler.source = '''xoxo !b oxox
abc= foo
def:bar
post content'''
header, markup = handler.split_header_markup()
expect = {'abc': 'foo', 'def': 'bar'}
self.assertEqual(header, expect)
self.assertEqual(markup, 'post content')
source = '%s!b\n' % handler.PREFIX_HEAD
source += handler.HEADER_FMT % ('abc', 'foo') + '\n'
source += handler.HEADER_FMT % ('def', 'bar') + '\n'
if handler.PREFIX_END:
source += handler.PREFIX_END + '\n'
source += '\npost content'
handler.source = source
header, markup = handler.split_header_markup()
self.assertEqual(header, expect)
self.assertEqual(markup, 'post content')
# =====
def test_generate_header(self):
handler = self.handler
handler.set_header('id', '123')
expect = '%s!b\n%s\n' % (handler.PREFIX_HEAD,
handler.HEADER_FMT % ('id', '123'))
if handler.PREFIX_END:
expect += handler.PREFIX_END + '\n'
self.assertEqual(handler.generate_header(), expect)
# =====
def test_generate_title_oneline(self):
handler = self.handler
title = 'foobar'
expect = 'foobar'
result = handler.generate_title(title)
self.assertEqual(result, expect)
def test_generate_title_multiline(self):
handler = self.handler
title = 'foo\nbar\n\nblah'
expect = 'foo bar blah'
result = handler.generate_title(title)
self.assertEqual(result, expect)
test_generate_title_common_markup_EXPECT = 'foo *bar*'
def test_generate_title_common_markup(self):
handler = self.handler
title = 'foo *bar*'
result = handler.generate_title(title)
expect = self.test_generate_title_common_markup_EXPECT
self.assertEqual(result, expect)
# =====
test_generate_str_MARKUP = '\xc3\xa1'
test_generate_str_EXPECT = '\xc3\xa1'
def test_generate__str(self):
handler = self.handler
html = handler._generate(self.test_generate_str_MARKUP)
self.assertEqual(html, self.test_generate_str_EXPECT)
self.assertIsInstance(html, type(''))
def test_generate_str(self):
handler = self.handler
handler.markup = self.test_generate_str_MARKUP
html = handler.generate()
self.assertEqual(html, self.test_generate_str_EXPECT)
self.assertIsInstance(html, type(''))
# =====
test_smartypants_MARKUP = 'foo "bar"'
test_smartypants_EXPECT = 'foo “bar”'
def test_smartypants(self):
handler = self.handler
handler.options['smartypants'] = True
handler.markup = self.test_smartypants_MARKUP
html = handler.generate()
self.assertEqual(html, self.test_smartypants_EXPECT)
self.assertIsInstance(html, type(''))
# =====
def test_generate_post(self):
handler = self.handler
handler.source = '''!b
abc=foo
title=the title
id=123
blog: 456
post content'''
header, markup = handler.split_header_markup()
handler.header = header
post = handler.generate_post()
self.assertEqual(post, {
'title': 'the title',
'draft': False,
'id': '123',
'blog': {'id': '456'}
})
# =====
def test_update_source(self):
handler = self.handler
source = '%s!b\n%s\n' % (handler.PREFIX_HEAD,
handler.HEADER_FMT % ('id', '123'))
if handler.PREFIX_END:
source += handler.PREFIX_END + '\n'
source += '\npost content'
handler.source = source
header, markup = handler.split_header_markup()
handler.header = header
handler.markup = markup
handler.update_source()
self.assertEqual(handler.source, source)
handler.options['markup_prefix'] = 'PREFIX'
handler.update_source()
self.assertEqual(handler.source, source)
# =====
test_embed_images_src = 'tests/test.png'
test_embed_images_data_URI = (
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAI'
'AAACQd1PeAAAADElEQVQI12Oorq4GAALmAXLRBAkWAAAAAElFTkSuQmCC'
)
test_embed_images_SOURCE1 = '<img src="http://example.com/example.png"/>'
test_embed_images_EXPECT1 = test_embed_images_SOURCE1
test_embed_images_SOURCE2 = '<img src="tests/test.png"/>'
test_embed_images_EXPECT2 = '<img src="%s"/>' % test_embed_images_data_URI
test_embed_images_SOURCE3 = '<img alt="foo" src="tests/test.png"/>'
test_embed_images_EXPECT3 = '<img alt="foo" src="%s"/>' % (
test_embed_images_data_URI)
test_embed_images_SOURCE4 = '<img src="tests/test.png" title="bar"/>'
test_embed_images_EXPECT4 = '<img src="%s" title="bar"/>' % (
test_embed_images_data_URI)
test_embed_images_SOURCE5 = '<img src="%s"/>' % test_embed_images_data_URI
test_embed_images_EXPECT5 = test_embed_images_SOURCE5
def test_embed_images(self):
handler = self.handler
result = handler.embed_images(self.test_embed_images_SOURCE1)
self.assertEqual(result, self.test_embed_images_EXPECT1)
result = handler.embed_images(self.test_embed_images_SOURCE2)
self.assertEqual(result, self.test_embed_images_EXPECT2)
result = handler.embed_images(self.test_embed_images_SOURCE3)
self.assertEqual(result, self.test_embed_images_EXPECT3)
result = handler.embed_images(self.test_embed_images_SOURCE4)
self.assertEqual(result, self.test_embed_images_EXPECT4)
result = handler.embed_images(self.test_embed_images_SOURCE5)
self.assertEqual(result, self.test_embed_images_EXPECT5)
test_embed_images_generate_SOURCE = '<img src="tests/test.png"/>'
test_embed_images_generate_EXPECT = '<img src="%s"/>' % (
test_embed_images_data_URI)
def test_embed_images_generate(self):
handler = self.handler
handler.options['embed_images'] = True
handler.markup = self.test_embed_images_generate_SOURCE
html = handler.generate()
self.assertEqual(html, self.test_embed_images_generate_EXPECT)
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,226
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/handlers/__init__.py
|
# Copyright (C) 2013 by Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Markup handlers' IDs and extensions:
==================== ========================================================
ID extensions
==================== ========================================================
``AsciiDoc`` ``.asciidoc``
``HTML`` ``.html``, ``.htm``, ``.raw``
``Markdown`` ``.md``, ``.mkd``, ``.mkdn``, ``.mkdown``, ``.markdown``
``reStructuredText`` ``.rst``
``Text`` ``.txt``, ``.text``
==================== ========================================================
Options
=======
The general options are supported by all handlers, defined in
:class:`bpy.handlers.base.BaseHandler`, but they have to be specified per
handler basis, the following sample code shows the options and their default
value:
.. code:: python
handlers = {
'<MARKUP HANDLER ID>': {
'options': {
# prefix string to HTML ID to avoid conflict
'id_affix': None,
# string to prepend to actual markup
'markup_prefix': '',
# string to append to actual markup
'markup_suffix': '',
# use smartypant to process the output of markup processor
'smartypants': False,
# support image embedding via data URI scheme
'embed_images': False,
},
},
}
.. _id_affix:
``id_affix``
------------
``id_affix`` is used to avoid conflict across posts' HTML element ID. It may be
a prefix or suffix, depending on handler's implementation and markup library's
support. It has three types of value:
1. ``None``: no affix to ID.
2. non-empty string: the string is the affix.
3. empty string: the affix is generated automatically.
Currently supported markup handler:
* :mod:`bpy.handlers.rst`
``markup_prefix`` and ``markup_suffix``
---------------------------------------
``markup_prefix`` and ``markup_suffix`` can be useful for adding header and
footer content for posts. Another useful case in reStructuredText is you can
use it for setting up some directives, for example ``.. sectnum::``, so you can
ensure all posts have prefixing section number if in use conjunction with
``.. contents::``.
``smartypants``
---------------
If ``smartypants`` is enabled, then all generated HTML from markup processor
will be processed by smartypants_ library.
.. _smartypants: https://pypi.python.org/pypi/smartypants
.. _embed_images:
``embed_images``
----------------
.. note::
Only :mod:`bpy.handlers.text` does not support this option.
When this option is enabled, it looks for the ``src`` attribute of ``img`` tag
in rendered HTML, see if there is a local files, excluding ``http``, ``https``,
and ``data`` schemes, if found, it reads the file and embeds with Base64
encoded content.
For example, in reStructuredText:
.. code:: rst
.. image:: /path/to/test.png
Instead of
.. code:: html
<img alt="/path/to/test.png" src="/path/to/test.png" />
It could be replaced with, if ``/path/to/test.png`` exists:
.. code:: html
<img alt="/path/to/test.png" src="data:image/png;base64,..." />
If the image file can't be found, a message will be printed out, the rendered
image tag will be kept untouched.
.. _custom-handler:
Writing a custom handler
========================
A sample handler ``sample_handler.py``:
.. code:: python
from bpy.handlers import base
class Handler(base.BaseHandler):
PREFIX_HEAD = ''
PREFIX_END = ''
HEADER_FMT = '%s: %s'
def _generate(self, markup=None):
if markup is None:
markup = self.markup
html = do_process(markup)
return html
And corresponding setting in ``brc.py``:
.. code:: python
import re
handlers = {
'SampleHandler': {
'match': re.compile(r'.*\.ext$'),
'module': 'sample_handler',
},
}
"""
import os
import re
import sys
import traceback
handlers = {
'AsciiDoc': {
'match': re.compile(r'.*\.asciidoc$'),
'module': 'bpy.handlers.asciidoc',
},
'HTML': {
'match': re.compile(r'.*\.(html?|raw)$'),
'module': 'bpy.handlers.html',
},
'Markdown': {
'match': re.compile(r'.*\.(markdown|md(own)?|mkdn?)$'),
'module': 'bpy.handlers.mkd',
},
'reStructuredText': {
'match': re.compile(r'.*\.rst$'),
'module': 'bpy.handlers.rst',
},
'Text': {
'match': re.compile(r'.*\.te?xt$'),
'module': 'bpy.handlers.text',
},
}
def find_handler(filename):
sys.path.insert(0, os.getcwd())
module = None
for name, hdlr in handlers.items():
if hdlr['match'].match(filename):
try:
module = __import__(hdlr['module'], fromlist=['Handler'])
break
except Exception:
print('Cannot load module %s of handler %s' % (hdlr['module'], name))
traceback.print_exc()
sys.path.pop(0)
if module:
return module.Handler(filename, hdlr.get('options', {}))
return None
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,227
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/handlers/base.py
|
# Copyright (C) 2013-2015 Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function, unicode_literals
import codecs
import logging
import re
import warnings
from abc import ABCMeta, abstractmethod
from base64 import b64encode
from hashlib import md5
from os.path import basename, exists, splitext
HAS_SMARTYPANTS = False
try:
import smartypants
HAS_SMARTYPANTS = True
except ImportError:
pass
class BaseHandler():
"""The base clase of markup handler"""
__metaclass__ = ABCMeta
# default handler options
OPTIONS = {
'markup_prefix': '',
'markup_suffix': '',
'smartypants': False,
'id_affix': None,
}
MERGE_HEADERS = ('service', 'kind', 'blog', 'id', 'url', 'draft')
HEADER_FMT = '%s: %s'
PREFIX_HEAD = ''
PREFIX_END = ''
RE_SPLIT = re.compile(r'^(?:([^\n]*?!b.*?)\n\n)?(.*)',
re.DOTALL | re.MULTILINE)
RE_HEADER = re.compile(r'.*?([a-zA-Z0-9_-]+)\s*[=:]\s*(.*)\s*')
SUPPORT_EMBED_IMAGES = True
RE_IMG = re.compile(
r'''
(?P<prefix><img.*?)
src="(?!data:image/|https?://)(?P<src>[^"]*)"
(?P<suffix>.*?>)
''',
re.VERBOSE
)
def __init__(self, filename, options=None):
self.filename = filename
self.title = ''
self.options = self.OPTIONS.copy()
self.options.update(options or {})
if filename:
with codecs.open(filename, 'r', 'utf8') as f:
self.source = f.read()
header, markup = self.split_header_markup()
self.title = splitext(basename(filename))[0]
else:
header = {}
markup = ''
self.header = header
self.markup = markup
self.modified = False
def set_header(self, k, v):
"""Set header
>>> class Handler(BaseHandler):
... def _generate(self, source=None): return source
>>> handler = Handler(None)
>>> print(handler.header)
{}
>>> handler.modified
False
>>> handler.set_header('foo', 'bar')
>>> print(handler.header['foo'])
bar
>>> handler.modified
True
"""
if k in self.header and self.header[k] == v:
return
self.header[k] = v
self.modified = True
def merge_header(self, header):
"""Merge header
>>> class Handler(BaseHandler):
... def _generate(self, source=None): return source
>>> handler = Handler(None)
>>> handler.merge_header({'id': 12345, 'bogus': 'blah'})
>>> print(handler.header['id'])
12345
>>> handler.modified
True
"""
for k, v in header.items():
if k not in self.MERGE_HEADERS:
continue
if k == 'blog':
v = v['id']
elif k == 'kind':
v = v.replace('blogger#', '')
self.set_header(k, v)
@property
def markup(self):
"""Return markup with markup_prefix and markup_suffix
>>> class Handler(BaseHandler):
... def _generate(self, source=None): return source
>>> options = {
... 'markup_prefix': 'the prefix\\n',
... 'markup_suffix': '\\nthe suffix',
... }
>>> handler = Handler(None, options)
>>> handler.markup = 'content'
>>> print(handler.markup)
the prefix
content
the suffix
"""
return '%s%s%s' % (
self.options['markup_prefix'],
self._markup,
self.options['markup_suffix'],
)
@markup.setter
def markup(self, markup):
"""Set the markup"""
self._markup = markup
@property
def id_affix(self):
"""Return id_affix
The initial value is from self.options, and can be overriden by
self.header.
Returns
* None if it's None.
* value if value is not ''
* first 4 digits of md5 of value if value is '', and assign back to
self.options. _generate method of Handler should write back to
self.header.
>>> class Handler(BaseHandler):
... def _generate(self, source=None): return source
>>> options = {
... 'id_affix': None,
... }
>>> handler = Handler(None, options)
>>> print(repr(handler.id_affix))
None
>>> handler.options['id_affix'] = 'foobar'
>>> print(handler.id_affix)
foobar
>>> # auto generate an id affix from title
>>> handler.options['id_affix'] = ''
>>> handler.title = 'abc'
>>> print(handler.id_affix)
9001
>>> handler.header['id_affix'] = 'override-affix'
>>> print(handler.id_affix)
override-affix
"""
id_affix = self.options['id_affix']
# override?
if 'id_affix' in self.header:
id_affix = self.header['id_affix']
if self.header['id_affix'] and id_affix != 'None':
return self.header['id_affix']
# second case is from header of post, has to use string 'None'
if id_affix is None or id_affix == 'None':
return None
if id_affix:
return id_affix
m = md5()
# if self.title is Unicode-type string, then encode it,
# otherwise it's byte-type, then just update with it.
# The __future__.unicode_literals ensures '' is unicode-type.
if isinstance(self.title, type('')):
m.update(self.title.encode('utf8'))
else:
m.update(self.title)
return m.hexdigest()[:4]
@abstractmethod
def _generate(self, markup=None):
"""Generate HTML of markup source"""
raise NotImplementedError
def generate(self, markup=None):
"""Generate HTML
>>> class Handler(BaseHandler):
... def _generate(self, markup=None): return markup
>>> handler = Handler(None)
>>> print(handler.generate('foo "bar"'))
foo "bar"
>>> handler.options['smartypants'] = True
>>> print(handler.generate('foo "bar"'))
foo “bar”
"""
if markup is None:
markup = self.markup
html = self._generate(markup)
if self.options.get('smartypants', False):
if not HAS_SMARTYPANTS:
warnings.warn("smartypants option is set, "
"but the library isn't installed.", RuntimeWarning)
return html
Attr = smartypants.Attr
html = smartypants.smartypants(html, Attr.set1 | Attr.w)
if self.SUPPORT_EMBED_IMAGES and self.options.get('embed_images', False):
html = self.embed_images(html)
return html
def generate_header(self, header=None):
"""Generate header in text for writing back to the file
>>> class Handler(BaseHandler):
... PREFIX_HEAD = 'foo '
... PREFIX_END = 'bar'
... HEADER_FMT = '--- %s: %s'
... def _generate(self, source=None): pass
>>> handler = Handler(None)
>>> print(handler.generate_header({'title': 'foobar'}))
foo !b
--- title: foobar
bar
<BLANKLINE>
>>> print(handler.generate_header({'labels': ['foo', 'bar']}))
foo !b
--- labels: foo, bar
bar
<BLANKLINE>
"""
if header is None:
header = self.header
lines = [self.PREFIX_HEAD + '!b']
for k, v in header.items():
if k in ('labels', 'categories'):
v = ', '.join(v)
elif k == 'draft':
v = repr(v)
lines.append(self.HEADER_FMT % (k, v))
lines.append(self.PREFIX_END)
return '\n'.join([_f for _f in lines if _f]) + '\n'
def generate_title(self, title=None):
"""Generate title for posting
>>> class Handler(BaseHandler):
... def _generate(self, source=None): return source
>>> handler = Handler(None)
>>> print(handler.generate_title('foo "bar"'))
foo "bar"
>>> print(handler.generate_title('foo\\nbar\\n\\n'))
foo bar
>>> handler.options['smartypants'] = True
>>> print(handler.generate_title('foo "bar"'))
foo “bar”
"""
if title is None:
title = self.header.get('title', self.title)
title = self.generate(title)
title = title.replace('<p>', '').replace('</p>', '')
# no trailing newlines
title = re.sub(r'\n+', ' ', title).rstrip()
return title
def generate_post(self):
"""Generate dict for merging to post object of API"""
post = {'title': self.generate_title(), 'draft': False}
for k in ('blog', 'id', 'labels', 'categories', 'draft'):
if k not in self.header:
continue
if k == 'blog':
post[k] = {'id': self.header[k]}
else:
post[k] = self.header[k]
return post
def split_header_markup(self, source=None):
"""Split source into header and markup parts
It also parses header into a dict."""
if source is None:
source = self.source
header, markup = self.RE_SPLIT.match(source).groups()
if not header:
logging.warning('found no header')
if not markup:
logging.warning('markup is empty')
logging.debug('markup length = %d' % len(markup))
_header = {}
if header:
for item in header.split('\n'):
m = self.RE_HEADER.match(item)
if not m:
continue
k, v = list(map(type('').strip, m.groups()))
if k in ('labels', 'categories'):
v = [_f for _f in [label.strip() for label in v.split(',')] if _f]
elif k == 'draft':
v = v.lower() in ('true', 'yes', '1')
_header[k] = v
header = _header
logging.debug('header = %r' % header)
return header, markup
def update_source(self, header=None, markup=None, only_returned=False):
if header is None:
header = self.header
if markup is None:
markup = self._markup
source = self.generate_header(header) + '\n' + markup
if not only_returned:
self.source = source
return source
def write(self, forced=False):
"""Write source back to file"""
if not self.modified:
if not forced:
return
else:
self.update_source()
with codecs.open(self.filename, 'w', 'utf8') as f:
f.write(self.source)
self.modified = False
def embed_images(self, html):
"""Embed images on local filesystem as data URI
>>> class Handler(BaseHandler):
... def _generate(self, source=None): return source
>>> handler = Handler(None)
>>> html = '<img src="http://example.com/example.png"/>'
>>> print(handler.embed_images(html))
<img src="http://example.com/example.png"/>
>>> html = '<img src="tests/test.png"/>'
>>> print(handler.embed_images(html)) #doctest: +ELLIPSIS
<img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAAB...QmCC"/>
"""
if not self.SUPPORT_EMBED_IMAGES:
raise RuntimeError('%r does not support embed_images' % type(self))
return self.RE_IMG.sub(self._embed_image, html)
@staticmethod
def _embed_image(match):
src = match.group('src')
if not exists(src):
print('%s is not found.' % src)
return match.group(0)
with open(src, 'rb') as f:
data = b64encode(f.read()).decode('ascii')
return '%ssrc="%s"%s' % (
match.group('prefix'),
'data:image/%s;base64,%s' % (splitext(src)[1].lstrip('.'), data),
match.group('suffix'),
)
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,228
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/services/blogger.py
|
# Copyright (C) 2013-2016 by Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Blogger service recognizes the following options in :ref:`brc.py`:
.. _blogger-brc:
.. code:: python
service = 'blogger'
service_options = {
client_id: '<your client ID>',
client_secret: '<your client secret>',
'blog': <blog id>,
}
You can use ``blogs`` command to quickly get the blog ID.
.. _Authorization:
Authorization
=============
You need to authorize *b.py* to access your Blogger account with your OAuth
`client ID`_. Simply using ``blogs`` command (see *Commands* section) to start
the authorization process:
.. code:: sh
b.py blogs
Once you follow the prompted steps, there should be a b.dat_ created under the
current working directory, you should keep it safe.
.. _Client ID:
Client ID
=========
You will need to obtain a OAuth Client ID in order to use *b.py*.
1. Go to `Google Developers Console`_.
2. Create a new project.
3. Enable *Blogger API*.
4. Create a *OAuth client ID* credential with *Other* application type.
5. Download the credential JSON for *Client Secret*.
6. Add *Client ID* and *Client Secert* to your :ref:`brc.py` as shown here__.
.. _Google Developers Console: https://console.developers.google.com/
__ blogger-brc_
.. _b.dat:
``b.dat``
=========
``b.dat`` is a credential file for Blogger service, it's read by *b.py* from
the current directory.
To create the file, please follow Authorization_.
"""
from __future__ import print_function
import os
import sys
import httplib2
from bpy.services.base import Service as BaseService
if sys.version_info.major == 2:
from apiclient.discovery import build
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.file import Storage as BaseStorage
from oauth2client.tools import run_flow, argparser
API_STORAGE = 'b.dat'
class Storage(BaseStorage):
"""Inherit the API Storage to suppress CredentialsFileSymbolicLinkError
"""
def __init__(self, filename):
super(Storage, self).__init__(filename)
self._filename_link_warned = False
def _validate_file(self):
if os.path.islink(self._filename) and not self._filename_link_warned:
print('File: %s is a symbolic link.' % self._filename)
self._filename_link_warned = True
class Service(BaseService):
service_name = 'blogger'
def __init__(self, *args, **kwargs):
super(Service, self).__init__(*args, **kwargs)
self.http = None
self.service = None
if 'client_id' not in self.options or 'client_secret' not in self.options:
raise RuntimeError(
'You need to supply client ID and secret, see '
'http://pythonhosted.org/b.py/apidoc/bpy.services.html#client-id'
)
self.client_id = self.options['client_id']
self.client_secret = self.options['client_secret']
def auth(self):
if sys.version_info.major != 2:
msg = ('This command requires google-api-python-client, '
'which only support Python 2')
raise RuntimeError(msg)
if self.http and self.service:
return
FLOW = OAuth2WebServerFlow(
self.client_id,
self.client_secret,
'https://www.googleapis.com/auth/blogger',
auth_uri='https://accounts.google.com/o/oauth2/auth',
token_uri='https://accounts.google.com/o/oauth2/token',
)
storage = Storage(API_STORAGE)
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run_flow(FLOW, storage, argparser.parse_args([]))
http = httplib2.Http()
self.http = credentials.authorize(http)
self.service = build("blogger", "v3", http=self.http)
def list_blogs(self):
self.auth()
blogs = self.service.blogs()
req = blogs.listByUser(userId='self')
resp = req.execute(http=self.http)
print('%-20s: %s' % ('Blog ID', 'Blog name'))
for blog in resp['items']:
print('%-20s: %s' % (blog['id'], blog['name']))
def post(self):
handler, post = self.make_handler_post()
if 'blog' not in post:
print('You need to specify which blog to post on '
'in either brc.py or header of %s.' % handler.filename)
sys.exit(1)
self.auth()
kind = post['kind'].replace('blogger#', '')
title = post['title']
if kind == 'post':
posts = self.service.posts()
elif kind == 'page':
posts = self.service.pages()
else:
raise ValueError('Unsupported kind: %s' % kind)
data = {
'blogId': post['blog']['id'],
'body': post,
}
if 'id' in post:
data['%sId' % kind] = post['id']
action = 'revert' if post['draft'] else 'publish'
data[action] = True
print('Updating a %s: %s' % (kind, title))
req = posts.update(**data)
else:
data['isDraft'] = post['draft']
print('Posting a new %s: %s' % (kind, title))
req = posts.insert(**data)
resp = req.execute(http=self.http)
resp['draft'] = resp['status'] == 'DRAFT'
handler.merge_header(resp)
handler.write()
def search(self, q):
if self.options['blog'] is None:
raise ValueError('no blog ID to search')
self.auth()
fields = 'items(labels,published,title,url)'
posts = self.service.posts()
req = posts.search(blogId=self.options['blog'], q=q, fields=fields)
resp = req.execute(http=self.http)
items = resp.get('items', [])
print('Found %d posts on Blog %s' % (len(items), self.options['blog']))
print()
for post in items:
print(post['title'])
labels = post.get('labels', [])
if labels:
print('Labels:', ', '.join(labels))
print('Published:', post['published'])
print(post['url'])
print()
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,229
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/services/base.py
|
# Copyright (C) 2013 by Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Base recognizes no options, it's only used for ``generate`` or ``checklink``
commands.
"""
from __future__ import print_function
import codecs
import os
import sys
from io import StringIO
from os import path
from tempfile import gettempdir
from bpy.handlers import find_handler
HAS_LNKCKR = False
try:
from lnkckr.checkers.html import Checker
HAS_LNKCKR = True
except ImportError:
pass
TEMPLATE_PATH = path.join(os.getcwd(), 'tmpl.html')
class Service(object):
"""The base clase of markup handler"""
service_name = 'base'
def __init__(self, options, filename=None):
self.options = options
self.filename = filename
def post(self):
"""Publish the post to the service"""
raise NotImplementedError
def make_handler_post(self):
handler = find_handler(self.filename)
if not handler:
print('No handler for the file!')
sys.exit(1)
hdr = handler.header
post = {
'service': self.service_name,
# default resource kind is blogger#post
'kind': 'blogger#%s' % hdr.get('kind', 'post'),
'content': handler.generate(),
}
if isinstance(self.options['blog'], int):
post['blog'] = {'id': self.options['blog']}
post.update(handler.generate_post())
return handler, post
def generate(self):
handler, post = self.make_handler_post()
with codecs.open(path.join(gettempdir(), 'draft.html'), 'w',
encoding='utf8') as f:
f.write(post['content'])
if path.exists(TEMPLATE_PATH):
with codecs.open(TEMPLATE_PATH, encoding='utf8') as f:
html = f.read()
html = html.replace('%%Title%%', post['title'])
html = html.replace('%%Content%%', post['content'])
with codecs.open(path.join(gettempdir(), 'preview.html'), 'w',
encoding='utf8') as f:
f.write(html)
def checklink(self):
if not HAS_LNKCKR:
print('You do not have lnkckr library')
return
handler, post = self.make_handler_post()
c = Checker()
c.process(StringIO(post['content']))
c.check()
print()
c.print_all()
def search(self, q):
"""Search posts"""
raise NotImplementedError
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,230
|
lbarchive/b.py
|
refs/heads/master
|
/tests/test_bpy_handlers_rst.py
|
# Copyright (C) 2013, 2014 Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import unicode_literals
import unittest
from docutils import nodes
from docutils.parsers.rst import Directive
import test_bpy_handlers_base as test_base
from bpy.handlers.rst import Handler, register_directive, register_role
class HandlerTestCase(test_base.BaseHandlerTestCase):
def setUp(self):
self.handler = Handler(None)
# =====
def test_options_register_directive_decorator(self):
source = '.. dtestdir::'
expect = '<p>TEST</p>'
@register_directive('dtestdir')
class dTestDir(Directive):
def run(self):
return [nodes.raw('', expect, format='html')]
handler = Handler(None)
self.assertEqual(handler.generate(source), expect)
def test_options_register_role_decorator(self):
source = 'abc :dtestrole:`123` def'
expect = '<p>abc <em>TEST</em> def</p>'
@register_role('dtestrole')
def dTestRole(*args, **kwds):
return [nodes.raw('', '<em>TEST</em>', format='html')], []
handler = Handler(None)
self.assertEqual(handler.generate(source), expect)
def test_options_register_directives(self):
source = '.. testdir::'
expect = '<p>TEST</p>'
class TestDir(Directive):
def run(self):
return [nodes.raw('', expect, format='html')]
options = {'register_directives': {'testdir': TestDir}}
handler = Handler(None, options)
self.assertEqual(handler.generate(source), expect)
def test_options_register_roles(self):
source = 'abc :testrole:`123` def'
expect = '<p>abc <em>TEST</em> def</p>'
def TestRole(*args, **kwds):
return [nodes.raw('', '<em>TEST</em>', format='html')], []
options = {'register_roles': {'testrole': TestRole}}
handler = Handler(None, options)
self.assertEqual(handler.generate(source), expect)
# =====
def test_id_affix(self):
handler = self.handler
handler.title = 'test'
source = ('Test Handler\n'
'------------')
html_base = ('<div class="section" id="%stest-handler">\n'
'<h2>Test Handler</h2>\n'
'</div>')
html = html_base % ''
self.assertEqual(handler.generate(source), html)
handler.header['id_affix'] = ''
html = html_base % '098f-'
self.assertEqual(handler.generate(source), html)
self.assertEqual(handler.modified, True)
self.assertEqual(handler.generate_header(), '''.. !b
id_affix: 098f
''')
handler.header['id_affix'] = 'foobar-prefix'
html = html_base % 'foobar-prefix-'
self.assertEqual(handler.generate(source), html)
# =====
test_markup_affixes_EXPECT1 = '<p>prefix-content-suffix</p>'
test_markup_affixes_EXPECT2 = '<p>foobar</p>'
# =====
test_generate_title_common_markup_EXPECT = 'foo <em>bar</em>'
# =====
test_generate_str_EXPECT = '<p>\xc3\xa1</p>'
# =====
test_smartypants_EXPECT = '<p>foo “bar”</p>'
# =====
@unittest.skip('tested in BaseHandler')
def test_embed_images(self):
pass
test_embed_images_generate_SOURCE = '.. image:: tests/test.png'
test_embed_images_generate_EXPECT = (
'<img alt="tests/test.png" src="%s" />' % (
test_base.BaseHandlerTestCase.test_embed_images_data_URI
)
)
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,231
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/handlers/asciidoc.py
|
#!/usr/bin/env python
# Copyright (C) 2013, 2014 Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
You can specify embed_images_, for example:
.. code:: python
handlers = {
'AsciiDoc': {
'options': {
'embed_images': True,
},
},
}
"""
from __future__ import print_function, unicode_literals
import StringIO
from bpy.api.asciidocapi import AsciiDocAPI
from bpy.handlers import base
class Handler(base.BaseHandler):
"""Handler for AsciiDoc markup language
>>> handler = Handler(None)
>>> print(handler.generate_header({'title': 'foobar'}))
// !b
// title: foobar
<BLANKLINE>
"""
PREFIX_HEAD = '// '
PREFIX_END = ''
HEADER_FMT = '// %s: %s'
def _generate(self, markup=None):
"""Generate HTML from AsciiDoc
>>> handler = Handler(None)
>>> print(handler._generate('a *b*'))
<p>a <strong>b</strong></p>
>>> print(handler._generate('a\\nb'))
<p>a
b</p>
>>> print(handler._generate('a\\nb\\n\\nc'))
<p>a
b</p>
<p>c</p>
"""
if markup is None:
markup = self.markup
markup = markup.encode('utf8')
asciidoc = AsciiDocAPI()
infile = StringIO.StringIO(markup)
outfile = StringIO.StringIO()
asciidoc.options('--no-header-footer')
asciidoc.execute(infile, outfile, backend='html4')
html = outfile.getvalue().decode('utf8')
html = html.replace('\r\n', '\n').rstrip()
return html
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,232
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/handlers/html.py
|
# Copyright (C) 2013, 2014 Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
HTML handler simply takes the file content as its output, and assume it's valid
HTML, therefore the handler doesn't edit or validate the content.
You can specify embed_images_, for example:
.. code:: python
handlers = {
'HTML': {
'options': {
'embed_images': True,
},
},
}
"""
from __future__ import print_function, unicode_literals
from bpy.handlers import base
class Handler(base.BaseHandler):
"""Handler for HTML
>>> handler = Handler(None)
>>> print(handler.generate_header({'title': 'foobar'}))
<!-- !b
title: foobar
-->
<BLANKLINE>
"""
PREFIX_HEAD = '<!-- '
PREFIX_END = '-->'
HEADER_FMT = '%s: %s'
def _generate(self, markup=None):
"""Return markup untouched
This handler doesn't do anything to the markup.
>>> handler = Handler(None)
>>> print(handler._generate('<br/>'))
<br/>
"""
if markup is None:
markup = self.markup
return markup
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,233
|
lbarchive/b.py
|
refs/heads/master
|
/bpy/services/wordpress.py
|
# Copyright (C) 2013, 2014, 2016 by Yu-Jie Lin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
WordPress service recognizes the following options in :ref:`brc.py`:
.. code:: python
service = 'wordpress'
service_options = {
'blog': <blog url>,
'username': 'user01',
'password': 'secret',
}
``blog`` should be the URL of WordPress blog, for example,
``http://<something>.wordpress.com/`` or ``http://example.com/wordpress/``.
Note that the tailing slash must be included.
In order to use WordPress XML-RPC API, you must provide ``username`` and
``password``.
"""
from __future__ import print_function
import sys
from bpy.handlers import find_handler
from bpy.services.base import Service as BaseService
# isort has different result for Python 2 and 3, so skip them
from wordpress_xmlrpc import Client, WordPressPage, WordPressPost # isort:skip
from wordpress_xmlrpc.methods import posts # isort:skip
class Service(BaseService):
service_name = 'wordpress'
def __init__(self, *args, **kwargs):
super(Service, self).__init__(*args, **kwargs)
self.service = None
def auth(self):
self.service = Client(self.options['blog'] + 'xmlrpc.php',
self.options['username'],
self.options['password'])
def make_handler_post(self):
handler = find_handler(self.filename)
if not handler:
print('No handler for the file!')
sys.exit(1)
hdr = handler.header
post = {
'service': self.service_name,
'kind': hdr.get('kind', 'post'),
'content': handler.generate(),
}
if isinstance(self.options['blog'], type('')):
post['blog'] = {'id': self.options['blog']}
post.update(handler.generate_post())
return handler, post
def post(self):
handler, post = self.make_handler_post()
if 'blog' not in post:
print('You need to specify which blog to post on '
'in either brc.py or header of %s.' % handler.filename)
sys.exit(1)
self.auth()
kind = post['kind']
title = post['title']
if kind == 'post':
wpost = WordPressPost()
else:
wpost = WordPressPage()
wpost.title = title
wpost.content = post['content']
wpost.post_status = 'draft' if post['draft'] else 'publish'
wpost.terms_names = {
'post_tag': post.get('labels', []),
'category': post.get('categories', []),
}
resp = {}
if 'id' in post:
print('Updating a %s: %s' % (kind, title))
self.service.call(posts.EditPost(post['id'], wpost))
else:
print('Posting a new %s: %s' % (kind, title))
wpost.id = self.service.call(posts.NewPost(wpost))
wpost = self.service.call(posts.GetPost(wpost.id))
resp['id'] = wpost.id
resp['url'] = wpost.link
for k in ('service', 'blog', 'kind', 'draft'):
resp[k] = post[k]
handler.merge_header(resp)
handler.write()
|
{"/tests/test_bpy_handlers_text.py": ["/bpy/handlers/text.py"], "/bpy/handlers/mkd.py": ["/bpy/handlers/__init__.py"], "/b.py": ["/bpy/handlers/__init__.py", "/bpy/services/__init__.py"], "/tests/test_bpy_handlers_mkd.py": ["/bpy/handlers/mkd.py"], "/bpy/handlers/rst.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/text.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_base.py": ["/bpy/handlers/base.py"], "/bpy/services/blogger.py": ["/bpy/services/base.py"], "/bpy/services/base.py": ["/bpy/handlers/__init__.py"], "/tests/test_bpy_handlers_rst.py": ["/bpy/handlers/rst.py"], "/bpy/handlers/asciidoc.py": ["/bpy/handlers/__init__.py"], "/bpy/handlers/html.py": ["/bpy/handlers/__init__.py"], "/bpy/services/wordpress.py": ["/bpy/handlers/__init__.py", "/bpy/services/base.py"]}
|
2,242
|
7h3rAm/kalpi
|
refs/heads/master
|
/bitcoin.py
|
#!/usr/bin/env python3
from datetime import datetime, timezone
from pprint import pprint
import sparkline
import hashlib
import random
import time
import csv
import utils
"""
http://172.16.1.48:3006/api/v1/blocks/tip/height
http://172.16.1.48:3006/api/v1/block-height/695148
http://172.16.1.48:3006/api/v1/block/000000000000000015dc777b3ff2611091336355d3f0ee9766a2cf3be8e4b1ce
http://172.16.1.48:3006/api/v1/block/000000000000000015dc777b3ff2611091336355d3f0ee9766a2cf3be8e4b1ce/txids
http://172.16.1.48:3006/api/v1/block/000000000000000015dc777b3ff2611091336355d3f0ee9766a2cf3be8e4b1ce/txs
http://172.16.1.48:3006/api/v1/blocks
http://172.16.1.48:3006/api/v1/difficulty-adjustment
http://172.16.1.48:3006/api/v1/fees/mempool-blocks
http://172.16.1.48:3006/api/v1/fees/recommended
http://172.16.1.48:3006/api/v1/mempool/txids
http://172.16.1.48:3006/api/v1/mempool/recent
http://172.16.1.48:3006/api/v1/tx/15e10745f15593a899cef391191bdd3d7c12412cc4696b7bcb669d0feadc8521
http://172.16.1.48:3006/api/v1/tx/15e10745f15593a899cef391191bdd3d7c12412cc4696b7bcb669d0feadc8521/status
https://api.coinpaprika.com/v1/tickers/btc-bitcoin
https://chain.so/api/v2/get_address_balance
https://chain.so/api/v2/get_tx_received
https://api.github.com/repos/7h3rAm/writeups
https://check.torproject.org/api/ip
https://ifconfig.me/all.json
http://ip-api.com/json/108.193.5.5
"""
class Bitcoin:
def __init__(self):
self.datastore_url = "https://raw.githubusercontent.com/7h3rAm/datastore/master"
self.datastorepath = "%s/datastore" % (utils.expand_env(var="$PROJECTSPATH"))
self.statsfilepath = "%s/datastore/bitcoin.json" % (utils.expand_env(var="$PROJECTSPATH"))
self.addressesfilepath = "%s/toolbox/bootstrap/btctxsmon-addresses.json" % (utils.expand_env(var="$HOME"))
self.addresses = utils.load_json(self.addressesfilepath)
try:
self.bitcoin = utils.load_json(self.statsfilepath)
except:
self.bitcoin = {
"category": {
"donation": {},
"popular": {},
"ransom": {}
},
"graph": {},
"last_update": None,
"nodes": [],
"nodessummary": {},
"stats": {
"count_address": 0,
"count_wallet": 0,
"count_received": 0,
"count_sent": 0,
"count_balance": 0,
"count_txs": 0,
"donation": {
"count_wallet": 0,
"count_address": 0,
"count_received": 0,
"count_sent": 0,
"count_balance": 0,
"count_txs": 0,
},
"popular": {
"count_wallet": 0,
"count_address": 0,
"count_received": 0,
"count_sent": 0,
"count_balance": 0,
"count_txs": 0,
},
"ransom": {
"count_wallet": 0,
"count_address": 0,
"count_received": 0,
"count_sent": 0,
"count_balance": 0,
"count_txs": 0,
},
}
}
def sparkify(self, content, maxsize=10, unique=True, sparkmode=True, skiphashing=True):
sparkid = content if skiphashing else hashlib.sha256(content.encode("utf-8")).hexdigest()
spark = "".join(sparkline.sparkify([int(x, base=16) for x in sparkid]))
charmap = {
"▁": "◐",
"▂": "■",
"▃": "◩",
"▄": "◆",
"▅": "◢",
"▆": "◨",
"▇": "●",
"█": "▲",
}
if unique:
sparkshort = "".join(['%s' % (ch if sparkmode else charmap[ch]) for ch in spark[:maxsize]])
else:
chars = ["▣", "►", "◐", "◧", "▤", "▼", "◑", "◨", "▥", "◀", "◒", "◩", "▦", "◆", "◕", "◪", "▧", "◈", "◢", "■", "▨", "◉", "◣", "▩", "◎", "◤", "▲", "●", "◥"]
sparkshort = "".join(['%s' % (random.choice(chars)) for _ in range(len(sparkid[:maxsize]))])
return sparkshort
def load_from_csv(self):
with open("%s/toolbox/bootstrap/btcpaymon.csv" % (utils.expand_env(var="$HOME"))) as csvfile:
rows = csv.reader(csvfile, delimiter=",")
header = next(rows)
newaddresses = 0
for row in rows:
address = row[0]
wallet = row[1].replace("_", " ")
tag = row[2]
category = row[3].lower()
source = row[4].split(";") if row[4] and row[4] != "" else None
if category not in ["donation", "popular", "ransom"]:
continue
if address not in self.addresses["category"][category]:
newaddresses += 1
self.addresses["category"][category][address] = {
"wallet": wallet,
"tag": tag,
"source": source,
"received": 0,
"sent": 0,
"balance": 0,
"transaction": 0,
"lastseen": None,
"lasttxepoch": 0,
}
if newaddresses:
print("added %d new addresses from %s file" % (newaddresses, "%s/toolbox/bootstrap/btcpaymon.csv" % (utils.expand_env(var="$HOME"))))
def query_address(self, address, explorer="blockchaininfomulti"):
if explorer == "chainso":
content = utils.get_http("https://chain.so/api/v2/address/BTC/%s" % (address))
if "data" in content and len(content["data"]["txs"]):
return {
"balance": int(float(content["data"]["balance"]) * (10**8)),
"received": int(float(content["data"]["received_value"]) * (10**8)),
"sent": int((float(content["data"]["received_value"])-float(content["data"]["balance"])) * (10**8)),
"transaction": int(content["data"]["total_txs"]),
"lasttxepoch": content["data"]["txs"][0]["time"],
"lastseen": time.strftime("%d/%b/%Y @ %H:%M:%S %Z", time.localtime(content["data"]["txs"][0]["time"])),
}
elif explorer == "blockchaininfo":
content = utils.get_http("https://blockchain.info/rawaddr/%s" % (address))
if "n_tx" in content:
return {
"transaction": content["n_tx"],
"received": content["total_received"],
"sent": content["total_sent"],
"balance": content["final_balance"],
"lasttxepoch": content["txs"][0]["time"] if len(content["txs"]) else None,
"lastseen": time.strftime("%d/%b/%Y @ %H:%M:%S %Z", time.localtime(content["txs"][0]["time"])) if len(content["txs"]) else "",
}
elif explorer == "blockchaininfomulti":
content = utils.get_http("https://blockchain.info/multiaddr?active=%s" % (address))
if "addresses" in content and content["addresses"][0]["address"] == address:
return {
"transaction": content["addresses"][0]["n_tx"],
"received": content["addresses"][0]["total_received"],
"sent": content["addresses"][0]["total_sent"],
"balance": content["addresses"][0]["final_balance"],
"lasttxepoch": content["txs"][0]["time"] if len(content["txs"]) else None,
"lastseen": time.strftime("%d/%b/%Y @ %H:%M:%S %Z", time.localtime(content["txs"][0]["time"])) if len(content["txs"]) else "",
}
def update_category(self, category, force=False):
updated, foundaddresses = [], []
chunks = list(utils.chunkify(list(self.addresses["category"][category].keys()), 50))
print("performing lookup for %d chunks of %d %s addresses" % (len(chunks), len(list(self.addresses["category"][category].keys())), category))
for chunk in chunks:
stats = utils.get_http("https://blockchain.info/multiaddr?active=%s" % ("|".join(chunk)))
if "addresses" in stats:
for entry in stats["addresses"]:
foundaddresses.append(entry["address"])
if force or entry["n_tx"] != self.addresses["category"][category][entry["address"]]["transaction"] or not self.addresses["category"][category][entry["address"]]["lastseen"]:
# new txs since we last updated this address, update with new lasttx
self.addresses["category"][category][entry["address"]]["transaction"] = entry["n_tx"]
self.addresses["category"][category][entry["address"]]["balance"] = entry["final_balance"]
self.addresses["category"][category][entry["address"]]["received"] = entry["total_received"]
self.addresses["category"][category][entry["address"]]["sent"] = entry["total_sent"]
self.addresses["category"][category][entry["address"]]["lastseen"] = None
self.addresses["category"][category][entry["address"]]["lasttxepoch"] = None
self.addresses["category"][category][entry["address"]]["retired"] = False
updated.append(entry["address"])
alladdresses = list(self.addresses["category"][category].keys())
updated.extend(list(set(alladdresses)-set(foundaddresses)))
updated = list(set(updated))
print("updating stats for %d %s addresses" % (len(updated), category))
for address in updated:
stats = self.query_address(address)
if stats:
self.addresses["category"][category][address]["transaction"] = stats["transaction"]
self.addresses["category"][category][address]["received"] = stats["received"]
self.addresses["category"][category][address]["sent"] = stats["sent"]
self.addresses["category"][category][address]["balance"] = stats["balance"]
self.addresses["category"][category][address]["lasttxepoch"] = stats["lasttxepoch"]
self.addresses["category"][category][address]["lastseen"] = stats["lastseen"]
self.addresses["category"][category][address]["retired"] = False
else:
if self.addresses["category"][category][address]["transaction"] == 0:
# this address has no txs and we could not find stats via 2 public apis; mark it retired
self.addresses["category"][category][address]["retired"] = True
else:
self.addresses["category"][category][address]["retired"] = False
def group_wallet(self):
self.bitcoin["category"]["donation"] = {}
self.bitcoin["category"]["popular"] = {}
self.bitcoin["category"]["ransom"] = {}
self.bitcoin["stats"] = {
"count_address": 0,
"count_wallet": 0,
"count_received": 0,
"count_sent": 0,
"count_balance": 0,
"count_txs": 0,
"donation": {
"count_wallet": 0,
"count_address": 0,
"count_received": 0,
"count_sent": 0,
"count_balance": 0,
"count_txs": 0,
},
"popular": {
"count_wallet": 0,
"count_address": 0,
"count_received": 0,
"count_sent": 0,
"count_balance": 0,
"count_txs": 0,
},
"ransom": {
"count_wallet": 0,
"count_address": 0,
"count_received": 0,
"count_sent": 0,
"count_balance": 0,
"count_txs": 0,
},
}
for category in ["donation", "popular", "ransom"]:
print("grouping %d addresses for %s category" % (len(self.addresses["category"][category]), category))
for address in self.addresses["category"][category]:
if self.addresses["category"][category][address]["retired"]:
continue
wallet = self.addresses["category"][category][address]["wallet"]
if wallet not in self.bitcoin["category"][category]:
self.bitcoin["category"][category][wallet] = {
"addresses": 0,
"received": 0,
"sent": 0,
"balance": 0,
"transaction": 0,
"lasttxepoch": None,
"lastseen": None,
}
self.bitcoin["category"][category][wallet]["addresses"] += 1
self.bitcoin["category"][category][wallet]["received"] += (self.addresses["category"][category][address]["received"] / (10**8))
self.bitcoin["category"][category][wallet]["sent"] += (self.addresses["category"][category][address]["sent"] / (10**8))
self.bitcoin["category"][category][wallet]["balance"] += (self.addresses["category"][category][address]["balance"] / (10**8))
self.bitcoin["category"][category][wallet]["transaction"] += self.addresses["category"][category][address]["transaction"]
if self.addresses["category"][category][address]["lasttxepoch"]:
if not self.bitcoin["category"][category][wallet]["lasttxepoch"]:
self.bitcoin["category"][category][wallet]["lasttxepoch"] = self.addresses["category"][category][address]["lasttxepoch"]
self.bitcoin["category"][category][wallet]["lastseen"] = time.strftime("%d/%b/%Y @ %H:%M:%S %Z", time.localtime(self.bitcoin["category"][category][wallet]["lasttxepoch"]))
else:
if self.addresses["category"][category][address]["lasttxepoch"] > self.bitcoin["category"][category][wallet]["lasttxepoch"]:
self.bitcoin["category"][category][wallet]["lasttxepoch"] = self.addresses["category"][category][address]["lasttxepoch"]
self.bitcoin["category"][category][wallet]["lastseen"] = time.strftime("%d/%b/%Y @ %H:%M:%S %Z", time.localtime(self.bitcoin["category"][category][wallet]["lasttxepoch"]))
for wallet in self.bitcoin["category"][category]:
self.bitcoin["stats"][category]["count_received"] += self.bitcoin["category"][category][wallet]["received"]
self.bitcoin["stats"][category]["count_sent"] += self.bitcoin["category"][category][wallet]["sent"]
self.bitcoin["stats"][category]["count_txs"] += self.bitcoin["category"][category][wallet]["transaction"]
self.bitcoin["stats"][category]["count_balance"] += self.bitcoin["category"][category][wallet]["balance"]
self.bitcoin["stats"][category]["count_address"] = len(self.addresses["category"][category])
self.bitcoin["stats"][category]["count_wallet"] = len(self.bitcoin["category"][category])
self.bitcoin["category"][category][wallet]["sparkid"] = self.sparkify("%x%x%x%x%x" % (
int(self.bitcoin["category"][category][wallet]["addresses"] % 15),
int(self.bitcoin["category"][category][wallet]["transaction"] % 15),
int(self.bitcoin["category"][category][wallet]["received"] % 15),
int(self.bitcoin["category"][category][wallet]["sent"] % 15),
int(self.bitcoin["category"][category][wallet]["balance"] % 15),
), skiphashing=True)
self.bitcoin["stats"]["count_address"] = len(self.addresses["category"]["donation"]) + len(self.addresses["category"]["popular"]) + len(self.addresses["category"]["ransom"])
self.bitcoin["stats"]["count_wallet"] = len(self.bitcoin["category"]["donation"]) + len(self.bitcoin["category"]["popular"]) + len(self.bitcoin["category"]["ransom"])
self.bitcoin["stats"]["count_received"] = self.bitcoin["stats"]["donation"]["count_received"] + self.bitcoin["stats"]["popular"]["count_received"] + self.bitcoin["stats"]["ransom"]["count_received"]
self.bitcoin["stats"]["count_sent"] = self.bitcoin["stats"]["donation"]["count_sent"] + self.bitcoin["stats"]["popular"]["count_sent"] + self.bitcoin["stats"]["ransom"]["count_sent"]
self.bitcoin["stats"]["count_balance"] = self.bitcoin["stats"]["donation"]["count_balance"] + self.bitcoin["stats"]["popular"]["count_balance"] + self.bitcoin["stats"]["ransom"]["count_balance"]
self.bitcoin["stats"]["count_txs"] = self.bitcoin["stats"]["donation"]["count_txs"] + self.bitcoin["stats"]["popular"]["count_txs"] + self.bitcoin["stats"]["ransom"]["count_txs"]
def update(self, skipupdate=False):
self.load_from_csv()
self.get_bitnodes()
self.bitcoin["last_update_epoch"] = datetime.now().astimezone(tz=None)
self.bitcoin["last_update"] = self.bitcoin["last_update_epoch"].strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.bitcoin, self.statsfilepath)
if not skipupdate:
for category in ["donation", "popular", "ransom"]:
self.update_category(category)
self.addresses["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.addresses, self.addressesfilepath)
self.group_wallet()
self.bitcoin["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.bitcoin, self.statsfilepath)
def get_bitnodes(self):
nodes = utils.get_http("https://bitnodes.io/api/v1/snapshots/latest/")
if nodes:
self.bitcoin["nodes"] = []
self.bitcoin["nodessummary"] = {
"asn": {},
"country": {},
"useragent": {},
"timezone": {},
}
print("adding stats for %d bitcoin nodes" % len(nodes["nodes"]))
for node in nodes["nodes"]:
asn = nodes["nodes"][node][11] if nodes["nodes"][node][11] else "Unknown"
city = nodes["nodes"][node][6] if nodes["nodes"][node][6] else "Unknown"
connectedsince = nodes["nodes"][node][2] if nodes["nodes"][node][2] else "Unknown"
connectedsincehuman = time.strftime("%d/%b/%Y @ %H:%M:%S %Z", time.localtime(nodes["nodes"][node][2])) if nodes["nodes"][node][2] else "Unknown"
country = nodes["nodes"][node][7] if nodes["nodes"][node][7] else "Unknown"
height = nodes["nodes"][node][4] if nodes["nodes"][node][4] else "Unknown"
hostname = nodes["nodes"][node][5] if nodes["nodes"][node][5] else "Unknown"
ipurl = node
latitude = nodes["nodes"][node][8] if nodes["nodes"][node][8] else "Unknown"
longitude = nodes["nodes"][node][9] if nodes["nodes"][node][9] else "Unknown"
orgname = nodes["nodes"][node][12] if nodes["nodes"][node][12] else "Unknown"
protoversion = nodes["nodes"][node][0] if nodes["nodes"][node][0] else "Unknown"
services = nodes["nodes"][node][3] if nodes["nodes"][node][3] else "Unknown"
timezone = nodes["nodes"][node][10] if nodes["nodes"][node][10] else "Unknown"
useragent = nodes["nodes"][node][1] if nodes["nodes"][node][1] else "Unknown"
servicesflags = []
if services & 0: servicesflags.append("NODE_NONE")
if services & (1 << 0): servicesflags.append("NODE_NETWORK")
if services & (1 << 1): servicesflags.append("NODE_GETUTXO")
if services & (1 << 2): servicesflags.append("NODE_BLOOM")
if services & (1 << 3): servicesflags.append("NODE_WITNESS")
if services & (1 << 4): servicesflags.append("NODE_XTHIN")
if services & (1 << 10): servicesflags.append("NODE_NETWORK_LIMITED")
self.bitcoin["nodes"].append({
"asn": asn,
"city": city,
"connectedsince": connectedsince,
"connectedsincehuman": connectedsincehuman,
"country": country,
"height": height,
"hostname": hostname,
"ipurl": ipurl,
"latitude": latitude,
"longitude": longitude,
"orgname": orgname,
"protoversion": protoversion,
"services": services,
"servicesflags": servicesflags,
"timezone": timezone,
"useragent": useragent,
})
if asn not in self.bitcoin["nodessummary"]["asn"]:
self.bitcoin["nodessummary"]["asn"][asn] = 1
else:
self.bitcoin["nodessummary"]["asn"][asn] += 1
if country not in self.bitcoin["nodessummary"]["country"]:
self.bitcoin["nodessummary"]["country"][country] = 1
else:
self.bitcoin["nodessummary"]["country"][country] += 1
if timezone not in self.bitcoin["nodessummary"]["timezone"]:
self.bitcoin["nodessummary"]["timezone"][timezone] = 1
else:
self.bitcoin["nodessummary"]["timezone"][timezone] += 1
if useragent not in self.bitcoin["nodessummary"]["useragent"]:
self.bitcoin["nodessummary"]["useragent"][useragent] = 1
else:
self.bitcoin["nodessummary"]["useragent"][useragent] += 1
self.bitcoin["stats"]["count_nodes"] = len(self.bitcoin["nodes"])
self.bitcoin["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.bitcoin, self.statsfilepath)
def group_and_update(self, categories=["donation", "popular", "ransom"]):
with open("%s/toolbox/bootstrap/btcpaymon.csv" % (utils.expand_env(var="$HOME"))) as csvfile:
self.addresses = {
"category": {
"donation": {},
"popular": {},
"ransom": {},
}
}
rows = csv.reader(csvfile, delimiter=",")
header = next(rows)
for row in rows:
address = row[0]
wallet = row[1].replace("_", " ")
tag = row[2]
category = row[3].lower()
source = row[4].split(";") if row[4] and row[4] != "" else None
if category in ["donation", "popular", "ransom"] and address not in self.addresses["category"][category]:
self.addresses["category"][category][address] = {
"wallet": wallet,
"tag": tag,
"source": source,
}
for category in categories:
self.bitcoin["category"][category] = {}
self.bitcoin["stats"][category]["count_wallet"] = 0
self.bitcoin["stats"][category]["count_address"] = 0
self.bitcoin["stats"][category]["count_received"] = 0
self.bitcoin["stats"][category]["count_sent"] = 0
self.bitcoin["stats"][category]["count_balance"] = 0
self.bitcoin["stats"][category]["count_txs"] = 0
print("grouping %d addresses in %s category" % (len(self.addresses["category"][category]), category))
for address in self.addresses["category"][category]:
wallet = self.addresses["category"][category][address]["wallet"]
if wallet not in self.bitcoin["category"][category]:
self.bitcoin["category"][category][wallet] = {
"addresses": [address],
"addrstats": [],
"received": 0,
"sent": 0,
"balance": 0,
"txcount": 0,
"lasttx": {
"epoch": None,
"epochhuman": None,
"hash": None,
"block": None,
"summary": None,
},
"sparkid": None,
}
else:
self.bitcoin["category"][category][wallet]["addresses"].append(address)
for category in categories:
print("updating %d wallets in %s category" % (len(self.bitcoin["category"][category]), category))
for wallet in self.bitcoin["category"][category]:
try:
# https://www.blockchain.com/api/blockchain_api
stats = utils.get_http("https://blockchain.info/multiaddr?active=%s" % ("|".join(self.bitcoin["category"][category][wallet]["addresses"])))
except:
stats= None
if stats and "addresses" in stats:
self.bitcoin["category"][category][wallet]["addrstats"] = []
for entry in stats["addresses"]:
self.bitcoin["category"][category][wallet]["addrstats"].append({
"address": entry["address"],
"received": entry["total_received"],
"sent": entry["total_sent"],
"balance": entry["final_balance"],
"txcount": entry["n_tx"],
})
self.bitcoin["category"][category][wallet]["received"] += entry["total_received"]
self.bitcoin["category"][category][wallet]["sent"] += entry["total_sent"]
self.bitcoin["category"][category][wallet]["balance"] += entry["final_balance"]
self.bitcoin["category"][category][wallet]["txcount"] += entry["n_tx"]
if len(stats["txs"]):
self.bitcoin["category"][category][wallet]["lasttx"]["epoch"] = stats["txs"][0]["time"]
self.bitcoin["category"][category][wallet]["lasttx"]["epochhuman"] = time.strftime("%d/%b/%Y @ %H:%M:%S %Z", time.localtime(stats["txs"][0]["time"]))
self.bitcoin["category"][category][wallet]["lasttx"]["hash"] = stats["txs"][0]["hash"]
self.bitcoin["category"][category][wallet]["lasttx"]["block"] = stats["txs"][0]["block_height"]
self.bitcoin["category"][category][wallet]["lasttx"]["summary"] = stats["txs"][0]["result"]
self.bitcoin["category"][category][wallet]["sparkid"] = self.sparkify("%x%x%x%x%x" % (
int(len(self.bitcoin["category"][category][wallet]["addresses"]) % 15),
int(self.bitcoin["category"][category][wallet]["txcount"] % 15),
int(self.bitcoin["category"][category][wallet]["received"] % 15),
int(self.bitcoin["category"][category][wallet]["sent"] % 15),
int(self.bitcoin["category"][category][wallet]["balance"] % 15),
), skiphashing=True)
self.bitcoin["stats"][category]["count_wallet"] = len(self.bitcoin["category"][category])
self.bitcoin["stats"][category]["count_address"] += len(self.bitcoin["category"][category][wallet]["addresses"])
self.bitcoin["stats"][category]["count_received"] += self.bitcoin["category"][category][wallet]["received"]
self.bitcoin["stats"][category]["count_sent"] += self.bitcoin["category"][category][wallet]["sent"]
self.bitcoin["stats"][category]["count_balance"] += self.bitcoin["category"][category][wallet]["balance"]
self.bitcoin["stats"][category]["count_txs"] += self.bitcoin["category"][category][wallet]["txcount"]
self.bitcoin["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.bitcoin, self.statsfilepath)
self.bitcoin["stats"]["count_wallet"] = self.bitcoin["stats"]["donation"]["count_wallet"] + self.bitcoin["stats"]["popular"]["count_wallet"] + self.bitcoin["stats"]["ransom"]["count_wallet"]
self.bitcoin["stats"]["count_address"] = self.bitcoin["stats"]["donation"]["count_address"] + self.bitcoin["stats"]["popular"]["count_address"] + self.bitcoin["stats"]["ransom"]["count_address"]
self.bitcoin["stats"]["count_received"] = self.bitcoin["stats"]["donation"]["count_received"] + self.bitcoin["stats"]["popular"]["count_received"] + self.bitcoin["stats"]["ransom"]["count_received"]
self.bitcoin["stats"]["count_sent"] = self.bitcoin["stats"]["donation"]["count_sent"] + self.bitcoin["stats"]["popular"]["count_sent"] + self.bitcoin["stats"]["ransom"]["count_sent"]
self.bitcoin["stats"]["count_balance"] = self.bitcoin["stats"]["donation"]["count_balance"] + self.bitcoin["stats"]["popular"]["count_balance"] + self.bitcoin["stats"]["ransom"]["count_balance"]
self.bitcoin["stats"]["count_txs"] = self.bitcoin["stats"]["donation"]["count_txs"] + self.bitcoin["stats"]["popular"]["count_txs"] + self.bitcoin["stats"]["ransom"]["count_txs"]
self.bitcoin["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.bitcoin, self.statsfilepath)
def wallet_graph(self):
def sat2btc(sat):
return sat/10**8
def sat2size(sat):
sizemap = {
1: 10,
10: 15,
100: 20,
1000: 25,
10000: 30,
100000: 35,
1000000: 40,
10000000: 45,
100000000: 50,
}
btc = sat2btc(sat)
for maxbtc in sizemap:
if btc <= maxbtc:
return sizemap[maxbtc]
grouplimits = {
"₿1": {"min": 0, "max": 1},
"₿10": {"min": 1, "max": 10},
"₿100": {"min": 10, "max": 100},
"₿1k": {"min": 100, "max": 1000},
"₿10k": {"min": 1000, "max": 10000},
"₿100k": {"min": 10000, "max": 100000},
"₿1m": {"min": 100000, "max": 1000000},
"₿10m": {"min": 1000000, "max": 10000000},
"₿100m": {"min": 10000000, "max": 100000000},
}
groupingcriteria = "received"
defaultnodesize = 20
defaultfillcolor = "#dedede"
rootfillcolor = "#c3daf7"
edgecolor = "#dddddd"
nodecolor = "#f6f6f6"
categoryfillcolors = {
"donation": "#d7ecc9",
"popular": "#fed9b5",
"ransom": "#fbbfc5",
}
self.bitcoin["graph"] = {
"grouped": {
"inlabel": self.bitcoin["stats"]["count_wallet"],
"outlabel": "💼 Wallets",
"size": defaultnodesize,
"edgecolor": edgecolor,
"fillcolor": rootfillcolor,
"tooltip": "", #"₿%.8f/₿%.8f/₿%.8f" % (sat2btc(self.bitcoin["stats"]["count_received"]), sat2btc(self.bitcoin["stats"]["count_sent"]), sat2btc(self.bitcoin["stats"]["count_balance"])),
"donation": {
"inlabel": None,
"outlabel": "🙏 Donation",
"size": defaultnodesize,
"edgecolor": edgecolor,
"fillcolor": categoryfillcolors["donation"],
"tooltip": None,
"₿1": [],
"₿10": [],
"₿100": [],
"₿1k": [],
"₿10k": [],
"₿100k": [],
"₿1m": [],
"₿10m": [],
"children": []
},
"popular": {
"inlabel": None,
"outlabel": "🔥 Popular",
"size": defaultnodesize,
"edgecolor": edgecolor,
"fillcolor": categoryfillcolors["popular"],
"tooltip": None,
"₿1": [],
"₿10": [],
"₿100": [],
"₿1k": [],
"₿10k": [],
"₿100k": [],
"₿1m": [],
"₿10m": [],
"children": []
},
"ransom": {
"inlabel": None,
"outlabel": "👾 Ransom",
"size": defaultnodesize,
"edgecolor": edgecolor,
"fillcolor": categoryfillcolors["ransom"],
"tooltip": None,
"₿1": [],
"₿10": [],
"₿100": [],
"₿1k": [],
"₿10k": [],
"₿100k": [],
"₿1m": [],
"₿10m": [],
"children": []
},
"children": []
}
}
for category in ["donation", "popular", "ransom"]:
print("gathering stats for %d wallets in %s category" % (len(self.bitcoin["category"][category]), category))
for wallet in self.bitcoin["category"][category]:
for fundgroup in grouplimits:
if sat2btc(self.bitcoin["category"][category][wallet][groupingcriteria]) > grouplimits[fundgroup]["min"] and sat2btc(self.bitcoin["category"][category][wallet][groupingcriteria]) <= grouplimits[fundgroup]["max"]:
self.bitcoin["graph"]["grouped"][category][fundgroup].append({
"inlabel": "",
"outlabel": wallet,
"size": defaultnodesize, #sat2size(self.bitcoin["category"][category][wallet][groupingcriteria]),
"edgecolor": edgecolor,
"nodecolor": nodecolor,
"fillcolor": categoryfillcolors[category] if self.bitcoin["category"][category][wallet]["balance"] > 0 else defaultfillcolor,
"tooltip": "₿%.8f/₿%.8f/₿%.8f" % (sat2btc(self.bitcoin["category"][category][wallet]["received"]), sat2btc(self.bitcoin["category"][category][wallet]["sent"]), sat2btc(self.bitcoin["category"][category][wallet]["balance"])),
"lastactivity": self.bitcoin["category"][category][wallet]["lasttx"]["epochhuman"],
"children": [],
})
print("grouping wallets using criteria '%s'" % groupingcriteria)
for fundgroup in grouplimits:
if fundgroup in self.bitcoin["graph"]["grouped"][category]:
if len(self.bitcoin["graph"]["grouped"][category][fundgroup]):
self.bitcoin["graph"]["grouped"][category]["children"].append({
"inlabel": len(self.bitcoin["graph"]["grouped"][category][fundgroup]),
"outlabel": "<=%s" % (fundgroup),
"size": defaultnodesize,
"edgecolor": edgecolor,
"nodecolor": nodecolor,
"fillcolor": categoryfillcolors[category],
"tooltip": "",
"children": self.bitcoin["graph"]["grouped"][category][fundgroup],
})
del self.bitcoin["graph"]["grouped"][category][fundgroup]
self.bitcoin["graph"]["grouped"]["children"].append({
"inlabel": self.bitcoin["stats"]["donation"]["count_wallet"],
"outlabel": "🙏 Donation",
"size": defaultnodesize,
"edgecolor": edgecolor,
"nodecolor": nodecolor,
"fillcolor": categoryfillcolors["donation"],
"tooltip": "", #"₿%.8f/₿%.8f/₿%.8f" % (sat2btc(self.bitcoin["stats"]["donation"]["count_received"]), sat2btc(self.bitcoin["stats"]["donation"]["count_sent"]), sat2btc(self.bitcoin["stats"]["donation"]["count_balance"])),
"children": self.bitcoin["graph"]["grouped"]["donation"]["children"],
})
del self.bitcoin["graph"]["grouped"]["donation"]
self.bitcoin["graph"]["grouped"]["children"].append({
"inlabel": self.bitcoin["stats"]["popular"]["count_wallet"],
"outlabel": "🔥 Popular",
"size": defaultnodesize,
"edgecolor": edgecolor,
"nodecolor": nodecolor,
"fillcolor": categoryfillcolors["popular"],
"tooltip": "", #"₿%.8f/₿%.8f/₿%.8f" % (sat2btc(self.bitcoin["stats"]["popular"]["count_received"]), sat2btc(self.bitcoin["stats"]["popular"]["count_sent"]), sat2btc(self.bitcoin["stats"]["popular"]["count_balance"])),
"children": self.bitcoin["graph"]["grouped"]["popular"]["children"],
})
del self.bitcoin["graph"]["grouped"]["popular"]
self.bitcoin["graph"]["grouped"]["children"].append({
"inlabel": self.bitcoin["stats"]["ransom"]["count_wallet"],
"outlabel": "👾 Ransom",
"size": defaultnodesize,
"edgecolor": edgecolor,
"nodecolor": nodecolor,
"fillcolor": categoryfillcolors["ransom"],
"tooltip": "", #"₿%.8f/₿%.8f/₿%.8f" % (sat2btc(self.bitcoin["stats"]["ransom"]["count_received"]), sat2btc(self.bitcoin["stats"]["ransom"]["count_sent"]), sat2btc(self.bitcoin["stats"]["ransom"]["count_balance"])),
"children": self.bitcoin["graph"]["grouped"]["ransom"]["children"],
})
del self.bitcoin["graph"]["grouped"]["ransom"]
self.bitcoin["last_update_epoch"] = datetime.now().timestamp(); self.bitcoin["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.bitcoin, self.statsfilepath)
if __name__ == "__main__":
bitcoin = Bitcoin()
bitcoin.group_and_update()
bitcoin.wallet_graph()
bitcoin.get_bitnodes()
|
{"/bitcoin.py": ["/utils.py"], "/kalpi.py": ["/utils.py"], "/astro.py": ["/utils.py"]}
|
2,243
|
7h3rAm/kalpi
|
refs/heads/master
|
/kalpi.py
|
#!/usr/bin/env python3
import os
import re
import time
import random
import hashlib
import htmlmin
import argparse
import markdown
import sparkline
import dateutil.relativedelta
from datetime import datetime
from jinja2 import Environment, BaseLoader, FileSystemLoader
import utils
class Kalpi:
def __init__(self):
self.datadict = {}
self.datadict["tags"] = {}
self.datadict["posts"] = {}
self.datadict["recent_count"] = 10
self.basedir = "%s/7h3rAm.github.io" % (utils.expand_env(var="$PROJECTSPATH"))
self.outputdir = self.basedir
self.postsdir = "%s/_posts" % (self.basedir)
self.templatesdir = "%s/_templates" % (self.basedir)
self.statsdir = "%s/static/files/pages_stats" % (self.outputdir)
self.pages = {}
self.pages["research"] = "%s/research.md" % (self.templatesdir)
self.pages["cv"] = "%s/cv.md" % (self.templatesdir)
self.pages["life"] = "%s/life.md" % (self.templatesdir)
self.pages["fitness"] = "%s/fitness.md" % (self.templatesdir)
self.datadict["pages"] = {}
self.datadict["metadata"] = utils.load_yaml("%s/toolbox/bootstrap/self.yml" % (utils.expand_env(var="$HOME")))["metadata"]
self.datadict["fitness"] = utils.load_yaml("%s/fitness.yml" % (self.templatesdir))
self.datadict["life"] = utils.load_yaml("%s/life.yml" % (self.templatesdir))
self.datadict["oscp"] = utils.load_yaml("%s/oscp.yml" % (self.templatesdir))
self.datadict["read"] = utils.load_yaml("%s/read.yml" % (self.templatesdir))
self.datadict["startpage"] = utils.load_yaml("%s/startpage.yml" % (self.templatesdir))
self.templatemapping = {
"index.html": "%s/index.html" % (self.outputdir),
"archive.html": "%s/archive.html" % (self.outputdir),
"tags.html": "%s/tags.html" % (self.outputdir),
"stats.html": "%s/stats.html" % (self.outputdir),
"cv.html": "%s/pages/cv.html" % (self.outputdir),
"fitness.html": "%s/pages/fitness.html" % (self.outputdir),
"life.html": "%s/pages/life.html" % (self.outputdir),
"oscp.html": "%s/pages/oscp.html" % (self.outputdir),
"read.html": "%s/pages/read.html" % (self.outputdir),
"research.html": "%s/pages/research.html" % (self.outputdir),
"satview.html": "%s/pages/satview.html" % (self.outputdir),
"astro.html": "%s/pages/astro.html" % (self.outputdir),
"startpage.html": "%s/pages/startpage.html" % (self.outputdir),
}
self.timeformat = "%B %-d, %Y"
self.timeformat = "%Y %b %d"
self.stimeformat = "%b %d"
self.postdateformat = "%d/%b/%Y"
self.trimlength = 30
self.totalsize = 0
self.minsize = 0
def join_list(self, inlist, url="/tags.html#"):
outlist = []
for item in sorted(inlist):
outlist.append("<a href=%s%s>%s</a>" % (url, item, item))
return ", ".join(outlist)
def join_list_and(self, inlist, url="/tags.html#"):
outlist = []
for item in sorted(inlist):
outlist.append("<a href=%s%s>%s</a>" % (url, item, item))
set1 = ", ".join(outlist[:-2])
set2 = " and ".join(outlist[-2:])
if set1:
return ", ".join([set1, set2])
else:
return set2
def trim_length(self, text):
return "".join([text[:self.trimlength], "..."]) if len(text) > self.trimlength else text
def preprocess_text(self, mdtext):
return mdtext.replace('\n```\n', '\n```c\n') if "\n```\n" in mdtext else mdtext
def md2html(self, mdtext):
return markdown.markdown(mdtext, extensions=["fenced_code", "footnotes", "tables"])
def clean_text(self, rgx_list, text, subtext=""):
# https://stackoverflow.com/a/37192727/1079836
new_text = text
for rgx_match in rgx_list:
new_text = re.sub(rgx_match, subtext, new_text)
return new_text
def remove_para(self, htmltext):
return self.clean_text([r"<p>", r"</p>"], text=htmltext)
def remove_empty_ul(self, htmltext):
return self.clean_text([r"</li>\s*</ul>\s*<ul>\s*<li>"], text=self.clean_text([r"<p>\s*</p>"], text=htmltext), subtext="</li><li>")
def get_template(self, templatefile, datadict):
env = Environment(loader=FileSystemLoader(self.templatesdir), extensions=["jinja2_markdown.MarkdownExtension"], autoescape=False)
env.trim_blocks = True
env.lsrtip_blocks = True
env.filters["md2html"] = self.md2html
env.filters["removepara"] = self.remove_para
env.filters["removeemptyul"] = self.remove_empty_ul
env.filters["joinlist"] = self.join_list
env.filters["joinlistand"] = self.join_list_and
env.filters["trimlength"] = self.trim_length
return env.get_template(templatefile).render(datadict=datadict)
def render_template(self, templatefile, postprocess=[]):
if templatefile in self.templatemapping:
output = self.get_template(templatefile, datadict=self.datadict)
output = output.replace('<div class="footer"></div>', '<div class="footer footercenter"><span><a href="https://creativecommons.org/licenses/by-sa/4.0/" class="footspan"> </a></span></div>')
html = output
if "minify" in postprocess:
html = htmlmin.minify(output, remove_comments=True, remove_empty_space=True)
utils.file_save(self.templatemapping[templatefile], html)
utils.info("rendered '%s' (%s)" % (utils.cyan(self.templatemapping[templatefile]), utils.blue(utils.sizeof_fmt(len(html)))))
self.totalsize += len(output)
self.minsize += len(html)
else:
utils.warn("could not find mapping for file '%s'" % (utils.red(templatefile)))
def render_template_string(self, templatestr):
env = Environment(loader=BaseLoader, extensions=["jinja2_markdown.MarkdownExtension"], autoescape=False)
env.trim_blocks = True
env.lsrtip_blocks = True
env.filters["md2html"] = self.md2html
env.filters["removepara"] = self.remove_para
env.filters["removeemptyul"] = self.remove_empty_ul
env.filters["joinlist"] = self.join_list
env.filters["joinlistand"] = self.join_list_and
env.filters["trimlength"] = self.trim_length
return env.from_string(htmlmin.minify(templatestr, remove_comments=True, remove_empty_space=True)).render(datadict=self.datadict)
def tag_cloud(self):
colors = ["#20b2aa", "#99cc99", "#0c9", "#5b92e5", "#ffcc66", "#00b7eb", "#69359c", "#fe4164", "#a50b5e"]
random.shuffle(colors)
maxtagcount = 0
tags, tagcloud = {}, {}
for tag in self.datadict["tags"]:
tagcloud[tag] = None
tags[tag] = len(self.datadict["tags"][tag])
if tags[tag] > maxtagcount:
maxtagcount = tags[tag]
for tag in tags:
percent = (tags[tag]*100/maxtagcount)
if percent <= 10:
tagcloud[tag] = "font-size:1.0em; color:%s; padding:20px 5px 20px 5px;" % (colors[0])
elif percent <= 20:
tagcloud[tag] = "font-size:1.5em; font-weight:bold; color:%s; padding:20px 5px 20px 5px;" % (colors[1])
elif percent <= 30:
tagcloud[tag] = "font-size:2.0em; color:%s; padding:20px 5px 20px 5px;" % (colors[2])
elif percent <= 40:
tagcloud[tag] = "font-size:2.5em; font-weight:bold; color:%s; padding:20px 5px 20px 5px;" % (colors[3])
elif percent <= 50:
tagcloud[tag] = "font-size:3.0em; color:%s; padding:20px 5px 20px 5px;" % (colors[4])
elif percent <= 60:
tagcloud[tag] = "font-size:3.5em; font-weight:bold; color:%s; padding:0px 5px 0px 5px;" % (colors[5])
elif percent <= 70:
tagcloud[tag] = "font-size:4.0em; color:%s; padding:0px 5px 0px 5px;" % (colors[6])
elif percent <= 80:
tagcloud[tag] = "font-size:4.5em; font-weight:bold; color:%s; padding:0px 5px 0px 5px;" % (colors[7])
elif percent <= 90:
tagcloud[tag] = "font-size:5.0em; color:%s; padding:0px 5px 0px 5px;" % (colors[8])
elif percent <= 100:
tagcloud[tag] = "font-size:5.5em; font-weight:bold; color:%s; padding:0px 5px 0px 5px;" % (colors[0])
keys = list(tagcloud.keys())
random.shuffle(keys)
tagcloud = {key: tagcloud[key] for key in keys}
return tagcloud
def parse(self, lines):
date, summary, tags, content = None, None, None, None
for idx, line in enumerate(lines):
if line.startswith("date:"):
date = time.strptime("".join(line.split(":")[1:]).strip(), self.postdateformat)
if line.startswith("summary:"):
summary = ":".join(line.split(":")[1:]).strip()
summary = None if summary in ["", "This is the summary for an awesome post."] else summary
if line.startswith("tags:"):
tags = []
for tag in "".join(line.split(":")[1:]).strip().split(", "):
tags.append(tag.replace(" ", "_"))
if line == "\n":
content = self.md2html("".join(lines[idx+1:]))
break
return date, summary, tags, content
def sparkify(self, content, maxsize=10, unique=True, sparkmode=True):
sparkid = hashlib.sha256(content.encode("utf-8")).hexdigest()
spark = "".join(sparkline.sparkify([int(x, base=16) for x in sparkid]))
colors = ["#007bff", "#00bcd4", "#17a2b8", "#20c997", "#2196f3", "#28a745", "#4caf50", "#6610f2", "#6c757d", "#6f42c1", "#8357ff", "#dc3545", "#e83e8c", "#f44336", "#fd7e14", "#ffc107", "#20b2aa", "#99cc99", "#0c9", "#5b92e5", "#ffcc66", "#00b7eb", "#69359c", "#fe4164", "#a50b5e"]
charmap = {
"▁": "◐",
"▂": "■",
"▃": "◩",
"▄": "◆",
"▅": "◢",
"▆": "◨",
"▇": "●",
"█": "▲",
}
if unique:
sparkcolored = "".join(['<span style="color:%s;">%s</span>' % (random.choice(colors), ch if sparkmode else charmap[ch]) for ch in spark[:maxsize]])
sparkcoloredlong = "".join(['<span style="color:%s;">%s</span>' % (random.choice(colors), ch if sparkmode else charmap[ch]) for ch in spark])
else:
chars = ["▣", "►", "◐", "◧", "▤", "▼", "◑", "◨", "▥", "◀", "◒", "◩", "▦", "◆", "◕", "◪", "▧", "◈", "◢", "■", "▨", "◉", "◣", "▩", "◎", "◤", "▲", "●", "◥"]
sparkcolored = "".join(['<span style="color:%s;">%s</span>' % (random.choice(colors), random.choice(chars)) for _ in range(len(sparkid[:maxsize]))])
sparkcoloredlong = "".join(['<span style="color:%s;">%s</span>' % (random.choice(colors), random.choice(chars)) for _ in range(len(sparkid))])
return ('<span class="sparklines">%s</span>' % (sparkcolored), '<span class="sparklines">%s</span>' % (sparkcoloredlong))
def get_tree(self, source):
posts = []
self.datadict["tags"] = dict()
for root, ds, fs in os.walk(source):
for name in fs:
if name[0] == ".": continue
if not re.match(r"^.+\.(md|mdown|markdown)$", name): continue
path = os.path.join(root, name)
with open(path, "r") as f:
title = f.readline()[:-1].strip("\n..")
contentmd = self.preprocess_text(f.readlines())
date, summary, tags, content = self.parse(contentmd)
year, month, day = date[:3]
pretty_date = time.strftime(self.postdateformat, date)
epoch = time.mktime(date)
url = "/posts/%d%02d%02d_%s.html" % (year, month, day, os.path.splitext(name)[0])
sparkcolored, sparkcoloredlong = self.sparkify("\n".join(contentmd))
post = {
"title": title,
"epoch": epoch,
"content": content,
"contentmd": contentmd,
"url": url,
"pretty_date": pretty_date,
"sdate": time.strftime(self.stimeformat, date),
"date": date,
"year": year,
"month": month,
"day": day,
"tags": tags,
"summary": summary,
"filename": name,
"sparkline": sparkcolored,
"sparklinelong": sparkcoloredlong,
"previous": None,
"next": None,
}
posts.append(post)
for tag in tags:
if tag not in self.datadict["tags"].keys():
self.datadict["tags"][tag] = [{
"title": title,
"sparkline": sparkcolored,
"sparklinelong": sparkcoloredlong,
"summary": summary,
"url": url,
"pretty_date": pretty_date,
"year": year,
"month": month,
"day": day,
}]
else:
self.datadict["tags"][tag].append({
"title": title,
"sparkline": sparkcolored,
"sparklinelong": sparkcoloredlong,
"summary": summary,
"url": url,
"pretty_date": pretty_date,
"year": year,
"month": month,
"day": day,
})
return posts
def gen_stats(self):
stats = {}
stats["count_posts"] = len(self.datadict["posts"])
stats["count_tags"] = len(self.datadict["tags"])
stats["groups"] = {
"per_yyyymm": {},
"per_yyyy": {},
"per_tag": {},
}
stats["duration"] = {
"start_year": 2100,
"end_year": 2000,
}
stats["dates"] = []
for post in self.datadict["posts"]:
if post["year"] < stats["duration"]["start_year"]:
stats["duration"]["start_year"] = post["year"]
if post["year"] > stats["duration"]["end_year"]:
stats["duration"]["end_year"] = post["year"]
stats["dates"].append("%04d%02d%02d" % (post["year"], post["month"], post["day"]))
key = "%04d%02d" % (post["year"], post["month"])
if key not in stats["groups"]["per_yyyymm"]:
stats["groups"]["per_yyyymm"][key] = {
"posts": 1,
"tagslist": [],
"tags": len(post["tags"]),
}
else:
stats["groups"]["per_yyyymm"][key]
stats["groups"]["per_yyyymm"][key]["posts"] += 1
stats["groups"]["per_yyyymm"][key]["tags"] += len(post["tags"])
stats["groups"]["per_yyyymm"][key]["tagslist"] += post["tags"]
stats["groups"]["per_yyyymm"][key]["tagslist"] = list(set(stats["groups"]["per_yyyymm"][key]["tagslist"]))
key = "%04d" % (post["year"])
if key not in stats["groups"]["per_yyyy"]:
stats["groups"]["per_yyyy"][key] = {
"posts": 1,
"tagslist": [],
"tags": len(post["tags"]),
}
else:
stats["groups"]["per_yyyy"][key]
stats["groups"]["per_yyyy"][key]["posts"] += 1
stats["groups"]["per_yyyy"][key]["tags"] += len(post["tags"])
stats["groups"]["per_yyyy"][key]["tagslist"] += post["tags"]
stats["groups"]["per_yyyy"][key]["tagslist"] = list(set(stats["groups"]["per_yyyy"][key]["tagslist"]))
for tag in post["tags"]:
if tag not in stats["groups"]["per_tag"]:
stats["groups"]["per_tag"][tag] = {
"posts": 1,
}
else:
stats["groups"]["per_tag"][tag]["posts"] += 1
stats["most_used_tag"] = max(stats["groups"]["per_tag"].keys(), key=(lambda key: stats["groups"]["per_tag"][key]["posts"]))
stats["least_used_tag"] = min(stats["groups"]["per_tag"].keys(), key=(lambda key: stats["groups"]["per_tag"][key]["posts"]))
stats["max_posts_yyyy"] = max(stats["groups"]["per_yyyy"].keys(), key=(lambda key: stats["groups"]["per_yyyy"][key]["posts"]))
stats["min_posts_yyyy"] = min(stats["groups"]["per_yyyy"].keys(), key=(lambda key: stats["groups"]["per_yyyy"][key]["posts"]))
stats["max_tags_yyyy"] = max(stats["groups"]["per_yyyy"].keys(), key=(lambda key: len(stats["groups"]["per_yyyy"][key]["tagslist"])))
stats["min_tags_yyyy"] = min(stats["groups"]["per_yyyy"].keys(), key=(lambda key: len(stats["groups"]["per_yyyy"][key]["tagslist"])))
curdate = datetime.now()
maxdate = datetime.strptime(max(stats["dates"]), "%Y%m%d")
mindate = datetime.strptime(min(stats["dates"]), "%Y%m%d")
rd1 = dateutil.relativedelta.relativedelta (maxdate, mindate)
rd2 = dateutil.relativedelta.relativedelta (curdate, maxdate)
rd3 = dateutil.relativedelta.relativedelta (curdate, mindate)
stats["summary"] = []
stats["summary"].append("There are a total of `%d` posts with `%d` tags, written over a period of `%dy%dm%dd` (from `%s` till `%s`)" % (stats["count_posts"], stats["count_tags"], rd1.years, rd1.months, rd1.days, datetime.strftime(mindate, "%d/%b/%Y"), datetime.strftime(maxdate, "%d/%b/%Y")))
stats["summary"].append("From the most recent update (on `%s`), it's been `%dy%dm%dd` when the last post was published and `%dy%dm%dd` since the first post" % (datetime.strftime(curdate, "%d/%b/%Y"), rd2.years, rd2.months, rd2.days, rd3.years, rd3.months, rd3.days))
stats["summary"].append("The year `%s` has highest number of posts with a count of `%d`, while the year `%s` has lowest number of posts with a count of `%d`" % (stats["max_posts_yyyy"], stats["groups"]["per_yyyy"][stats["max_posts_yyyy"]]["posts"], stats["min_posts_yyyy"], stats["groups"]["per_yyyy"][stats["min_posts_yyyy"]]["posts"]))
stats["summary"].append("The year `%s` has highest number of tags with a count of `%d`, while the year `%s` has lowest number of tags with a count of `%d`" % (stats["max_tags_yyyy"], len(stats["groups"]["per_yyyy"][stats["max_tags_yyyy"]]["tagslist"]), stats["min_tags_yyyy"], len(stats["groups"]["per_yyyy"][stats["min_tags_yyyy"]]["tagslist"])))
stats["summary"].append("The most widely used of all `%d` tags across `%d` posts is `%s` while the least used is `%s`" % (stats["count_tags"], stats["count_posts"], stats["most_used_tag"], stats["least_used_tag"]))
stats["summary"].append("On an average, there are `%d` posts per tag and `%d` posts, `%d` tags per year" % (sum([stats["groups"]["per_tag"][x]["posts"] for x in stats["groups"]["per_tag"]])/len(stats["groups"]["per_tag"].keys()), sum([stats["groups"]["per_yyyy"][x]["posts"] for x in stats["groups"]["per_yyyy"]])/len(stats["groups"]["per_yyyy"].keys()), sum([len(stats["groups"]["per_yyyy"][x]["tagslist"]) for x in stats["groups"]["per_yyyy"]])/len(stats["groups"]["per_yyyy"].keys())))
stats["summary"] = [self.md2html(x).replace("<p>", "").replace("</p>", "") for x in stats["summary"]]
ppt = {tag:stats["groups"]["per_tag"][tag]["posts"] for tag in stats["groups"]["per_tag"]}
utils.to_xkcd(ppt, "%s/posts_per_tag.png" % (self.statsdir), "")
ppy = {yyyy:stats["groups"]["per_yyyy"][yyyy]["posts"] for yyyy in stats["groups"]["per_yyyy"]}
utils.to_xkcd(ppy, "%s/posts_per_year.png" % (self.statsdir), "")
tpy = {yyyy:len(stats["groups"]["per_yyyy"][yyyy]["tagslist"]) for yyyy in stats["groups"]["per_yyyy"]}
utils.to_xkcd(tpy, "%s/tags_per_year.png" % (self.statsdir), "")
return stats
def make(self, args, postprocess=[]):
# posts
calist = [x.replace(self.basedir, "") for x in utils.search_files_all("%s/static/images/clipart" % (self.basedir))]
posts = sorted(self.get_tree(self.postsdir), key=lambda post: post["epoch"], reverse=False)
self.datadict["posts"] = sorted(posts, key=lambda post: post["epoch"], reverse=True)
total = len(posts)
for idx, post in enumerate(posts):
if idx == 0:
post["next"] = {}
post["next"]["title"] = posts[idx+1]["title"]
post["next"]["url"] = posts[idx+1]["url"]
elif idx == total-1:
post["previous"] = {}
post["previous"]["title"] = posts[idx-1]["title"]
post["previous"]["url"] = posts[idx-1]["url"]
else:
post["previous"] = {}
post["previous"]["title"] = posts[idx-1]["title"]
post["previous"]["url"] = posts[idx-1]["url"]
post["next"] = {}
post["next"]["title"] = posts[idx+1]["title"]
post["next"]["url"] = posts[idx+1]["url"]
filename = "%s%s" % (self.outputdir, post["url"])
output = self.get_template("post.html", datadict={"metadata": self.datadict["metadata"], "post": post, "tags": self.datadict["tags"]})
output = output.replace('<h1>', '<h1 class="h1 collapsible" onclick="toggle(this);">').replace('<h2>', '<hr><h2 class="h2 collapsible" onclick="toggle(this);">').replace('<h3>', '<h3 class="h3 collapsible" onclick="toggle(this);">').replace('<h4>', '<h4 class="h4 collapsible" onclick="toggle(this);">').replace('<h5>', '<h5 class="h5 collapsible" onclick="toggle(this);">').replace('<h6>', '<h6 class="h6 collapsible" onclick="toggle(this);">').replace('<ul>', '<ul class="nested active">').replace('<ol>', '<ol class="nested active">').replace('<p>', '<p class="nested active">').replace('<pre><code>', '<pre class="nested active"><code>').replace('<pre><code class="','<pre class="nested active"><code class="').replace('<p class="nested active"><a href="/posts/', '<p><a href="/posts/').replace('<p class="nested active">📅 published on ', '<p>📅 published on ').replace('<p class="nested active">🔖 tagged ', '<p>🔖 tagged ')
output = output.replace('](https://7h3ram.github.io/posts/', '](/posts/').replace('href="https://7h3ram.github.io/posts/', 'href="/posts/')
#output = output.replace('BG_CLIPART_STYLE_HERE', 'class="bgclipart_sq" style="background-image: url(%s);"' % (random.choice(calist)))
html = htmlmin.minify(output, remove_comments=True, remove_empty_space=True) if "minify" in postprocess else output
utils.file_save(filename, html)
utils.info("rendered '%s' (%s)" % (utils.magenta(filename), utils.blue(utils.sizeof_fmt(len(html)))))
self.totalsize += len(output)
self.minsize += len(html)
# pages
self.render_template("cv.html", postprocess=postprocess)
self.render_template("fitness.html", postprocess=postprocess)
self.render_template("life.html", postprocess=postprocess)
self.render_template("read.html", postprocess=postprocess)
self.render_template("oscp.html", postprocess=postprocess)
self.render_template("research.html", postprocess=postprocess)
self.render_template("satview.html", postprocess=postprocess)
#self.render_template("astro.html", postprocess=postprocess)
self.render_template("startpage.html", postprocess=postprocess)
# default
self.datadict["stats"] = self.gen_stats()
self.datadict["tagcloud"] = self.tag_cloud()
self.render_template("index.html", postprocess=postprocess)
self.render_template("archive.html", postprocess=postprocess)
self.render_template("tags.html", postprocess=postprocess)
self.render_template("stats.html", postprocess=postprocess)
utils.info("size: total:%s (%d), minified:%s (%d), delta:%s (%d)" % (
utils.sizeof_fmt(self.totalsize),
self.totalsize,
utils.sizeof_fmt(self.minsize),
self.minsize,
utils.sizeof_fmt(self.totalsize-self.minsize),
self.totalsize-self.minsize
))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="%s (v%s)" % (utils.blue_bold("kalpi"), utils.green_bold("0.1")))
args = parser.parse_args()
klp = Kalpi()
klp.make(args)
|
{"/bitcoin.py": ["/utils.py"], "/kalpi.py": ["/utils.py"], "/astro.py": ["/utils.py"]}
|
2,244
|
7h3rAm/kalpi
|
refs/heads/master
|
/astro.py
|
#!/usr/bin/env python3
from datetime import datetime, timezone
from pprint import pprint
import time
import utils
class Astro:
def __init__(self):
self.apikey = utils.expand_env(var="$NASAKEY")
self.datastore_url = "https://raw.githubusercontent.com/7h3rAm/datastore/master"
self.datastore_path = "%s/datastore" % (utils.expand_env(var="$PROJECTSPATH"))
self.datafile_path = "%s/datastore/astro.json" % (utils.expand_env(var="$PROJECTSPATH"))
self.downloads = {}
self.data = {
"last_update": None,
"apod": None,
"neo": None,
"earthevents": None,
"satview": None,
"spacex": None,
}
self.category_map = {
"Drought": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/drought",
"emoji": "💧",
},
"Dust and Haze": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/dustHaze",
"emoji": "🌫️",
},
"Earthquakes": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/earthquakes",
"emoji": "🌐",
},
"Earthquake": {
"url": "https://earthquake.usgs.gov/earthquakes/",
"emoji": "🔴",
},
"Floods": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/floods",
"emoji": "🌊",
},
"Landslides": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/landslides",
"emoji": "⛰️",
},
"Manmade": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/manmade",
"emoji": "🧍",
},
"Sea and Lake Ice": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/seaLakeIce",
"emoji": "🧊",
},
"Severe Storms": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/severeStorms",
"emoji": "🌀",
},
"Snow": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/snow",
"emoji": "🌨️",
},
"Temperature Extremes": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/tempExtremes",
"emoji": "🌡️",
},
"Volcanoes": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/volcanoes",
"emoji": "🌋",
},
"Water Color": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/waterColor",
"emoji": "⛲",
},
"Wildfires": {
"url": "https://eonet.sci.gsfc.nasa.gov/api/v3/categories/wildfires",
"emoji": "🔥",
}
}
def apod(self):
self.data["apod"] = {
"todayurl": "https://apod.nasa.gov/apod/astropix.html",
"archiveurl": "https://apod.nasa.gov/apod/archivepix.html",
}
apodjson = utils.download_json("https://api.nasa.gov/planetary/apod?api_key=%s" % (self.apikey))
if apodjson:
self.data["apod"]["title"] = "%s (%s)" % (apodjson["title"], datetime.strptime(apodjson["date"], '%Y-%m-%d').astimezone(tz=None).strftime("%d/%b/%Y %Z"))
self.data["apod"]["source"] = apodjson["url"]
self.data["apod"]["datastore"] = "%s/apod.jpg" % (self.datastore_url)
self.downloads[self.data["apod"]["source"]] = "%s/apod.jpg" % (self.datastore_path)
utils.download(self.data["apod"]["source"], self.downloads[self.data["apod"]["source"]])
self.data["apod"]["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
def neo(self):
self.data["neo"] = {
"date": None,
"objects": [],
}
neojson = utils.download_json("https://api.nasa.gov/neo/rest/v1/feed/today?detailed=true&api_key=%s" % (self.apikey))
datekey = list(neojson["near_earth_objects"].keys())[0]
self.data["neo"]["date"] = datetime.strptime(datekey, "%Y-%m-%d").astimezone(tz=None).strftime("%d/%b/%Y %Z")
for neo in neojson["near_earth_objects"][datekey]:
self.data["neo"]["objects"].append({
"cat": datetime.strptime(neo["close_approach_data"][0]["close_approach_date_full"], "%Y-%b-%d %H:%M").strftime("%d/%b/%Y @ %H:%M:%S %Z"),
"diameter": "%s-%s miles" % ("{:,.2f}".format(float(neo["estimated_diameter"]["miles"]["estimated_diameter_min"])), "{:,.2f}".format(float(neo["estimated_diameter"]["miles"]["estimated_diameter_max"]))),
"distance": "%s miles" % ("{:,.2f}".format(float(neo["close_approach_data"][0]["miss_distance"]["miles"]))),
"velocity": "%s mph" % ("{:,.2f}".format(float(neo["close_approach_data"][0]["relative_velocity"]["miles_per_hour"]))),
"hazardous": neo["is_potentially_hazardous_asteroid"],
"name": "Asteroid %s" % (neo["name"]),
"url": neo["nasa_jpl_url"],
})
self.data["neo"]["title"] = "%d objects making close approach (%s)" % (len(self.data["neo"]["objects"]), self.data["neo"]["date"])
self.data["neo"]["objects"] = sorted(self.data["neo"]["objects"], key=lambda k: k["name"])
self.data["neo"]["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
def earthevents(self):
self.data["earthevents"] = {
"date": datetime.now().astimezone(tz=None).strftime("%d/%b/%Y %Z"),
"events": [],
}
eonetjson = utils.download_json("https://eonet.gsfc.nasa.gov/api/v3/events?status=open&days=30")
usgseqjson = utils.download_json("https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_day.geojson")
self.data["earthevents"]["mapdata"] = {
"Drought": [],
"Dust and Haze": [],
"Earthquakes": [],
"Floods": [],
"Landslides": [],
"Manmade": [],
"Sea and Lake Ice": [],
"Severe Storms": [],
"Snow": [],
"Temperature Extremes": [],
"Volcanoes": [],
"Water Color": [],
"Wildfires": [],
}
for event in eonetjson["events"]:
self.data["earthevents"]["events"].append({
"eid": event["id"],
"name": event["title"],
"url": event["link"],
"location": "https://www.google.com/maps/dir/%s/@%s,%s,3z" % ("/".join([",".join([str(x["coordinates"][1]), str(x["coordinates"][0])]) for x in event["geometry"]]), event["geometry"][0]["coordinates"][1], event["geometry"][0]["coordinates"][0]),
"category": [self.category_map[x["title"]] for x in event["categories"]],
"source": [{"url": x["url"], "sid": x["id"]} for x in event["sources"]],
})
for cat in event["categories"]:
if cat["title"] == "Drought":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Drought"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Dust and Haze":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Dust and Haze"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Earthquakes":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Earthquakes"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Floods":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Floods"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Landslides":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Landslides"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Manmade":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Manmade"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Sea and Lake Ice":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Sea and Lake Ice"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Severe Storms":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Severe Storms"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Snow":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Snow"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Temperature Extremes":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Temperature Extremes"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Volcanoes":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Volcanoes"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Water Color":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Water Color"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
if cat["title"] == "Wildfires":
for coord in event["geometry"]:
self.data["earthevents"]["mapdata"]["Wildfires"].append(['<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/><b>Category</b>: %s<br/><b>Source</b>: %s' % (coord["coordinates"][1], coord["coordinates"][0], event["title"], ", ".join(list(sorted([x["title"] for x in event["categories"]]))), ", ".join(list(sorted(['<a href="%s">%s</a>' % (x["url"], x["id"]) for x in event["sources"]])))), coord["coordinates"][1], coord["coordinates"][0]])
for event in usgseqjson["features"]:
if event["properties"]["type"] == "earthquake" and event["properties"]["mag"] >= 4:
self.data["earthevents"]["events"].append({
"eid": event["id"],
"name": event["properties"]["title"],
"url": event["properties"]["url"],
"location": "http://maps.google.com/maps?q=%s,%s" % (event["geometry"]["coordinates"][1], event["geometry"]["coordinates"][0]),
"category": [self.category_map[event["properties"]["type"].title()]],
"source": [{"url": "https://earthquake.usgs.gov/earthquakes/feed/v1.0/geojson.php", "sid": "USGS"}],
})
self.data["earthevents"]["mapdata"]["Earthquakes"].append([
'<a href="http://maps.google.com/maps?q=%s,%s"><b>%s</b></a><br/>Category: %s<br/>Source: <a href="%s">USGS</a>' % (
event["geometry"]["coordinates"][1],
event["geometry"]["coordinates"][0],
event["properties"]["title"],
event["properties"]["type"].title(),
event["properties"]["url"],
),
event["geometry"]["coordinates"][1],
event["geometry"]["coordinates"][0],
])
self.data["earthevents"]["title"] = "%d events (%s)" % (len(self.data["earthevents"]["events"]), self.data["earthevents"]["date"])
self.data["earthevents"]["events"] = sorted(self.data["earthevents"]["events"], key=lambda k: k["name"])
self.data["earthevents"]["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
def spaceppl(self):
self.data["spaceppl"] = {
"title": None,
"people": [],
}
spaceppljson = utils.download_json("http://api.open-notify.org/astros.json")
for ppl in spaceppljson["people"]:
self.data["spaceppl"]["people"].append({
"name": ppl["name"],
"url": "https://www.google.com/search?q=Astronaut+%s" % (ppl["name"].replace(" ", "+")),
"spacecraft": ppl["craft"],
})
self.data["spaceppl"]["title"] = "%d people in space (%s)" % (len(self.data["spaceppl"]["people"]), datetime.now().astimezone(tz=None).strftime("%d/%b/%Y %Z"))
self.data["spaceppl"]["people"] = sorted(self.data["spaceppl"]["people"], key=lambda k: k["name"])
self.data["spaceppl"]["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
def spacex(self):
self.data["spacex"]["capsules"] = []
capsulesjson = utils.download_json("https://api.spacexdata.com/v4/capsules")
for capsule in capsulesjson:
self.data["spacex"]["capsules"].append({
"name": "(%s) %s" % (capsule["serial"], capsule["type"]),
"status": capsule["status"] if capsule["status"] else "",
"status_emoji": utils.to_emoji(capsule["status"]) if capsule["status"] else "",
"launches": len(capsule["launches"]) if capsule["launches"] else 0,
"reuse_count": capsule["reuse_count"] if capsule["reuse_count"] else 0,
"water_landings": capsule["water_landings"] if capsule["water_landings"] else 0,
"land_landings": capsule["land_landings"] if capsule["land_landings"] else 0,
"last_update": capsule["last_update"] if capsule["last_update"] else "Location and status unknown",
})
self.data["spacex"]["capsules"] = sorted(self.data["spacex"]["capsules"], key=lambda k: k["name"])
companyjson = utils.download_json("https://api.spacexdata.com/v4/company")
self.data["spacex"]["company"] = {
"name": companyjson["name"],
"url": companyjson["links"]["website"],
"employees": companyjson["employees"],
"vehicles": companyjson["vehicles"],
"launch_sites": companyjson["launch_sites"],
"test_sites": companyjson["test_sites"],
"valuation": companyjson["valuation"],
"valuation_human": utils.currency_human(companyjson["valuation"]),
"summary": companyjson["summary"],
}
# cores
self.data["spacex"]["cores"] = []
coresjson = utils.download_json("https://api.spacexdata.com/v4/cores")
for core in coresjson:
self.data["spacex"]["cores"].append({
"name": core["serial"],
"status": core["status"] if core["status"] else "",
"status_emoji": utils.to_emoji(core["status"]) if core["status"] else "",
"last_update": core["last_update"] if core["last_update"] else "",
"launches": len(core["launches"]) if core["launches"] else 0,
"rtls_attempts": core["rtls_attempts"] if core["rtls_attempts"] else 0,
"rtls_landings": core["rtls_landings"] if core["rtls_landings"] else 0,
"asds_attempts": core["asds_attempts"] if core["asds_attempts"] else 0,
"asds_landings": core["asds_landings"] if core["asds_landings"] else 0,
"reuse_count": core["reuse_count"] if core["reuse_count"] else 0,
})
self.data["spacex"]["cores"] = sorted(self.data["spacex"]["cores"], key=lambda k: k["name"])
# crew
self.data["spacex"]["crew"] = []
crewjson = utils.download_json("https://api.spacexdata.com/v4/crew")
for crew in crewjson:
self.data["spacex"]["crew"].append({
"name": crew["name"],
"agency": crew["agency"] if crew["agency"] else "",
"url": crew["wikipedia"] if crew["wikipedia"] else "",
"launches": len(crew["launches"]) if crew["launches"] else 0,
"status": crew["status"] if crew["status"] else "",
"status_emoji": utils.to_emoji(crew["status"]) if crew["status"] else "",
})
self.data["spacex"]["crew"] = sorted(self.data["spacex"]["crew"], key=lambda k: k["name"])
# dragons
self.data["spacex"]["dragons"] = []
dragonsjson = utils.download_json("https://api.spacexdata.com/v4/dragons")
for dragon in dragonsjson:
self.data["spacex"]["dragons"].append({
"name": dragon["name"],
"description": dragon["description"],
"first_flight": datetime.strptime(dragon["first_flight"], "%Y-%m-%d").astimezone(tz=None).strftime("%d/%b/%Y %Z"),
"type": dragon["type"],
"type_emoji": utils.to_emoji(dragon["type"]),
"active": dragon["active"],
"status_emoji": utils.to_emoji("active" if dragon["active"] else "retired"),
"crew_capacity": dragon["crew_capacity"],
"dry_mass": "%s lbs" % ("{:,.2f}".format(float(dragon["dry_mass_lb"]))),
"url": dragon["wikipedia"],
})
self.data["spacex"]["dragons"] = sorted(self.data["spacex"]["dragons"], key=lambda k: k["name"])
# landpads
self.data["spacex"]["landpads"] = []
landpadsjson = utils.download_json("https://api.spacexdata.com/v4/landpads")
for landpad in landpadsjson:
self.data["spacex"]["landpads"].append({
"name": "%s (%s)" % (landpad["full_name"], landpad["name"]),
"type": landpad["type"],
"location": "%s, %s" % (landpad["locality"], landpad["region"]),
"location_url": "https://www.google.com/maps/place/%s,%s" % (landpad["latitude"], landpad["longitude"]),
"url": landpad["wikipedia"],
"landing_attempts": landpad["landing_attempts"],
"landing_successes": landpad["landing_successes"],
"description": landpad["details"],
"launches": len(landpad["launches"]),
"status": landpad["status"],
"status_emoji": utils.to_emoji(landpad["status"]),
})
self.data["spacex"]["landpads"] = sorted(self.data["spacex"]["landpads"], key=lambda k: k["name"])
# launches
self.data["spacex"]["launches"] = {
"past": [],
"future": [],
}
launchesjson = utils.download_json("https://api.spacexdata.com/v4/launches")
for launch in launchesjson:
launchdata = {
"name": launch["name"],
"launch": datetime.fromtimestamp(launch["date_unix"], tz=timezone.utc).replace(tzinfo=timezone.utc).astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z"),
"upcoming": launch["upcoming"],
"flight": launch["flight_number"],
"description": launch["details"],
"url": launch["links"]["webcast"],
}
if launch["upcoming"]:
self.data["spacex"]["launches"]["future"].append(launchdata)
else:
self.data["spacex"]["launches"]["past"].append(launchdata)
self.data["spacex"]["launches"]["past"] = sorted(self.data["spacex"]["launches"]["past"], key=lambda k: k["flight"])
self.data["spacex"]["launches"]["future"] = sorted(self.data["spacex"]["launches"]["future"], key=lambda k: k["flight"])
# launchpads
self.data["spacex"]["launchpads"] = []
launchpadsjson = utils.download_json("https://api.spacexdata.com/v4/launchpads")
for launchpad in launchpadsjson:
self.data["spacex"]["launchpads"].append({
"name": "%s (%s)" % (launchpad["full_name"], launchpad["name"]),
"location": "%s, %s" % (launchpad["locality"], launchpad["region"]),
"location_url": "https://www.google.com/maps/place/%s,%s" % (launchpad["latitude"], launchpad["longitude"]),
"launch_attempts": launchpad["launch_attempts"],
"launch_successes": launchpad["launch_successes"],
"description": launchpad["details"],
"status": launchpad["status"],
"status_emoji": utils.to_emoji(launchpad["status"]),
})
self.data["spacex"]["launchpads"] = sorted(self.data["spacex"]["launchpads"], key=lambda k: k["name"])
# payloads
self.data["spacex"]["payloads"] = []
payloadsjson = utils.download_json("https://api.spacexdata.com/v4/payloads")
for payload in payloadsjson:
self.data["spacex"]["payloads"].append({
"name": payload["name"],
"type": payload["type"],
"type_emoji": utils.to_emoji(payload["type"]),
"customer": ", ".join(payload["customers"]),
"nationality": ", ".join(payload["nationalities"]),
"manufacturer": ", ".join(payload["manufacturers"]),
"orbit": payload["orbit"],
})
self.data["spacex"]["payloads"] = sorted(self.data["spacex"]["payloads"], key=lambda k: k["name"])
# roadster
self.data["spacex"]["roadster"] = {}
roadsterjson = utils.download_json("https://api.spacexdata.com/v4/roadster")
self.data["spacex"]["roadster"]["name"] = roadsterjson["name"]
self.data["spacex"]["roadster"]["url"] = roadsterjson["video"]
self.data["spacex"]["roadster"]["date"] = datetime.fromtimestamp(roadsterjson["launch_date_unix"], tz=timezone.utc).replace(tzinfo=timezone.utc).astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
self.data["spacex"]["roadster"]["description"] = roadsterjson["details"]
self.data["spacex"]["roadster"]["launch_mass"] = "%s lbs" % ("{:,.2f}".format(float(roadsterjson["launch_mass_lbs"])))
self.data["spacex"]["roadster"]["orbit"] = roadsterjson["orbit_type"].title()
self.data["spacex"]["roadster"]["speed"] = "%s mph" % ("{:,.2f}".format(float(roadsterjson["speed_mph"])))
self.data["spacex"]["roadster"]["earth_distance"] = "%s miles" % ("{:,.2f}".format(float(roadsterjson["earth_distance_mi"])))
self.data["spacex"]["roadster"]["mars_distance"] = "%s miles" % ("{:,.2f}".format(float(roadsterjson["mars_distance_mi"])))
# rockets
self.data["spacex"]["rockets"] = []
rocketsjson = utils.download_json("https://api.spacexdata.com/v4/rockets")
for rocket in rocketsjson:
payload_weights_leo, payload_weights_gto, payload_weights_moon, payload_weights_mars = "", "", "", ""
for item in rocket["payload_weights"]:
if item["id"] == "leo":
payload_weights_leo = "%s lbs" % ("{:,}".format(int(item["lb"])))
elif item["id"] == "gto":
payload_weights_gto = "%s lbs" % ("{:,}".format(int(item["lb"])))
elif item["id"] == "moon":
payload_weights_moon = "%s lbs" % ("{:,}".format(int(item["lb"])))
elif item["id"] == "mars":
payload_weights_mars = "%s lbs" % ("{:,}".format(int(item["lb"])))
self.data["spacex"]["rockets"].append({
"name": rocket["name"],
"stage": rocket["stages"],
"booster": rocket["boosters"],
"landing_leg": "%d (%s)" % (rocket["landing_legs"]["number"], rocket["landing_legs"]["material"]),
"height": "%s feet" % ("{:,}".format(int(rocket["height"]["feet"]))),
"diameter": "%s feet" % ("{:,}".format(int(rocket["diameter"]["feet"]))),
"mass": "%s lbs" % ("{:,}".format(int(rocket["mass"]["lb"]))),
"launch_cost": "$%s" % ("{:,}".format(int(rocket["cost_per_launch"]))),
"success_rate": "%s%%" % (rocket["success_rate_pct"]),
"first_flight": datetime.strptime(rocket["first_flight"], "%Y-%m-%d").astimezone(tz=None).strftime("%d/%b/%Y %Z"),
"description": rocket["description"],
"url": rocket["wikipedia"],
"first_stage_reusable": rocket["first_stage"]["reusable"],
"first_stage_reusable_emoji": utils.to_emoji("good") if rocket["first_stage"]["reusable"] else utils.to_emoji("bad"),
"first_stage_engine": rocket["first_stage"]["engines"],
"first_stage_fuel": "%s tons" % (rocket["first_stage"]["fuel_amount_tons"]),
"first_stage_burn_time": "{:,.2f} sec".format(int(rocket["first_stage"]["burn_time_sec"])) if rocket["first_stage"]["burn_time_sec"] else "",
"second_stage_reusable": rocket["second_stage"]["reusable"],
"second_stage_reusable_emoji": utils.to_emoji("good") if rocket["second_stage"]["reusable"] else utils.to_emoji("bad"),
"second_stage_engine": rocket["second_stage"]["engines"],
"second_stage_fuel": "%s tons" % (rocket["second_stage"]["fuel_amount_tons"]),
"second_stage_burn_time": "{:,.2f} sec".format(int(rocket["second_stage"]["burn_time_sec"])) if rocket["second_stage"]["burn_time_sec"] else "",
"engine": "%s (%d)" % (rocket["engines"]["type"].title(), rocket["engines"]["number"]),
"engine_propellant": "%s, %s" % (rocket["engines"]["propellant_1"], rocket["engines"]["propellant_2"]),
"engine_thrust_to_weight": rocket["engines"]["thrust_to_weight"],
"payload_weights_leo": payload_weights_leo,
"payload_weights_gto": payload_weights_gto,
"payload_weights_mars": payload_weights_mars,
"payload_weights_moon": payload_weights_moon,
"type": "%d/%d/%s/%s feet/%s feet/%s lbs" % (rocket["stages"], rocket["boosters"], "%d (%s)" % (rocket["landing_legs"]["number"], rocket["landing_legs"]["material"]) if rocket["landing_legs"]["number"] else 0, "{:,}".format(int(rocket["height"]["feet"])), "{:,}".format(int(rocket["diameter"]["feet"])), "{:,}".format(int(rocket["mass"]["lb"]))),
"first_stage": "%d/%s tons/%s/%s" % (rocket["first_stage"]["engines"], rocket["first_stage"]["fuel_amount_tons"], "{:,.2f} sec".format(int(rocket["first_stage"]["burn_time_sec"])) if rocket["first_stage"]["burn_time_sec"] else "", utils.to_emoji("good") if rocket["first_stage"]["reusable"] else utils.to_emoji("bad")),
"second_stage": "%d/%s tons/%s/%s" % (rocket["second_stage"]["engines"], rocket["second_stage"]["fuel_amount_tons"], "{:,.2f} sec".format(int(rocket["second_stage"]["burn_time_sec"])) if rocket["second_stage"]["burn_time_sec"] else "", utils.to_emoji("good") if rocket["second_stage"]["reusable"] else utils.to_emoji("bad")),
"engine": "%d %s engine(s) w/ %s+%s propellants and a thrust-to-weight ratio of %d" % (rocket["engines"]["number"], rocket["engines"]["type"].title(), rocket["engines"]["propellant_1"], rocket["engines"]["propellant_2"], rocket["engines"]["thrust_to_weight"]),
"payload": "/%s/%s/%s/%s" % (payload_weights_leo, payload_weights_gto, payload_weights_moon, payload_weights_mars),
})
self.data["spacex"]["rockets"] = sorted(self.data["spacex"]["rockets"], key=lambda k: k["name"])
# ships
self.data["spacex"]["ships"] = []
shipsjson = utils.download_json("https://api.spacexdata.com/v4/ships")
for ship in shipsjson:
self.data["spacex"]["ships"].append({
"name": ship["name"],
"status_emoji": utils.to_emoji("good") if ship["active"] else utils.to_emoji("bad"),
"url": ship["link"],
"port": ship["home_port"],
"mass": "%s lbs" % ("{:,.2f}".format(float(item["lb"]))),
"launches": len(ship["launches"]),
"type": ship["type"],
"roles": ", ".join(ship["roles"]),
})
self.data["spacex"]["ships"] = sorted(self.data["spacex"]["ships"], key=lambda k: k["name"])
# starlink
self.data["spacex"]["starlink"] = {
"satellites": [],
"mapdata": [],
"stats": {
"inorbit": 0,
"decayed": 0,
"total": 0,
"firstlaunch": None,
"latestlaunch": None,
},
}
starlinkjson = utils.download_json("https://api.spacexdata.com/v4/starlink")
locs, epochs = [], []
for starlink in starlinkjson:
if not starlink["spaceTrack"]["LAUNCH_DATE"]:
break
self.data["spacex"]["starlink"]["stats"]["total"] += 1
epochs.append(datetime.strptime(starlink["spaceTrack"]["LAUNCH_DATE"], "%Y-%m-%d").timestamp())
if starlink["latitude"] and starlink["longitude"]:
self.data["spacex"]["starlink"]["stats"]["inorbit"] += 1
locs.append("%s,%s" % ("{:,.2f}".format(float(starlink["latitude"])), "{:,.2f}".format(float(starlink["longitude"]))))
self.data["spacex"]["starlink"]["mapdata"].append([
'<a href="https://www.n2yo.com/satellite/?s=%s"><b>%s</b></a><br/><b>Launch</b>: %s<br/><b>Height</b>: %s<br/><b>Velocity</b>: %s' % (
starlink["spaceTrack"]["NORAD_CAT_ID"],
starlink["spaceTrack"]["OBJECT_NAME"],
datetime.strptime(starlink["spaceTrack"]["LAUNCH_DATE"], "%Y-%m-%d").astimezone(tz=None).strftime("%d/%b/%Y %Z"),
"%s miles" % ("{:,.2f}".format(float(starlink["height_km"])*0.62137)) if starlink["height_km"] else "",
"%s mph" % ("{:,.2f}".format(float(starlink["velocity_kms"])*0.62137*60*60)) if starlink["velocity_kms"] else "",
),
starlink["latitude"],
starlink["longitude"],
])
else:
self.data["spacex"]["starlink"]["stats"]["decayed"] += 1
self.data["spacex"]["starlink"]["satellites"].append({
"name": starlink["spaceTrack"]["OBJECT_NAME"],
"url": "https://www.n2yo.com/satellite/?s=%s" % (starlink["spaceTrack"]["NORAD_CAT_ID"]),
"launch": datetime.strptime(starlink["spaceTrack"]["LAUNCH_DATE"], "%Y-%m-%d").astimezone(tz=None).strftime("%d/%b/%Y %Z"),
"epoch": datetime.strptime(starlink["spaceTrack"]["LAUNCH_DATE"], "%Y-%m-%d").timestamp(),
"latitude": starlink["latitude"] if starlink["latitude"] else None,
"longitude": starlink["longitude"] if starlink["longitude"] else None,
"location": "http://maps.google.com/maps?q=%s,%s" % (starlink["latitude"], starlink["longitude"]) if starlink["latitude"] and starlink["longitude"] else None,
"height": "%s miles" % ("{:,.2f}".format(float(starlink["height_km"])*0.62137)) if starlink["height_km"] else None,
"velocity": "%s mph" % ("{:,.2f}".format(float(starlink["velocity_kms"])*0.62137*60*60)) if starlink["velocity_kms"] else None,
})
self.data["spacex"]["starlink"]["satellites"] = sorted(self.data["spacex"]["starlink"]["satellites"], key=lambda k: k["epoch"])
self.data["spacex"]["starlink"]["stats"]["firstlaunch"] = time.strftime("%d/%b/%Y %Z", time.localtime(min(epochs)))
self.data["spacex"]["starlink"]["stats"]["latestlaunch"] = time.strftime("%d/%b/%Y %Z", time.localtime(max(epochs)))
# history
self.data["spacex"]["history"] = []
historyjson = utils.download_json("https://api.spacexdata.com/v4/history")
for history in historyjson:
self.data["spacex"]["history"].append({
"title": history["title"],
"url": history["links"]["article"] if history["links"]["article"] else None,
"date": datetime.fromtimestamp(history["event_date_unix"], tz=timezone.utc).replace(tzinfo=timezone.utc).astimezone(tz=None).strftime("%d/%b/%Y %Z"),
"description": history["details"],
"epoch": history["event_date_unix"],
})
self.data["spacex"]["history"] = sorted(self.data["spacex"]["history"], key=lambda k: k["epoch"])
self.data["spacex"]["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
def satview(self):
self.data["satview"] = {
"date": datetime.now().astimezone(tz=None).strftime("%d/%b/%Y %Z"),
"fullday": {
"datastore_hstack": "https://raw.githubusercontent.com/7h3rAm/datastore/master/earthview_hstack.gif",
"datastore_vstack": "https://raw.githubusercontent.com/7h3rAm/datastore/master/earthview_vstack.gif",
"url": "https://twitter.com/7h3rAm/status/1401555983373987842",
"title_hstack": "Earth Full Day: 07/JUN/2021 (Horizontally Stacked)",
"title_vstack": "Earth Full Day: 07/JUN/2021 (Vertically Stacked)",
},
"himawari8_naturalcolor": {
"source": "http://rammb.cira.colostate.edu/ramsdis/online/images/latest/himawari-8/full_disk_ahi_natural_color.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/himwari8_naturalcolor.jpg"
},
"himawari8_truecolor": {
"source": "http://rammb.cira.colostate.edu/ramsdis/online/images/latest/himawari-8/full_disk_ahi_true_color.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/himwari8_truecolor.jpg"
},
"goes16_geocolor": {
"source": "https://cdn.star.nesdis.noaa.gov/GOES16/ABI/FD/GEOCOLOR/1808x1808.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/goes16.jpg"
},
"goes17_geocolor": {
"source": "https://cdn.star.nesdis.noaa.gov/GOES17/ABI/FD/GEOCOLOR/1808x1808.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/goes17.jpg"
},
"meteosat0_naturalcolor": {
"source": "https://eumetview.eumetsat.int/static-images/latestImages/EUMETSAT_MSG_RGBNatColourEnhncd_LowResolution.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/meteosat0.jpg"
},
"meteosat415_naturalcolor": {
"source": "https://eumetview.eumetsat.int/static-images/latestImages/EUMETSAT_MSGIODC_RGBNatColourEnhncd_LowResolution.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/meteosat415.jpg"
},
"elektrol": {
"source": "http://electro.ntsomz.ru/i/splash/20210529-2330.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/elektrol.jpg"
},
"insat_fd_ir": {
"source": "https://mausam.imd.gov.in/Satellite/3Dglobe_ir1.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/insat_ir1.jpg"
},
"insat_fd_vis": {
"source": "https://mausam.imd.gov.in/Satellite/3Dglobe_vis.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/insat_vis.jpg"
},
"sdo_0171": {
"source": "https://sdo.gsfc.nasa.gov/assets/img/latest/latest_1024_0171.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/sdo_0171.jpg"
},
"sdo_0304": {
"source": "https://sdo.gsfc.nasa.gov/assets/img/latest/latest_1024_0304.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/sdo_0304.jpg"
},
"sdo_hmid": {
"source": "https://sdo.gsfc.nasa.gov/assets/img/latest/latest_1024_HMID.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/sdo_hmid.jpg"
},
"sdo_hmiic": {
"source": "https://sdo.gsfc.nasa.gov/assets/img/latest/latest_1024_HMIIC.jpg",
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/sdo_hmiic.jpg"
},
}
epicjson = utils.download_json("https://epic.gsfc.nasa.gov/api/natural")
ids = []
for epic in epicjson:
ids.append(int(epic["identifier"]))
latest_id = max(ids)
for epic in epicjson:
if int(epic["identifier"]) == latest_id:
date_obj = datetime.strptime("%s GMT" % (epic["date"].replace(" ", "T")), "%Y-%m-%dT%H:%M:%S GMT").replace(tzinfo=timezone.utc)
self.data["satview"]["dscovr_epic"] = {
"message": "%s on %s." % (epic["caption"], date_obj.astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")),
"source": "https://epic.gsfc.nasa.gov/archive/natural/%s/%s/%s/jpg/%s.jpg" % (epic["identifier"][0:4], epic["identifier"][4:6], epic["identifier"][6:8], epic["image"]),
"datastore": "https://raw.githubusercontent.com/7h3rAm/datastore/master/dscovr_epic.jpg",
}
sats = ["dscovr_epic", "himawari8_naturalcolor", "himawari8_truecolor", "goes16_geocolor", "goes17_geocolor", "meteosat0_naturalcolor", "meteosat415_naturalcolor", "elektrol", "insat_fd_ir", "insat_fd_vis", "sdo_0171", "sdo_0304", "sdo_hmid", "sdo_hmiic"]
total = len(sats)
for idx, sat in enumerate(sats):
destination_filepath = "%s/%s" % (self.datastore_path, self.data["satview"][sat]["datastore"].split("/")[-1])
print("[%d/%d] %s" % (idx+1, total, self.data["satview"][sat]["source"]))
try:
utils.download(self.data["satview"][sat]["source"], destination_filepath)
except:
print("[!] could not download from %s" % (self.data["satview"][sat]["source"]))
def marsphotos(self):
# https://github.com/chrisccerami/mars-photo-api
return
def dsn(self):
# https://twitter.com/dsn_status
# https://github.com/russss/pydsn/blob/master/parser.py
return
def mrn(self):
# https://twitter.com/mrn_status
# https://github.com/russss/mrn_status/blob/main/mrn.py
# https://mars.nasa.gov/rss/api/?feed=marsrelay&category=all&feedtype=json
# https://mars.nasa.gov/rss/api/?feed=marsrelay_db&category=all&feedtype=json
# https://mars.nasa.gov/rss/api/?feed=marsrelay_oe&category=all&feedtype=json
return
def solarbody(self):
self.data["spacex"]["solarbody"] = []
solarbodyjson = utils.download_json("https://api.le-systeme-solaire.net/rest/bodies/")
for solarbody in solarbodyjson:
self.data["spacex"]["solarbody"].append({
"title": solarbody["title"],
"url": solarbody["links"]["article"] if solarbody["links"]["article"] else None,
"date": datetime.fromtimestamp(solarbody["event_date_unix"], tz=timezone.utc).replace(tzinfo=timezone.utc).astimezone(tz=None).strftime("%d/%b/%Y %Z"),
"description": solarbody["details"],
"epoch": solarbody["event_date_unix"],
})
self.data["spacex"]["solarbody"] = sorted(self.data["spacex"]["solarbody"], key=lambda k: k["epoch"])
def spaceprobes(self):
# https://github.com/spacehackers/api.spaceprob.es
# http://murmuring-anchorage-8062.herokuapp.com/distances.json
return
def update(self):
self.data = utils.load_json(self.datafile_path)
self.apod()
self.data["last_update_epoch"] = datetime.now().timestamp(); self.data["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.data, self.datafile_path)
self.earthevents()
self.data["last_update_epoch"] = datetime.now().timestamp(); self.data["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.data, self.datafile_path)
self.neo()
self.data["last_update_epoch"] = datetime.now().timestamp(); self.data["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.data, self.datafile_path)
self.spaceppl()
self.data["last_update_epoch"] = datetime.now().timestamp(); self.data["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.data, self.datafile_path)
self.spacex()
self.data["last_update_epoch"] = datetime.now().timestamp(); self.data["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.data, self.datafile_path)
self.satview()
self.data["last_update_epoch"] = datetime.now().timestamp(); self.data["last_update"] = datetime.now().astimezone(tz=None).strftime("%d/%b/%Y @ %H:%M:%S %Z")
utils.save_json(self.data, self.datafile_path)
if __name__ == "__main__":
astro = Astro()
astro.update()
|
{"/bitcoin.py": ["/utils.py"], "/kalpi.py": ["/utils.py"], "/astro.py": ["/utils.py"]}
|
2,245
|
7h3rAm/kalpi
|
refs/heads/master
|
/utils.py
|
import os
import re
import glob
import json
import yaml
import errno
import codecs
import locale
import fnmatch
import datetime
import urllib.parse
import urllib.request
import requests
import sparkline
import prettytable
from PIL import Image
import matplotlib.pyplot as plt
def highlight(text, color="black", bold=False):
resetcode = "\x1b[0m"
color = color.lower().strip()
if color == "black":
colorcode = "\x1b[0;30m" if not bold else "\x1b[1;30m"
elif color == "red":
colorcode = "\x1b[0;31m" if not bold else "\x1b[1;31m"
elif color == "green":
colorcode = "\x1b[0;32m" if not bold else "\x1b[1;32m"
elif color == "yellow":
colorcode = "\x1b[0;33m" if not bold else "\x1b[1;33m"
elif color == "blue":
colorcode = "\x1b[0;34m" if not bold else "\x1b[1;34m"
elif color == "magenta":
colorcode = "\x1b[0;35m" if not bold else "\x1b[1;35m"
elif color == "cyan":
colorcode = "\x1b[0;36m" if not bold else "\x1b[1;36m"
else:
colorcode = "\x1b[0;30m" if not bold else "\x1b[1;30m"
return "%s%s%s" % (colorcode, text, resetcode)
def black(text):
return highlight(text, color="black", bold=False)
def black_bold(text):
return highlight(text, color="black", bold=True)
def red(text):
return highlight(text, color="red", bold=False)
def red_bold(text):
return highlight(text, color="red", bold=True)
def green(text):
return highlight(text, color="green", bold=False)
def green_bold(text):
return highlight(text, color="green", bold=True)
def yellow(text):
return highlight(text, color="yellow", bold=False)
def yellow_bold(text):
return highlight(text, color="yellow", bold=True)
def blue(text):
return highlight(text, color="blue", bold=False)
def blue_bold(text):
return highlight(text, color="blue", bold=True)
def magenta(text):
return highlight(text, color="magenta", bold=False)
def magenta_bold(text):
return highlight(text, color="magenta", bold=True)
def cyan(text):
return highlight(text, color="cyan", bold=False)
def cyan_bold(text):
return highlight(text, color="cyan", bold=True)
def debug(text):
print("%s %s" % (blue_bold("[*]"), text))
def info(text):
print("%s %s" % (green_bold("[+]"), text))
def warn(text):
print("%s %s" % (yellow_bold("[!]"), text))
def error(text):
print("%s %s" % (red_bold("[-]"), text))
def expand_env(var="$HOME"):
return os.environ[var.replace("$", "")]
def trim(text, maxq=40):
return "%s..." % (text[:maxq]) if len(text) > maxq else text
def mkdirp(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def search_files(dirpath="./", regex="*"):
matches = []
for root, dirnames, filenames in os.walk(dirpath):
for filename in fnmatch.filter(filenames, regex):
resultfile = os.path.join(root, filename)
if os.path.exists(resultfile):
matches.append(resultfile)
fm = filter(lambda item: '/__pycache__' not in item and '/results' not in item and '/.git' not in item and '/summary.yml' not in item and '/techniques.yml' not in item and '/ttps.yml' not in item and '/test.ttp.yml' not in item, matches)
return list(set(fm))
def search_files_all(dirpath):
return search_files(dirpath, regex="*")
def search_files_yml(dirpath):
return search_files(dirpath, regex="*.yml")
def search_files_md(dirpath):
return search_files(dirpath, regex="*.md")
def download_json(url):
res = requests.get(url)
if res.status_code == 200:
return res.json()
return None
def load_json(filename):
with open(filename) as fp:
return json.load(fp)
def save_json(datadict, filename):
with open(filename, "w", encoding="utf-8") as fp:
json.dump(datadict, fp, ensure_ascii=False, indent=2, sort_keys=True)
def load_file(filename):
lines = []
with open(filename) as fp:
lines = fp.read().split("\n")
return lines
def save_file(datalist, filename):
with open(filename, "w") as fp:
fp.write("\n".join(datalist))
fp.write("\n")
def load_yaml(filename):
return yaml.safe_load(open(filename))
def save_yaml(datayml, filename):
with open(filename, "w") as fp:
yaml.dump(datayml, fp, default_flow_style=True)
def dict2yaml(datadict):
return yaml.safe_dump(yaml.load(json.dumps(datadict), Loader=yaml.FullLoader), default_flow_style=False)
def file_open(filename):
if filename and filename != "":
with codecs.open(filename, mode="r", encoding="utf-8") as fo:
return fo.read()
def file_save(filename, data, mode="w"):
if filename and filename != "":
if "/" in filename:
mkdirp(os.path.dirname(filename))
try:
with codecs.open(filename, mode, encoding="utf-8") as fo:
fo.write(data)
except Exception as ex:
with open(filename, mode) as fo:
try:
fo.write(data)
except:
fo.write(data.encode('utf-16', 'surrogatepass').decode('utf-16'))
def download(url, filename, timeout=5):
res = requests.get(url, timeout=timeout)
print(url, res.status_code)
if res.status_code == 200:
open(filename, "wb").write(res.content)
return filename
else:
return False
def get_http_res(url, headers={}):
res = requests.get(cleanup_url(url), headers=headers)
print(url, res.status_code)
return res
def get_http(url, headers={}):
res = requests.get(cleanup_url(url), headers=headers)
print(url, res.status_code)
if res.status_code == 200:
return res.json()
else:
return {}
def post_http(url, data={}, headers={}):
res = requests.post(cleanup_url(url), data=json.dumps(data), headers=headers)
print(url, res.status_code)
if res.status_code == 200:
return res.json()
else:
return {}
def strip_html(data):
return re.sub("\s+", " ", BeautifulSoup(data, "lxml").text)
def datetimefilter(datestr, format='%Y/%m/%d %H:%M:%S'):
try:
return datetime.datetime.strptime(str(datestr), '%Y%m%dT%H:%M:%SZ').strftime(format)
except:
return datetime.datetime.strptime(str(datestr), '%Y%m%d').strftime(format)
def cleanup_url(url):
return url.replace("//", "/").replace(":/", "://")
def cleanup_name(name):
return re.sub(r"[\W_]", "", name.lower())
return name.lower().replace(" ", "").replace(":", "").replace("_", "").replace("-", "")
def ghsearchlinks(items, repourl="https://github.com/7h3rAm/writeups", delim=", "):
if isinstance(items, str):
return "[`%s`](%s/search?q=%s&unscoped_q=%s)" % (items, repourl, items, items)
else:
return delim.join([ "[%s](%s/search?q=%s&unscoped_q=%s)" % (x, repourl, x, x) for x in items])
def anchorformat(items, repourl="https://github.com/7h3rAm/writeups", delim=", "):
if isinstance(items, str):
if items.startswith("enumerate_") or items.startswith("exploit_") or items.startswith("privesc_"):
return "[`%s`](%s#%s)" % (items, repourl, items)
else:
return ghsearchlinks(items, repourl)
else:
results = []
for x in items:
if x.startswith("enumerate_") or x.startswith("exploit_") or x.startswith("privesc_"):
results.append("[`%s`](%s#%s)" % (x, repourl, x))
else:
results.append(ghsearchlinks(x, repourl))
return delim.join(results)
def mdurl(datadict):
results = []
for item in datadict:
results.append("[%s](%s)" % (item["name"], item["url"]))
return "<br /><br />".join(results)
def obfuscate(data, mass=0.81):
# calculate event horizon using the given mass
# use eh to hide remaining data forever
if isinstance(data, str):
eh = int(len(data) * mass)
return "".join([data[:eh], len(data[eh:])*"*"])
else:
results = []
for x in data:
eh = int(len(x) * mass)
results.append("".join([x[:eh], len(x[eh:])*"*"]))
return results
def monojoin(items):
if isinstance(items, str):
return "`%s`" % (items)
else:
results = []
for x in items:
results.append("`%s`" % (x))
return "<br /><br />".join(results)
def sec_to_human(secs, sep=" and "):
units = dict({
7*24*60*60: "week",
24*60*60: "day",
60*60: "hour",
1*60: "minute",
1: "second"
})
if secs <= 0: return "0 seconds"
s = list()
for divisor, name in sorted(units.items(), reverse=True):
quot=int(secs/divisor)
if quot:
if abs(quot) > 1:
s.append("%s %ss" % (quot, name))
else:
s.append("%s %s" % (quot, name))
secs -= quot * divisor
return sep.join(s)
def currency_human(num):
try:
for unit in ['','K','M','B','T']:
if abs(num) < 1000.0:
return "%d%s" % (num, unit)
num /= 1000.0
return "%d%s" % (num, 'T')
except:
import traceback
print(traceback.print_exc())
locale.setlocale(locale.LC_ALL, "")
return locale.currency(num, grouping=True)
def sizeof_fmt(num, suffix='B'):
# https://stackoverflow.com/a/1094933/1079836
for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def customsort(items):
return [str(y) for y in sorted([int(x) for x in items])]
def lookahead(iterable):
# https://stackoverflow.com/a/1630350
it = iter(iterable)
last = next(it)
for val in it:
yield last, True
last = val
yield last, False
def yturl2verboseid(url):
#https://www.youtube.com/watch?v=CO_g3wtC7rk&t=0
for param in url.lower().strip().split("?", 1)[1].split("&"):
if param.startswith("v="):
return "youtube?%s" % (param)
return url
def sparkify(difficulty):
return sparkline.sparkify(difficulty)
def chunkify(l, n):
for i in range(0, len(l), n):
yield l[i:i + n]
def to_color_difficulty(sparkline):
return "".join([green(sparkline[:3]), yellow(sparkline[3:7]), red(sparkline[7:])])
def to_emoji(text):
text = str(text)
# https://github.com/ikatyang/emoji-cheat-sheet
if "private" == text.lower():
return "🔒"
elif "public" == text.lower():
return "🔓"
elif "oscplike" == text.lower():
return "⚠️"
elif "access_root" == text.lower():
return "🩸"
elif "access_user" == text.lower():
return "💧"
elif "linux" == text.lower():
return "🐧"
elif "bsd" in text.lower():
return "👹"
elif "windows" == text.lower():
return "🔷"
elif "difficulty_unknown" == text.lower():
return "⚪"
elif "easy" == text.lower():
return "🟢"
elif "medium" == text.lower():
return "🟡"
elif "hard" == text.lower():
return "🟠"
elif "insane" == text.lower():
return "🔴"
elif "destroyed" == text.lower():
return "🔴"
elif "retired" == text.lower():
return "🟡"
elif "active" == text.lower():
return "🟢"
elif "unknown" == text.lower():
return "⚪"
elif "lost" == text.lower():
return "🔴"
elif "inactive" == text.lower():
return "🟠"
elif "expended" == text.lower():
return "🟡"
elif "capsule" == text.lower():
return "💊"
elif "satellite" == text.lower():
return "🛰️"
elif "dragon" in text.lower():
return "🐉"
elif "bad" == text.lower():
return "🔴"
elif "good" == text.lower():
return "🟢"
else:
return "⚪"
def to_markdown_table(pt):
_junc = pt.junction_char
if _junc != "|":
pt.junction_char = "|"
markdown = [row for row in pt.get_string().split("\n")[1:-1]]
pt.junction_char = _junc
return "\n".join(markdown)
def get_table(header, rows, delim="___", aligndict=None, markdown=False, colalign=None):
table = prettytable.PrettyTable()
table.field_names = header
table.align = "c"; table.valign = "m"
for row in rows:
table.add_row(row.split(delim))
if markdown:
if colalign in ["left", "center", "right"]:
if colalign == "left":
return to_markdown_table(table).replace("|-", "|:")
elif colalign == "center":
return to_markdown_table(table).replace("-|-", ":|:").replace("|-", "|:").replace("-|", ":|")
elif colalign == "right":
return to_markdown_table(table).replace("-|", ":|")
else:
#return table.get_html_string()
return to_markdown_table(table)
else:
if aligndict:
for colheader in aligndict:
table.align[colheader] = aligndict[colheader]
else:
table.align["#"] = "r"
table.align["ID"] = "r"
table.align["Name"] = "l"
table.align["Expires"] = "l"
table.align["Follow"] = "l"
table.align["Private"] = "c"
table.align["OS"] = "c"
table.align["Rating"] = "l"
table.align["Difficulty"] = "c"
table.align["Owned"] = "l"
table.align["OSCPlike"] = "l"
table.vertical_char = " "
table.horizontal_char = "-"
table.junction_char = " "
return table.get_string()
def to_table(header, rows, delim="___", aligndict=None, markdown=False):
print(get_table(header, rows, delim=delim, aligndict=aligndict, markdown=markdown))
def to_json(data):
print(json.dumps(data, indent=2, sort_keys=True))
def show_machines(data, sort_key="name", jsonify=False):
if not len(data):
return
elif "success" in data:
return to_json(data)
elif jsonify:
to_json(data)
else:
rows = []
if "writeuppdfurl" in data[0]:
header = ["#", "ID", "Name", "Private", "OS", "Rating", "Difficulty", "Owned", "OSCPlike"]
for idx, entry in enumerate(sorted(data, key=lambda k: k[sort_key].lower())):
mid = "%s%s" % (blue("%s#" % (entry["verbose_id"].split("#")[0])), blue_bold("%s" % (entry["verbose_id"].split("#")[1])))
name = black_bold(entry["name"])
os = to_emoji(entry["os"])
difficulty = to_emoji(entry["difficulty"]) if entry.get("difficulty") and entry["difficulty"] else to_emoji("difficulty_unknown")
rating = to_color_difficulty(sparkify(entry["difficulty_ratings"])) if entry.get("difficulty_ratings") else ""
if entry.get("owned_root") and entry["owned_root"]:
owned = to_emoji("access_root")
elif entry.get("owned_user") and entry["owned_user"]:
owned = to_emoji("access_user")
else:
owned = to_emoji("access_none")
oscplike = to_emoji("oscplike") if entry["oscplike"] else to_emoji("notoscplike")
private = to_emoji("private") if entry["private"] else to_emoji("public")
rows.append("%s.___%s___%s___%s___%s___%s___%s___%s___%s" % (
idx+1,
mid,
name,
private,
os,
rating,
difficulty,
owned,
oscplike,
))
elif "expires_at" in data[0]:
header = ["#", "ID", "Name", "Expires", "OS", "Difficulty", "Rating", "Owned", "OSCPlike"]
for idx, entry in enumerate(sorted(data, key=lambda k: k[sort_key].lower())):
mid = "%s%s" % (blue("%s#" % (entry["verbose_id"].split("#")[0])), blue_bold("%s" % (entry["verbose_id"].split("#")[1])))
name = black_bold(entry["name"])
os = to_emoji(entry["os"])
difficulty = entry["difficulty"] if entry.get("difficulty") and entry["difficulty"] else "difficulty_unknown"
rating = to_color_difficulty(sparkify(entry["difficulty_ratings"])) if entry.get("difficulty_ratings") else ""
if entry.get("owned_root") and entry["owned_root"]:
owned = "access_root"
elif entry.get("owned_user") and entry["owned_user"]:
owned = "access_user"
else:
owned = "access_none"
oscplike = to_emoji("oscplike") if entry["oscplike"] else to_emoji("notoscplike")
rows.append("%s.___%s___%s___%s___%s___%s___%s___%s___%s" % (
idx+1,
mid,
name,
entry["expires_at"],
os,
to_emoji(difficulty),
rating,
to_emoji(owned),
to_emoji(oscplike),
))
elif "search_url" in data[0]:
header = ["#", "ID", "Name", "Follow", "OS", "Rating", "Difficulty", "Owned", "OSCPlike"]
for idx, entry in enumerate(sorted(data, key=lambda k: k[sort_key].lower())):
mid = "%s%s" % (blue("%s#" % (entry["verbose_id"].split("#")[0])), blue_bold("%s" % (entry["verbose_id"].split("#")[1])))
name = black_bold(entry["name"])
follow = blue(entry["search_url"])
os = to_emoji(entry["os"])
difficulty = to_emoji(entry["difficulty"]) if entry.get("difficulty") and entry["difficulty"] else to_emoji("difficulty_unknown")
rating = to_color_difficulty(sparkify(entry["difficulty_ratings"])) if entry.get("difficulty_ratings") else ""
if entry.get("owned_root") and entry["owned_root"]:
owned = to_emoji("access_root")
elif entry.get("owned_user") and entry["owned_user"]:
owned = to_emoji("access_user")
else:
owned = to_emoji("access_none")
oscplike = to_emoji("oscplike") if entry["oscplike"] else to_emoji("notoscplike")
rows.append("%s.___%s___%s___%s___%s___%s___%s___%s___%s" % (
idx+1,
mid,
name,
follow,
os,
rating,
difficulty,
owned,
oscplike,
))
else:
header = ["#", "ID", "Name", "OS", "Rating", "Difficulty", "Owned", "OSCPlike"]
for idx, entry in enumerate(sorted(data, key=lambda k: k[sort_key].lower())):
mid = "%s%s" % (blue("%s#" % (entry["verbose_id"].split("#")[0])), blue_bold("%s" % (entry["verbose_id"].split("#")[1])))
name = black_bold(entry["name"])
os = to_emoji(entry["os"])
difficulty = to_emoji(entry["difficulty"]) if entry.get("difficulty") and entry["difficulty"] else to_emoji("difficulty_unknown")
rating = to_color_difficulty(sparkify(entry["difficulty_ratings"])) if entry.get("difficulty_ratings") else ""
if entry.get("owned_root") and entry["owned_root"]:
owned = to_emoji("access_root")
elif entry.get("owned_user") and entry["owned_user"]:
owned = to_emoji("access_user")
else:
owned = to_emoji("access_none")
oscplike = to_emoji("oscplike") if entry["oscplike"] else to_emoji("notoscplike")
rows.append("%s.___%s___%s___%s___%s___%s___%s___%s" % (
idx+1,
mid,
name,
os,
rating,
difficulty,
owned,
oscplike,
))
to_table(header=header, rows=rows, delim="___", aligndict=None, markdown=False)
def to_xkcd(plotdict, filename, title, rotate=True, trimlength=20):
datadict = {}
for key in plotdict:
datadict[key] = [[key], [plotdict[key]]]
with plt.xkcd():
for idx, label in enumerate(sorted(datadict)):
plt.bar(datadict[label][0], datadict[label][1])
text = "%s... (%d)" % ("".join(datadict[label][0][0][:trimlength]), datadict[label][1][0]) if len(label) >= trimlength else "%s (%d)" % (datadict[label][0][0], datadict[label][1][0])
if rotate:
angle = 90
x, y = idx, 0.5
else:
angle = 0
padding = (len(label)/2)/10
x, y = idx-padding, datadict[label][1][0]-1
plt.text(s=text, x=x, y=y, color="black", verticalalignment="center", horizontalalignment="left", size=15, rotation=angle, rotation_mode="anchor")
plt.suptitle(title, fontsize=18, color="black")
plt.gca().spines["left"].set_color("black")
plt.gca().spines["bottom"].set_color("black")
plt.gca().spines["left"].set_visible(False)
plt.gca().spines["right"].set_visible(False)
plt.gca().spines["top"].set_visible(False)
plt.xticks([]); plt.yticks([])
plt.tight_layout()
plt.savefig(filename, dpi=300)
plt.close()
def to_sparklines(items, filename, transparent=True):
colormap = ["#9acc14", "#9acc14", "#9acc14", "#f7af3e", "#f7af3e", "#f7af3e", "#f7af3e", "#db524b", "#db524b", "#db524b"]
barlist = plt.bar([str(x) for x in range(len(items))], items, width=0.95)
for i in range(len(items)):
barlist[i].set_color(colormap[i])
ax = plt.gca()
ax.spines["bottom"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["top"].set_visible(False)
plt.xticks([]); plt.yticks([])
plt.tight_layout()
plt.savefig(filename, dpi=300, transparent=transparent)
plt.close()
def hex2rgb(hexstr="#ffcb6b"):
if hexstr and hexstr != "":
return tuple(int(hexstr.replace("#", "")[i:i+2], 16) for i in (0, 2, 4))
def rgb2hex(rgb=(255, 0, 0)):
if rgb:
return "#%s" % ("".join(["%x" % (x) for x in rgb]))
def tint(color, factor=0.35):
# https://stackoverflow.com/a/6615053/1079836
# (tint)factor range: 0.1 (dark) - 0.9 (light)
rgb = hex2rgb(color)
trgb = (int(rgb[0] + (factor * (255 - rgb[0]))), int(rgb[1] + (factor * (255 - rgb[1]))), int(rgb[2] + (factor * (255 - rgb[2]))))
return rgb2hex(trgb)
|
{"/bitcoin.py": ["/utils.py"], "/kalpi.py": ["/utils.py"], "/astro.py": ["/utils.py"]}
|
2,246
|
dmodena/projetoextensao
|
refs/heads/main
|
/core/models.py
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
class Edital(models.Model):
titulo = models.CharField(max_length = 100)
descricao = models.TextField()
inicio_inscricoes = models.DateField()
fim_inscricoes = models.DateField()
inicio_curso = models.DateField()
fim_curso = models.DateField()
vagas = models.IntegerField()
pre_requisitos = models.TextField()
edital_link = models.CharField(max_length = 100)
carga_horaria = models.IntegerField(default = 1)
cidade = models.CharField(max_length = 100)
ativo = models.BooleanField(default = True)
def __str__(self):
return self.titulo
class Aluno(models.Model):
nome = models.CharField(max_length = 100)
logradouro = models.CharField(max_length = 100)
numero = models.CharField(max_length = 10)
complemento = models.TextField(blank = True, default = '')
cep = models.CharField(max_length = 10)
cidade = models.CharField(max_length = 100)
estado = models.CharField(max_length = 2)
rg = models.CharField(max_length = 20)
cpf = models.CharField(unique = True, max_length = 20)
email = models.CharField(max_length = 30)
telefone = models.CharField(max_length = 20)
nascimento = models.DateField()
created_by = models.ForeignKey(User, null = True, on_delete = models.SET_NULL)
ativo = models.BooleanField(default = True)
def __str__(self):
return self.nome
class Inscrito(models.Model):
inscrito_em = models.DateTimeField(default = timezone.now)
matriculado_em = models.DateTimeField(null = True)
aprovado_em = models.DateTimeField(null = True)
reprovado_em = models.DateTimeField(null = True)
aluno = models.ForeignKey(Aluno, on_delete = models.CASCADE)
edital = models.ForeignKey(Edital, on_delete = models.CASCADE)
status = models.IntegerField(default = 0)
observacoes = models.TextField()
|
{"/core/views.py": ["/core/models.py", "/core/forms.py", "/core/utils.py"], "/core/forms.py": ["/core/models.py"]}
|
2,247
|
dmodena/projetoextensao
|
refs/heads/main
|
/core/templatetags/custom_filters.py
|
from django import template
register = template.Library()
@register.filter
def date_mask(value):
return value.strftime('%d/%m/%Y')
@register.filter
def datefull_mask(value):
mes = ""
if value.month == 1:
mes = "Janeiro"
elif value.month == 2:
mes = "Fevereiro"
elif value.month == 3:
mes = "Março"
elif value.month == 4:
mes = "Abril"
elif value.month == 5:
mes = "Maio"
elif value.month == 6:
mes = "Junho"
elif value.month == 7:
mes = "Julho"
elif value.month == 8:
mes = "Agosto"
elif value.month == 9:
mes = "Setembro"
elif value.month == 10:
mes = "Outubro"
elif value.month == 11:
mes = "Novembro"
elif value.month == 12:
mes = "Dezembro"
return value.strftime('%d de ') + str(mes) + value.strftime(' de %Y')
@register.filter
def datetime_mask(value):
return value.strftime('%d/%m/%Y %H:%M')
|
{"/core/views.py": ["/core/models.py", "/core/forms.py", "/core/utils.py"], "/core/forms.py": ["/core/models.py"]}
|
2,248
|
dmodena/projetoextensao
|
refs/heads/main
|
/core/migrations/0001_initial.py
|
# Generated by Django 2.0.1 on 2018-03-15 16:16
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Aluno',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=100)),
('logradouro', models.CharField(max_length=100)),
('numero', models.CharField(max_length=10)),
('complemento', models.TextField(blank=True, default='')),
('cep', models.CharField(max_length=10)),
('cidade', models.CharField(max_length=100)),
('estado', models.CharField(max_length=2)),
('rg', models.CharField(max_length=20)),
('cpf', models.CharField(max_length=20, unique=True)),
('email', models.CharField(max_length=30)),
('telefone', models.CharField(max_length=20)),
('nascimento', models.DateField()),
('ativo', models.BooleanField(default=True)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Edital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo', models.CharField(max_length=100)),
('descricao', models.TextField()),
('inicio_inscricoes', models.DateField()),
('fim_inscricoes', models.DateField()),
('inicio_curso', models.DateField()),
('fim_curso', models.DateField()),
('vagas', models.IntegerField()),
('pre_requisitos', models.TextField()),
('edital_link', models.CharField(max_length=100)),
('carga_horaria', models.IntegerField(default=1)),
('cidade', models.CharField(max_length=100)),
('ativo', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Inscrito',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('inscrito_em', models.DateTimeField(default=django.utils.timezone.now)),
('matriculado_em', models.DateTimeField(null=True)),
('aprovado_em', models.DateTimeField(null=True)),
('reprovado_em', models.DateTimeField(null=True)),
('status', models.IntegerField(default=0)),
('observacoes', models.TextField()),
('aluno', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Aluno')),
('edital', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Edital')),
],
),
]
|
{"/core/views.py": ["/core/models.py", "/core/forms.py", "/core/utils.py"], "/core/forms.py": ["/core/models.py"]}
|
2,249
|
dmodena/projetoextensao
|
refs/heads/main
|
/core/urls.py
|
from django.urls import path
from core import views
urlpatterns = [
path('', views.r_editais),
path('editais/', views.editais, name='editais'),
path('editais/novo/', views.edital_create, name='editais/novo'),
path('editais/editar/<id>', views.edital_edit, name='editais/editar'),
path('editais/excluir/<id>', views.edital_remove, name='editais/excluir'),
path('alunos/', views.alunos, name='alunos'),
path('alunos/novo/', views.aluno_create, name='alunos/novo'),
path('alunos/editar/<id>', views.aluno_edit, name='alunos/editar'),
path('inscricoes/', views.inscricoes, name='inscricoes'),
path('inscricoes/edital/<id>', views.inscricoes_edital, name='inscricoes/edital'),
path('inscricoes/aluno/', views.inscricoes_aluno, name='inscricoes/aluno'),
path('inscricoes/edital/novo/<id>', views.inscricoes_create, name='inscricao/nova'),
path('inscricoes/matricular/<id>', views.inscricoes_matricular, name='inscricao/matricular'),
path('inscricoes/cancelar/<id>', views.inscricoes_cancelar, name='inscricao/cancelar'),
path('inscricoes/aprovar/<id>', views.inscricoes_aprovar, name='inscricao/aprovar'),
path('inscricoes/reprovar/<id>', views.inscricoes_reprovar, name='inscricao/reprovar'),
path('inscricoes/excluir/<id>', views.inscricoes_remove, name='inscricao/excluir'),
path('certificados/<id>', views.certificado, name='certificado'),
path('registrar/', views.signup, name='registrar'),
]
|
{"/core/views.py": ["/core/models.py", "/core/forms.py", "/core/utils.py"], "/core/forms.py": ["/core/models.py"]}
|
2,250
|
dmodena/projetoextensao
|
refs/heads/main
|
/core/views.py
|
from django.shortcuts import render, redirect
from core.models import Edital, Aluno, Inscrito
from core.forms import EditalForm, AlunoForm
from core.utils import static_files_url
from django.utils import timezone
from django.contrib.auth import login, authenticate
from django.contrib.auth.forms import UserCreationForm
# from django.core.email import send_email
def home(request):
return render(request, 'core/index.html')
def editais(request, mensagem = None):
editais = Edital.objects.all().order_by('-id')
return render(request, 'core/editais/lista.html', {'editais': editais, 'mensagem': mensagem, 'static_url': static_files_url})
def r_editais(request):
return redirect(editais)
def edital_create(request):
form = EditalForm(request.POST or None)
if request.method == 'POST':
if form.is_valid() and request.user.is_staff:
form.save()
return redirect(editais)
return render(request, 'core/editais/novo.html', {'form': form, 'static_url': static_files_url})
def edital_edit(request, id):
edital = Edital.objects.get(id=id)
form = EditalForm(request.POST or None, instance = edital)
if form.is_valid() and request.user.is_staff:
form.save()
return redirect(editais)
return render(request, 'core/editais/novo.html', {'form': form, 'static_url': static_files_url})
def edital_remove(request, id):
edital = Edital.objects.get(id=id)
if request.user.is_staff:
edital.delete()
return redirect(editais)
def alunos(request):
if request.user.is_authenticated:
if request.user.is_staff:
alunos = Aluno.objects.all().order_by('nome')
else:
alunos = Aluno.objects.all().filter(created_by=request.user)
return render(request, 'core/alunos/lista.html', {'alunos':alunos, 'static_url': static_files_url})
return redirect(editais)
def aluno_create(request):
form = AlunoForm(request.POST or None)
if request.method == 'POST':
if form.is_valid() and request.user.is_authenticated:
aluno = form.save()
aluno.created_by = request.user
aluno.save()
return redirect(alunos)
return render(request, 'core/alunos/novo.html', {'form': form, 'static_url': static_files_url})
def aluno_edit(request, id):
aluno = Aluno.objects.get(id=id)
form = AlunoForm(request.POST or None, instance = aluno)
if form.is_valid() and request.user.is_authenticated:
form.save()
return redirect(alunos)
return render(request, 'core/alunos/novo.html', {'form': form, 'static_url': static_files_url})
def inscricoes(request):
editais = Edital.objects.all().order_by('titulo')
return render(request, 'core/inscricoes/lista.html', {'editais': editais, 'static_url': static_files_url})
def inscricoes_edital(request, id):
edital = Edital.objects.get(id=id)
inscritos = Inscrito.objects.filter(edital=edital).order_by('inscrito_em')
return render(request, 'core/inscricoes/edital.html', {'inscritos': inscritos, 'static_url': static_files_url})
def inscricoes_aluno(request):
aluno = Aluno.objects.get(created_by=request.user)
inscritos = Inscrito.objects.filter(aluno=aluno).order_by('inscrito_em')
return render(request, 'core/inscricoes/aluno.html', {'inscritos': inscritos, 'static_url': static_files_url})
def inscricoes_create(request, id):
edital = Edital.objects.get(id=id)
aluno = Aluno.objects.get(created_by=request.user)
if aluno == None:
return redirect(editais)
qtd_inscricoes = Inscrito.objects.filter(aluno=aluno, edital=edital).count()
if qtd_inscricoes > 0:
mensagem = "Aluno já inscrito!"
return editais(request, mensagem)
inscrito = Inscrito()
inscrito.aluno = aluno
inscrito.edital = edital
inscrito.status = 1
inscrito.save()
mensagem = "Inscrição realizada com sucesso!"
# send_email('SisExtensão - Inscrição em curso', 'Obrigado por se inscrever em um de nosso cursos! Aguarde a confirmação de sua matrícula.', 'sisextensao@example.com', [inscrito.email])
return editais(request, mensagem)
def inscricoes_matricular(request, id):
inscrito = Inscrito.objects.get(id=id)
edital = inscrito.edital
inscrito.status = 2
inscrito.matriculado_em = timezone.now()
inscrito.save()
# send_email('SisExtensão - Matrícula em curso', 'Parabéns! Você foi selecionado para iniciar seu curso! Entre em contato com o Campus para realizar sua matrícula.', 'sisextensao@example.com', [inscrito.email])
return redirect('inscricoes/edital', id=edital.id)
def inscricoes_cancelar(request, id):
inscrito = Inscrito.objects.get(id=id)
edital = inscrito.edital
inscrito.status = 1
inscrito.matriculado_em = None
inscrito.save()
return redirect('inscricoes/edital', id=edital.id)
def inscricoes_aprovar(request, id):
inscrito = Inscrito.objects.get(id=id)
edital = inscrito.edital
inscrito.status = 3
inscrito.aprovado_em = timezone.now()
inscrito.save()
return redirect('inscricoes/edital', id=edital.id)
def inscricoes_reprovar(request, id):
inscrito = Inscrito.objects.get(id=id)
edital = inscrito.edital
inscrito.status = 4
inscrito.reprovado_em = timezone.now()
inscrito.save()
return redirect('inscricoes/edital', id=edital.id)
def inscricoes_remove(request, id):
inscrito = Inscrito.objects.get(id=id)
inscrito.delete()
return redirect('inscricoes/aluno')
def certificado(request, id):
inscrito = Inscrito.objects.get(id=id)
edital = Edital.objects.get(id=inscrito.edital.id)
aluno = Aluno.objects.get(id=inscrito.aluno.id)
return render(request, 'core/certificados/certificado.html', {'inscrito': inscrito, 'edital': edital, 'aluno': aluno, 'static_url': static_files_url})
def signup(request):
if request.method == 'POST':
form_user = UserCreationForm(request.POST)
form_aluno = AlunoForm(request.POST)
if form_user.is_valid() and form_aluno.is_valid():
usuario = form_user.save()
aluno = form_aluno.save()
aluno.created_by = usuario
aluno.save()
username = form_user.cleaned_data.get('username')
raw_password = form_user.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
return redirect(editais)
else:
form_user = UserCreationForm()
form_aluno = AlunoForm()
return render(request, 'core/signup.html', {'form_user': form_user, 'form_aluno': form_aluno, 'static_url': static_files_url})
|
{"/core/views.py": ["/core/models.py", "/core/forms.py", "/core/utils.py"], "/core/forms.py": ["/core/models.py"]}
|
2,251
|
dmodena/projetoextensao
|
refs/heads/main
|
/projetoextensao/settings/dev.py
|
from projetoextensao.settings.base import *
from decouple import config
DEBUG = True
ALLOWED_HOSTS = []
SECRET_KEY = config('SECRET_KEY')
# Base url for static files
STATIC_FILES_URL = config('STATIC_FILES_URL')
|
{"/core/views.py": ["/core/models.py", "/core/forms.py", "/core/utils.py"], "/core/forms.py": ["/core/models.py"]}
|
2,252
|
dmodena/projetoextensao
|
refs/heads/main
|
/core/utils.py
|
from django.conf import settings
def static_files_url():
return settings.STATIC_FILES_URL
|
{"/core/views.py": ["/core/models.py", "/core/forms.py", "/core/utils.py"], "/core/forms.py": ["/core/models.py"]}
|
2,253
|
dmodena/projetoextensao
|
refs/heads/main
|
/projetoextensao/settings/prod.py
|
from projetoextensao.settings.base import *
import dj_database_url
DEBUG = False
ALLOWED_HOSTS = ['projetoextensao.herokuapp.com']
SECRET_KEY = os.environ['DJANGO_KEY']
DATABASES['default'] = dj_database_url.config()
# Base url for static files
STATIC_FILES_URL = os.environ['STATIC_FILES_URL']
|
{"/core/views.py": ["/core/models.py", "/core/forms.py", "/core/utils.py"], "/core/forms.py": ["/core/models.py"]}
|
2,254
|
dmodena/projetoextensao
|
refs/heads/main
|
/core/forms.py
|
from django.forms import ModelForm
from core.models import Edital, Aluno
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class EditalForm(ModelForm):
class Meta:
model = Edital
fields = ['titulo', 'descricao', 'inicio_inscricoes', 'fim_inscricoes', 'inicio_curso', 'fim_curso', 'vagas', 'pre_requisitos', 'edital_link', 'carga_horaria', 'cidade', 'ativo']
class AlunoForm(ModelForm):
class Meta:
model = Aluno
fields = ['nome', 'logradouro', 'numero', 'complemento', 'cep', 'cidade', 'estado', 'rg', 'cpf', 'email', 'telefone', 'nascimento', 'ativo']
|
{"/core/views.py": ["/core/models.py", "/core/forms.py", "/core/utils.py"], "/core/forms.py": ["/core/models.py"]}
|
2,258
|
NuriAmari/website-server
|
refs/heads/master
|
/main.py
|
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
import os
from tornado.web import Application
from chess_ws_server import WSHandler
from auth import AuthHandler
from nuri_format import FormatHandler, LintHandler
application = Application(
[
(r"^/api/lint", LintHandler),
(r"^/api/format", FormatHandler),
(r"^/api/auth", AuthHandler),
(r"^/ws", WSHandler),
],
cookie_secret=os.environ.get("TORNADO_COOKIE_SECRET"),
websocket_ping_interval=15,
)
if __name__ == "__main__":
server = HTTPServer(application)
server.listen(8000)
IOLoop.current().start()
|
{"/main.py": ["/chess_ws_server.py", "/auth.py", "/nuri_format.py"], "/chess_ws_server.py": ["/mail.py"]}
|
2,259
|
NuriAmari/website-server
|
refs/heads/master
|
/nuri_format.py
|
from tornado.web import RequestHandler
from simplejson.src.json import json
from langtools.lexer.exceptions import LexicalError
import json as simplejson
class FormatHandler(RequestHandler):
def post(self):
try:
content_str = self.request.body.decode("utf-8")
content = json.loads(content_str)
except LexicalError as le:
self.write(
simplejson.dumps(
{
"content": content_str,
"annotations": [
{
"type": "error",
"text": f"Unexpected Character: {le.error_char} at line {le.error_line + 1}, col {le.error_col}",
"row": le.error_line,
"column": le.error_col,
}
],
"markers": [
{
"startRow": le.error_line,
"endRow": le.error_line,
"startCol": le.error_col,
"endCol": le.error_col + 1,
"type": "text",
"className": "error-marker",
}
],
}
)
)
except Exception as e:
print(e)
self.write(
simplejson.dumps({"content": '{"oops": true}', "annotations": []})
)
else:
self.write(
simplejson.dumps({"content": json.dumps(content), "annotations": []})
)
class LintHandler(RequestHandler):
def post(self):
self.write(
json.dumps(
{"content": self.request.body.decode("utf-8"), "annotations": []}
)
)
|
{"/main.py": ["/chess_ws_server.py", "/auth.py", "/nuri_format.py"], "/chess_ws_server.py": ["/mail.py"]}
|
2,260
|
NuriAmari/website-server
|
refs/heads/master
|
/auth.py
|
import secrets
import json
import os
import redis
from tornado.web import RequestHandler
r = redis.Redis(host="localhost", port=6379, db=0)
username = os.environ.get("WEBSITE_USERNAME")
password = os.environ.get("WEBSITE_PASSWORD")
class AuthHandler(RequestHandler):
def post(self):
data = json.loads(self.request.body)
if data["username"] == username and data["password"] == password:
auth_cookie = secrets.token_urlsafe()
r.sadd("COOKIES", auth_cookie)
self.set_secure_cookie("auth", auth_cookie)
self.write(json.dumps({"success": True}))
else:
self.write(json.dumps({"success": False}))
|
{"/main.py": ["/chess_ws_server.py", "/auth.py", "/nuri_format.py"], "/chess_ws_server.py": ["/mail.py"]}
|
2,261
|
NuriAmari/website-server
|
refs/heads/master
|
/mail.py
|
import os
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail
def send_notification():
if os.environ.get('MODE') == 'PROD':
message = Mail(
from_email=os.environ.get('WEBSITE_EMAIL'),
to_emails=os.environ.get('WEBSITE_EMAIL'),
subject='Your move pal',
html_content="<p>Someone's made a move</p>"
)
try:
sg = SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
response = sg.send(message)
print(response.status_code)
print(response.body)
print(response.headers)
except Exception as e:
print(e.message)
|
{"/main.py": ["/chess_ws_server.py", "/auth.py", "/nuri_format.py"], "/chess_ws_server.py": ["/mail.py"]}
|
2,262
|
NuriAmari/website-server
|
refs/heads/master
|
/chess_ws_server.py
|
from typing import Set
import json
import threading
import redis
import chess
from tornado.websocket import WebSocketHandler
from mail import send_notification
clients: Set[WebSocketHandler] = set()
r = redis.Redis(host="localhost", port=6379, db=0)
def get_white_wins():
if not r.exists("W_WINS"):
r.set("W_WINS", 0)
return r.get("W_WINS").decode("utf-8")
def get_black_wins():
if not r.exists("B_WINS"):
r.set("B_WINS", 0)
return r.get("B_WINS").decode("utf-8")
def get_most_recent_UCI():
if not r.exists("MR_UCI"):
r.set("MR_UCI", "")
return r.get("MR_UCI").decode("utf-8")
def white_win():
r.incr("W_WINS")
def black_win():
r.incr("B_WINS")
def reset_board():
r.set("FEN", chess.STARTING_FEN)
def get_board_state():
if not r.exists("FEN"):
reset_board()
return r.get("FEN").decode("utf-8")
def get_move_history():
return [move.decode("utf-8") for move in r.lrange("HISTORY", 0, -1)]
def broadcast(fen):
for client in clients:
client.write_message(fen)
class WSHandler(WebSocketHandler):
def check_origin(self, origin):
return True
def open(self):
clients.add(self)
self.write_message(
json.dumps(
{
"FEN": get_board_state(),
"W_WINS": get_white_wins(),
"B_WINS": get_black_wins(),
"HISTORY": get_move_history(),
"MR_UCI": get_most_recent_UCI(),
}
)
)
def reset(self):
reset_board()
r.delete("HISTORY")
r.set("MR_UCI", "")
broadcast(
json.dumps(
{
"FEN": get_board_state(),
"W_WINS": get_white_wins(),
"B_WINS": get_black_wins(),
"HISTORY": get_move_history(),
"MR_UCI": get_most_recent_UCI(),
}
)
)
def on_message(self, message):
board = chess.Board(get_board_state())
next_move = board.parse_san(message)
if board.turn == chess.BLACK:
# check for auth cookie
auth_cookie = self.get_secure_cookie("auth")
if not auth_cookie or not r.sismember("COOKIES", auth_cookie):
return
try:
board.push_san(message)
except ValueError:
# ignore illegal moves
pass
else:
new_board_state = board.fen()
r.set("FEN", new_board_state)
r.rpush("HISTORY", message)
r.set("MR_UCI", next_move.uci())
broadcast(
json.dumps(
{"FEN": new_board_state, "SAN": message, "MR_UCI": next_move.uci()}
)
)
if board.turn == chess.BLACK:
# notify me, someone has made a valid move
send_notification()
if board.is_game_over():
result = board.result()
if result == "1-0":
white_win()
elif result == "0-1":
black_win()
else:
# draw
white_win()
black_win()
self.reset()
def on_close(self):
clients.remove(self)
|
{"/main.py": ["/chess_ws_server.py", "/auth.py", "/nuri_format.py"], "/chess_ws_server.py": ["/mail.py"]}
|
2,304
|
blackdie100/Propine
|
refs/heads/master
|
/HomeworkImplement.py
|
import time
import HomeworkClass as HomeworkImplement
## Environment Setting ##
#############################################################################################
### Choose Platform ###
CheckPlatform = HomeworkImplement.Platform();
CheckPlatform.setup_method_Chrome();
### Get Website URL ###
NickTest = HomeworkImplement.TestList(CheckPlatform);
NickTest.get_site()
## Test Item ##
#############################################################################################
### Test 1 ~ 10 units with one random number ###
# NickTest.ImputNumberRandom_FirstElement()
# NickTest.teardown_method()
### Test 1 ~ 10 units with two random number ###
NickTest.InputNumberRandom_TwoElement()
NickTest.teardown_method()
### Input English / Chinese Character ###
# NickTest.InputCharacter()
# NickTest.teardown_method()
### Open and Close app ###
# i = 1
# while i < 10:
# NickTest.teardown_method()
# CheckPlatform = HomeworkImplement.Platform();
# CheckPlatform.setup_method_Chrome();
# NickTest = HomeworkImplement.TestList(CheckPlatform);
# NickTest.get_site()
# i = i + 1
### Test Load Performance ###
# NickTest.Load_Performance()
# NickTest.teardown_method()
### Test Press Propine Marker Link ###
# NickTest.PressLink()
# NickTest.teardown_method()
|
{"/HomeworkImplement.py": ["/HomeworkClass.py"]}
|
2,305
|
blackdie100/Propine
|
refs/heads/master
|
/HomeworkClass.py
|
import pytest
import time
import json
import random
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
class Platform():
def setup_method_Chrome(self):
self.driver = webdriver.Chrome()
self.vars = {}
def setup_method_Safari(self):
self.driver = webdriver.Safari()
self.vars = {}
def setup_method_Firefox(self):
self.driver = webdriver.Firefox()
self.vars = {}
def setup_method_Ie(self):
self.driver = webdriver.Ie()
self.vars = {}
class TestList(Platform):
def __init__(self, class_a):
self.driver = class_a.driver
def teardown_method(self):
time.sleep(3)
self.driver.quit()
def get_site(self):
self.driver.get("https://vast-dawn-73245.herokuapp.com/")
self.driver.set_window_size(1212, 777)
def InputNumberRandom_FirstElement(self):
i = 2
while i <= 10:
j = 1
while j <= 10:
self.driver.find_element(By.NAME, "firstNumber").click()
self.driver.find_element(By.NAME, "firstNumber").send_keys(int(random.random()*10**i))
self.driver.find_element(By.NAME, "secondNumber").click()
self.driver.find_element(By.NAME, "secondNumber").send_keys("1")
self.driver.find_element(By.CSS_SELECTOR, ".btn").click()
result = int(self.driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div').text)
print result
time.sleep(1)
j = j + 1
i = i + 1
def InputNumberRandom_TwoElement(self):
i = 2
while i <= 10:
j = 1
while j <= 10:
self.driver.find_element(By.NAME, "firstNumber").click()
self.driver.find_element(By.NAME, "firstNumber").send_keys(int(random.random()*10**i))
self.driver.find_element(By.NAME, "secondNumber").click()
self.driver.find_element(By.NAME, "secondNumber").send_keys(int(random.random()*10**i))
self.driver.find_element(By.CSS_SELECTOR, ".btn").click()
result = int(self.driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div').text)
print result
time.sleep(1)
j = j + 1
i = i + 1
def InputCharacter(self):
i = 2 ; k = 0
while i <= 10:
j = 1 ; Word = ["Hello", "ChineseWord" , "JapaneseWord" , "#"]
while j <= 10:
self.driver.find_element(By.NAME, "firstNumber").click()
self.driver.find_element(By.NAME, "firstNumber").send_keys(int(random.random()*10**i))
self.driver.find_element(By.NAME, "secondNumber").click()
self.driver.find_element(By.NAME, "secondNumber").send_keys(Word[k])
self.driver.find_element(By.CSS_SELECTOR, ".btn").click()
result = str(self.driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div').text)
print result
time.sleep(1)
j = j + 1
if i > 2 :
k = 1
elif i > 4 :
k = 2
elif i > 6 :
k = 3
i = i + 1
def Load_Performance(self):
navigationStart = int(self.driver.execute_script("return window.performance.timing.navigationStart"))
responseStart = int(self.driver.execute_script("return window.performance.timing.responseStart"))
domComplete = int(self.driver.execute_script("return window.performance.timing.domComplete"))
backendPerformance_calc = responseStart - navigationStart
frontendPerformance_calc = domComplete - responseStart
print("Back End: %s ms" % backendPerformance_calc)
print("Front End: %s ms" % frontendPerformance_calc)
def PressLink(self):
i = 1
while i < 100:
self.driver.find_element(By.XPATH, "/html/body/div[1]/div/a/img").click()
i = i + 1
|
{"/HomeworkImplement.py": ["/HomeworkClass.py"]}
|
2,312
|
LiuyangKyotoU/ChemBias_Causal
|
refs/heads/main
|
/preprocessor.py
|
import torch
import torch.nn.functional as F
from torch_geometric.datasets import QM9, ZINC
from torch_geometric.datasets import MoleculeNet
class Preprocessor:
def _qm9(self, target):
dataset = QM9('data/QM9', transform=QM9Transformer(target))
mean = dataset.data.y.mean(dim=0, keepdim=True)
std = dataset.data.y.std(dim=0, keepdim=True)
dataset.data.y = (dataset.data.y - mean) / std
return dataset, std[:, target].item(), 11, 4
def _zinc(self):
dataset = ZINC('data/ZINC', transform=ZINCTransformer())
mean = dataset.data.y.mean()
std = dataset.data.y.std()
dataset.data.y = (dataset.data.y - mean) / std
return dataset, std.item(), 28, 4
def _molenet(self, task):
dataset = MoleculeNet('data/MolNet', task, transform=MolNetTransformer())
mean = dataset.data.y.mean()
std = dataset.data.y.std()
dataset.data.y = (dataset.data.y - mean) / std
return dataset, std.item(), 9, 3
def _split(self, dataset, scenario):
dic = torch.load('sampling/' + scenario + '.pt')
return dataset[dic['test_ids']], dataset[dic['train_ids']], dataset[dic['val_ids']]
def get_dataset(self, task, scenario):
if task[:3] == 'qm9':
target = int(task.split('_')[1])
dataset, std, i_dim, e_dim = self._qm9(target)
elif task == 'zinc':
dataset, std, i_dim, e_dim = self._zinc()
elif task in ['esol', 'lipo', 'freesolv']:
dataset, std, i_dim, e_dim = self._molenet(task)
test_dataset, train_dataset, val_dataset = self._split(dataset, scenario)
return (test_dataset, train_dataset, val_dataset), std, i_dim, e_dim
class QM9Transformer(object):
def __init__(self, target):
self.target = target
def __call__(self, data):
data.y = data.y[:, self.target]
return data
class ZINCTransformer(object):
def __call__(self, data):
data.x = F.one_hot(data.x.view(-1), num_classes=28).to(torch.float32)
data.edge_attr = F.one_hot(data.edge_attr, num_classes=4).to(torch.float32)
return data
class MolNetTransformer(object):
def __call__(self, data):
data.x = data.x.to(torch.float32)
data.edge_attr = data.edge_attr.to(torch.float32)
data.y = data.y[:, 0]
return data
|
{"/trainers.py": ["/models.py", "/preprocessor.py", "/evaluator.py"]}
|
2,313
|
LiuyangKyotoU/ChemBias_Causal
|
refs/heads/main
|
/trainers.py
|
import torch
import copy
import numpy as np
import torch.nn.functional as F
from torch_geometric.nn import global_mean_pool
from torch_geometric.data import DataLoader
from geomloss import SamplesLoss
import models
from preprocessor import Preprocessor
from evaluator import Evaluator
class Trainer:
def __init__(self, task, scenario, **kw):
self.task = task
self.scenario = scenario
self.name = task + '==>' + scenario
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.h_dim = kw.get('h_dim')
self.times = kw.get('times')
self.batch_size = kw.get('batch_size')
self.lr = kw.get('lr')
self.epoch = kw.get('epoch')
self.datasets, self.std, self.i_dim, self.e_dim = Preprocessor().get_dataset(task, scenario)
self.test_loader, self.train_loader, self.val_loader = self._create_loaders()
self.batch_error_func, self.all_error_func = Evaluator().get_error_func(task)
def _train(self, *args):
raise NotImplementedError
def _test(self, *args):
raise NotImplementedError
def run(self):
raise NotImplementedError
def _save(self, test_error, model_state_dic):
with open('results/test_errors.txt', 'a') as f:
f.write(self.name + '\t' + str(test_error) + '\n')
torch.save(model_state_dic, 'results/' + self.name + '.pt')
print('Result of {} saved!'.format(self.name))
def _create_loaders(self, ):
test_dataset, train_dataset, val_dataset = self.datasets
test_loader = DataLoader(test_dataset, batch_size=self.batch_size, shuffle=False)
train_loader = DataLoader(train_dataset, batch_size=self.batch_size, shuffle=True)
val_loader = DataLoader(val_dataset, batch_size=self.batch_size, shuffle=False)
print('Load {} successfully!'.format(self.name))
return test_loader, train_loader, val_loader
class BaselineTrainer(Trainer):
def __init__(self, task, scenario, **kw):
super(BaselineTrainer, self).__init__(task, scenario, **kw)
self.name = 'Baseline' + str(self.times) + '==>' + self.name
def _train(self, model, optimizer):
model.train()
loss_all = 0
for batch in self.train_loader:
batch = batch.to(self.device)
optimizer.zero_grad()
loss = F.mse_loss(model(batch), batch.y)
loss.backward()
loss_all += loss.item() * batch.num_graphs
optimizer.step()
return loss_all / len(self.train_loader.dataset)
def _test(self, model, loader):
model.eval()
error = 0
for batch in loader:
batch = batch.to(self.device)
with torch.no_grad():
pred = model(batch)
error += self.batch_error_func(pred, batch.y, self.std)
return self.all_error_func(error / len(loader.dataset))
def run(self):
model = models.BaselineRegressNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
optimizer = torch.optim.Adam(model.parameters(), self.lr)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.7, patience=5,
min_lr=0.00001)
best_val_error = float('inf')
best_model_dic = None
for e in range(self.epoch):
train_loss = self._train(model, optimizer)
val_error = self._test(model, self.val_loader)
scheduler.step(val_error)
if val_error <= best_val_error:
best_val_error = val_error
best_model_dic = copy.deepcopy(model.state_dict())
print(e, train_loss, val_error)
model = models.BaselineRegressNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
model.load_state_dict(best_model_dic)
test_error = self._test(model, self.test_loader)
self._save(test_error, best_model_dic)
class IpsTrainer(Trainer):
def __init__(self, task, scenario, **kw):
super(IpsTrainer, self).__init__(task, scenario, **kw)
self.first_train_epoch = kw.get('first_train_epoch')
assert self.first_train_epoch is not None
self.name = 'IPS' + str(self.times) + '==>' + self.name
def _train(self, model, optimizer, classifier):
classifier.eval()
model.train()
loss_all = 0
for batch in self.train_loader:
batch = batch.to(self.device)
optimizer.zero_grad()
loss = F.mse_loss(model(batch), batch.y, reduction='none')
with torch.no_grad():
weights = 1 / (torch.exp(classifier(batch)[:, 1]) * 2)
loss = loss * weights
loss = loss.mean()
loss.backward()
loss_all += loss.item() * batch.num_graphs
optimizer.step()
return loss_all / len(self.train_loader.dataset)
def _test(self, model, loader):
model.eval()
error = 0
for batch in loader:
batch = batch.to(self.device)
with torch.no_grad():
pred = model(batch)
error += self.batch_error_func(pred, batch.y, self.std)
return self.all_error_func(error / len(loader.dataset))
def _first_train(self, model, optimizer, scheduler):
unbias_dataset, bias_dataset, _ = self.datasets
n = len(bias_dataset)
val_size = int(n * 0.3)
ids = torch.randperm(n)
val_bias_dataset = bias_dataset[ids[:val_size]]
val_unbias_dataset = unbias_dataset[ids[:val_size]]
train_bias_dataset = bias_dataset[ids[val_size:]]
train_unbias_dataset = unbias_dataset[ids[val_size:]]
val_bias_loader = DataLoader(val_bias_dataset, batch_size=self.batch_size, shuffle=False)
val_unbias_loader = DataLoader(val_unbias_dataset, batch_size=self.batch_size, shuffle=False)
train_bias_loader = DataLoader(train_bias_dataset, batch_size=self.batch_size, shuffle=True)
train_unbias_loader = DataLoader(train_unbias_dataset, batch_size=self.batch_size, shuffle=True)
best_val_acc = -float('inf')
best_model_dict = None
for e in range(self.first_train_epoch):
model.train()
train_bias_iter = iter(train_bias_loader)
train_unbias_iter = iter(train_unbias_loader)
loss_all = 0
for _ in range(len(train_bias_iter)):
bias_batch = train_bias_iter.next().to(self.device)
unbias_batch = train_unbias_iter.next().to(self.device)
optimizer.zero_grad()
loss = F.nll_loss(
torch.cat((model(bias_batch), model(unbias_batch))),
torch.cat((torch.ones(bias_batch.num_graphs),
torch.zeros(unbias_batch.num_graphs))).to(torch.int64).to(self.device)
)
loss.backward()
loss_all += loss.item() * (bias_batch.num_graphs + unbias_batch.num_graphs)
optimizer.step()
loss_all = loss_all / (len(train_bias_loader.dataset) + len(train_unbias_loader.dataset))
model.eval()
correct = 0
val_bias_iter = iter(val_bias_loader)
val_unbias_iter = iter(val_unbias_loader)
for _ in range(len(val_bias_iter)):
bias_batch = val_bias_iter.next().to(self.device)
unbias_batch = val_unbias_iter.next().to(self.device)
with torch.no_grad():
pred = torch.cat((model(bias_batch), model(unbias_batch))).max(1)[1]
correct += pred.eq(
torch.cat((torch.ones(bias_batch.num_graphs),
torch.zeros(unbias_batch.num_graphs))).to(torch.int64).to(self.device)
).sum().item()
val_acc = correct / (len(val_bias_loader.dataset) + len(val_unbias_loader.dataset))
scheduler.step()
if val_acc >= best_val_acc:
best_val_acc = val_acc
best_model_dict = copy.deepcopy(model.state_dict())
print(e, loss_all, val_acc)
return best_model_dict
def run(self):
# first step
classifier = models.IpsClassifyNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
optimizer = torch.optim.Adam(classifier.parameters(), self.lr)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, self.first_train_epoch // 2, gamma=0.1)
classifier_best_stat_dic = self._first_train(classifier, optimizer, scheduler)
classifier = models.IpsClassifyNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
classifier.load_state_dict(classifier_best_stat_dic)
# second step
model = models.BaselineRegressNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
optimizer = torch.optim.Adam(model.parameters(), self.lr)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.7, patience=5,
min_lr=0.00001)
best_val_error = float('inf')
best_model_dic = None
for e in range(self.epoch):
train_loss = self._train(model, optimizer, classifier)
val_error = self._test(model, self.val_loader)
scheduler.step(val_error)
if val_error <= best_val_error:
best_val_error = val_error
best_model_dic = copy.deepcopy(model.state_dict())
print(e, train_loss, val_error)
model = models.BaselineRegressNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
model.load_state_dict(best_model_dic)
test_error = self._test(model, self.test_loader)
self._save(test_error, best_model_dic)
class DirlTrainer(Trainer):
def __init__(self, task, scenario, **kw):
super(DirlTrainer, self).__init__(task, scenario, **kw)
self.name = 'Dirl' + '==>' + self.name
def _train(self, model, optimizer, e):
model.train()
source_iter = iter(self.train_loader) # bias_iter
target_iter = iter(self.test_loader) # unbias_iter
loss_all = 0
for i in range(len(source_iter)):
p = (i + e * len(source_iter)) / self.epoch / len(source_iter)
alpha = 2 / (1 + np.exp(-10 * p)) - 1
optimizer.zero_grad()
# {source / bias / train} domain
batch = source_iter.next().to(self.device)
label_out, domain_out = model(batch, alpha)
loss = F.mse_loss(label_out, batch.y)
loss += F.nll_loss(domain_out, torch.zeros(batch.num_graphs).to(torch.int64).to(self.device))
# {target / unbias / test} domain
batch = target_iter.next().to(self.device)
_, domain_out = model(batch, alpha)
loss += F.nll_loss(domain_out, torch.ones(batch.num_graphs).to(torch.int64).to(self.device))
loss.backward()
loss_all += loss.item() * batch.num_graphs
optimizer.step()
return loss_all / len(self.train_loader.dataset)
def _test(self, model, loader):
model.eval()
error = 0
for batch in loader:
batch = batch.to(self.device)
with torch.no_grad():
label_out, _ = model(batch, 0)
error += self.batch_error_func(label_out, batch.y, self.std)
return self.all_error_func(error / len(loader.dataset))
def run(self):
model = models.DirlNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
optimizer = torch.optim.Adam(model.parameters(), self.lr)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.7, patience=5,
min_lr=0.00001)
best_val_error = float('inf')
best_model_dic = None
for e in range(self.epoch):
train_loss = self._train(model, optimizer, e)
val_error = self._test(model, self.val_loader)
scheduler.step(val_error)
if val_error <= best_val_error:
best_val_error = val_error
best_model_dic = copy.deepcopy(model.state_dict())
print(e, train_loss, val_error)
model = models.DirlNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
model.load_state_dict(best_model_dic)
test_error = self._test(model, self.test_loader)
self._save(test_error, best_model_dic)
class CfrIswTrainer(Trainer):
def __init__(self, task, scenario, **kw):
super(CfrIswTrainer, self).__init__(task, scenario, **kw)
self.alpha = kw.get('alpha')
assert self.alpha is not None
self.name = 'CfrIsw' + str(self.alpha) + '==>' + self.name
def _train(self, R, L, D, disc_func, optimizer):
R.train()
L.train()
D.train()
bias_iter = iter(self.train_loader)
unbias_iter = iter(self.test_loader)
loss_rl, loss_d = 0, 0
for _ in range(len(bias_iter)):
optimizer.zero_grad()
bias_batch = bias_iter.next().to(self.device)
unbias_batch = unbias_iter.next().to(self.device)
bias_repr = R(bias_batch)
unbias_repr = R(unbias_batch)
disc_loss = disc_func(global_mean_pool(bias_repr, bias_batch.batch),
global_mean_pool(unbias_repr, unbias_batch.batch))
with torch.no_grad():
weights = 1 / (2 * torch.exp(D(bias_batch, bias_repr.data)[:, 1]))
label_loss = F.mse_loss(L(bias_batch, bias_repr), bias_batch.y, reduction='none')
label_loss = (label_loss * weights).mean()
loss = self.alpha * disc_loss + label_loss
loss.backward()
loss_rl += loss.item() * bias_batch.num_graphs
optimizer.step()
optimizer.zero_grad()
loss = F.nll_loss(
torch.cat((D(bias_batch, bias_repr.data), D(unbias_batch, unbias_repr.data))),
torch.cat((torch.ones(bias_batch.num_graphs),
torch.zeros(unbias_batch.num_graphs))).to(torch.int64).to(self.device)
)
loss.backward()
loss_d += loss.item() * (bias_batch.num_graphs + unbias_batch.num_graphs)
optimizer.step()
loss_rl = loss_rl / len(self.train_loader.dataset)
loss_d = loss_d / (len(self.train_loader.dataset) + len(self.test_loader.dataset))
return loss_rl, loss_d
def _test(self, R, L, loader):
R.eval()
L.eval()
error = 0
for batch in loader:
batch = batch.to(self.device)
with torch.no_grad():
pred = L(batch, R(batch))
error += self.batch_error_func(pred, batch.y, self.std)
return self.all_error_func(error / len(loader.dataset))
def run(self):
R = models.CausalFeatureNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
L = models.CausalRegressNet(self.h_dim, self.e_dim, self.times).to(self.device)
D = models.CausalClassifyNet(self.h_dim, self.e_dim, self.times).to(self.device)
optimizer = torch.optim.Adam(list(R.parameters()) + list(L.parameters()) + list(D.parameters()), lr=self.lr)
# There will not be val_error for D training part, thus we use two optimizer.
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.7, patience=5,
min_lr=0.00001)
best_val_error = float('inf')
best_model_dic = None
disc_func = SamplesLoss('sinkhorn')
for e in range(self.epoch):
train_loss_lr, train_loss_d = self._train(R, L, D, disc_func, optimizer)
val_error = self._test(R, L, self.val_loader)
scheduler.step(val_error)
if val_error <= best_val_error:
best_val_error = val_error
best_model_dic = (copy.deepcopy(R.state_dict()),
copy.deepcopy(L.state_dict()),
copy.deepcopy(D.state_dict()))
print(e, train_loss_lr, train_loss_d, val_error)
R = models.CausalFeatureNet(self.i_dim, self.h_dim, self.e_dim, self.times).to(self.device)
L = models.CausalRegressNet(self.h_dim, self.e_dim, self.times).to(self.device)
D = models.CausalClassifyNet(self.h_dim, self.e_dim, self.times).to(self.device)
R.load_state_dict(best_model_dic[0])
L.load_state_dict(best_model_dic[1])
D.load_state_dict(best_model_dic[2])
test_error = self._test(R, L, self.test_loader)
self._save(test_error, best_model_dic)
|
{"/trainers.py": ["/models.py", "/preprocessor.py", "/evaluator.py"]}
|
2,314
|
LiuyangKyotoU/ChemBias_Causal
|
refs/heads/main
|
/models.py
|
import torch
import torch.nn.functional as F
from torch.nn import Sequential, Linear, ReLU, GRU
from torch_geometric.nn import NNConv, global_mean_pool
class ConvLayer(torch.nn.Module):
def __init__(self, h_dim, e_dim, times=3):
super(ConvLayer, self).__init__()
nn = Sequential(Linear(e_dim, h_dim), ReLU(), Linear(h_dim, h_dim * h_dim))
self.conv = NNConv(h_dim, h_dim, nn, aggr='mean')
self.gru = GRU(h_dim, h_dim)
self.times = times
def forward(self, batch, out):
h = out.unsqueeze(0)
for _ in range(self.times):
m = F.relu(self.conv(out, batch.edge_index, batch.edge_attr))
out, h = self.gru(m.unsqueeze(0), h)
out = out.squeeze(0)
return out
class BaselineRegressNet(torch.nn.Module):
def __init__(self, i_dim, h_dim, e_dim, times):
super(BaselineRegressNet, self).__init__()
self.lin0 = Sequential(Linear(i_dim, h_dim), ReLU())
self.conv_layer = ConvLayer(h_dim, e_dim, times)
self.lin1 = Sequential(Linear(h_dim, h_dim), ReLU(), Linear(h_dim, 1))
def forward(self, batch):
out = self.lin1(global_mean_pool(self.conv_layer(batch, self.lin0(batch.x)), batch.batch))
return out.view(-1)
class IpsClassifyNet(torch.nn.Module):
def __init__(self, i_dim, h_dim, e_dim, times):
super(IpsClassifyNet, self).__init__()
self.lin0 = Sequential(Linear(i_dim, h_dim), ReLU())
self.conv_layer = ConvLayer(h_dim, e_dim, times)
self.lin1 = Sequential(Linear(h_dim, h_dim), ReLU(), Linear(h_dim, 2))
def forward(self, batch):
out = self.lin1(global_mean_pool(self.conv_layer(batch, self.lin0(batch.x)), batch.batch))
return F.log_softmax(out, dim=1)
class ReverseLayerF(torch.autograd.Function):
@staticmethod
def forward(ctx, x, alpha):
ctx.alpha = alpha
return x.view_as(x)
@staticmethod
def backward(ctx, grad_output):
output = grad_output.neg() * ctx.alpha
return output, None
class DirlNet(torch.nn.Module):
def __init__(self, i_dim, h_dim, e_dim, times):
super(DirlNet, self).__init__()
self.lin0 = Sequential(Linear(i_dim, h_dim), ReLU())
self.feature_conv_layer = ConvLayer(h_dim, e_dim, times)
self.label_conv_layer = ConvLayer(h_dim, e_dim, times)
self.lin1 = Sequential(Linear(h_dim, h_dim), ReLU(), Linear(h_dim, 1))
self.domain_conv_layer = ConvLayer(h_dim, e_dim, times)
self.lin2 = Sequential(Linear(h_dim, h_dim), ReLU(), Linear(h_dim, 2))
def forward(self, batch, alpha):
out = self.feature_conv_layer(batch, self.lin0(batch.x))
r_out = ReverseLayerF.apply(out, alpha)
label_out = self.lin1(global_mean_pool(self.label_conv_layer(batch, out), batch.batch))
domain_out = self.lin2(global_mean_pool(self.domain_conv_layer(batch, r_out), batch.batch))
return label_out.view(-1), F.log_softmax(domain_out, dim=1)
class CausalFeatureNet(torch.nn.Module):
def __init__(self, i_dim, h_dim, e_dim, times):
super(CausalFeatureNet, self).__init__()
self.lin0 = Sequential(Linear(i_dim, h_dim), ReLU())
self.conv_layer = ConvLayer(h_dim, e_dim, times)
def forward(self, batch):
return self.conv_layer(batch, self.lin0(batch.x))
class CausalRegressNet(torch.nn.Module):
def __init__(self, h_dim, e_dim, times):
super(CausalRegressNet, self).__init__()
self.conv_layer = ConvLayer(h_dim, e_dim, times)
self.lin1 = Sequential(Linear(h_dim, h_dim), ReLU(), Linear(h_dim, 1))
def forward(self, batch, out):
out = self.lin1(global_mean_pool(self.conv_layer(batch, out), batch.batch))
return out.view(-1)
class CausalClassifyNet(torch.nn.Module):
def __init__(self, h_dim, e_dim, times):
super(CausalClassifyNet, self).__init__()
self.conv_layer = ConvLayer(h_dim, e_dim, times)
self.lin1 = Sequential(Linear(h_dim, h_dim), ReLU(), Linear(h_dim, 2))
def forward(self, batch, out):
out = self.lin1(global_mean_pool(self.conv_layer(batch, out), batch.batch))
return F.log_softmax(out, dim=1)
if __name__ == '__main__':
from torch_geometric.datasets import QM9
from torch_geometric.data import DataLoader
dataset = QM9('data/QM9')
loader = DataLoader(dataset, batch_size=6)
data = iter(loader).next()
model = BaselineRegressNet(11, 32, 4, 6)
print(model(data))
model = DirlNet(11, 32, 4, 3)
print(model(data, 1))
R = CausalFeatureNet(11, 32, 4, 3)
D = CausalClassifyNet(32, 4, 3)
L = CausalRegressNet(32, 4, 3)
print(R(data))
print(D(data, R(data)), L(data, R(data)))
|
{"/trainers.py": ["/models.py", "/preprocessor.py", "/evaluator.py"]}
|
2,315
|
LiuyangKyotoU/ChemBias_Causal
|
refs/heads/main
|
/evaluator.py
|
import math
class Evaluator:
def _mae(self, x, y, std):
return (x * std - y * std).abs().sum().item()
def _rmse(self, x, y, std):
return ((x * std - y * std) ** 2).sum().item()
def _keep(self, x):
return x
def _sqrt(self, x):
return math.sqrt(x)
def _mae_funcs(self):
return self._mae, self._keep
def _rmse_funcs(self):
return self._rmse, self._sqrt
def get_error_func(self, task):
if task[:3] == 'qm9':
return self._mae_funcs()
if task == 'zinc':
return self._mae_funcs()
if task == 'esol':
return self._rmse_funcs()
if task == 'lipo':
return self._rmse_funcs()
if task == 'freesolv':
return self._rmse_funcs()
|
{"/trainers.py": ["/models.py", "/preprocessor.py", "/evaluator.py"]}
|
2,316
|
LiuyangKyotoU/ChemBias_Causal
|
refs/heads/main
|
/samplers.py
|
import torch
import numpy as np
import matplotlib.pyplot as plt
from torch_geometric.datasets import QM9, ZINC, MoleculeNet
import itertools
class Sampler(object):
def __init__(self, dataset, f1_alpha, f2_alpha, f3_alpha):
self.n = len(dataset)
self.f1_tensor = self._get_mols_f1(dataset)
self.f2_tensor = self._get_mols_f2(dataset)
self.f3_tensor = self._get_mols_f3(dataset)
self.f1_alpha = f1_alpha
self.f2_alpha = f2_alpha
self.f3_alpha = f3_alpha
self.name = self.__class__.__name__ + ':'
def _get_mols_f1(self, dataset):
raise NotImplementedError
def _get_mols_f2(self, dataset):
raise NotImplementedError
def _get_mols_f3(self, dataset):
raise NotImplementedError
def _sigmoid(self, x, a, b):
return 1 / (1 + torch.exp(-a * (x - b)))
def _save(self, test_ids, train_ids, val_ids, factors, trial):
dic = {'test_ids': test_ids, 'train_ids': train_ids, 'val_ids': val_ids}
torch.save(dic, 'sampling/' + self.name + '+'.join(factors) + '_' + str(trial) + '.pt')
def run_all_sampling(self):
factors = ['f1', 'f2', 'f3']
for i in range(len(factors) + 1):
for subset in itertools.combinations(factors, i):
for trial in range(10):
self.sampling(subset, trial)
def sampling(self, factors, trial):
ids = torch.randperm(self.n)
test_ids = ids[:self.n // 10]
other_ids = ids[self.n // 10:]
scores = torch.zeros(other_ids.shape[0]).to(torch.float32)
if not factors:
scores = torch.ones(other_ids.shape[0]).to(torch.float32)
for factor in factors:
t = self.__getattribute__(factor + '_tensor')
a = self.__getattribute__(factor + '_alpha')
scores += self._sigmoid(t[other_ids], a, t.median())
scores = scores / scores.sum()
train_val_ids = torch.tensor(
np.random.choice(other_ids, self.n // 10 * 10 // 7, replace=False, p=scores.numpy()))
train_val_ids = train_val_ids[torch.randperm(train_val_ids.shape[0])]
train_ids = train_val_ids[:self.n // 10]
val_ids = train_val_ids[self.n // 10:]
self._save(test_ids, train_ids, val_ids, factors, trial)
def draw(self, factor, bins, xlim=None):
dic = torch.load('sampling/' + self.name + factor + '.pt')
unbias_ids = dic['test_ids']
bias_ids = dic['train_ids']
t = self.__getattribute__(factor + '_tensor')
a = self.__getattribute__(factor + '_alpha')
tmp1 = t[unbias_ids].numpy()
tmp2 = t[bias_ids].numpy()
fig, ax = plt.subplots()
ax.hist(tmp1, weights=np.ones_like(tmp1) / len(tmp1), bins=bins, alpha=0.5)
ax.hist(tmp2, weights=np.ones_like(tmp2) / len(tmp2), bins=bins, alpha=0.5)
ax_ = ax.twinx()
x = torch.linspace(t.min(), t.max(), 100)
ax_.plot(x.numpy(), self._sigmoid(x, a, t.median()).numpy())
if xlim:
ax.set_xlim([*xlim])
plt.show()
class QM9Sampler(Sampler):
def __init__(self, f1_alpha, f2_alpha, f3_alpha):
dataset = QM9('data/QM9')
super(QM9Sampler, self).__init__(dataset, f1_alpha, f2_alpha, f3_alpha)
def _get_mols_f1(self, dataset):
ans = []
for data in dataset:
ans.append(data.x.shape[0])
return torch.tensor(ans).to(torch.float32)
def _get_mols_f2(self, dataset):
ans = []
for data in dataset:
ans.append((data.edge_attr[:, 0].sum() / data.edge_attr.shape[0]).item())
return torch.tensor(ans).to(torch.float32)
def _get_mols_f3(self, dataset):
ans = []
for data in dataset:
ans.append(data.y[0, 4].item())
return torch.tensor(ans).to(torch.float32)
class ZINCSampler(Sampler):
def __init__(self, f1_alpha, f2_alpha, f3_alpha):
dataset = ZINC('data/ZINC')
super(ZINCSampler, self).__init__(dataset, f1_alpha, f2_alpha, f3_alpha)
def _get_mols_f1(self, dataset):
ans = []
for data in dataset:
ans.append(data.x.shape[0])
return torch.tensor(ans).to(torch.float32)
def _get_mols_f2(self, dataset):
ans = []
for data in dataset:
ans.append(torch.nonzero(data.edge_attr == 1).shape[0] / data.edge_attr.shape[0])
return torch.tensor(ans).to(torch.float32)
def _get_mols_f3(self, dataset):
ans = []
for data in dataset:
ans.append(data.y.item())
return torch.tensor(ans).to(torch.float32)
class MoleNetSampler(Sampler):
def __init__(self, task, f1_alpha, f2_alpha, f3_alpha):
dataset = MoleculeNet('data/MolNet', task)
super(MoleNetSampler, self).__init__(dataset, f1_alpha, f2_alpha, f3_alpha)
def _get_mols_f1(self, dataset):
ans = []
for data in dataset:
ans.append(data.x.shape[0])
return torch.tensor(ans).to(torch.float32)
def _get_mols_f2(self, dataset):
ans = []
for data in dataset:
ans.append((torch.nonzero(data.edge_attr[:, 0] == 1).shape[0] + 1) / (data.edge_attr.shape[0] + 1))
return torch.tensor(ans).to(torch.float32)
def _get_mols_f3(self, dataset):
ans = []
for data in dataset:
ans.append(data.y[0, 0].item())
return torch.tensor(ans).to(torch.float32)
if __name__ == '__main__':
sampler = QM9Sampler(-1, -50, 2)
sampler.run_all_sampling()
# sampler.sampling('f1')
# sampler.sampling('f2')
# sampler.sampling('f3')
# sampler.draw('f1', 40)
# sampler.draw('f2', 20, [0.75, 1])
# sampler.draw('f3', 40,[2,12])
|
{"/trainers.py": ["/models.py", "/preprocessor.py", "/evaluator.py"]}
|
2,319
|
rramjee/Session12
|
refs/heads/main
|
/custompolygon.py
|
import random
from collections import namedtuple
#from PyClassicRound import classic_round
from decimal import *
import cmath
import math
from session11 import Polygon
class Polygons:
def __init__(self, m, R):
if m < 3:
raise ValueError('m must be greater than 3')
self._m = m
self._R = R
self.length = self._m - 2
#self._polygons = [Polygon(i, R) for i in range(3, m+1)]
def __len__(self):
return self.length
def __repr__(self):
return f'Polygons(m={self._m}, R={self._R})'
def __iter__(self):
return self.PolyIterator(self)
# def __getitem__(self, s):
# return self._polygons[s]
@property
def max_efficiency_polygon(self):
sorted_polygons = sorted(self._polygons,
key=lambda p: p.area/p.perimeter,
reverse=True)
return sorted_polygons[0]
class PolyIterator:
def __init__(self, poly_obj):
self._poly_obj = poly_obj
self._index = 3
def __iter__(self):
return self
def __next__(self):
if self._index > self._poly_obj._m:
raise StopIteration
else:
item = Polygon(self._index, self._poly_obj._R)
self._index += 1
return item
if __name__ == '__main__':
for num in Polygons(25,6):
print(num)
p2 = Polygons(10,8)
p = iter(p2)
for p in p:
print(f'number of vertices = {p.count_edges} number of edges = {p.count_edges} Edge Length = {p.side_length} interior angle = {p.interior_angle} apothem = {p.apothem} area = {p.area} perimeter = {p.perimeter}')
print(f'number of vertices = {p.count_edges} number of edges = {p.count_edges} Edge Length = {p.side_length} interior angle = {p.interior_angle} apothem = {p.apothem} area = {p.area} perimeter = {p.perimeter}')
# print(p.__repr__())
# print(p.area)
# print(p.area)
|
{"/custompolygon.py": ["/session11.py"]}
|
2,320
|
rramjee/Session12
|
refs/heads/main
|
/session11.py
|
import math
class Polygon:
def __init__(self, n, R):
if n < 3:
raise ValueError('Polygon must have at least 3 vertices.')
self._n = n
self._R = R
self.polydict={}
def __repr__(self):
return f'Polygon(n={self._n}, R={self._R})'
@property
def count_vertices(self):
if self.polydict.get("count_vertices"):
print("picking from calculated value")
return self.polydict["count_vertices"]
else:
print("Calculating for the first time")
self.polydict["count_vertices"] = self._n
return self._n
@property
def count_edges(self):
if self.polydict.get("count_edges"):
print("picking from calculated value")
return self.polydict["count_edges"]
else:
print("Calculating for the first time")
self.polydict["count_edges"] = self._n
return self._n
@property
def circumradius(self):
if self.polydict.get("circumradius"):
print("picking from calculated value")
return self.polydict["circumradius"]
else:
print("Calculating for the first time")
self.polydict["circumradius"] = self._R
return self._R
@property
def interior_angle(self):
if self.polydict.get("interior_angle"):
print("picking from calculated value")
return self.polydict["interior_angle"]
else:
print("Calculating for the first time")
self.polydict["interior_angle"] = (self._n - 2) * 180 / self._n
return self.polydict["interior_angle"]
@property
def side_length(self):
if self.polydict.get("side_length"):
print("picking from calculated value")
return self.polydict["side_length"]
else:
print("Calculating for the first time")
self.polydict["side_length"] = 2 * self._R * math.sin(math.pi / self._n)
return self.polydict["side_length"]
@property
def apothem(self):
if self.polydict.get("apothem"):
print("picking from calculated value")
return self.polydict["apothem"]
else:
print("Calculating for the first time")
self.polydict["apothem"] = self._R * math.cos(math.pi / self._n)
return self.polydict["apothem"]
@property
def area(self):
if self.polydict.get("area"):
print("picking from calculated value")
return self.polydict["area"]
else:
print("Calculating for the first time")
self.polydict["area"] = self._n / 2 * self.side_length * self.apothem
return self.polydict["area"]
@property
def perimeter(self):
if self.polydict.get("perimeter"):
print("picking from calculated value")
return self.polydict["perimeter"]
else:
print("Calculating for the first time")
self.polydict["perimeter"] = self._n * self.side_length
return self.polydict["perimeter"]
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.count_edges == other.count_edges
and self.circumradius == other.circumradius)
else:
return NotImplemented
def __gt__(self, other):
if isinstance(other, self.__class__):
return self.count_vertices > other.count_vertices
else:
return NotImplemented
if __name__ == '__main__':
p = Polygon(25,6)
print(p.__repr__())
print(p.area)
print(p.area)
#print(p.__len__())
|
{"/custompolygon.py": ["/session11.py"]}
|
2,325
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/controllers/room_controller.py
|
from flask import Blueprint, Flask, redirect, render_template, request
from models.room import Room
import repositories.room_repository as room_repository
rooms_blueprint = Blueprint('rooms', __name__)
@rooms_blueprint.route('/rooms')
def rooms():
rooms = room_repository.select_all()
return render_template('rooms/index.html', rooms=rooms)
@rooms_blueprint.route('/rooms/new')
def new_room():
return render_template('rooms/new.html')
@rooms_blueprint.route('/rooms/new', methods=['POST'])
def add_room():
name = request.form['name']
capacity = request.form['capacity']
description = request.form['description']
room = Room(name, capacity, description, id)
room_repository.save(room)
return redirect('/rooms')
@rooms_blueprint.route('/rooms/<id>')
def see_room(id):
room = room_repository.select(id)
return render_template('rooms/edit.html', room=room)
@rooms_blueprint.route('/rooms/<id>', methods=['POST'])
def edit_room(id):
name = request.form['name']
capacity = request.form['capacity']
description = request.form['description']
room = Room(name, capacity, description, id)
room_repository.update(room)
return redirect('/rooms')
@rooms_blueprint.route('/rooms/<id>/delete', methods=['POST'])
def delete_room(id):
room_repository.delete(id)
return redirect('/rooms')
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,326
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/controllers/schedule_controller.py
|
from flask import Blueprint, Flask, redirect, render_template, request
from models.schedule import Schedule
from models.schedule_member import ScheduleMember
import repositories.schedule_repository as schedule_repository
import repositories.instructor_details_repository as details_repository
import repositories.gym_class_repository as gym_class_repository
import repositories.room_repository as room_repository
import repositories.member_repository as member_repository
from datetime import date
from datetime import timedelta
import calendar
schedule_blueprint = Blueprint('schedule', __name__)
@schedule_blueprint.route('/schedule')
def schedules():
rooms = room_repository.select_all()
schedules = schedule_repository.select_dates()
schedules_dict = {}
dates = [date.today()+timedelta(days=i) for i in range(7)]
days = [calendar.day_name[dates[i].weekday()] for i in range(7)]
for i in range(7):
if schedules[i] is not None:
schedules_dict['today_schedules_' + str(i)] = schedules[i]
else:
schedules_dict['today_schedules_' + str(i)] = None
return render_template('schedule/index.html', schedules=schedules, dates=dates,
days=days, schedules_dict=schedules_dict, rooms=rooms)
@schedule_blueprint.route('/schedule/new')
def new_schedule():
instructors = details_repository.select_all()
classes = gym_class_repository.select_all()
rooms = room_repository.select_all()
return render_template('schedule/new.html', instructors=instructors, classes=classes, rooms=rooms)
@schedule_blueprint.route('/schedule/new', methods=['POST'])
def add_schedule():
class_date = request.form['class_date']
start_time = request.form['start_time']
length_mins = request.form['length_mins']
instructor_id = request.form['instructor_id']
class_id = request.form['class_id']
room_id = request.form['room_id']
instructor = details_repository.select(instructor_id)
gym_class = gym_class_repository.select(class_id)
room = room_repository.select(room_id)
schedule = Schedule(class_date, start_time, length_mins, instructor, gym_class, room, id)
schedule_repository.save(schedule)
return redirect('/schedule')
@schedule_blueprint.route('/schedule/<id>')
def show_schedule(id):
current_cap = schedule_repository.count_member(id)
schedule = schedule_repository.select(id)
selected_members = member_repository.selected_members(id)
return render_template('schedule/show.html', schedule=schedule, members=selected_members, current_cap=current_cap[0][0])
@schedule_blueprint.route('/schedule/<id>/new')
def new_member(id):
schedule = schedule_repository.select(id)
members = member_repository.non_selected_members(id)
return render_template('schedule/new_member.html', schedule=schedule, members=members)
@schedule_blueprint.route('/schedule/<id>/new', methods=['POST'])
def add_member(id):
member_id = request.form['member_id']
member = member_repository.select(member_id)
schedule = schedule_repository.select(id)
schedule_member = ScheduleMember(member, schedule)
schedule_repository.save_member(schedule_member)
return redirect('/schedule')
@schedule_blueprint.route('/schedule/all')
def show_all():
upcoming_classes = []
previous_classes = []
schedules = schedule_repository.select_all()
for schedule in schedules:
if schedule.class_date < date.today():
previous_classes.append(schedule)
else:
upcoming_classes.append(schedule)
return render_template('schedule/all.html', previous_classes=previous_classes, upcoming_classes=upcoming_classes)
@schedule_blueprint.route('/schedule/<id>/remove')
def remove_select_member(id):
schedule = schedule_repository.select(id)
members = member_repository.selected_members(id)
return render_template('schedule/remove_member.html', schedule=schedule, members=members)
@schedule_blueprint.route('/schedule/<id>/remove', methods=['POST'])
def remove_member(id):
member_id = request.form['member_id']
schedule_repository.remove_member(id, member_id)
return redirect('/schedule')
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,327
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/controllers/member_controller.py
|
from flask import Blueprint, Flask, redirect, render_template, request
from models.member import Member
import repositories.member_repository as member_repository
import datetime
members_blueprint = Blueprint('members', __name__)
@members_blueprint.route('/members')
def members():
members = member_repository.select_all()
return render_template('members/index.html', members=members)
@members_blueprint.route('/members/<id>')
def member(id):
member = member_repository.select(id)
classes = member_repository.select_classes(id)
return render_template('members/edit.html', member=member, classes=classes)
@members_blueprint.route('/members/<id>', methods=['POST'])
def edit_member(id):
first_name = request.form['first_name']
last_name = request.form['last_name']
date_of_birth = request.form['date_of_birth']
email = request.form['email']
phone = request.form['phone']
if request.form.get('membership'):
membership = True
member_since = request.form['member_since']
member_until = request.form['member_until']
if request.form.get('premium'):
premium = True
else:
premium = False
else:
membership = False
premium = False
member_since = None
member_until = None
member = Member(first_name, last_name, email, phone, date_of_birth, membership,
premium, member_since, member_until, id)
member_repository.update(member)
return redirect('/members')
@members_blueprint.route('/members/new')
def new_member():
return render_template('members/new.html')
@members_blueprint.route('/members/new', methods=['POST'])
def add_member():
first_name = request.form['first_name']
last_name = request.form['last_name']
date_of_birth = request.form['date_of_birth']
email = request.form['email']
phone = request.form['phone']
if request.form.get('membership'):
membership = True
member_since = request.form['member_since']
member_until = request.form['member_until']
if request.form.get('premium'):
premium = True
else:
premium = False
else:
membership = False
premium = False
member_since = None
member_until = None
member = Member(first_name, last_name, email, phone, date_of_birth, membership,
premium, member_since, member_until, id)
member_repository.save(member)
return redirect('/members')
@members_blueprint.route('/members/<id>/delete', methods=['POST'])
def delete_member(id):
member_repository.delete(id)
return redirect('/members')
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,328
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/console.py
|
from models.gym_class import GymClass
from models.member import Member
from models.room import Room
from models.instructor import InstructorDetails, InstructorSchedule, InstructorTimetable
from models.schedule import Schedule
import repositories.gym_class_repository as class_repository
import repositories.member_repository as member_repository
import repositories.room_repository as room_repository
import repositories.instructor_timetable_repository as timetable_repository
import repositories.instructor_details_repository as details_repository
import repositories.instructor_schedule_repository as i_schedule_repository
import repositories.schedule_repository as schedule_repository
import datetime
class_1 = GymClass('Hot Yoga', 'Yoga in a very warm studio', 60, 16)
class_2 = GymClass('CrossFit', 'Bodyweight workout', 90, 24)
class_3 = GymClass('Spinning', 'Stationary indoor cycling', 60, 20)
class_4 = GymClass('Adult Swimming Lessons', 'Adult Swimming Lessons', 60, 20)
class_5 = GymClass('Water Aerobics', 'Water Exercises', 60, 25)
class_repository.save(class_1)
class_repository.save(class_2)
class_repository.save(class_3)
class_repository.save(class_4)
class_repository.save(class_5)
member_1 = Member('John', 'Smith', 'johnsmith@gmail.com', '07595964019',
datetime.date(1997, 5, 17), True, False, datetime.date(2021, 3, 21),
datetime.date(2022, 3, 21))
member_2 = Member('Luke', 'Jones', 'lukejones@gmail.com', '07595964018',
datetime.date(1992, 1, 15), False, False, None, None)
member_3 = Member('Mary', 'Taylor', 'marytaylor@gmail.com', '07595964048',
datetime.date(1988, 12, 1), True, True, datetime.date(2021, 4, 15),
datetime.date(2022, 7, 15))
member_4 = Member('Susan', 'Wilson', 'susanwilson@gmail.com', '07595964013',
datetime.date(1968, 12, 1), False, False, None, None)
member_repository.save(member_1)
member_repository.save(member_2)
member_repository.save(member_3)
member_repository.save(member_4)
room_1 = Room('Studio 1', 24, 'Large Room')
room_2 = Room('Studio 2', 4, 'Small Room')
room_3 = Room('Swimming Pool', 50, 'Pool')
room_repository.save(room_1)
room_repository.save(room_2)
room_repository.save(room_3)
instructor_dets_1 = InstructorDetails('Mary', 'Johnson', datetime.date(1992, 3, 12))
instructor_dets_2 = InstructorDetails('Zach', 'Smith', datetime.date(1990, 8, 14))
instructor_dets_3 = InstructorDetails('John', 'Wilson', datetime.date(1990, 8, 14))
# instructor_sch = InstructorSchedule('9-5', True, True, True,
# True, True, False, False, datetime.time(9, 0),
# datetime.time(17, 0))
# instructor_tim = InstructorTimetable(datetime.date(2021, 3, 21), instructor_dets, instructor_sch)
details_repository.save(instructor_dets_1)
details_repository.save(instructor_dets_2)
details_repository.save(instructor_dets_3)
# i_schedule_repository.save(instructor_sch)
# timetable_repository.save(instructor_tim)
schedule_1 = Schedule(datetime.date(2021, 5, 3), datetime.time(10, 0), 60, instructor_dets_1, class_1, room_1)
schedule_2 = Schedule(datetime.date(2021, 5, 3), datetime.time(13, 0), 60, instructor_dets_1, class_1, room_1)
schedule_3 = Schedule(datetime.date(2021, 5, 3), datetime.time(16, 0), 60, instructor_dets_1, class_1, room_1)
schedule_4 = Schedule(datetime.date(2021, 5, 5), datetime.time(10, 0), 60, instructor_dets_1, class_1, room_1)
schedule_5 = Schedule(datetime.date(2021, 5, 5), datetime.time(13, 0), 60, instructor_dets_1, class_1, room_1)
schedule_6 = Schedule(datetime.date(2021, 5, 5), datetime.time(16, 0), 60, instructor_dets_1, class_1, room_1)
schedule_7 = Schedule(datetime.date(2021, 5, 7), datetime.time(10, 0), 60, instructor_dets_1, class_1, room_1)
schedule_8 = Schedule(datetime.date(2021, 5, 7), datetime.time(13, 0), 60, instructor_dets_1, class_1, room_1)
schedule_9 = Schedule(datetime.date(2021, 5, 7), datetime.time(16, 0), 60, instructor_dets_1, class_1, room_1)
schedule_10 = Schedule(datetime.date(2021, 5, 4), datetime.time(11, 0), 60, instructor_dets_1, class_3, room_2)
schedule_11 = Schedule(datetime.date(2021, 5, 4), datetime.time(14, 0), 60, instructor_dets_1, class_3, room_2)
schedule_12 = Schedule(datetime.date(2021, 5, 6), datetime.time(11, 0), 60, instructor_dets_1, class_3, room_2)
schedule_13 = Schedule(datetime.date(2021, 5, 6), datetime.time(14, 0), 60, instructor_dets_1, class_3, room_2)
schedule_14 = Schedule(datetime.date(2021, 5, 3), datetime.time(9, 0), 60, instructor_dets_2, class_2, room_2)
schedule_15 = Schedule(datetime.date(2021, 5, 3), datetime.time(12, 0), 60, instructor_dets_2, class_2, room_2)
schedule_16 = Schedule(datetime.date(2021, 5, 3), datetime.time(15, 0), 60, instructor_dets_2, class_2, room_2)
schedule_17 = Schedule(datetime.date(2021, 5, 5), datetime.time(9, 0), 60, instructor_dets_2, class_2, room_2)
schedule_18 = Schedule(datetime.date(2021, 5, 5), datetime.time(12, 0), 60, instructor_dets_2, class_2, room_2)
schedule_19 = Schedule(datetime.date(2021, 5, 5), datetime.time(15, 0), 60, instructor_dets_2, class_2, room_2)
schedule_20 = Schedule(datetime.date(2021, 5, 7), datetime.time(9, 0), 60, instructor_dets_2, class_2, room_2)
schedule_21 = Schedule(datetime.date(2021, 5, 7), datetime.time(12, 0), 60, instructor_dets_2, class_2, room_2)
schedule_22 = Schedule(datetime.date(2021, 5, 7), datetime.time(15, 0), 60, instructor_dets_2, class_2, room_2)
schedule_23 = Schedule(datetime.date(2021, 5, 4), datetime.time(8, 0), 60, instructor_dets_2, class_4, room_3)
schedule_24 = Schedule(datetime.date(2021, 5, 4), datetime.time(12, 0), 60, instructor_dets_2, class_4, room_3)
schedule_25 = Schedule(datetime.date(2021, 5, 6), datetime.time(8, 0), 60, instructor_dets_2, class_4, room_3)
schedule_26 = Schedule(datetime.date(2021, 5, 6), datetime.time(12, 0), 60, instructor_dets_2, class_4, room_3)
schedule_27 = Schedule(datetime.date(2021, 5, 3), datetime.time(15, 0), 60, instructor_dets_3, class_5, room_3)
schedule_28 = Schedule(datetime.date(2021, 5, 3), datetime.time(18, 0), 60, instructor_dets_3, class_5, room_3)
schedule_29 = Schedule(datetime.date(2021, 5, 5), datetime.time(15, 0), 60, instructor_dets_3, class_5, room_3)
schedule_30 = Schedule(datetime.date(2021, 5, 5), datetime.time(18, 0), 60, instructor_dets_3, class_5, room_3)
schedule_31 = Schedule(datetime.date(2021, 5, 7), datetime.time(15, 0), 60, instructor_dets_3, class_5, room_3)
schedule_32 = Schedule(datetime.date(2021, 5, 7), datetime.time(18, 0), 60, instructor_dets_3, class_5, room_3)
schedule_repository.save(schedule_1)
schedule_repository.save(schedule_2)
schedule_repository.save(schedule_3)
schedule_repository.save(schedule_4)
schedule_repository.save(schedule_5)
schedule_repository.save(schedule_6)
schedule_repository.save(schedule_7)
schedule_repository.save(schedule_8)
schedule_repository.save(schedule_9)
schedule_repository.save(schedule_10)
schedule_repository.save(schedule_11)
schedule_repository.save(schedule_12)
schedule_repository.save(schedule_13)
schedule_repository.save(schedule_14)
schedule_repository.save(schedule_15)
schedule_repository.save(schedule_16)
schedule_repository.save(schedule_17)
schedule_repository.save(schedule_18)
schedule_repository.save(schedule_19)
schedule_repository.save(schedule_20)
schedule_repository.save(schedule_21)
schedule_repository.save(schedule_22)
schedule_repository.save(schedule_23)
schedule_repository.save(schedule_24)
schedule_repository.save(schedule_25)
schedule_repository.save(schedule_26)
schedule_repository.save(schedule_27)
schedule_repository.save(schedule_28)
schedule_repository.save(schedule_29)
schedule_repository.save(schedule_30)
schedule_repository.save(schedule_31)
schedule_repository.save(schedule_32)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,329
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/repositories/member_repository.py
|
from database.run_sql import run_sql
from models.member import Member
import repositories.schedule_repository as schedule_repository
from datetime import date
def save(member):
sql = """INSERT INTO members
(first_name, last_name, email, phone, date_of_birth,
membership, premium, member_since, member_until)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
RETURNING id"""
values = [member.first_name, member.last_name, member.email, member.phone, member.date_of_birth,
member.membership, member.premium, member.member_since, member.member_until]
results = run_sql(sql, values)
member.id = results[0]['id']
return member
def select_all():
members = []
sql = "SELECT * FROM members"
results = run_sql(sql)
for row in results:
member = Member(row['first_name'], row['last_name'], row['email'], row['phone'],
row['date_of_birth'], row['membership'], row['premium'],
row['member_since'], row['member_until'], row['id'])
members.append(member)
return members
def select(id):
member = None
sql = "SELECT * FROM members WHERE id = %s"
values = [id]
result = run_sql(sql, values)[0]
if result is not None:
member = Member(result['first_name'], result['last_name'], result['email'],
result['phone'], result['date_of_birth'], result['membership'],
result['premium'], result['member_since'], result['member_until'],
result['id'])
return member
def update(member):
sql = """UPDATE members
SET first_name = %s,
last_name = %s,
email = %s,
phone = %s,
date_of_birth = %s,
membership = %s,
premium = %s,
member_since = %s,
member_until = %s
WHERE id = %s"""
values = [member.first_name, member.last_name, member.email, member.phone, member.date_of_birth,
member.membership, member.premium, member.member_since, member.member_until,
member.id]
run_sql(sql, values)
def delete_all():
sql = "DELETE FROM members"
run_sql(sql)
def delete(id):
sql = "DELETE FROM members WHERE id = %s"
values = [id]
run_sql(sql, values)
def selected_members(id):
sql = """SELECT members.* FROM members
INNER JOIN schedules_members ON schedules_members.member_id = members.id
WHERE schedules_members.schedule_id = %s"""
values = [id]
members = run_sql(sql, values)
return members
def non_selected_members(id):
sql = """SELECT members.id FROM members
FULL OUTER JOIN schedules_members ON schedules_members.member_id = members.id
WHERE schedule_id = %s"""
values = [id]
rows = run_sql(sql, values)
member_ids = tuple([row[0] for row in rows])
if member_ids == ():
sql2 = "SELECT * FROM members"
members = run_sql(sql2)
else:
sql2 = """SELECT * FROM members WHERE id NOT IN %s"""
values2 = [member_ids]
members = run_sql(sql2, values2)
return members
def sort(type):
members = []
sql = "SELECT * FROM members ORDER BY %s"
values = [type]
results = run_sql(sql, values)
for row in results:
member = Member(row['first_name'], row['last_name'], row['email'], row['phone'],
row['date_of_birth'], row['membership'], row['premium'],
row['member_since'], row['member_until'], row['id'])
members.append(member)
return members
def select_classes(id):
classes = []
sql = "SELECT * FROM schedules_members WHERE member_id = %s"
values = [id]
results = run_sql(sql, values)
for row in results:
schedule = schedule_repository.select(row['schedule_id'])
if schedule.class_date >= date.today():
classes.append(schedule)
return classes
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,330
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/tests/instructor_test.py
|
import unittest
from models.instructor import InstructorDetails, InstructorSchedule
import datetime
class TestInstructorDetails(unittest.TestCase):
def setUp(self):
self.instructor = InstructorDetails('Mary', 'Jones', datetime.date(1992, 3, 12))
def test_instructor_has_first_name(self):
self.assertEqual('Mary', self.instructor.first_name)
def test_instructor_has_last_name(self):
self.assertEqual('Jones', self.instructor.last_name)
def test_instructor_has_date_of_birth(self):
self.assertEqual('1992-03-12', str(self.instructor.date_of_birth))
class TestInstructorSchedule(unittest.TestCase):
def setUp(self):
self.instructor_dets = InstructorDetails('Mary', 'Jones', datetime.date(1992, 3, 12))
self.instructor = InstructorSchedule(datetime.date(2021, 3, 22), True, True, True,
True, True, False, False, datetime.time(9, 0),
datetime.time(17, 0), self.instructor_dets)
def test_instructor_has_week_start_date(self):
self.assertEqual('2021-03-22', str(self.instructor.week_start_date))
def test_instructor_has_instructor(self):
self.assertEqual('Mary', self.instructor.instructor.first_name)
def test_instructor_has_day(self):
self.assertEqual(True, self.instructor.monday)
self.assertEqual(True, self.instructor.tuesday)
self.assertEqual(True, self.instructor.wednesday)
self.assertEqual(True, self.instructor.thursday)
self.assertEqual(True, self.instructor.friday)
self.assertEqual(False, self.instructor.saturday)
self.assertEqual(False, self.instructor.sunday)
def test_instructor_has_start_time(self):
self.assertEqual('09:00:00', str(self.instructor.start_time))
def test_instructor_has_end_time(self):
self.assertEqual('17:00:00', str(self.instructor.end_time))
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,331
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/models/member.py
|
class Member:
def __init__(self, first_name, last_name, email, phone, date_of_birth, membership=False,
premium=False, member_since=None, member_until=None, id=None):
self.first_name = first_name
self.last_name = last_name
self.email = email
self.phone = phone
self.date_of_birth = date_of_birth
self.membership = membership
self.premium = premium
self.member_since = member_since
self.member_until = member_until
self.id = id
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,332
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/app.py
|
from flask import Flask, render_template
from controllers.class_controller import classes_blueprint
from controllers.member_controller import members_blueprint
from controllers.room_controller import rooms_blueprint
from controllers.instructor_controller import instructors_blueprint
from controllers.schedule_controller import schedule_blueprint
app = Flask(__name__)
app.register_blueprint(classes_blueprint)
app.register_blueprint(members_blueprint)
app.register_blueprint(rooms_blueprint)
app.register_blueprint(instructors_blueprint)
app.register_blueprint(schedule_blueprint)
@app.route('/')
def home():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,333
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/models/gym_class.py
|
class GymClass:
def __init__(self, class_name, description, max_time, capacity, id=None):
self.class_name = class_name
self.description = description
self.max_time = max_time
self.capacity = capacity
self.id = id
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,334
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/controllers/class_controller.py
|
from flask import Blueprint, Flask, redirect, render_template, request
from models.gym_class import GymClass
import repositories.gym_class_repository as class_repository
classes_blueprint = Blueprint('classes', __name__)
@classes_blueprint.route('/classes')
def classes():
classes = class_repository.select_all()
return render_template('classes/index.html', classes=classes)
@classes_blueprint.route('/classes/new')
def new_class():
return render_template('classes/new.html')
@classes_blueprint.route('/classes/new', methods=['POST'])
def add_class():
name = request.form['name']
description = request.form['description']
max_time = request.form['max_time']
capacity = request.form['capacity']
gym_class = GymClass(name, description, max_time, capacity, id)
class_repository.save(gym_class)
return redirect('/classes')
@classes_blueprint.route('/classes/<id>')
def see_class(id):
gym_class = class_repository.select(id)
return render_template('classes/edit.html', gym_class=gym_class)
@classes_blueprint.route('/classes/<id>', methods=['POST'])
def edit_class(id):
name = request.form['name']
description = request.form['description']
max_time = request.form['max_time']
capacity = request.form['capacity']
gym_class = GymClass(name, description, max_time, capacity, id)
class_repository.update(gym_class)
return redirect('/classes')
@classes_blueprint.route('/classes/<id>/delete', methods=['POST'])
def delete_class(id):
class_repository.delete(id)
return redirect('/classes')
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,335
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/run_test.py
|
import unittest
from tests.member_test import TestMember
from tests.gym_class_test import TestGymClass
from tests.instructor_test import TestInstructorDetails, TestInstructorSchedule
from tests.room_test import TestRoom
from tests.schedule_test import TestSchedule
if __name__ == '__main__':
unittest.main()
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,336
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/models/instructor.py
|
class InstructorDetails:
def __init__(self, first_name, last_name, date_of_birth, id=None) :
self.first_name = first_name
self.last_name = last_name
self.date_of_birth = date_of_birth
self.id = id
class InstructorSchedule:
def __init__(self, nickname, monday, tuesday, wednesday, thursday, friday,
saturday, sunday, start_time, end_time, id=None):
self.nickname = nickname
self.monday = monday
self.tuesday = tuesday
self.wednesday = wednesday
self.thursday = thursday
self.friday = friday
self.saturday = saturday
self.sunday = sunday
self.start_time = start_time
self.end_time = end_time
self.id = id
class InstructorTimetable:
def __init__(self, week_start_date, detail, schedule, id=None):
self.week_start_date = week_start_date
self.detail = detail
self.schedule = schedule
self.id = id
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,337
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/repositories/instructor_schedule_repository.py
|
from database.run_sql import run_sql
from models.instructor import InstructorSchedule
def save(instructor):
sql = """INSERT INTO instructor_schedules
(nickname, monday, tuesday, wednesday, thursday, friday,
saturday, sunday, start_time, end_time)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s )
RETURNING id"""
values = [instructor.nickname, instructor.monday, instructor.tuesday,
instructor.wednesday, instructor.thursday, instructor.friday,
instructor.saturday, instructor.sunday, instructor.start_time,
instructor.end_time]
results = run_sql( sql, values )
instructor.id = results[0]['id']
return instructor
def select_all():
instructors = []
sql = "SELECT * FROM instructor_schedules"
results = run_sql(sql)
for row in results:
instructor = InstructorSchedule(row['nickname'], row['monday'],
row['tuesday'], row['wednesday'],
row['thursday'], row['friday'],
row['saturday'], row['sunday'],
row['start_time'], row['end_time'], row['id'])
instructors.append(instructor)
return instructors
def select(id):
instructor = None
sql = "SELECT * FROM instructor_schedules WHERE id = %s"
values = [id]
result = run_sql(sql, values)[0]
if result is not None:
instructor = InstructorSchedule(result['nickname'], result['monday'],
result['tuesday'], result['wednesday'],
result['thursday'], result['friday'],
result['saturday'], result['sunday'],
result['start_time'], result['end_time'],
result['id'])
return instructor
def update(instructor):
sql = """UPDATE instructor_schedules
SET nickname = %s, monday = %s, tuesday = %s,
wednesday = %s, thursday = %s, friday = %s, saturday = %s,
sunday = %s, start_time = %s, end_time = %s
WHERE id = %s"""
values = [instructor.nickname, instructor.monday, instructor.tuesday,
instructor.wednesday, instructor.thursday, instructor.friday,
instructor.saturday, instructor.sunday, instructor.start_time,
instructor.end_time, instructor.id]
run_sql(sql, values)
def delete_all():
sql = "DELETE FROM instructor_schedules"
run_sql(sql)
def delete(id):
sql = "DELETE FROM instructor_scheudles WHERE id = %s"
values = [id]
run_sql(sql, values)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,338
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/tests/member_test.py
|
import unittest
import datetime
from models.member import Member
class TestMember(unittest.TestCase):
def setUp(self):
self.member = Member('John', 'Smith', 'johnsmith@gmail.com', '07595964019',
datetime.date(1997, 5, 17), True, False, datetime.date(2021, 3, 21),
datetime.date(2021, 4, 21))
def test_member_has_first_name(self):
self.assertEqual('John', self.member.first_name)
def test_member_has_last_name(self):
self.assertEqual('Smith', self.member.last_name)
def test_member_has_email(self):
self.assertEqual('johnsmith@gmail.com', self.member.email)
def test_member_has_phone(self):
self.assertEqual('07595964019', self.member.phone)
def test_member_has_date_of_birth(self):
self.assertEqual('1997-05-17', str(self.member.date_of_birth))
def test_member_has_membership(self):
self.assertEqual(True, self.member.membership)
def test_member_has_premium(self):
self.assertEqual(False, self.member.premium)
def test_member_has_member_since(self):
self.assertEqual('2021-03-21', str(self.member.member_since))
def test_member_has_member_until(self):
self.assertEqual('2021-04-21', str(self.member.member_until))
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,339
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/tests/room_test.py
|
import unittest
from models.room import Room
class TestRoom(unittest.TestCase):
def setUp(self):
self.room = Room('Room 1', 24, 'Large Room')
def test_room_has_name(self):
self.assertEqual('Room 1', self.room.room_name)
def test_room_has_capacity(self):
self.assertEqual(24, self.room.capacity)
def test_room_has_description(self):
self.assertEqual('Large Room', self.room.description)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,340
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/repositories/instructor_timetable_repository.py
|
from database.run_sql import run_sql
from models.instructor import InstructorDetails, InstructorSchedule, InstructorTimetable
import repositories.instructor_details_repository as details_repository
import repositories.instructor_schedule_repository as schedule_repository
def save(timetable):
sql = """INSERT INTO instructor_timetables ( week_start, i_details_id, i_schedules_id )
VALUES ( %s, %s, %s ) RETURNING id"""
values = [timetable.week_start_date, timetable.detail.id, timetable.schedule.id]
results = run_sql( sql, values )
timetable.id = results[0]['id']
return timetable
def select_all():
timetables = []
sql = "SELECT * FROM instructor_timetables"
results = run_sql(sql)
for row in results:
detail = details_repository.select(row['i_details_id'])
schedule = schedule_repository.select(row['i_schedules_id'])
timetable = InstructorTimetable(row['week_start'], detail, schedule, row['id'])
timetables.append(timetable)
return timetables
def delete_all():
sql = "DELETE FROM instructor_timetables"
run_sql(sql)
def delete(id):
sql = "DELETE FROM instructor_timetables WHERE id = %s"
values = [id]
run_sql(sql, values)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,341
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/repositories/instructor_details_repository.py
|
from database.run_sql import run_sql
from models.instructor import InstructorDetails
def save(instructor):
sql = """INSERT INTO instructor_details
(first_name, last_name, date_of_birth)
VALUES ( %s, %s, %s )
RETURNING id"""
values = [instructor.first_name, instructor.last_name, instructor.date_of_birth]
results = run_sql( sql, values )
instructor.id = results[0]['id']
return instructor
def select(id):
instructor = None
sql = "SELECT * FROM instructor_details WHERE id = %s"
values = [id]
result = run_sql(sql, values)[0]
if result is not None:
instructor = InstructorDetails(result['first_name'], result['last_name'],
result['date_of_birth'], result['id'])
return instructor
def select_all():
instructors = []
sql = "SELECT * FROM instructor_details"
results = run_sql(sql)
for row in results:
instructor = InstructorDetails(row['first_name'], row['last_name'],
row['date_of_birth'], row['id'])
instructors.append(instructor)
return instructors
def update(instructor):
sql = """UPDATE instructor_details
SET first_name = %s, last_name = %s, date_of_birth = %s
WHERE id = %s"""
values = [instructor.first_name, instructor.last_name, instructor.date_of_birth, instructor.id]
run_sql(sql, values)
def delete_all():
sql = "DELETE FROM instructor_details"
run_sql(sql)
def delete(id):
sql = "DELETE FROM instructor_details WHERE id = %s"
values = [id]
run_sql(sql, values)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,342
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/repositories/gym_class_repository.py
|
from database.run_sql import run_sql
from models.gym_class import GymClass
def save(gym_class):
sql = """INSERT INTO classes
(class_name, description, max_time, capacity)
VALUES ( %s, %s, %s, %s )
RETURNING id"""
values = [gym_class.class_name, gym_class.description, gym_class.max_time, gym_class.capacity]
results = run_sql(sql, values)
gym_class.id = results[0]['id']
return gym_class
def select_all():
gym_classes = []
sql = "SELECT * FROM classes ORDER BY id"
results = run_sql(sql)
for row in results:
gym_class = GymClass(row['class_name'], row['description'], row['max_time'],
row['capacity'], row['id'])
gym_classes.append(gym_class)
return gym_classes
def select(id):
gym_class = None
sql = "SELECT * FROM classes WHERE id = %s"
values = [id]
result = run_sql(sql, values)[0]
if result is not None:
gym_class = GymClass(result['class_name'], result['description'], result['max_time'],
result['capacity'], result['id'])
return gym_class
def update(gym_class):
sql = """UPDATE classes
SET class_name = %s,
description = %s,
max_time = %s,
capacity = %s
WHERE id = %s"""
values = [gym_class.class_name, gym_class.description, gym_class.max_time,
gym_class.capacity, gym_class.id]
run_sql(sql, values)
def delete_all():
sql = "DELETE FROM classes"
run_sql(sql)
def delete(id):
sql = "DELETE FROM classes WHERE id = %s"
values = [id]
run_sql(sql, values)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,343
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/models/room.py
|
class Room:
def __init__(self, room_name, capacity, descripton=None, id=None):
self.room_name = room_name
self.capacity = capacity
self.description = descripton
self.id = id
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,344
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/repositories/schedule_repository.py
|
from database.run_sql import run_sql
from models.schedule import Schedule
from models.instructor import InstructorDetails
from models.gym_class import GymClass
from models.room import Room
from models.schedule_member import ScheduleMember
import repositories.instructor_details_repository as instructor_repository
import repositories.gym_class_repository as gym_class_repository
import repositories.room_repository as room_repository
import repositories.member_repository as member_repository
from datetime import timedelta
from datetime import date
def save(schedule):
sql = """INSERT INTO schedules
(class_date, start_time, length_mins, instructor_id, class_id, room_id)
VALUES ( %s, %s, %s, %s, %s, %s ) RETURNING id"""
values = [schedule.class_date, schedule.start_time, schedule.length_mins, schedule.instructor.id,
schedule.gym_class.id, schedule.room.id]
results = run_sql(sql, values)
id = results[0]['id']
schedule.id = id
def select_all():
schedules = []
sql = "SELECT * FROM schedules ORDER BY class_date"
results = run_sql(sql)
for row in results:
instructor = instructor_repository.select(row['instructor_id'])
gym_class = gym_class_repository.select(row['class_id'])
room = room_repository.select(row['room_id'])
schedule = Schedule(row['class_date'], row['start_time'], row['length_mins'], instructor,
gym_class, room, row['id'])
schedules.append(schedule)
return schedules
def select_dates():
schedules_list = []
sql = "SELECT * FROM schedules WHERE class_date = %s ORDER BY start_time"
for index in range(7):
schedules = []
values = [date.today() + timedelta(days=index)]
results = run_sql(sql, values)
if results is not None:
for row in results:
instructor = instructor_repository.select(row['instructor_id'])
gym_class = gym_class_repository.select(row['class_id'])
room = room_repository.select(row['room_id'])
schedule = Schedule(row['class_date'], row['start_time'], row['length_mins'], instructor,
gym_class, room, row['id'])
schedules.append(schedule)
else:
schedule = None
schedules.append(schedule)
schedules_list.append(schedules)
return schedules_list
def update(schedule):
sql = """UPDATE schedules
SET (class_date, length_mins, start_time, instructor_id, class_id, room_id) =
(%s, %s, %s, %s, %s, %s)
WHERE id = %s"""
values = [schedule.class_date, schedule.length_mins, schedule.start_time, schedule.instructor.id,
schedule.gym_class.id, schedule.room.id]
run_sql(sql, values)
def select(id):
schedule = None
sql = "SELECT * FROM schedules WHERE id = %s"
values = [id]
result = run_sql(sql, values)[0]
if result is not None:
instructor = instructor_repository.select(result['instructor_id'])
gym_class = gym_class_repository.select(result['class_id'])
room = room_repository.select(result['room_id'])
schedule = Schedule(result['class_date'], result['start_time'], result['length_mins'], instructor,
gym_class, room, result['id'])
return schedule
def delete_all():
sql = "DELETE FROM schedules"
run_sql(sql)
def delete(id):
sql = "DELETE FROM schedules WHERE id = %s"
values = [id]
run_sql(sql, values)
def save_member(member):
sql = """INSERT INTO schedules_members (member_id, schedule_id)
VALUES (%s, %s)
RETURNING id"""
values = [member.member.id, member.schedule.id]
results = run_sql(sql, values)
id = results[0]['id']
member.id = id
def count_member(id):
sql = """SELECT COUNT(member_id) FROM schedules_members WHERE schedule_id = %s"""
values = [id]
count = run_sql(sql, values)
return count
def remove_member(id, member_id):
sql = "DELETE FROM schedules_members WHERE schedule_id = %s AND member_id = %s"
values = [id, member_id]
run_sql(sql, values)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,345
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/models/schedule_member.py
|
class ScheduleMember:
def __init__(self, member, schedule, id=None):
self.member = member
self.schedule = schedule
self.id = id
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,346
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/models/schedule.py
|
class Schedule:
def __init__(self, class_date, start_time, length_mins, instructor, gym_class, room, id=None):
self.class_date = class_date
self.start_time = start_time
self.length_mins = length_mins
self.instructor = instructor
self.gym_class = gym_class
self.room = room
self.id = id
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,347
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/repositories/room_repository.py
|
from database.run_sql import run_sql
from models.room import Room
def save(room):
sql = """INSERT INTO rooms
(room_name, capacity, description)
VALUES (%s, %s, %s)
RETURNING id"""
values = [room.room_name, room.capacity, room.description]
results = run_sql(sql, values)
room.id = results[0]['id']
return room
def select_all():
rooms = []
sql = "SELECT * FROM rooms ORDER BY id"
results = run_sql(sql)
for row in results:
room = Room(row['room_name'], row['capacity'], row['description'], row['id'])
rooms.append(room)
return rooms
def select(id):
room = None
sql = "SELECT * FROM rooms WHERE id = %s"
values = [id]
result = run_sql(sql, values)[0]
if result is not None:
room = Room(result['room_name'], result['capacity'], result['description'], result['id'])
return room
def update(room):
sql = """UPDATE rooms
SET room_name = %s, capacity = %s, description = %s
WHERE id = %s"""
values = [room.room_name, room.capacity, room.description, room.id]
run_sql(sql, values)
def delete_all():
sql = "DELETE FROM rooms"
run_sql(sql)
def delete(id):
sql = "DELETE FROM rooms WHERE id = %s"
values = [id]
run_sql(sql, values)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,348
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/controllers/instructor_controller.py
|
from flask import Blueprint, Flask, redirect, render_template, request
from models.instructor import InstructorTimetable, InstructorDetails, InstructorSchedule
import repositories.instructor_timetable_repository as timetable_repository
import repositories.instructor_details_repository as details_repository
import repositories.instructor_schedule_repository as schedule_repository
instructors_blueprint = Blueprint('instructors', __name__)
@instructors_blueprint.route('/instructors')
def instructors():
instructors = details_repository.select_all()
return render_template('instructor/index.html', instructors=instructors)
@instructors_blueprint.route('/instructors/new_instructor')
def show_instructor():
return render_template('instructor/new_dets.html')
@instructors_blueprint.route('/instructors/new_instructor', methods=['POST'])
def new_instructor():
first_name = request.form['first_name']
last_name = request.form['last_name']
date_of_birth = request.form['date_of_birth']
instructor = InstructorDetails(first_name, last_name, date_of_birth, id)
details_repository.save(instructor)
return redirect('/instructors')
@instructors_blueprint.route('/instructors/new_schedule')
def show_schedule():
return render_template('instructor/new_sch.html')
@instructors_blueprint.route('/instructors/new_schedule', methods=['POST'])
def new_scheudle():
nickname = request.form['nickname']
variables = [False] * 7
strings = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
for index in range(len(variables)):
if request.form.get(strings[index]):
variables[index] = True
start_time = request.form['start_time']
end_time = request.form['end_time']
instructor = InstructorSchedule(nickname, variables[0], variables[1], variables[2], variables[3],
variables[4], variables[5], variables[6], start_time, end_time, id)
schedule_repository.save(instructor)
return redirect('/instructors')
@instructors_blueprint.route('/instructors/new_timetable')
def show_timetable():
instructors = details_repository.select_all()
schedules = schedule_repository.select_all()
return render_template('instructor/new_tim.html', instructors=instructors, schedules=schedules)
@instructors_blueprint.route('/instructors/new_timetable', methods=['POST'])
def add_timetable():
instructor_id = request.form['instructor_id']
schedule_id = request.form['schedule_id']
start_date = request.form['start_date']
instructor = details_repository.select(instructor_id)
schedule = schedule_repository.select(schedule_id)
timetable = InstructorTimetable(start_date, instructor, schedule, id)
timetable_repository.save(timetable)
return redirect('/instructors')
@instructors_blueprint.route('/instructors/schedule/<id>')
def e_schedule(id):
schedule = schedule_repository.select(id)
return render_template('instructor/edit_sch.html', schedule=schedule)
@instructors_blueprint.route('/instructors/schedule/<id>', methods=['POST'])
def edit_schedule(id):
nickname = request.form['name']
variables = [False] * 7
strings = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
for index in range(len(variables)):
if request.form.get(strings[index]):
variables[index] = True
start_time = request.form['start_time']
end_time = request.form['end_time']
instructor = InstructorSchedule(nickname, variables[0], variables[1], variables[2], variables[3],
variables[4], variables[5], variables[6], start_time, end_time, id)
schedule_repository.update(instructor)
return redirect('/instructors')
@instructors_blueprint.route('/instructors/details/<id>')
def e_details(id):
details = details_repository.select(id)
return render_template('instructor/edit_dets.html', details=details)
@instructors_blueprint.route('/instructors/details/<id>', methods=['POST'])
def edit_details(id):
first_name = request.form['first_name']
last_name = request.form['last_name']
date_of_birth = request.form['date_of_birth']
instructor = InstructorDetails(first_name, last_name, date_of_birth, id)
details_repository.update(instructor)
return redirect('/instructors')
@instructors_blueprint.route('/instructors/details/<id>/delete', methods=['POST'])
def delete_instructor(id):
details_repository.delete(id)
return redirect('/instructors')
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,349
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/tests/schedule_test.py
|
import unittest
from models.schedule import Schedule
import datetime
class TestSchedule(unittest.TestCase):
def setUp(self):
self.schedule = Schedule(datetime.date(2021, 3, 21), 45)
def test_schedule_has_class_date(self):
self.assertEqual('2021-03-21', str(self.schedule.class_date))
def test_schedule_has_length_mins(self):
self.assertEqual(45, self.schedule.length_mins)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,350
|
constable-ldp/gym_management_app
|
refs/heads/main
|
/tests/gym_class_test.py
|
import unittest
from models.gym_class import GymClass
class TestGymClass(unittest.TestCase):
def setUp(self):
self.gym_class = GymClass('Hot Yoga', 'Yoga performed in a very warm studio', 60, 16)
def test_class_has_name(self):
self.assertEqual('Hot Yoga', self.gym_class.class_name)
def test_class_has_description(self):
self.assertEqual('Yoga performed in a very warm studio', self.gym_class.description)
def test_class_has_max_time(self):
self.assertEqual(60, self.gym_class.max_time)
def test_class_has_capacity(self):
self.assertEqual(16, self.gym_class.capacity)
|
{"/controllers/room_controller.py": ["/models/room.py", "/repositories/room_repository.py"], "/controllers/schedule_controller.py": ["/models/schedule.py", "/models/schedule_member.py", "/repositories/schedule_repository.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/controllers/member_controller.py": ["/models/member.py", "/repositories/member_repository.py"], "/console.py": ["/models/gym_class.py", "/models/member.py", "/models/room.py", "/models/instructor.py", "/models/schedule.py", "/repositories/gym_class_repository.py", "/repositories/member_repository.py", "/repositories/room_repository.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py", "/repositories/schedule_repository.py"], "/repositories/member_repository.py": ["/models/member.py", "/repositories/schedule_repository.py"], "/tests/instructor_test.py": ["/models/instructor.py"], "/app.py": ["/controllers/class_controller.py", "/controllers/member_controller.py", "/controllers/room_controller.py", "/controllers/instructor_controller.py", "/controllers/schedule_controller.py"], "/controllers/class_controller.py": ["/models/gym_class.py", "/repositories/gym_class_repository.py"], "/run_test.py": ["/tests/member_test.py", "/tests/gym_class_test.py", "/tests/instructor_test.py", "/tests/room_test.py", "/tests/schedule_test.py"], "/repositories/instructor_schedule_repository.py": ["/models/instructor.py"], "/tests/member_test.py": ["/models/member.py"], "/tests/room_test.py": ["/models/room.py"], "/repositories/instructor_timetable_repository.py": ["/models/instructor.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/repositories/instructor_details_repository.py": ["/models/instructor.py"], "/repositories/gym_class_repository.py": ["/models/gym_class.py"], "/repositories/schedule_repository.py": ["/models/schedule.py", "/models/instructor.py", "/models/gym_class.py", "/models/room.py", "/models/schedule_member.py", "/repositories/instructor_details_repository.py", "/repositories/gym_class_repository.py", "/repositories/room_repository.py", "/repositories/member_repository.py"], "/repositories/room_repository.py": ["/models/room.py"], "/controllers/instructor_controller.py": ["/models/instructor.py", "/repositories/instructor_timetable_repository.py", "/repositories/instructor_details_repository.py", "/repositories/instructor_schedule_repository.py"], "/tests/schedule_test.py": ["/models/schedule.py"], "/tests/gym_class_test.py": ["/models/gym_class.py"]}
|
2,351
|
Therapoid/django-assent
|
refs/heads/master
|
/tests/conftest.py
|
# -*- coding: utf-8 -*-
import os
import pytest
from django import setup
from django.utils import timezone
from django.contrib.auth import get_user_model
from assent.models import Agreement, AgreementUser, AgreementVersion
def pytest_configure():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'example.settings')
setup()
@pytest.fixture
def user():
obj = get_user_model().objects.create(email='test@test.com', username='test_user')
return obj
@pytest.fixture
def agreement():
obj = Agreement.objects.create(
document_key='test key',
description='test description',
short_description='test short description',
latest_version=None,
date_modified=None,
)
return obj
@pytest.fixture
def agreement_version(agreement):
obj = AgreementVersion.objects.create(
agreement=agreement,
short_title='test version',
full_title='test version',
content='test content',
content_format='TEXT',
release_date=None,
)
agreement.latest_version = obj
agreement.date_modified = obj.release_date
agreement.save()
return obj
@pytest.fixture
def agreement_user(agreement_version, user):
acceptance_date = timezone.now()
obj = AgreementUser.objects.create(
user=user,
agreement_version=agreement_version,
acceptance_date=acceptance_date,
ip_address='2001:db8:85a3:0:0:8a2e:370:7334'
)
return obj
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,352
|
Therapoid/django-assent
|
refs/heads/master
|
/tasks.py
|
# -*- coding: utf-8 -*-
import os
import sys
from invoke import task
BUILDDIR = "build"
PROJECT = "assent"
@task
def clean(ctx):
"""Removes all the cache files"""
ctx.run("find . -type d -name __pycache__ | xargs rm -rf")
ctx.run('rm -rf ./.cache')
builddir = os.path.join(__file__, BUILDDIR)
if os.path.exists(builddir):
print('Removing builddir {}'.format(builddir))
ctx.run('rm -rf {}'.format(builddir))
@task
def install(ctx):
"""Installs the libraries required to run the application"""
ctx.run("pip install -U pip")
ctx.run("pip install -qr requirements/base.txt")
@task(install)
def develop(ctx):
"""Installs all the libraries used for development"""
ctx.run("pip install -qr requirements/dev.txt")
@task
def checks(ctx):
"""Runs pep8/flake8 checks on the code"""
excl = "--exclude='build/,*migrations/*'"
ctx.run("pep8 {} .".format(excl))
ctx.run("flake8 {} .".format(excl))
@task(develop)
def test(ctx):
"""Runs the tests"""
ctx.run(
'PYTHONPATH=`pwd` '
"py.test --cov-config .coveragerc --cov-report html --cov-report term --cov={}".format(
PROJECT
),
pty=True
)
if sys.platform == 'darwin':
ctx.run('open {}/coverage/index.html'.format(BUILDDIR))
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,353
|
Therapoid/django-assent
|
refs/heads/master
|
/assent/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-15 03:43
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Agreement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('document_key', models.CharField(default='', max_length=255, unique=True, verbose_name='document key')),
('slug', models.SlugField(default='', max_length=255, verbose_name='slug')),
('description', models.CharField(default='', max_length=4096, verbose_name='description')),
('short_description', models.CharField(default='', max_length=255, verbose_name='short description')),
('date_modified', models.DateTimeField(blank=True, default=django.utils.timezone.now, editable=False, null=True)),
],
options={
'verbose_name': 'agreement',
'verbose_name_plural': 'agreements',
},
),
migrations.CreateModel(
name='AgreementUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('acceptance_date', models.DateTimeField(blank=True, null=True, verbose_name='acceptance date')),
('ip_address', models.GenericIPAddressField(blank=True, null=True, verbose_name='IP address')),
],
options={
'verbose_name': 'agreement user',
'verbose_name_plural': 'agreement users',
},
),
migrations.CreateModel(
name='AgreementVersion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('short_title', models.CharField(default='', max_length=255, verbose_name='short title')),
('full_title', models.CharField(default='', max_length=1023, verbose_name='full title')),
('content', models.TextField(blank=True, default='', verbose_name='content')),
('content_format', models.CharField(default='', max_length=4, verbose_name='Content format')),
('release_date', models.DateTimeField(auto_now_add=True, verbose_name='release date')),
('agreement', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='versions', to='assent.Agreement', verbose_name='agreement')),
('users', models.ManyToManyField(related_name='agreement_versions', through='assent.AgreementUser', to=settings.AUTH_USER_MODEL, verbose_name='users')),
],
options={
'verbose_name': 'agreement version',
'get_latest_by': ('release_date',),
'verbose_name_plural': 'agreement versions',
'ordering': ('-release_date',),
},
),
migrations.AddField(
model_name='agreementuser',
name='agreement_version',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='agreement_users', to='assent.AgreementVersion', verbose_name='agreement_version'),
),
migrations.AddField(
model_name='agreementuser',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='agreement',
name='latest_version',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='agreement_latest_version', to='assent.AgreementVersion'),
),
migrations.AlterUniqueTogether(
name='agreementuser',
unique_together=set([('user', 'agreement_version')]),
),
]
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,354
|
Therapoid/django-assent
|
refs/heads/master
|
/tests/test_models.py
|
# -*- coding: utf-8 -*-
import pytest
@pytest.mark.django_db
class TestAgreement:
def test_str(self, agreement):
assert str(agreement) == 'test key'
@pytest.mark.django_db
class TestAgreementVersion:
def test_str(self, agreement_version):
timestamp = agreement_version.release_date
assert str(agreement_version) == 'Agreement: "test key" released: {0:%Y-%m-%d %H:%M}'.format(timestamp)
def test_plain_text_render(self, agreement_version):
assert agreement_version.get_rendered_content() == '<p>test content</p>'
@pytest.mark.django_db
class TestAgreementUser:
def test_str(self, agreement_version, agreement_user, user):
# Assign user to having signed the agreement
assert str(agreement_user) == "User: {0}, agreement: {1}".format(
user, agreement_version)
def test_relationships(self, agreement_user, agreement_version, user):
assert agreement_version in list(user.agreement_versions.all())
assert user in list(agreement_version.users.all())
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,355
|
Therapoid/django-assent
|
refs/heads/master
|
/assent/urls.py
|
# -*- coding: utf-8 -*-
from django.conf.urls import url, include
from .views import (
AgreementDetailView,
AgreementListView,
AgreementFormView,
)
urlpatterns = [
url(r'^$', AgreementListView.as_view(), name='agreement_list'),
url(r'^(?P<slug>[^/]+)/$', AgreementDetailView.as_view(), name='agreement_detail'),
url(r'^(?P<slug>[^/]+)/accept/$', AgreementFormView.as_view(), name='agreement_form'),
]
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,356
|
Therapoid/django-assent
|
refs/heads/master
|
/assent/views/forms.py
|
# -*- coding: utf-8 -*-
from django import forms
from ..models import AgreementUser
class AgreementForm(forms.ModelForm):
hidden_fields = (
'user', 'agreement_version', 'ip_address', 'acceptance_date', )
class Meta:
model = AgreementUser
fields = (
'user', 'agreement_version', 'ip_address', 'acceptance_date', )
def __init__(self, *args, **kwargs):
"""
Hides any fields listed in the class property: hidden_fields.
"""
super(AgreementForm, self).__init__(*args, **kwargs)
for fld in self.hidden_fields:
self.fields[fld].widget = forms.widgets.HiddenInput()
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,357
|
Therapoid/django-assent
|
refs/heads/master
|
/assent/views/assent.py
|
# -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.functional import cached_property
from django.views.generic import DetailView, ListView
from django.views.generic.detail import SingleObjectTemplateResponseMixin
from django.views.generic.edit import ModelFormMixin, ProcessFormView
from ..models import Agreement, AgreementUser
from .forms import AgreementForm
@method_decorator(login_required, name='dispatch')
class AgreementListView(ListView):
model = AgreementUser
template_name = 'assent/agreement_list.html'
def get_queryset(self):
"""
Override to use the user from the request as part of the query.
"""
queryset = super(AgreementListView, self).get_queryset()
queryset = queryset.filter(user=self.request.user)
return queryset
class AgreementMixin(object):
@cached_property
def agreement(self):
"""
Returns the Agreement specified in the url
"""
slug = self.kwargs.get('slug', None)
return get_object_or_404(Agreement, slug=slug)
def get_object(self, queryset=None):
agreement_version = self.agreement.latest_version
if queryset is None:
queryset = self.get_queryset()
obj = queryset.filter(
user=self.request.user, agreement_version=agreement_version).first()
return obj
def get_context_data(self, **kwargs):
"""
Ensures agreement and its latest_version are in the context.
"""
context = super(AgreementMixin, self).get_context_data(**kwargs)
context['agreement'] = self.agreement
context['agreement_version'] = self.agreement.latest_version
return context
@method_decorator(login_required, name='dispatch')
class AgreementDetailView(AgreementMixin, DetailView):
model = AgreementUser
template_name = 'assent/agreement_detail.html'
@method_decorator(login_required, name='dispatch')
class AgreementFormView(AgreementMixin, SingleObjectTemplateResponseMixin,
ModelFormMixin, ProcessFormView):
"""
This is essentially a CreateOrUpdateView. If the object dosn't exist, it is
created. If it already does, then it is updated.
"""
model = AgreementUser
template_name = 'assent/agreement_form.html'
form_class = AgreementForm
success_url = reverse_lazy('assent:agreement_list')
def get(self, request, *args, **kwargs):
self.object = self.get_object()
return super(AgreementFormView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
return super(AgreementFormView, self).post(request, *args, **kwargs)
@cached_property
def client_ip_address(self):
"""
Attempts to return the user's IP address.
:return:
"""
x_forwarded_for = self.request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = self.request.META.get('REMOTE_ADDR')
return ip
def get_initial(self):
initial = self.initial.copy()
initial.update({
'user': self.request.user,
'agreement_version': self.agreement.latest_version,
'acceptance_date': timezone.now(),
'ip_address': self.client_ip_address,
})
return initial
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,358
|
Therapoid/django-assent
|
refs/heads/master
|
/assent/views/__init__.py
|
# -*- coding: utf-8 -*-
from .assent import *
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,359
|
Therapoid/django-assent
|
refs/heads/master
|
/assent/models.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from transcode.render import render
class Agreement(models.Model):
document_key = models.CharField(
_('document key'), max_length=255, blank=False, default='',
unique=True)
slug = models.SlugField(
_('slug'), max_length=255, blank=False, default='')
description = models.CharField(
_('description'), max_length=4096, blank=False, default='')
short_description = models.CharField(
_('short description'), max_length=255, blank=False, default='')
latest_version = models.OneToOneField(
'assent.AgreementVersion', blank=True, null=True,
related_name='agreement_latest_version')
# Note, this should only be updated when we add a new version
# This is currently done in AgreementVersion.save()
date_modified = models.DateTimeField(
default=timezone.now, blank=True, null=True, editable=False)
class Meta:
verbose_name = _('agreement')
verbose_name_plural = _('agreements')
def get_absolute_url(self):
return reverse('assent:agreement_detail', kwargs={'slug': self.slug})
def __str__(self):
return self.document_key
class AgreementUser(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
agreement_version = models.ForeignKey(
to='assent.AgreementVersion',
verbose_name=_('agreement_version'),
related_name='agreement_users', blank=False)
acceptance_date = models.DateTimeField(
_('acceptance date'), blank=True, null=True)
ip_address = models.GenericIPAddressField(
_('IP address'), blank=True, null=True)
class Meta:
verbose_name = _('agreement user')
verbose_name_plural = _('agreement users')
unique_together = (('user', 'agreement_version', ), )
def __str__(self):
return _('User: {0}, agreement: {1}').format(
self.user, self.agreement_version)
class AgreementVersion(models.Model):
users = models.ManyToManyField(
to=settings.AUTH_USER_MODEL, through='assent.AgreementUser',
related_name='agreement_versions', verbose_name=_('users'))
agreement = models.ForeignKey(
to='assent.Agreement', verbose_name=_('agreement'),
related_name='versions', blank=False)
short_title = models.CharField(
_('short title'), max_length=255, blank=False, default='')
full_title = models.CharField(
_('full title'), max_length=1023, blank=False, default='')
content = models.TextField(
_('content'), blank=True, default='')
content_format = models.CharField(
_('Content format'), max_length=4, blank=False, default='')
release_date = models.DateTimeField(
_('release date'), auto_now_add=True)
class Meta:
ordering = ('-release_date', )
get_latest_by = ('release_date', )
verbose_name = _('agreement version')
verbose_name_plural = _('agreement versions')
def __str__(self):
return _('Agreement: "{0}" released: {1:%Y-%m-%d %H:%M}').format(
self.agreement.document_key, self.release_date)
def get_rendered_content(self):
return render(self.content, self.content_format)
def save(self, *args, **kwargs):
super(AgreementVersion, self).save(*args, **kwargs)
if self.pk is not None and self.agreement_id:
self.agreement.latest_version = self
self.agreement.date_modified = self.release_date
self.agreement.save()
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,360
|
Therapoid/django-assent
|
refs/heads/master
|
/assent/admin.py
|
# -*- coding: utf-8 -*-
from django import forms
from django.contrib import admin
from django.utils.translation import ugettext as _
from transcode.conf import get_content_formatters
from .models import Agreement, AgreementUser, AgreementVersion
ContentFormat = get_content_formatters('ASSENT_FORMATTERS')
# ===== INLINES ===============================================================
class AgreementVersionForm(forms.ModelForm):
content_format = forms.ChoiceField(
label=_('Content Format'), required=True, choices=ContentFormat.CHOICES)
class Meta:
model = AgreementVersion
fields = (
'short_title',
'full_title',
'content',
'content_format',
)
def __init__(self, *args, **kwargs):
super(AgreementVersionForm, self).__init__(*args, **kwargs)
instance = self.instance
if not instance or not instance.content_format:
self.fields['content_format'].initial = ContentFormat.DEFAULT
class AgreementVersionInlineAdmin(admin.StackedInline):
model = AgreementVersion
extra = 0
readonly_fields = ('release_date', )
form = AgreementVersionForm
fieldsets = (
(None, {
'fields': (
'short_title',
'full_title',
'content',
'content_format',
'release_date',
)
}),
)
# ===== ADMINS ================================================================
class AgreementAdmin(admin.ModelAdmin):
inlines = (AgreementVersionInlineAdmin, )
prepopulated_fields = {"slug": ("document_key",)}
fieldsets = (
(None, {
'fields': (
'document_key',
'slug',
'description',
'short_description',
'latest_version',
)
}),
)
admin.site.register(Agreement, AgreementAdmin)
class AgreementUserAdmin(admin.ModelAdmin):
list_display = ('user', 'agreement_version', 'acceptance_date', )
readonly_fields = ('acceptance_date', 'ip_address', )
fieldsets = (
(None, {
'fields': (
'user',
'agreement_version',
'acceptance_date',
'ip_address',
)
}),
)
admin.site.register(AgreementUser, AgreementUserAdmin)
|
{"/tests/conftest.py": ["/assent/models.py"], "/assent/urls.py": ["/assent/views/__init__.py"], "/assent/views/forms.py": ["/assent/models.py"], "/assent/views/assent.py": ["/assent/models.py", "/assent/views/forms.py"], "/assent/views/__init__.py": ["/assent/views/assent.py"], "/assent/admin.py": ["/assent/models.py"]}
|
2,362
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/app/csrf_protect.py
|
from . import app
from .utils import hash_generator
@app.before_request
def csrf_protect():
if request.method == "POST":
token = session.pop("_csrf_token", None);
if not token or token != request.form.get("_csrf_token"):
abort(403);
def generate_csrf_token():
if "_csrf_token" not in session:
session["_csrf_token"] = hash_generator(30);
return session["_csrf_token"];
app.jinja_env.globals['csrf_token'] = generate_csrf_token
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,363
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/app/forms.py
|
# def?
class LoginForm():
"""
For the login forms
"""
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,364
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/app/__init__.py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
# alternatively, from flask_mongoengine import MongoEngine
import os
from config import BASEDIR
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
# db = MongoEngine(app)
# User Authentication Dictionary (would be Redis-cache in production)
authhashes = {};
# --------------------- Logging Errors
import logging
from logging.handlers import RotatingFileHandler
file_handler = RotatingFileHandler("activities.log",maxBytes=100000,backupCount=1);
file_handler.setLevel(10);
app.logger.addHandler(file_handler);
app.logger.setLevel(level=0);
logger = app.logger
# ==================== Start the App !
"""
blueprints live just below the app/ folder as subfolders with a similar
layout to the BASEDIR/app/ folder.
>>> from app.simple_page import simple_page
>>> app.register_blueprint(simple_page)
and within BASEDIR/app/simple_page/__init__.py
>>> simple_page = Blueprint('simple_page, __name__, template_folder="templates")
Then add simple_page routing like in the BASEDIR/app/
"""
# I though below was >>> from app import views, models
import csrf_protect
import views, models
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,365
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/app/accounts.py
|
# 'User' Model (for MySQL)
from app import app, db
from werkzeug.security import generate_password_hash, \
check_password_hash
class User(db.Model):
# All models should have an 'id'
id = db.Column(db.Integer, index=True, primary_key=True)
# Standard user stuff
username = db.Column(db.String(128), unique=True)
password = db.Column(db.String(128), unique=True)
def set_password(self, password):
self.password = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password, password)
def __repr__(self):
# what gets printed in the console during debugging
return "<User %r>" % (self.username)
# login_required decorator
from functools import wraps
from flask import g, request, redirect, url_for
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if g["user"] is None:
return redirect( url_for("login_page") );
return f(*args, **kwargs)
return decorated_function
# login pages
@app.route("/login_page",methods=["GET"])
def login_page():
return render_template("login_page.html")
@app.route("/login",methods=["POST"])
def login():
if request.headers['Content-Type'] == "application/json":
name_of_user = request.json['username']
pass_check = request.json['password']
"""
Store & then return some random hash as a key for
authentication. Then check for it as a login in a
RESTful authentication.
from flask import jsonify
from app import authhashes
from app.utils import hash_generator
hashname = hash_generator();
authhashes[hashname] = name_of_user;
return jsonify(authkey=hashname);
"""
else:
name_of_user = request.form["username"]
pass_check = request.form["password"]
u = db.session.query(User).\
filter_by(username=request.form['username']).first()
if u is not None:
verification = u.check_password(pass_check)
if verification==True:
g["user"] = u.username;
logger.info("User %s Has Logged in" % str(session['user']))
return redirect( url_for("homepage") );
return render_template("error.html");
@app.route("/logout",methods=['GET','POST'])
def logout():
g["user"] = None;
logger.info("User %s Has Logged Out" % str(session['user']));
return redirect( url_for("homepage") )
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,366
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/fabric_scripts/setup_server.py
|
from fabric.api import *
import string,random
def update_upgrade():
"""Updates & Upgrades the server"""
sudo("apt-get update");
sudo("apt-get -y upgrade");
def install_nginx():
"""Install nginx"""
sudo("apt-get install nginx");
def install_python():
"""Install python, dev, etc."""
sudo("apt-get install python python-dev python-pip libssl-dev "\
"libffi-dev htop munin");
def create_deploy_user():
"""creates a user for deploying a website & copy a git template over"""
sudo("adduser deploy");
run("git clone http://github.com/valexandersaulys/flask-ladder");
run("virtualenv .venv");
sudo("chown deploy:deploy .venv/");
sudo("chown deploy:deploy flask-ladder/");
run(".venv/bin/pip install -r flask-ladder/requirements.txt");
def install_mysql():
""" Installs and configures MySQL for 'deploy' user """
sudo("apt-get install mysql-server mysql-client");
password = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12));
sudo("mysql --execute=\"CREATE USER 'deploy'@'localhost' IDENTIFIED BY '"+password+"';\"");
# Run some mysql commands for setup
sudo("mysql --execute=\"CREATE DATABASE deployment;\"");
sudo("mysql --execute=\"CREATE DATABASE development;\"");
sudo("mysql --execute=\"GRANT ALL PRIVILEGES ON deployment.* to 'deploy'@'localhost';\"");
sudo("mysql --execute=\"GRANT ALL PRIVILEGES ON development.* to 'deploy'@'localhost';\"");
# Then add the mysql URI to the bashrc for deploy user
bash_insert_string = "deploy:"+password+"@localhost"; # uri for flask
sudo("echo 'DATABASE_URI=x"+bash_insert_string+"' >> /home/deploy/.bashrc");
def install_mongodb():
""" Installs and configures mongodb for 'deploy' user """
# Install MongoDB
sudo("apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv EA312927");
run("echo "\
+ "'deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse'"\
+ "| sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list");
sudo("apt-get update");
sudo("apt-get install -y mongodb-org");
# Unfortuantely, creating and configuring a user will not be a one liner...
"""
db.createUser({ username:"",
password:"",
roles:[ { role:"dbAdmin or readWrite",
db: "<name_of_database>" }, ... ]
} );
db.getUsers(); # to get users
show dbs # to show users
db.updateUser("name_of_user", { roles: [ {role: '', db: '' },... ] });
"""
def install_redis():
"""
Installs Redis
Run as main user
"""
print "Installing some base dependencies"
sudo("apt-get update");
sudo("apt-get install -y build-essential tcl8.5");
print "Download with wget the redis installers for ubuntu";
run("wget http://download.redis.io/releases/redis-stable.tar.gz");
run("tar xzf redis-stable.tar.gz");
print "Download and run make for redis";
run("cd redis-stable");
run("make");
run("make test");
sudo("make install");
print "Run the included script to install"
with cd("utils"):
sudo("./install_server.sh");
sudo("service redis_6379 start");
print "Run setup at startup"
sudo("update-rc.d redis_6379 defaults");
print "then setup security so that _only_ the localhost can access"
sudo("echo 'bind 127.0.0.1' >> /etc/redis/6379.conf");
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,367
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/app/models.py
|
from app import db
from werkzeug.security import generate_password_hash, \
check_password_hash
class SimpleModel(db.Model):
id = db.Column(db.Integer, index=True, primary_key=True)
def __repr__(self):
return "<ID #%r>" % (self.id);
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,368
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/app/views.py
|
import os, datetime
from app import app, db, logger
from flask import render_template, flash, redirect, session, url_for, \
request, g, send_from_directory
# - - - - - - - Custom Routing
@app.errorhandler(404)
def error_404():
# Haven't actually tried this yet
return render_template("404.html")
# - - - - - - - Main Routes
@app.route("/")
@app.route("/index",methods=['GET'])
def main_page():
# Display main login page
return render_template("index.html")
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,369
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/config.py
|
import os
SECRET_KEY = "beautiful_little_world_is_mine"
BASEDIR = os.path.abspath(os.path.dirname(__file__))
# = = = = = = = For the Database Configuration
# Separate out into a 'db_config.py' for larger projects
if os.environ.get('DATABASE_URI') is None:
DATABASE_URL = 'sqlite:///' + os.path.join(BASEDIR, 'app.db')
SQLALCHEMY_MIGRATE_REPO = os.path.join(BASEDIR, 'db_repository')
SQLALCHEMY_TRACK_MODIFICATIONS = True;
else:
DATABASE_URL = os.environ['DATABASE_URI']; # could be mongodb
"""
Sample MongoDB Setup, if the URL is not specified as an environement variable
MONGODB_DB = 'project1'
MONGODB_HOST = '192.168.1.35'
MONGODB_PORT = 12345
MONGODB_USERNAME = 'webapp'
MONGODB_PASSWORD = 'pwd123'
"""
# - - - - - - - Put Constants here
"""
Examples can include bits like constants for folder storage
"""
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,370
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/app/utils.py
|
# Any utility functions would go here
import string, random
def hash_generator(size=8,
chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,371
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/run.py
|
#!.venv/bin/python
from app import app
app.run(host='0.0.0.0') # Does __not__ run as a debug! For use with gunicorn
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,372
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/fabric_scripts/fabfile.py
|
# Python script: run with `$ fab ___command___`
# Imports from nearby files
from fabric.api import *
from setup_server import *
# Environmental Stuff
env.hosts = [
'server.domaind.tld', # name or ip address of server
]
env.user = 'root' # name of user, you'll have to supply password at execution
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,373
|
valexandersaulys/flask-ladder
|
refs/heads/master
|
/manage.py
|
#!.venv/bin/python
"""
This can be pretty much kept straight
"""
from app import db, app
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__=="__main__":
manager.run()
|
{"/app/csrf_protect.py": ["/app/__init__.py", "/app/utils.py"], "/app/__init__.py": ["/config.py"], "/app/accounts.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"], "/manage.py": ["/app/__init__.py"]}
|
2,389
|
lbowenwest/connectz-rust
|
refs/heads/master
|
/connectz/__main__.py
|
import sys
from . import run_file
if len(sys.argv) != 2:
print(f'{sys.argv[0]}: Provide one input file')
sys.exit()
result = run_file(sys.argv[1])
print(result)
|
{"/connectz/__main__.py": ["/connectz/__init__.py"]}
|
2,390
|
lbowenwest/connectz-rust
|
refs/heads/master
|
/connectz/__init__.py
|
from .connectz import *
|
{"/connectz/__main__.py": ["/connectz/__init__.py"]}
|
2,392
|
aharley/neural_3d_mapping
|
refs/heads/master
|
/model_carla_det.py
|
import time
import torch
import torch.nn as nn
import hyperparams as hyp
import numpy as np
import os
from model_base import Model
from nets.feat3dnet import Feat3dNet
from nets.detnet import DetNet
import utils.vox
import utils.samp
import utils.geom
import utils.misc
import utils.improc
import utils.basic
# import utils.track
# import frozen_flow_net
import utils.eval
# from tensorboardX import SummaryWriter
# from backend import saverloader, inputs
# from torchvision import datasets, transforms
np.set_printoptions(precision=2)
np.random.seed(0)
# EPS = 1e-6
# MAX_QUEUE = 10 # how many items before the summaryWriter flushes
class CARLA_DET(Model):
def initialize_model(self):
print("------ INITIALIZING MODEL OBJECTS ------")
self.model = CarlaDetModel()
if hyp.do_freeze_feat3d:
self.model.feat3dnet.eval()
self.set_requires_grad(self.model.feat3dnet, False)
if hyp.do_freeze_det:
self.model.detnet.eval()
self.set_requires_grad(self.model.detnet, False)
class CarlaDetModel(nn.Module):
def __init__(self):
super(CarlaDetModel, self).__init__()
if hyp.do_feat3d:
self.feat3dnet = Feat3dNet(in_dim=4)
if hyp.do_det:
self.detnet = DetNet()
def prepare_common_tensors(self, feed):
results = dict()
self.summ_writer = utils.improc.Summ_writer(
writer=feed['writer'],
global_step=feed['global_step'],
log_freq=feed['set_log_freq'],
fps=8,
just_gif=True)
global_step = feed['global_step']
self.B = feed['set_batch_size']
self.S = feed['set_seqlen']
self.set_name = feed['set_name']
# in det mode, we do not have much reason to have S>1
assert(self.S==1)
__p = lambda x: utils.basic.pack_seqdim(x, self.B)
__u = lambda x: utils.basic.unpack_seqdim(x, self.B)
self.N = hyp.N
self.Z, self.Y, self.X = hyp.Z, hyp.Y, hyp.X
self.Z2, self.Y2, self.X2 = int(self.Z/2), int(self.Y/2), int(self.X/2)
self.pix_T_cams = feed['pix_T_cams']
set_data_format = feed['set_data_format']
self.S = feed['set_seqlen']
self.origin_T_camRs = feed['origin_T_camRs']
self.origin_T_camXs = feed['origin_T_camXs']
self.camX0s_T_camXs = utils.geom.get_camM_T_camXs(self.origin_T_camXs, ind=0)
self.camXs_T_camX0s = __u(utils.geom.safe_inverse(__p(self.camX0s_T_camXs)))
self.camR0s_T_camRs = utils.geom.get_camM_T_camXs(self.origin_T_camRs, ind=0)
self.camRs_T_camR0s = __u(utils.geom.safe_inverse(__p(self.camR0s_T_camRs)))
self.camRs_T_camXs = __u(torch.matmul(__p(self.origin_T_camRs).inverse(), __p(self.origin_T_camXs)))
self.camXs_T_camRs = __u(__p(self.camRs_T_camXs).inverse())
self.xyz_camXs = feed['xyz_camXs']
if self.set_name=='test' or self.set_name=='val':
scene_centroid_x = 0.0
scene_centroid_y = 1.0
scene_centroid_z = 18.0
scene_centroid = np.array([scene_centroid_x,
scene_centroid_y,
scene_centroid_z]).reshape([1, 3])
self.scene_centroid = torch.from_numpy(scene_centroid).float().cuda()
self.vox_util = utils.vox.Vox_util(self.Z, self.Y, self.X, self.set_name, scene_centroid=self.scene_centroid, assert_cube=True)
else:
# randomize a bit, as a form of data aug
all_ok = False
num_tries = 0
while (not all_ok) and (num_tries < 100):
scene_centroid_x = np.random.uniform(-8.0, 8.0)
scene_centroid_y = np.random.uniform(-1.0, 3.0)
scene_centroid_z = np.random.uniform(10.0, 26.0)
scene_centroid = np.array([scene_centroid_x,
scene_centroid_y,
scene_centroid_z]).reshape([1, 3])
self.scene_centroid = torch.from_numpy(scene_centroid).float().cuda()
num_tries += 1
all_ok = True
self.vox_util = utils.vox.Vox_util(self.Z, self.Y, self.X, self.set_name, scene_centroid=self.scene_centroid, assert_cube=True)
# we want to ensure this gives us a few points inbound for each element
inb = __u(self.vox_util.get_inbounds(__p(self.xyz_camXs), self.Z, self.Y, self.X, already_mem=False))
# this is B x S x N
num_inb = torch.sum(inb.float(), axis=2)
# this is B x S
if torch.min(num_inb) < 300:
all_ok = False
self.summ_writer.summ_scalar('centroid_sampling/num_tries', float(num_tries))
self.summ_writer.summ_scalar('centroid_sampling/num_inb', torch.mean(num_inb).cpu().item())
if num_tries >= 100:
return False # not OK; do not train on this
self.vox_size_X = self.vox_util.default_vox_size_X
self.vox_size_Y = self.vox_util.default_vox_size_Y
self.vox_size_Z = self.vox_util.default_vox_size_Z
origin_T_camRs_ = self.origin_T_camRs.reshape(self.B, self.S, 1, 4, 4).repeat(1, 1, self.N, 1, 1).reshape(self.B*self.S, self.N, 4, 4)
boxlists = feed['boxlists']
self.scorelist_s = feed['scorelists']
self.tidlist_s = feed['tidlists']
boxlists_ = boxlists.reshape(self.B*self.S, self.N, 9)
lrtlist_camRs_ = utils.misc.parse_boxes(boxlists_, origin_T_camRs_)
self.lrtlist_camRs = lrtlist_camRs_.reshape(self.B, self.S, self.N, 19)
self.lrtlist_camR0s = __u(utils.geom.apply_4x4_to_lrtlist(__p(self.camR0s_T_camRs), __p(self.lrtlist_camRs)))
self.lrtlist_camXs = __u(utils.geom.apply_4x4_to_lrtlist(__p(self.camXs_T_camRs), __p(self.lrtlist_camRs)))
self.lrtlist_camX0s = __u(utils.geom.apply_4x4_to_lrtlist(__p(self.camX0s_T_camXs), __p(self.lrtlist_camXs)))
inbound_s = __u(utils.misc.rescore_lrtlist_with_inbound(
__p(self.lrtlist_camX0s), __p(self.tidlist_s), self.Z, self.Y, self.X, self.vox_util))
self.scorelist_s *= inbound_s
for b in list(range(self.B)):
if torch.sum(self.scorelist_s[:,0]) < (self.B/2): # not worth it; return early
return False # not OK; do not train on this
self.rgb_camXs = feed['rgb_camXs']
self.summ_writer.summ_rgb('2d_inputs/rgb', self.rgb_camXs[:,0])
# get 3d voxelized inputs
self.occ_memXs = __u(self.vox_util.voxelize_xyz(__p(self.xyz_camXs), self.Z, self.Y, self.X))
self.unp_memXs = __u(self.vox_util.unproject_rgb_to_mem(
__p(self.rgb_camXs), self.Z, self.Y, self.X, __p(self.pix_T_cams)))
# these are B x C x Z x Y x X
self.summ_writer.summ_occs('3d_inputs/occ_memXs', torch.unbind(self.occ_memXs, dim=1))
self.summ_writer.summ_unps('3d_inputs/unp_memXs', torch.unbind(self.unp_memXs, dim=1), torch.unbind(self.occ_memXs, dim=1))
return True # OK
def run_train(self, feed):
total_loss = torch.tensor(0.0).cuda()
__p = lambda x: utils.basic.pack_seqdim(x, self.B)
__u = lambda x: utils.basic.unpack_seqdim(x, self.B)
results = dict()
# eliminate the seq dim, to make life easier
lrtlist_camX = self.lrtlist_camXs[:, 0]
rgb_camX0 = self.rgb_camXs[:,0]
occ_memX0 = self.occ_memXs[:,0]
unp_memX0 = self.unp_memXs[:,0]
tidlist_g = self.tidlist_s[:,0]
scorelist_g = self.scorelist_s[:,0]
if hyp.do_feat3d:
# start with a 4-channel feature map;
feat_memX0_input = torch.cat([
occ_memX0,
unp_memX0*occ_memX0,
], dim=1)
# featurize
feat3d_loss, feat_halfmemX0 = self.feat3dnet(
feat_memX0_input,
self.summ_writer)
total_loss += feat3d_loss
self.summ_writer.summ_feat('feat3d/feat_memX0_input', feat_memX0_input, pca=True)
self.summ_writer.summ_feat('feat3d/feat_halfmemX0', feat_halfmemX0, pca=True)
if hyp.do_det:
# this detector can only handle axis-aligned boxes (like rcnn)
# so first let's inflate the boxes to the nearest axis lines
axlrtlist_camX = utils.geom.inflate_to_axis_aligned_lrtlist(lrtlist_camX)
lrtlist_memX = self.vox_util.apply_mem_T_ref_to_lrtlist(lrtlist_camX, self.Z, self.Y, self.X)
axlrtlist_memX = utils.geom.inflate_to_axis_aligned_lrtlist(lrtlist_memX)
self.summ_writer.summ_lrtlist_bev(
'det/boxlist_g',
occ_memX0[0:1],
lrtlist_memX[0:1],
scorelist_g,
tidlist_g,
self.vox_util,
already_mem=True)
self.summ_writer.summ_lrtlist_bev(
'det/axboxlist_g',
occ_memX0[0:1],
axlrtlist_memX[0:1],
scorelist_g,
tidlist_g,
self.vox_util,
already_mem=True)
lrtlist_halfmemX = self.vox_util.apply_mem_T_ref_to_lrtlist(lrtlist_camX, self.Z2, self.Y2, self.X2)
axlrtlist_halfmemX = utils.geom.inflate_to_axis_aligned_lrtlist(lrtlist_halfmemX)
det_loss, boxlist_halfmemX_e, scorelist_e, tidlist_e, pred_objectness, sco, ove = self.detnet(
axlrtlist_halfmemX,
scorelist_g,
feat_halfmemX0,
self.summ_writer)
total_loss += det_loss
lrtlist_halfmemX_e = utils.geom.convert_boxlist_to_lrtlist(boxlist_halfmemX_e)
lrtlist_camX_e = self.vox_util.apply_ref_T_mem_to_lrtlist(lrtlist_halfmemX_e, self.Z2, self.Y2, self.X2)
lrtlist_e = lrtlist_camX_e[0:1]
# lrtlist_g = lrtlist_camX[0:1] # true boxes
lrtlist_g = axlrtlist_camX[0:1] # axis-aligned boxes
scorelist_e = scorelist_e[0:1]
scorelist_g = scorelist_g[0:1]
lrtlist_e, lrtlist_g, scorelist_e, scorelist_g = utils.eval.drop_invalid_lrts(
lrtlist_e, lrtlist_g, scorelist_e, scorelist_g)
lenlist_e, _ = utils.geom.split_lrtlist(lrtlist_e)
clist_e = utils.geom.get_clist_from_lrtlist(lrtlist_e)
lenlist_g, _ = utils.geom.split_lrtlist(lrtlist_g)
clist_g = utils.geom.get_clist_from_lrtlist(lrtlist_g)
_, Ne, _ = list(lrtlist_e.shape)
_, Ng, _ = list(lrtlist_g.shape)
# only summ if there is at least one pred and one gt
if Ne > 0 and Ng > 0:
lrtlist_e_ = lrtlist_e.unsqueeze(2).repeat(1, 1, Ng, 1).reshape(1, Ne * Ng, -1)
lrtlist_g_ = lrtlist_g.unsqueeze(1).repeat(1, Ne, 1, 1).reshape(1, Ne * Ng, -1)
ious, _ = utils.geom.get_iou_from_corresponded_lrtlists(lrtlist_e_, lrtlist_g_)
ious = ious.reshape(1, Ne, Ng)
ious_e = torch.max(ious, dim=2)[0]
self.summ_writer.summ_lrtlist(
'det/boxlist_eg',
rgb_camX0[0:1],
torch.cat((lrtlist_g, lrtlist_e), dim=1),
torch.cat((ious_e.new_ones(1, Ng), ious_e), dim=1),
torch.cat([torch.ones(1, Ng).long().cuda(),
torch.ones(1, Ne).long().cuda()+1], dim=1),
self.pix_T_cams[0:1, 0])
self.summ_writer.summ_lrtlist_bev(
'det/boxlist_bev_eg',
occ_memX0[0:1],
torch.cat((lrtlist_g, lrtlist_e), dim=1),
torch.cat((ious_e.new_ones(1, Ng), ious_e), dim=1),
torch.cat([torch.ones(1, Ng).long().cuda(),
torch.ones(1, Ne).long().cuda()+1], dim=1),
self.vox_util,
already_mem=False)
ious = [0.3, 0.4, 0.5, 0.6, 0.7]
maps_3d, maps_2d = utils.eval.get_mAP_from_lrtlist(lrtlist_e, scorelist_e, lrtlist_g, ious)
for ind, overlap in enumerate(ious):
self.summ_writer.summ_scalar('ap_3d/%.2f_iou' % overlap, maps_3d[ind])
self.summ_writer.summ_scalar('ap_bev/%.2f_iou' % overlap, maps_2d[ind])
self.summ_writer.summ_scalar('loss', total_loss.cpu().item())
return total_loss, results, False
def forward(self, feed):
data_ok = self.prepare_common_tensors(feed)
if not data_ok:
# return early
total_loss = torch.tensor(0.0).cuda()
return total_loss, None, True
else:
if self.set_name=='train':
return self.run_train(feed)
else:
print('not prepared for this set_name:', set_name)
assert(False)
|
{"/model_carla_det.py": ["/hyperparams.py", "/nets/detnet.py"], "/model_carla_ego.py": ["/hyperparams.py", "/nets/egonet.py"], "/exp_carla_static.py": ["/exp_base.py"], "/model_carla_static.py": ["/hyperparams.py", "/nets/emb2dnet.py", "/nets/emb3dnet.py", "/nets/viewnet.py"], "/nets/detnet.py": ["/hyperparams.py", "/archs/encoder3d.py"], "/exp_carla_ego.py": ["/exp_base.py"], "/archs/encoder3d.py": ["/archs/pixelshuffle3d.py"], "/backend/saverloader.py": ["/hyperparams.py"], "/main.py": ["/model_carla_static.py", "/model_carla_ego.py", "/model_carla_det.py", "/hyperparams.py"], "/nets/flownet.py": ["/hyperparams.py"], "/nets/emb2dnet.py": ["/archs/encoder2d.py", "/hyperparams.py"], "/nets/egonet.py": ["/hyperparams.py"], "/nets/viewnet.py": ["/hyperparams.py"], "/exp_base.py": ["/pretrained_nets_carla.py"], "/nets/emb3dnet.py": ["/hyperparams.py"], "/exp_carla_det.py": ["/exp_base.py"]}
|
2,393
|
aharley/neural_3d_mapping
|
refs/heads/master
|
/model_carla_ego.py
|
import torch
import torch.nn as nn
import hyperparams as hyp
import numpy as np
# import imageio,scipy
from model_base import Model
from nets.feat3dnet import Feat3dNet
from nets.egonet import EgoNet
import torch.nn.functional as F
import utils.vox
import utils.samp
import utils.geom
import utils.improc
import utils.basic
import utils.eval
import utils.misc
np.set_printoptions(precision=2)
np.random.seed(0)
class CARLA_EGO(Model):
def initialize_model(self):
print('------ INITIALIZING MODEL OBJECTS ------')
self.model = CarlaEgoModel()
if hyp.do_freeze_feat3d:
self.model.feat3dnet.eval()
self.set_requires_grad(self.model.feat3dnet, False)
if hyp.do_freeze_ego:
self.model.egonet.eval()
self.set_requires_grad(self.model.egonet, False)
class CarlaEgoModel(nn.Module):
def __init__(self):
super(CarlaEgoModel, self).__init__()
if hyp.do_feat3d:
self.feat3dnet = Feat3dNet(in_dim=4)
if hyp.do_ego:
self.egonet = EgoNet(
num_scales=hyp.ego_num_scales,
num_rots=hyp.ego_num_rots,
max_deg=hyp.ego_max_deg,
max_disp_z=hyp.ego_max_disp_z,
max_disp_y=hyp.ego_max_disp_y,
max_disp_x=hyp.ego_max_disp_x)
def prepare_common_tensors(self, feed):
results = dict()
self.summ_writer = utils.improc.Summ_writer(
writer=feed['writer'],
global_step=feed['global_step'],
log_freq=feed['set_log_freq'],
fps=8,
just_gif=True)
global_step = feed['global_step']
self.B = feed['set_batch_size']
self.S = feed['set_seqlen']
self.set_name = feed['set_name']
__p = lambda x: utils.basic.pack_seqdim(x, self.B)
__u = lambda x: utils.basic.unpack_seqdim(x, self.B)
self.H, self.W, self.V, self.N = hyp.H, hyp.W, hyp.V, hyp.N
self.PH, self.PW = hyp.PH, hyp.PW
if self.set_name=='test':
self.Z, self.Y, self.X = hyp.Z_test, hyp.Y_test, hyp.X_test
elif self.set_name=='val':
self.Z, self.Y, self.X = hyp.Z_val, hyp.Y_val, hyp.X_val
else:
self.Z, self.Y, self.X = hyp.Z, hyp.Y, hyp.X
self.Z2, self.Y2, self.X2 = int(self.Z/2), int(self.Y/2), int(self.X/2)
self.Z4, self.Y4, self.X4 = int(self.Z/4), int(self.Y/4), int(self.X/4)
self.ZZ, self.ZY, self.ZX = hyp.ZZ, hyp.ZY, hyp.ZX
self.pix_T_cams = feed['pix_T_cams']
self.S = feed['set_seqlen']
# in this mode, we never use R coords, so we can drop the R/X notation
self.origin_T_cams = feed['origin_T_camXs']
self.xyz_cams = feed['xyz_camXs']
scene_centroid_x = 0.0
scene_centroid_y = 1.0
scene_centroid_z = 18.0
scene_centroid = np.array([scene_centroid_x,
scene_centroid_y,
scene_centroid_z]).reshape([1, 3])
self.scene_centroid = torch.from_numpy(scene_centroid).float().cuda()
self.vox_util = utils.vox.Vox_util(self.Z, self.Y, self.X, self.set_name, scene_centroid=self.scene_centroid, assert_cube=True)
self.vox_size_X = self.vox_util.default_vox_size_X
self.vox_size_Y = self.vox_util.default_vox_size_Y
self.vox_size_Z = self.vox_util.default_vox_size_Z
self.rgb_cams = feed['rgb_camXs']
# get 3d voxelized inputs
self.occ_mems = __u(self.vox_util.voxelize_xyz(__p(self.xyz_cams), self.Z, self.Y, self.X))
self.unp_mems = __u(self.vox_util.unproject_rgb_to_mem(
__p(self.rgb_cams), self.Z, self.Y, self.X, __p(self.pix_T_cams)))
# these are B x C x Z x Y x X
self.summ_writer.summ_occs('3d_inputs/occ_mems', torch.unbind(self.occ_mems, dim=1))
self.summ_writer.summ_unps('3d_inputs/unp_mems', torch.unbind(self.unp_mems, dim=1), torch.unbind(self.occ_mems, dim=1))
return True # OK
def run_train(self, feed):
total_loss = torch.tensor(0.0).cuda()
__p = lambda x: utils.basic.pack_seqdim(x, self.B)
__u = lambda x: utils.basic.unpack_seqdim(x, self.B)
results = dict()
assert(hyp.do_ego)
assert(self.S==2)
origin_T_cam0 = self.origin_T_cams[:, 0]
origin_T_cam1 = self.origin_T_cams[:, 1]
cam0_T_cam1 = utils.basic.matmul2(utils.geom.safe_inverse(origin_T_cam0), origin_T_cam1)
feat_mems_input = torch.cat([
self.occ_mems,
self.occ_mems*self.unp_mems,
], dim=2)
feat_loss, feat_halfmems_ = self.feat3dnet(__p(feat_mems_input), self.summ_writer)
feat_halfmems = __u(feat_halfmems_)
total_loss += feat_loss
ego_loss, cam0_T_cam1_e, _ = self.egonet(
feat_halfmems[:,0],
feat_halfmems[:,1],
cam0_T_cam1,
self.vox_util,
self.summ_writer)
total_loss += ego_loss
# try aligning the frames, for a qualitative result
occ_mem0_e = self.vox_util.apply_4x4_to_vox(cam0_T_cam1_e, self.occ_mems[:,1])
self.summ_writer.summ_occs('ego/occs_aligned', [occ_mem0_e, self.occ_mems[:,0]])
self.summ_writer.summ_occs('ego/occs_unaligned', [self.occ_mems[:,0], self.occ_mems[:,1]])
self.summ_writer.summ_scalar('loss', total_loss.cpu().item())
return total_loss, results, False
def forward(self, feed):
data_ok = self.prepare_common_tensors(feed)
if not data_ok:
# return early
total_loss = torch.tensor(0.0).cuda()
return total_loss, None, True
else:
if self.set_name=='train':
return self.run_train(feed)
else:
print('not prepared for this set_name:', set_name)
assert(False)
|
{"/model_carla_det.py": ["/hyperparams.py", "/nets/detnet.py"], "/model_carla_ego.py": ["/hyperparams.py", "/nets/egonet.py"], "/exp_carla_static.py": ["/exp_base.py"], "/model_carla_static.py": ["/hyperparams.py", "/nets/emb2dnet.py", "/nets/emb3dnet.py", "/nets/viewnet.py"], "/nets/detnet.py": ["/hyperparams.py", "/archs/encoder3d.py"], "/exp_carla_ego.py": ["/exp_base.py"], "/archs/encoder3d.py": ["/archs/pixelshuffle3d.py"], "/backend/saverloader.py": ["/hyperparams.py"], "/main.py": ["/model_carla_static.py", "/model_carla_ego.py", "/model_carla_det.py", "/hyperparams.py"], "/nets/flownet.py": ["/hyperparams.py"], "/nets/emb2dnet.py": ["/archs/encoder2d.py", "/hyperparams.py"], "/nets/egonet.py": ["/hyperparams.py"], "/nets/viewnet.py": ["/hyperparams.py"], "/exp_base.py": ["/pretrained_nets_carla.py"], "/nets/emb3dnet.py": ["/hyperparams.py"], "/exp_carla_det.py": ["/exp_base.py"]}
|
2,394
|
aharley/neural_3d_mapping
|
refs/heads/master
|
/exp_carla_static.py
|
from exp_base import *
############## choose an experiment ##############
current = 'builder'
current = 'trainer'
# current = 'tester_basic'
mod = '"sta00"' # nothing; builder
mod = '"sta01"' # just prep and return
mod = '"sta02"' # again, fewer prints
mod = '"sta03"' # run feat3d forward; drop the sparse stuff
mod = '"sta04"' # really run it
mod = '"sta05"' # again
mod = '"sta06"' # warp; show altfeat
mod = '"sta07"' # ensure either ==1 or a==b
mod = '"sta08"' # try emb
mod = '"sta09"' # train a while
mod = '"sta10"' #
mod = '"sta11"' # show altfeat input
mod = '"sta12"' #
mod = '"sta13"' # train occ
mod = '"sta14"' # move things to R
mod = '"sta14"' # do view
mod = '"sta15"' # encode in X0
mod = '"sta16"' #
mod = '"sta17"' # show rgb_camX1, so i can understand the inbound idea better
mod = '"sta18"' # show inbound separately
mod = '"sta19"' # allow 0 to 32m
mod = '"sta20"' # builder
mod = '"sta21"' # show occ_memXs
mod = '"sta22"' # wider bounds please
mod = '"sta23"' # properly combine bounds with centorid
mod = '"sta24"' # train a hwile
mod = '"sta25"' # same but encode in Xs and warp to R then X0
mod = '"sta26"' # use resnet3d
mod = '"sta27"' # skipnet; randomize the centroid a bit
mod = '"sta28"' # wider rand, and inbound check
mod = '"sta29"' # handle the false return
mod = '"sta30"' # add emb2d
mod = '"sta31"' # freeze the slow model
mod = '"sta32"' # 2d parts
mod = '"sta33"' # fewer prints
mod = '"sta34"' # nice suffixes; JUST 2d learning
mod = '"sta35"' # fix bug
mod = '"sta36"' # better summ suffix
mod = '"sta37"' # tell me about neg pool size
mod = '"sta38"' # fix small bug in the hyp lettering
mod = '"sta39"' # cleaned up hyps
mod = '"sta40"' # weak smooth coeff on feats
mod = '"sta41"' # run occnet on altfeat instead
mod = '"sta42"' # redo
mod = '"sta43"' # replication padding
mod = '"sta44"' # pret 170k 02_s2_m128x32x128_p64x192_1e-3_F2_d32_F3_d32_s.01_O_c1_s.01_V_d32_e1_E2_e.1_n4_d32_c1_E3_n2_c1_mags7i3t_sta41
mod = '"sta45"' # inspect and maybe fix the loading; log10
mod = '"sta46"' # init slow in model base after saverloader
mod = '"sta47"' # zero padding; log500
mod = '"sta48"' # replication padding; log500
mod = '"sta49"' # repeat after deleting some code
mod = '"sta50"' # pret 02_s2_m128x32x128_1e-3_F3_d32_s.01_O_c2_s.1_E3_n2_c.1_mags7i3t_sta48
mod = '"sta51"' # same deal after some cleanup
############## exps ##############
exps['builder'] = [
'carla_static', # mode
'carla_multiview_10_data', # dataset
'16-4-16_bounds',
'3_iters',
'lr0',
'B1',
'no_shuf',
'train_feat3d',
# 'train_occ',
# 'train_view',
# 'train_emb2d',
# 'train_emb3d',
'log1',
]
exps['trainer'] = [
'carla_static', # mode
'carla_multiview_train_data', # dataset
'16-4-16_bounds',
'300k_iters',
'lr3',
'B2',
'pretrained_feat3d',
'pretrained_occ',
'train_feat3d',
'train_emb3d',
'train_occ',
# 'train_view',
# 'train_feat2d',
# 'train_emb2d',
'log500',
]
############## groups ##############
groups['carla_static'] = ['do_carla_static = True']
groups['train_feat2d'] = [
'do_feat2d = True',
'feat2d_dim = 32',
# 'feat2d_smooth_coeff = 0.1',
]
groups['train_occ'] = [
'do_occ = True',
'occ_coeff = 2.0',
'occ_smooth_coeff = 0.1',
]
groups['train_view'] = [
'do_view = True',
'view_depth = 32',
'view_l1_coeff = 1.0',
]
groups['train_emb2d'] = [
'do_emb2d = True',
# 'emb2d_smooth_coeff = 0.01',
'emb2d_ce_coeff = 1.0',
'emb2d_l2_coeff = 0.1',
'emb2d_mindist = 32.0',
'emb2d_num_samples = 4',
# 'do_view = True',
# 'view_depth = 32',
# 'view_l1_coeff = 1.0',
]
groups['train_emb3d'] = [
'do_emb3d = True',
'emb3d_ce_coeff = 0.1',
# 'emb3d_mindist = 8.0',
# 'emb3d_l2_coeff = 0.1',
'emb3d_num_samples = 2',
]
############## datasets ##############
# dims for mem
SIZE = 32
Z = int(SIZE*4)
Y = int(SIZE*1)
X = int(SIZE*4)
K = 2 # how many objects to consider
S = 2
H = 128
W = 384
# H and W for proj stuff
PH = int(H/2.0)
PW = int(W/2.0)
############## verify and execute ##############
def _verify_(s):
varname, eq, val = s.split(' ')
assert varname in globals()
assert eq == '='
assert type(s) is type('')
print(current)
assert current in exps
for group in exps[current]:
print(" " + group)
assert group in groups
for s in groups[group]:
print(" " + s)
_verify_(s)
exec(s)
s = "mod = " + mod
_verify_(s)
exec(s)
|
{"/model_carla_det.py": ["/hyperparams.py", "/nets/detnet.py"], "/model_carla_ego.py": ["/hyperparams.py", "/nets/egonet.py"], "/exp_carla_static.py": ["/exp_base.py"], "/model_carla_static.py": ["/hyperparams.py", "/nets/emb2dnet.py", "/nets/emb3dnet.py", "/nets/viewnet.py"], "/nets/detnet.py": ["/hyperparams.py", "/archs/encoder3d.py"], "/exp_carla_ego.py": ["/exp_base.py"], "/archs/encoder3d.py": ["/archs/pixelshuffle3d.py"], "/backend/saverloader.py": ["/hyperparams.py"], "/main.py": ["/model_carla_static.py", "/model_carla_ego.py", "/model_carla_det.py", "/hyperparams.py"], "/nets/flownet.py": ["/hyperparams.py"], "/nets/emb2dnet.py": ["/archs/encoder2d.py", "/hyperparams.py"], "/nets/egonet.py": ["/hyperparams.py"], "/nets/viewnet.py": ["/hyperparams.py"], "/exp_base.py": ["/pretrained_nets_carla.py"], "/nets/emb3dnet.py": ["/hyperparams.py"], "/exp_carla_det.py": ["/exp_base.py"]}
|
2,395
|
aharley/neural_3d_mapping
|
refs/heads/master
|
/model_carla_static.py
|
import torch
import torch.nn as nn
import hyperparams as hyp
import numpy as np
# import imageio,scipy
from model_base import Model
from nets.occnet import OccNet
from nets.feat2dnet import Feat2dNet
from nets.feat3dnet import Feat3dNet
from nets.emb2dnet import Emb2dNet
from nets.emb3dnet import Emb3dNet
from nets.viewnet import ViewNet
import torch.nn.functional as F
import utils.vox
import utils.samp
import utils.geom
import utils.improc
import utils.basic
import utils.eval
import utils.misc
np.set_printoptions(precision=2)
np.random.seed(0)
class CARLA_STATIC(Model):
def initialize_model(self):
print('------ INITIALIZING MODEL OBJECTS ------')
self.model = CarlaStaticModel()
if hyp.do_freeze_feat3d:
self.model.feat3dnet.eval()
self.set_requires_grad(self.model.feat3dnet, False)
if hyp.do_freeze_view:
self.model.viewnet.eval()
self.set_requires_grad(self.model.viewnet, False)
if hyp.do_freeze_occ:
self.model.occnet.eval()
self.set_requires_grad(self.model.occnet, False)
if hyp.do_freeze_emb2d:
self.model.emb2dnet.eval()
self.set_requires_grad(self.model.emb2dnet, False)
if hyp.do_emb2d:
# freeze the slow model
self.model.feat2dnet_slow.eval()
self.set_requires_grad(self.model.feat2dnet_slow, False)
if hyp.do_emb3d:
# freeze the slow model
self.model.feat3dnet_slow.eval()
self.set_requires_grad(self.model.feat3dnet_slow, False)
class CarlaStaticModel(nn.Module):
def __init__(self):
super(CarlaStaticModel, self).__init__()
if hyp.do_occ:
self.occnet = OccNet()
if hyp.do_view:
self.viewnet = ViewNet()
if hyp.do_feat2d:
self.feat2dnet = Feat2dNet()
if hyp.do_emb2d:
self.emb2dnet = Emb2dNet()
# make a slow net
self.feat2dnet_slow = Feat2dNet(in_dim=3)
if hyp.do_feat3d:
self.feat3dnet = Feat3dNet(in_dim=4)
if hyp.do_emb3d:
self.emb3dnet = Emb3dNet()
# make a slow net
self.feat3dnet_slow = Feat3dNet(in_dim=4)
def prepare_common_tensors(self, feed):
results = dict()
self.summ_writer = utils.improc.Summ_writer(
writer=feed['writer'],
global_step=feed['global_step'],
log_freq=feed['set_log_freq'],
fps=8,
just_gif=True)
global_step = feed['global_step']
self.B = feed['set_batch_size']
self.S = feed['set_seqlen']
self.set_name = feed['set_name']
__p = lambda x: utils.basic.pack_seqdim(x, self.B)
__u = lambda x: utils.basic.unpack_seqdim(x, self.B)
self.H, self.W, self.V, self.N = hyp.H, hyp.W, hyp.V, hyp.N
self.PH, self.PW = hyp.PH, hyp.PW
# if self.set_name=='test':
# self.Z, self.Y, self.X = hyp.Z_test, hyp.Y_test, hyp.X_test
# elif self.set_name=='val':
# self.Z, self.Y, self.X = hyp.Z_val, hyp.Y_val, hyp.X_val
# else:
self.Z, self.Y, self.X = hyp.Z, hyp.Y, hyp.X
self.Z2, self.Y2, self.X2 = int(self.Z/2), int(self.Y/2), int(self.X/2)
self.Z4, self.Y4, self.X4 = int(self.Z/4), int(self.Y/4), int(self.X/4)
self.ZZ, self.ZY, self.ZX = hyp.ZZ, hyp.ZY, hyp.ZX
self.pix_T_cams = feed['pix_T_cams']
set_data_format = feed['set_data_format']
self.S = feed['set_seqlen']
self.origin_T_camRs = feed['origin_T_camRs']
self.origin_T_camXs = feed['origin_T_camXs']
self.camX0s_T_camXs = utils.geom.get_camM_T_camXs(self.origin_T_camXs, ind=0)
self.camR0s_T_camRs = utils.geom.get_camM_T_camXs(self.origin_T_camRs, ind=0)
self.camRs_T_camR0s = __u(utils.geom.safe_inverse(__p(self.camR0s_T_camRs)))
self.camRs_T_camXs = __u(torch.matmul(__p(self.origin_T_camRs).inverse(), __p(self.origin_T_camXs)))
self.camXs_T_camRs = __u(__p(self.camRs_T_camXs).inverse())
self.xyz_camXs = feed['xyz_camXs']
self.xyz_camRs = __u(utils.geom.apply_4x4(__p(self.camRs_T_camXs), __p(self.xyz_camXs)))
self.xyz_camX0s = __u(utils.geom.apply_4x4(__p(self.camX0s_T_camXs), __p(self.xyz_camXs)))
if self.set_name=='test' or self.set_name=='val':
# fixed centroid
scene_centroid_x = 0.0
scene_centroid_y = 1.0
scene_centroid_z = 18.0
else:
# randomize a bit, as a form of data aug
all_ok = False
num_tries = 0
while (not all_ok) and (num_tries < 100):
scene_centroid_x = np.random.uniform(-8.0, 8.0)
scene_centroid_y = np.random.uniform(-1.5, 3.0)
scene_centroid_z = np.random.uniform(10.0, 26.0)
scene_centroid = np.array([scene_centroid_x,
scene_centroid_y,
scene_centroid_z]).reshape([1, 3])
self.scene_centroid = torch.from_numpy(scene_centroid).float().cuda()
num_tries += 1
all_ok = True
self.vox_util = utils.vox.Vox_util(self.Z, self.Y, self.X, self.set_name, scene_centroid=self.scene_centroid, assert_cube=True)
# we want to ensure this gives us a few points inbound for each element
inb = __u(self.vox_util.get_inbounds(__p(self.xyz_camX0s), self.Z, self.Y, self.X, already_mem=False))
# this is B x S x N
num_inb = torch.sum(inb.float(), axis=2)
# this is B x S
if torch.min(num_inb) < 300:
all_ok = False
self.summ_writer.summ_scalar('centroid_sampling/num_tries', float(num_tries))
self.summ_writer.summ_scalar('centroid_sampling/num_inb', torch.mean(num_inb).cpu().item())
if num_tries >= 100:
return False
self.vox_size_X = self.vox_util.default_vox_size_X
self.vox_size_Y = self.vox_util.default_vox_size_Y
self.vox_size_Z = self.vox_util.default_vox_size_Z
origin_T_camRs_ = self.origin_T_camRs.reshape(self.B, self.S, 1, 4, 4).repeat(1, 1, self.N, 1, 1).reshape(self.B*self.S, self.N, 4, 4)
boxlists = feed['boxlists']
self.rgb_camXs = feed['rgb_camXs']
## get the projected depthmap and inbound mask
self.depth_camXs_, self.valid_camXs_ = utils.geom.create_depth_image(__p(self.pix_T_cams), __p(self.xyz_camXs), self.H, self.W)
self.dense_xyz_camXs_ = utils.geom.depth2pointcloud(self.depth_camXs_, __p(self.pix_T_cams))
# we need to go to X0 to see what will be inbounds
self.dense_xyz_camX0s_ = utils.geom.apply_4x4(__p(self.camX0s_T_camXs), self.dense_xyz_camXs_)
self.inbound_camXs_ = self.vox_util.get_inbounds(self.dense_xyz_camX0s_, self.Z, self.Y, self.X).float()
self.inbound_camXs_ = torch.reshape(self.inbound_camXs_, [self.B*self.S, 1, self.H, self.W])
self.depth_camXs = __u(self.depth_camXs_)
self.valid_camXs = __u(self.valid_camXs_) * __u(self.inbound_camXs_)
self.summ_writer.summ_oned('2d_inputs/depth_camX0', self.depth_camXs[:,0], maxval=32.0)
self.summ_writer.summ_oned('2d_inputs/valid_camX0', self.valid_camXs[:,0], norm=False)
self.summ_writer.summ_rgb('2d_inputs/rgb_camX0', self.rgb_camXs[:,0])
# get 3d voxelized inputs
self.occ_memXs = __u(self.vox_util.voxelize_xyz(__p(self.xyz_camXs), self.Z, self.Y, self.X))
self.unp_memXs = __u(self.vox_util.unproject_rgb_to_mem(
__p(self.rgb_camXs), self.Z, self.Y, self.X, __p(self.pix_T_cams)))
# these are B x C x Z x Y x X
self.summ_writer.summ_occs('3d_inputs/occ_memXs', torch.unbind(self.occ_memXs, dim=1))
self.summ_writer.summ_unps('3d_inputs/unp_memXs', torch.unbind(self.unp_memXs, dim=1), torch.unbind(self.occ_memXs, dim=1))
return True # OK
def run_train(self, feed):
results = dict()
global_step = feed['global_step']
total_loss = torch.tensor(0.0).cuda()
__p = lambda x: utils.basic.pack_seqdim(x, self.B)
__u = lambda x: utils.basic.unpack_seqdim(x, self.B)
#####################
## run the nets
#####################
if hyp.do_feat2d:
feat2d_loss, feat_camX0 = self.feat2dnet(
self.rgb_camXs[:,0],
self.summ_writer,
)
if hyp.do_emb2d:
# for stability, we will also use a slow net here
_, altfeat_camX0 = self.feat2dnet_slow(self.rgb_camXs[:,0])
if hyp.do_feat3d:
# start with a 4-channel feature map;
feat_memXs_input = torch.cat([
self.occ_memXs,
self.unp_memXs*self.occ_memXs,
], dim=2)
# featurize
feat3d_loss, feat_memXs_ = self.feat3dnet(
__p(feat_memXs_input[:,1:]), self.summ_writer)
feat_memXs = __u(feat_memXs_)
total_loss += feat3d_loss
valid_memXs = torch.ones_like(feat_memXs[:,:,0:1])
feat_memRs = self.vox_util.apply_4x4s_to_voxs(self.camRs_T_camXs[:,1:], feat_memXs)
valid_memRs = self.vox_util.apply_4x4s_to_voxs(self.camRs_T_camXs[:,1:], valid_memXs)
# these are B x S x C x Z2 x Y2 x X2
feat_memR = utils.basic.reduce_masked_mean(
feat_memRs, valid_memRs, dim=1)
valid_memR = torch.max(valid_memRs, dim=1)[0]
# these are B x C x Z2 x Y2 x X2
self.summ_writer.summ_feat('feat3d/feat_output_agg', feat_memR, valid_memR, pca=True)
if hyp.do_emb3d:
_, altfeat_memR = self.feat3dnet_slow(feat_memXs_input[:,0])
altvalid_memR = torch.ones_like(altfeat_memR[:,0:1])
self.summ_writer.summ_feat('feat3d/altfeat_input', feat_memXs_input[:,0], pca=True)
self.summ_writer.summ_feat('feat3d/altfeat_output', altfeat_memR, pca=True)
if hyp.do_occ:
assert(hyp.do_feat3d)
occ_memR_sup, free_memR_sup, _, _ = self.vox_util.prep_occs_supervision(
self.camRs_T_camXs,
self.xyz_camXs,
self.Z2, self.Y2, self.X2,
agg=True)
occ_loss, occ_memR_pred = self.occnet(
feat_memR,
occ_memR_sup,
free_memR_sup,
valid_memR,
self.summ_writer)
total_loss += occ_loss
if hyp.do_view:
assert(hyp.do_feat3d)
# decode the perspective volume into an image
view_loss, rgb_camX0_e, viewfeat_camX0 = self.viewnet(
self.pix_T_cams[:,0],
self.camXs_T_camRs[:,0],
feat_memR,
self.rgb_camXs[:,0],
self.vox_util,
valid=self.valid_camXs[:,0],
summ_writer=self.summ_writer)
total_loss += view_loss
if hyp.do_emb2d:
assert(hyp.do_feat2d)
if hyp.do_view:
# anchor against the bottom-up 2d net
valid_camX0 = F.interpolate(self.valid_camXs[:,0], scale_factor=0.5, mode='nearest')
emb2d_loss, _ = self.emb2dnet(
viewfeat_camX0,
feat_camX0,
valid_camX0,
summ_writer=self.summ_writer,
suffix='_view')
total_loss += emb2d_loss
# anchor against the slow net
emb2d_loss, _ = self.emb2dnet(
feat_camX0,
altfeat_camX0,
torch.ones_like(feat_camX0[:,0:1]),
summ_writer=self.summ_writer,
suffix='_slow')
total_loss += emb2d_loss
if hyp.do_emb3d:
assert(hyp.do_feat3d)
# compute 3D ML
emb3d_loss = self.emb3dnet(
feat_memR,
altfeat_memR,
valid_memR.round(),
altvalid_memR.round(),
self.summ_writer)
total_loss += emb3d_loss
self.summ_writer.summ_scalar('loss', total_loss.cpu().item())
return total_loss, results, False
def run_test(self, feed):
results = dict()
global_step = feed['global_step']
total_loss = torch.tensor(0.0).cuda()
# total_loss = torch.autograd.Variable(0.0, requires_grad=True).cuda()
__p = lambda x: utils.basic.pack_seqdim(x, self.B)
__u = lambda x: utils.basic.unpack_seqdim(x, self.B)
# get the boxes
boxlist_camRs = feed['boxlists']
tidlist_s = feed['tidlists'] # coordinate-less and plural
scorelist_s = feed['scorelists'] # coordinate-less and plural
lrtlist_camRs = __u(utils.geom.convert_boxlist_to_lrtlist(__p(boxlist_camRs))).reshape(self.B, self.S, self.N, 19)
lrtlist_camXs = __u(utils.geom.apply_4x4_to_lrtlist(__p(self.camXs_T_camRs), __p(lrtlist_camRs)))
# these are is B x S x N x 19
self.summ_writer.summ_lrtlist('obj/lrtlist_camX0', self.rgb_camXs[:,0], lrtlist_camXs[:,0],
scorelist_s[:,0], tidlist_s[:,0], self.pix_T_cams[:,0])
self.summ_writer.summ_lrtlist('obj/lrtlist_camR0', self.rgb_camRs[:,0], lrtlist_camRs[:,0],
scorelist_s[:,0], tidlist_s[:,0], self.pix_T_cams[:,0])
# mask_memX0 = utils.vox.assemble_padded_obj_masklist(
# lrtlist_camXs[:,0], scorelist_s[:,0], self.Z2, self.Y2, self.X2, coeff=1.0)
# mask_memX0 = torch.sum(mask_memX0, dim=1).clamp(0, 1)
# self.summ_writer.summ_oned('obj/mask_memX0', mask_memX0, bev=True)
mask_memXs = __u(utils.vox.assemble_padded_obj_masklist(
__p(lrtlist_camXs), __p(scorelist_s), self.Z2, self.Y2, self.X2, coeff=1.0))
mask_memXs = torch.sum(mask_memXs, dim=2).clamp(0, 1)
self.summ_writer.summ_oneds('obj/mask_memXs', torch.unbind(mask_memXs, dim=1), bev=True)
for b in list(range(self.B)):
for s in list(range(self.S)):
mask = mask_memXs[b,s]
if torch.sum(mask) < 2.0:
# return early
return total_loss, None, True
# next: i want to treat features differently if they are in obj masks vs not
# in particular, i want a different kind of retrieval metric
if hyp.do_feat3d:
# occXs is B x S x 1 x H x W x D
# unpXs is B x S x 3 x H x W x D
feat_memXs_input = torch.cat([self.occXs, self.occXs*self.unpXs], dim=2)
feat_memXs_input_ = __p(feat_memXs_input)
feat_memXs_, _, _ = self.feat3dnet(
feat_memXs_input_,
self.summ_writer,
comp_mask=None,
)
feat_memXs = __u(feat_memXs_)
self.summ_writer.summ_feats('3d_feats/feat_memXs_input', torch.unbind(feat_memXs_input, dim=1), pca=True)
self.summ_writer.summ_feats('3d_feats/feat_memXs_output', torch.unbind(feat_memXs, dim=1), pca=True)
mv_precision = utils.eval.measure_semantic_retrieval_precision(feat_memXs[0], mask_memXs[0])
self.summ_writer.summ_scalar('semantic_retrieval/multiview_precision', mv_precision)
ms_precision = utils.eval.measure_semantic_retrieval_precision(feat_memXs[:,0], mask_memXs[:,0])
self.summ_writer.summ_scalar('semantic_retrieval/multiscene_precision', ms_precision)
return total_loss, None, False
def forward(self, feed):
data_ok = self.prepare_common_tensors(feed)
if not data_ok:
# return early
total_loss = torch.tensor(0.0).cuda()
return total_loss, None, True
else:
if self.set_name=='train':
return self.run_train(feed)
elif self.set_name=='test':
return self.run_test(feed)
else:
print('weird set_name:', set_name)
assert(False)
|
{"/model_carla_det.py": ["/hyperparams.py", "/nets/detnet.py"], "/model_carla_ego.py": ["/hyperparams.py", "/nets/egonet.py"], "/exp_carla_static.py": ["/exp_base.py"], "/model_carla_static.py": ["/hyperparams.py", "/nets/emb2dnet.py", "/nets/emb3dnet.py", "/nets/viewnet.py"], "/nets/detnet.py": ["/hyperparams.py", "/archs/encoder3d.py"], "/exp_carla_ego.py": ["/exp_base.py"], "/archs/encoder3d.py": ["/archs/pixelshuffle3d.py"], "/backend/saverloader.py": ["/hyperparams.py"], "/main.py": ["/model_carla_static.py", "/model_carla_ego.py", "/model_carla_det.py", "/hyperparams.py"], "/nets/flownet.py": ["/hyperparams.py"], "/nets/emb2dnet.py": ["/archs/encoder2d.py", "/hyperparams.py"], "/nets/egonet.py": ["/hyperparams.py"], "/nets/viewnet.py": ["/hyperparams.py"], "/exp_base.py": ["/pretrained_nets_carla.py"], "/nets/emb3dnet.py": ["/hyperparams.py"], "/exp_carla_det.py": ["/exp_base.py"]}
|
2,396
|
aharley/neural_3d_mapping
|
refs/heads/master
|
/nets/detnet.py
|
import numpy as np
import torch
import torch.nn as nn
import torchvision
import torchvision.ops as ops
import utils.basic
import utils.geom
import utils.misc
import hyperparams as hyp
import archs.encoder3d
def smooth_l1_loss(deltas, targets, sigma=3.0):
sigma2 = sigma * sigma
diffs = deltas - targets
smooth_l1_signs = (torch.abs(diffs) < 1.0 / sigma2).float()
smooth_l1_option1 = diffs**2 * 0.5 * sigma2
smooth_l1_option2 = torch.abs(diffs) - 0.5 / sigma2
smooth_l1_add = smooth_l1_option1 * smooth_l1_signs + smooth_l1_option2 * (1 - smooth_l1_signs)
smooth_l1 = smooth_l1_add
return smooth_l1
def binarize(input, threshold):
return torch.where(input < threshold, torch.zeros_like(input), torch.ones_like(input))
def meshgrid3d_xyz(B, Z, Y, X):
grid_z, grid_y, grid_x = utils.basic.meshgrid3d(B, Z, Y, X, stack=False)
# each one is shaped B x Z x Y x X
grid_z = grid_z.permute(0, 3, 2, 1)
grid_x = grid_x.permute(0, 3, 2, 1)
grid_y = grid_y.permute(0, 3, 2, 1)
# make each one axis order XYZ
grid = torch.stack([grid_x, grid_y, grid_z], dim=-1)
return grid
def anchor_deltas_to_bboxes(anchor_deltas, indices):
# anchors deltas is num_objects x 6, first 3 for translation and last 3 for scale
# grid_center is num_objects x 3
grid_center = indices.float()
object_center = grid_center + anchor_deltas[:, :3] * hyp.det_anchor_size
object_min = object_center - 0.5 * torch.exp(anchor_deltas[:, 3:]) * hyp.det_anchor_size
object_max = object_center + 0.5 * torch.exp(anchor_deltas[:, 3:]) * hyp.det_anchor_size
return torch.stack([object_min, object_max], 2), torch.cat([object_center, object_max - object_min], 1) #these are N x 3 x 2 and N x 6, respectively
def overlap_graph(boxes1, boxes2): #tested
# boxes1: batch x 3 x 2 (z1,z2,y1,y2,x1,x2)
b1_bs = boxes1.shape[0] #batch_size
b2_bs = boxes2.shape[0]
if b1_bs == 0 or b2_bs == 0:
# torch's repeat will fail, so let's return early
return torch.zeros(b1_bs, b2_bs)
boxes1 = boxes1.view(-1, 6)
boxes2 = boxes2.view(-1, 6)
b1 = boxes1.unsqueeze(1).repeat(1, b2_bs, 1).view(-1, 6) #this is (b1xb2) x 6
b2 = boxes2.unsqueeze(0).repeat(b1_bs, 1, 1).view(-1, 6)
b1_z1, b1_z2, b1_y1, b1_y2, b1_x1, b1_x2 = torch.chunk(b1, 6, dim=1)
b2_z1, b2_z2, b2_y1, b2_y2, b2_x1, b2_x2 = torch.chunk(b2, 6, dim=1)
z1 = torch.max(b1_z1, b2_z1)
z2 = torch.min(b1_z2, b2_z2)
y1 = torch.max(b1_y1, b2_y1)
y2 = torch.min(b1_y2, b2_y2)
x1 = torch.max(b1_x1, b2_x1)
x2 = torch.min(b1_x2, b2_x2)
intersection = torch.max(z2 - z1, torch.zeros_like(z1)) * torch.max(y2 - y1, torch.zeros_like(y1)) * torch.max(x2 - x1, torch.zeros_like(x1))
b1_area = (b1_z2 - b1_z1) * (b1_y2 - b1_y1) * (b1_x2 - b1_x1)
b2_area = (b2_z2 - b2_z1) * (b2_y2 - b2_y1) * (b2_x2 - b2_x1)
union = b1_area + b2_area - intersection
iou = intersection / union
overlaps = iou.view(b1_bs, b2_bs)
return overlaps
def box_refinement_graph(positive_rois, roi_gt_boxes):
# roi_gt_boxes are N x 3 x 2
gt_center = torch.mean(roi_gt_boxes, dim=2)
pd_center = torch.mean(positive_rois, dim=2) #these are N x 3 (zyx order)
delta_zyx = gt_center-pd_center
len_gt = roi_gt_boxes[:,:,1] - roi_gt_boxes[:,:,0]
len_pd = positive_rois[:,:,1] - positive_rois[:,:,0]
delta_len = len_gt - len_pd
return torch.cat([delta_zyx, delta_len], dim=1) # N x 6
def rpn_proposal_graph(pred_objectness, pred_anchor_deltas, valid_mask, corners_min_max_g, iou_thresh=0.5): #tested
######################## ROI generation ####################
# object_bbox: batch_size x N x 3 x 2
# pred_objectness: B x X x Y x Z
# pred_anchor_deltas: B x X x Y x Z x 6
# valid_mask: B x N
# corners_min_max_g: B x N x 3 x 2, in xyz order
P_THRES = 0.9
high_prob_indices = torch.stack(torch.where(pred_objectness > P_THRES), dim=1) # this is ? x 4, last dim in bxyz order
B = pred_objectness.shape[0]
# build prediction target
bs_selected_boxes_co = []
bs_selected_scores = []
bs_overlaps = []
if len(high_prob_indices > 0):
for i in list(range(B)):
selected_boxes, selected_boxes_scores, overlaps, selected_boxes_co = detection_target_graph(i, high_prob_indices, \
corners_min_max_g, valid_mask, pred_objectness, pred_anchor_deltas, iou_thresh=iou_thresh)
bs_selected_boxes_co.append(selected_boxes_co)
bs_selected_scores.append(selected_boxes_scores)
bs_overlaps.append(overlaps)
return bs_selected_boxes_co, bs_selected_scores, bs_overlaps
else:
return None, None, None
def detection_target_graph(i, high_prob_indices, corners_min_max_g, valid_mask, pred_objectness, pred_anchor_deltas,
iou_thresh=0.5): #tested
batch_i_idxs = torch.stack(torch.where(high_prob_indices[:,0] == i), dim=1) # this is (?, 1)
batch_i_indices = high_prob_indices[batch_i_idxs.squeeze(dim=1)] # this is ? x 4
batch_i_scores = pred_objectness[batch_i_indices[:, 0], batch_i_indices[:, 1], batch_i_indices[:, 2], batch_i_indices[:, 3]] # this is (?, )
batch_i_anchor_deltas = pred_anchor_deltas[batch_i_indices[:, 0], batch_i_indices[:, 1], batch_i_indices[:, 2], batch_i_indices[:, 3]] # this is (?, 6)
# don't know why all out of a sudden order becomes zyx, but we follow this zyx order for the following code ...
# co refers to center + offset parameterization
batch_i_bboxes, batch_i_bboxes_co = anchor_deltas_to_bboxes(
batch_i_anchor_deltas, batch_i_indices[:,1:])
# N x 3 x 2 and N x 6
# print(batch_i_bboxes[:, 1:, :].permute(0, 2, 1).shape)
selected_bboxes_idx_xy = ops.nms(
batch_i_bboxes[:, 1:, :].permute(0, 2, 1).contiguous().view(-1, 4).cpu(), # view() fails, so we introduce this contiguous()
batch_i_scores.cpu(),
iou_thresh).cuda()
selected_bboxes_idx_zx = ops.nms(
batch_i_bboxes[:, [0,2], :].permute(0, 2, 1).contiguous().view(-1, 4).cpu(),
batch_i_scores.cpu(),
iou_thresh).cuda()
selected_bboxes_idx = torch.unique(torch.cat([selected_bboxes_idx_xy, selected_bboxes_idx_zx], dim=0)) # this is (selected_bbox, )
selected_3d_bboxes = batch_i_bboxes[selected_bboxes_idx] # this is (selected_bbox, 3, 2)
selected_3d_bboxes_co = batch_i_bboxes_co[selected_bboxes_idx] # this is (selected_bbox, 6)
selected_3d_bboxes_scores = batch_i_scores[selected_bboxes_idx]
valid_inds = torch.stack(torch.where(valid_mask[i, :]), dim=1).squeeze(dim=1) # this is (valid_ids, )
corners_min_max_g_i = corners_min_max_g[i, valid_inds] # (valid_ids, 3, 2)
# calculate overlap in 3d
overlaps = overlap_graph(selected_3d_bboxes, corners_min_max_g_i) # this is (selected_bbox, valid_ids)
return selected_3d_bboxes, selected_3d_bboxes_scores, overlaps, selected_3d_bboxes_co
class DetNet(nn.Module):
def __init__(self):
print('DetNet...')
super(DetNet, self).__init__()
self.pred_dim = 7
self.net = torch.nn.Conv3d(in_channels=hyp.feat3d_dim, out_channels=self.pred_dim, kernel_size=3, stride=1, padding=1).cuda()
print(self.net)
def forward(self,
lrtlist_g,
scores_g,
feat_zyx,
summ_writer
):
total_loss = torch.tensor(0.0).cuda()
B, C, Z, Y, X = feat_zyx.shape
_, N, _ = lrtlist_g.shape
pred_dim = self.pred_dim # total 7, 6 deltas, 1 objectness
feat = feat_zyx.permute(0, 1, 4, 3, 2) # get feat in xyz order, now B x C x X x Y x Z
corners = utils.geom.get_xyzlist_from_lrtlist(lrtlist_g) # corners is B x N x 8 x 3, last dim in xyz order
corners_max = torch.max(corners, dim=2)[0] # B x N x 3
corners_min = torch.min(corners, dim=2)[0]
corners_min_max_g = torch.stack([corners_min, corners_max], dim=3) # this is B x N x 3 x 2
# trim down, to save some time
N = min(N, hyp.K)
corners_min_max_g = corners_min_max_g[:,:N]
scores_g = scores_g[:, :N] # B x N
# boxes_g is [-0.5~63.5, -0.5~15.5, -0.5~63.5]
centers_g = utils.geom.get_clist_from_lrtlist(lrtlist_g)
# centers_g is B x N x 3
grid = meshgrid3d_xyz(B, Z, Y, X)[0] # just one grid please, this is X x Y x Z x 3
delta_positions_raw = centers_g.view(B, N, 1, 1, 1, 3) - grid.view(1, 1, X, Y, Z, 3)
delta_positions = delta_positions_raw / hyp.det_anchor_size
lengths_g = utils.geom.get_lenlist_from_lrtlist(lrtlist_g) # B x N x 3
delta_lengths = torch.log(lengths_g / hyp.det_anchor_size)
delta_lengths = torch.max(delta_lengths, -1e6 * torch.ones_like(delta_lengths)) # to avoid -infs turning into nans
lengths_g = lengths_g.view(B, N, 1, 1, 1, 3).repeat(1, 1, X, Y, Z, 1) # B x N x X x Y x Z x 3
delta_lengths = delta_lengths.view(B, N, 1, 1, 1, 3).repeat(1, 1, X, Y, Z, 1) # B x N x X x Y x Z x 3
valid_mask = scores_g.view(B, N, 1, 1, 1, 1).repeat(1, 1, X, Y, Z, 1) # B x N x X x Y x Z x 1
delta_gt = torch.cat([delta_positions, delta_lengths], -1) # B x N x X x Y x Z x 6
object_dist = torch.max(torch.abs(delta_positions_raw)/(lengths_g * 0.5 + 1e-5), dim=5)[0] # B x N x X x Y x Z
object_dist_mask = (torch.ones_like(object_dist) - binarize(object_dist, 0.5)).unsqueeze(dim=5) # B x N x X x Y x Z x 1
object_dist_mask = object_dist_mask * valid_mask # B x N x X x Y x Z x 1
object_neg_dist_mask = torch.ones_like(object_dist) - binarize(object_dist, 0.8)
object_neg_dist_mask = object_neg_dist_mask * valid_mask.squeeze(dim=5) # B x N x X x Y x Z
anchor_deltas_gt = None
for obj_id in list(range(N)):
if anchor_deltas_gt is None:
anchor_deltas_gt = delta_gt[:, obj_id, :, :, :, :] * object_dist_mask[:, obj_id, :, :, :, :]
current_mask = object_dist_mask[:, obj_id, :, :, :, :]
else:
# don't overwrite anchor positions that are already taken
overlap = current_mask * object_dist_mask[:, obj_id, :, :, :, :]
anchor_deltas_gt += (torch.ones_like(overlap)- overlap) * delta_gt[:, obj_id, :, :, :, :] * object_dist_mask[:, obj_id, :, :, :, :]
current_mask = current_mask + object_dist_mask[:, obj_id, :, :, :, :]
current_mask = binarize(current_mask, 0.5)
pos_equal_one = binarize(torch.sum(object_dist_mask, dim=1), 0.5).squeeze(dim=4) # B x X x Y x Z
neg_equal_one = binarize(torch.sum(object_neg_dist_mask, dim=1), 0.5)
neg_equal_one = torch.ones_like(neg_equal_one) - neg_equal_one # B x X x Y x Z
pos_equal_one_sum = torch.sum(pos_equal_one, [1,2,3]) # B
neg_equal_one_sum = torch.sum(neg_equal_one, [1,2,3])
summ_writer.summ_occ('det/pos_equal_one', pos_equal_one.unsqueeze(1))
# set min to one in case no object, to avoid nan
pos_equal_one_sum_safe = torch.max(pos_equal_one_sum, torch.ones_like(pos_equal_one_sum)) # B
neg_equal_one_sum_safe = torch.max(neg_equal_one_sum, torch.ones_like(neg_equal_one_sum)) # B
pred = self.net(feat) # this is B x 7 x X x Y x Z
summ_writer.summ_feat('det/feat', feat, pca=False)
summ_writer.summ_feat('det/pred', pred, pca=True)
pred = pred.permute(0, 2, 3, 4, 1) # B x X x Y x Z x 7
pred_anchor_deltas = pred[..., 1:] # B x X x Y x Z x 6
pred_objectness_logits = pred[..., 0] # B x X x Y x Z
pred_objectness = torch.nn.functional.sigmoid(pred_objectness_logits) # B x X x Y x Z
alpha = 1.5
beta = 1.0
small_addon_for_BCE = 1e-6
overall_loss = torch.nn.functional.binary_cross_entropy_with_logits(
input=pred_objectness_logits,
target=pos_equal_one,
reduction='none',
)
cls_pos_loss = utils.basic.reduce_masked_mean(overall_loss, pos_equal_one)
cls_neg_loss = utils.basic.reduce_masked_mean(overall_loss, neg_equal_one)
loss_prob = torch.sum(alpha * cls_pos_loss + beta * cls_neg_loss)
pos_mask = pos_equal_one.unsqueeze(dim=4) # B x X x Y x Z x 1
loss_l1 = smooth_l1_loss(pos_mask * pred_anchor_deltas, pos_mask * anchor_deltas_gt) # B x X x Y x Z x 1
loss_reg = torch.sum(loss_l1/pos_equal_one_sum_safe.view(-1, 1, 1, 1, 1))/float(B)
total_loss = utils.misc.add_loss('det/detect_prob', total_loss, loss_prob, hyp.det_prob_coeff, summ_writer)
total_loss = utils.misc.add_loss('det/detect_reg', total_loss, loss_reg, hyp.det_reg_coeff, summ_writer)
# finally, turn the preds into hard boxes, with nms
(
bs_selected_boxes_co,
bs_selected_scores,
bs_overlaps,
) = rpn_proposal_graph(pred_objectness, pred_anchor_deltas, scores_g, corners_min_max_g,
iou_thresh=0.2)
# these are lists of length B, each one leading with dim "?", since there is a variable number of objs per frame
N = hyp.K*2
tidlist = torch.linspace(1.0, N, N).long().to('cuda')
tidlist = tidlist.unsqueeze(0).repeat(B, 1)
padded_boxes_e = torch.zeros(B, N, 9).float().cuda()
padded_scores_e = torch.zeros(B, N).float().cuda()
if bs_selected_boxes_co is not None:
for b in list(range(B)):
# make the boxes 1 x N x 9 (instead of B x ? x 6)
padded_boxes0_e = bs_selected_boxes_co[b].unsqueeze(0)
padded_scores0_e = bs_selected_scores[b].unsqueeze(0)
padded_boxes0_e = torch.cat([padded_boxes0_e, torch.zeros([1, N, 6], device=torch.device('cuda'))], dim=1) # 1 x ? x 6
padded_scores0_e = torch.cat([padded_scores0_e, torch.zeros([1, N], device=torch.device('cuda'))], dim=1) # pad out
padded_boxes0_e = padded_boxes0_e[:,:N] # clip to N
padded_scores0_e = padded_scores0_e[:,:N] # clip to N
padded_boxes0_e = torch.cat([padded_boxes0_e, torch.zeros([1, N, 3], device=torch.device('cuda'))], dim=2)
padded_boxes_e[b] = padded_boxes0_e[0]
padded_scores_e[b] = padded_scores0_e[0]
return total_loss, padded_boxes_e, padded_scores_e, tidlist, pred_objectness, bs_selected_scores, bs_overlaps
if __name__ == "__main__":
A = torch.randn(5, 10)
B = torch.randn(5, 10)
# print(smooth_l1_loss(A, A+1))
# meshgrid3d_xyz(2, 64, 64, 64)
boxes1 = torch.randn(2, 3, 1)
boxes1 = boxes1.repeat(1, 1, 2) #2 x 3 x 2
boxes1[:, :, 1] += 1.0
boxes2 = boxes1 - 0.5
# print(overlap_graph(boxes1, boxes2))
# print(box_refinement_graph(boxes1, boxes2))
# boxes3d = torch.zeros(2, 2, 9).cuda()
pred_objectness = torch.zeros(2, 10, 10, 10)
pred_objectness[0,1,1,1] = 1.0
pred_anchor_deltas = torch.zeros(2, 10, 10, 10, 6)
valid_mask = torch.ones(2, 1)
corners_min_max_g = torch.tensor(np.array([[0.0, 1.5], [0.0, 1.5], [0.5, 1.5]])).view(1, 1, 3, 2).repeat(2, 1, 1, 1).float()
bs_selected_boxes_co, bs_selected_scores, bs_overlaps = rpn_proposal_graph(pred_objectness, pred_anchor_deltas, valid_mask, corners_min_max_g)
print(bs_overlaps)
|
{"/model_carla_det.py": ["/hyperparams.py", "/nets/detnet.py"], "/model_carla_ego.py": ["/hyperparams.py", "/nets/egonet.py"], "/exp_carla_static.py": ["/exp_base.py"], "/model_carla_static.py": ["/hyperparams.py", "/nets/emb2dnet.py", "/nets/emb3dnet.py", "/nets/viewnet.py"], "/nets/detnet.py": ["/hyperparams.py", "/archs/encoder3d.py"], "/exp_carla_ego.py": ["/exp_base.py"], "/archs/encoder3d.py": ["/archs/pixelshuffle3d.py"], "/backend/saverloader.py": ["/hyperparams.py"], "/main.py": ["/model_carla_static.py", "/model_carla_ego.py", "/model_carla_det.py", "/hyperparams.py"], "/nets/flownet.py": ["/hyperparams.py"], "/nets/emb2dnet.py": ["/archs/encoder2d.py", "/hyperparams.py"], "/nets/egonet.py": ["/hyperparams.py"], "/nets/viewnet.py": ["/hyperparams.py"], "/exp_base.py": ["/pretrained_nets_carla.py"], "/nets/emb3dnet.py": ["/hyperparams.py"], "/exp_carla_det.py": ["/exp_base.py"]}
|
2,397
|
aharley/neural_3d_mapping
|
refs/heads/master
|
/hyperparams.py
|
import os
# from munch import Munch
H = 240 # height
W = 320 # width
Z = 128
Y = 64
X = 128
Z_val = 128
Y_val = 64
X_val = 128
Z_test = 128
Y_test = 64
X_test = 128
PH = int(128/4)
PW = int(384/4)
ZY = 32
ZX = 32
ZZ = 32
N = 8 # number of boxes per npz
K = 1 # number of boxes to actually use
# S = 2 # seq length
# S_test = 3 # seq length
T = 256 # height & width of birdview map
V = 100000 # num velodyne points
# metric bounds of mem space
XMIN = -16.0 # right (neg is left)
XMAX = 16.0 # right
YMIN = -1.0 # down (neg is up)
YMAX = 3.0 # down
ZMIN = 2.0 # forward
ZMAX = 34.0 # forward
XMIN_val = -16.0 # right (neg is left)
XMAX_val = 16.0 # right
YMIN_val = -1.0 # down (neg is up)
YMAX_val = 3.0 # down
ZMIN_val = 2.0 # forward
ZMAX_val = 34.0 # forward
XMIN_test = -16.0 # right (neg is left)
XMAX_test = 16.0 # right
YMIN_test = -1.0 # down (neg is up)
YMAX_test = 3.0 # down
ZMIN_test = 2.0 # forward
ZMAX_test = 34.0 # forward
FLOOR = 2.65 # ground (2.65m downward from the cam)
CEIL = (FLOOR-2.0) #
#----------- loading -----------#
do_include_summs = False
do_include_vis = True
do_test = False
do_export_vis = False
do_export_stats = False
do_export_inds = False
emb2d_init = ""
feat2d_init = ""
feat3d_init = ""
flow_init = ""
occ_init = ""
view_init = ""
ego_init = ""
det_init = ""
total_init = ""
reset_iter = False
do_freeze_emb2d = False
do_freeze_feat2d = False
do_freeze_feat3d = False
do_freeze_occ = False
do_freeze_view = False
do_freeze_flow = False
do_freeze_ego = False
do_freeze_det = False
do_resume = False
# by default, only backprop on "train" iters
backprop_on_train = True
backprop_on_val = False
backprop_on_test = False
#----------- net design -----------#
# by default, run nothing
do_emb2d = False
do_emb3d = False
do_feat2d = False
do_feat3d = False
do_occ = False
do_view = False
do_flow = False
do_ego = False
do_det = False
#----------- general hypers -----------#
lr = 0.0
#----------- emb hypers -----------#
emb2d_ml_coeff = 0.0
emb3d_ml_coeff = 0.0
emb2d_l2_coeff = 0.0
emb3d_l2_coeff = 0.0
emb2d_mindist = 0.0
emb3d_mindist = 0.0
emb2d_num_samples = 0
emb3d_num_samples = 0
emb2d_ce_coeff = 0.0
emb3d_ce_coeff = 0.0
#----------- feat3d hypers -----------#
feat3d_dim = 32
feat3d_smooth_coeff = 0.0
#----------- feat2d hypers -----------#
feat2d_smooth_coeff = 0.0
feat2d_dim = 8
#----------- occ hypers -----------#
occ_coeff = 0.0
occ_smooth_coeff = 0.0
#----------- view hypers -----------#
view_depth = 64
view_accu_render = False
view_accu_render_unps = False
view_accu_render_gt = False
view_pred_embs = False
view_pred_rgb = False
view_l1_coeff = 0.0
#----------- det hypers -----------#
det_anchor_size = 12.0
det_prob_coeff = 0.0
det_reg_coeff = 0.0
#----------- flow hypers -----------#
flow_warp_coeff = 0.0
flow_warp_g_coeff = 0.0
flow_cycle_coeff = 0.0
flow_smooth_coeff = 0.0
flow_l1_coeff = 0.0
flow_l2_coeff = 0.0
# flow_synth_l1_coeff = 0.0
# flow_synth_l2_coeff = 0.0
flow_do_synth_rt = False
flow_heatmap_size = 4
#----------- ego hypers -----------#
ego_num_scales = 1
ego_num_rots = 0
ego_max_disp_z = 0
ego_max_disp_y = 0
ego_max_disp_x = 0
ego_max_deg = 0.0
ego_t_l2_coeff = 0.0
ego_deg_l2_coeff = 0.0
ego_synth_prob = 0.0
#----------- det hypers -----------#
det_anchor_size = 12.0
det_prob_coeff = 0.0
det_reg_coeff = 0.0
#----------- mod -----------#
mod = '""'
############ slower-to-change hyperparams below here ############
## logging
log_freq_train = 100
log_freq_val = 100
log_freq_test = 100
snap_freq = 10000
max_iters = 10000
shuffle_train = True
shuffle_val = True
shuffle_test = True
trainset_format = 'seq'
valset_format = 'seq'
testset_format = 'seq'
# should the seqdim be taken in consecutive order
trainset_consec = True
valset_consec = True
testset_consec = True
trainset_seqlen = 2
valset_seqlen = 2
testset_seqlen = 2
trainset_batch_size = 2
valset_batch_size = 1
testset_batch_size = 1
dataset_name = ""
seqname = ""
ind_dataset = ''
trainset = ""
valset = ""
testset = ""
dataset_location = ""
dataset_filetype = "npz"
# mode selection
do_carla_static = False
do_carla_det = False
do_carla_ego = False
############ rev up the experiment ############
mode = os.environ["MODE"]
print('os.environ mode is %s' % mode)
if mode=="CARLA_STATIC":
exec(compile(open('exp_carla_static.py').read(), 'exp_carla_static.py', 'exec'))
elif mode=="CARLA_DET":
exec(compile(open('exp_carla_det.py').read(), 'exp_carla_det.py', 'exec'))
elif mode=="CARLA_EGO":
exec(compile(open('exp_carla_ego.py').read(), 'exp_carla_ego.py', 'exec'))
else:
assert(False) # what mode is this?
############ make some final adjustments ############
trainset_path = "%s/%s.txt" % (dataset_location, trainset)
valset_path = "%s/%s.txt" % (dataset_location, valset)
testset_path = "%s/%s.txt" % (dataset_location, testset)
data_paths = {}
data_paths['train'] = trainset_path
data_paths['val'] = valset_path
data_paths['test'] = testset_path
set_nums = {}
set_nums['train'] = 0
set_nums['val'] = 1
set_nums['test'] = 2
set_names = ['train', 'val', 'test']
log_freqs = {}
log_freqs['train'] = log_freq_train
log_freqs['val'] = log_freq_val
log_freqs['test'] = log_freq_test
shuffles = {}
shuffles['train'] = shuffle_train
shuffles['val'] = shuffle_val
shuffles['test'] = shuffle_test
data_formats = {}
data_formats['train'] = trainset_format
data_formats['val'] = valset_format
data_formats['test'] = testset_format
data_consecs = {}
data_consecs['train'] = trainset_consec
data_consecs['val'] = valset_consec
data_consecs['test'] = testset_consec
seqlens = {}
seqlens['train'] = trainset_seqlen
seqlens['val'] = valset_seqlen
seqlens['test'] = testset_seqlen
batch_sizes = {}
batch_sizes['train'] = trainset_batch_size
batch_sizes['val'] = valset_batch_size
batch_sizes['test'] = testset_batch_size
############ autogen a name; don't touch any hypers! ############
def strnum(x):
s = '%g' % x
if '.' in s:
s = s[s.index('.'):]
return s
if do_test:
name = "%02d_s%d" % (testset_batch_size, trainset_seqlen)
name += "_m%dx%dx%d" % (Z_test, Y_test, X_test)
else:
name = "%02d_s%d" % (trainset_batch_size, trainset_seqlen)
if do_feat3d:
name += "_m%dx%dx%d" % (Z, Y, X)
if do_view or do_emb2d:
name += "_p%dx%d" % (PH,PW)
if lr > 0.0:
lrn = "%.1e" % lr
# e.g., 5.0e-04
lrn = lrn[0] + lrn[3:5] + lrn[-1]
name += "_%s" % lrn
if do_feat2d:
name += "_F2"
if do_freeze_feat2d:
name += "f"
coeffs = [
feat2d_dim,
feat2d_smooth_coeff,
]
prefixes = [
"d",
"s",
]
for l_, l in enumerate(coeffs):
if l > 0:
name += "_%s%s" % (prefixes[l_],strnum(l))
if do_feat3d:
name += "_F3"
if do_freeze_feat3d:
name += "f"
coeffs = [
feat3d_dim,
feat3d_smooth_coeff,
]
prefixes = [
"d",
"s",
]
for l_, l in enumerate(coeffs):
if l > 0:
name += "_%s%s" % (prefixes[l_],strnum(l))
if do_ego:
name += '_G_%dx%dx%dx%dx%d' % (
ego_num_scales,
ego_num_rots,
ego_max_disp_z,
ego_max_disp_y,
ego_max_disp_x,
)
if do_freeze_ego:
name += "f"
ego_coeffs = [
ego_max_deg,
ego_t_l2_coeff,
ego_deg_l2_coeff,
ego_synth_prob,
]
ego_prefixes = [
"r",
"t",
"d",
"p",
]
for l_, l in enumerate(ego_coeffs):
if l > 0:
name += "_%s%s" % (ego_prefixes[l_],strnum(l))
if do_det:
name += "_D"
name += "%d" % det_anchor_size
if do_freeze_det:
name += "f"
det_coeffs = [
det_prob_coeff,
det_reg_coeff,
]
det_prefixes = [
"p",
"r",
]
for l_, l in enumerate(det_coeffs):
if l > 0:
name += "_%s%s" % (det_prefixes[l_],strnum(l))
if do_occ:
name += "_O"
if do_freeze_occ:
name += "f"
occ_coeffs = [
occ_coeff,
occ_smooth_coeff,
]
occ_prefixes = [
"c",
"s",
]
for l_, l in enumerate(occ_coeffs):
if l > 0:
name += "_%s%s" % (occ_prefixes[l_],strnum(l))
if do_view:
name += "_V"
if view_pred_embs:
name += "e"
if view_pred_rgb:
name += "r"
if do_freeze_view:
name += "f"
view_coeffs = [
view_depth,
view_l1_coeff,
]
view_prefixes = [
"d",
"e",
]
for l_, l in enumerate(view_coeffs):
if l > 0:
name += "_%s%s" % (view_prefixes[l_],strnum(l))
if do_det:
name += "_D"
name += "%d" % det_anchor_size
if do_freeze_det:
name += "f"
det_coeffs = [
det_prob_coeff,
det_reg_coeff,
]
det_prefixes = [
"p",
"r",
]
for l_, l in enumerate(det_coeffs):
if l > 0:
name += "_%s%s" % (det_prefixes[l_],strnum(l))
if do_emb2d:
name += "_E2"
if do_freeze_emb2d:
name += "f"
coeffs = [
emb2d_ml_coeff,
emb2d_l2_coeff,
emb2d_num_samples,
emb2d_mindist,
emb2d_ce_coeff,
]
prefixes = [
"m",
"e",
"n",
"d",
"c",
]
for l_, l in enumerate(coeffs):
if l > 0:
name += "_%s%s" % (prefixes[l_],strnum(l))
if do_emb3d:
name += "_E3"
coeffs = [
emb3d_ml_coeff,
emb3d_l2_coeff,
emb3d_num_samples,
emb3d_mindist,
emb3d_ce_coeff,
]
prefixes = [
"m",
"e",
"n",
"d",
"c",
]
for l_, l in enumerate(coeffs):
if l > 0:
name += "_%s%s" % (prefixes[l_],strnum(l))
if do_flow:
name += "_F"
if do_freeze_flow:
name += "f"
else:
flow_coeffs = [flow_heatmap_size,
flow_warp_coeff,
flow_warp_g_coeff,
flow_cycle_coeff,
flow_smooth_coeff,
flow_l1_coeff,
flow_l2_coeff,
# flow_synth_l1_coeff,
# flow_synth_l2_coeff,
]
flow_prefixes = ["h",
"w",
"g",
"c",
"s",
"e",
"f",
# "y",
# "x",
]
for l_, l in enumerate(flow_coeffs):
if l > 0:
name += "_%s%s" % (flow_prefixes[l_],strnum(l))
##### end model description
# add some training data info
sets_to_run = {}
if trainset:
name = "%s_%s" % (name, trainset)
sets_to_run['train'] = True
else:
sets_to_run['train'] = False
if valset:
name = "%s_%s" % (name, valset)
sets_to_run['val'] = True
else:
sets_to_run['val'] = False
if testset:
name = "%s_%s" % (name, testset)
sets_to_run['test'] = True
else:
sets_to_run['test'] = False
sets_to_backprop = {}
sets_to_backprop['train'] = backprop_on_train
sets_to_backprop['val'] = backprop_on_val
sets_to_backprop['test'] = backprop_on_test
if (not shuffle_train) or (not shuffle_val) or (not shuffle_test):
name += "_ns"
if mod:
name = "%s_%s" % (name, mod)
if do_resume:
name += '_gt'
total_init = name
print(name)
|
{"/model_carla_det.py": ["/hyperparams.py", "/nets/detnet.py"], "/model_carla_ego.py": ["/hyperparams.py", "/nets/egonet.py"], "/exp_carla_static.py": ["/exp_base.py"], "/model_carla_static.py": ["/hyperparams.py", "/nets/emb2dnet.py", "/nets/emb3dnet.py", "/nets/viewnet.py"], "/nets/detnet.py": ["/hyperparams.py", "/archs/encoder3d.py"], "/exp_carla_ego.py": ["/exp_base.py"], "/archs/encoder3d.py": ["/archs/pixelshuffle3d.py"], "/backend/saverloader.py": ["/hyperparams.py"], "/main.py": ["/model_carla_static.py", "/model_carla_ego.py", "/model_carla_det.py", "/hyperparams.py"], "/nets/flownet.py": ["/hyperparams.py"], "/nets/emb2dnet.py": ["/archs/encoder2d.py", "/hyperparams.py"], "/nets/egonet.py": ["/hyperparams.py"], "/nets/viewnet.py": ["/hyperparams.py"], "/exp_base.py": ["/pretrained_nets_carla.py"], "/nets/emb3dnet.py": ["/hyperparams.py"], "/exp_carla_det.py": ["/exp_base.py"]}
|
2,398
|
aharley/neural_3d_mapping
|
refs/heads/master
|
/exp_carla_ego.py
|
from exp_base import *
############## choose an experiment ##############
current = 'builder'
current = 'debugger'
current = 'trainer'
mod = '"eg00"' # nothing; builder
mod = '"eg01"' # deleted junk
mod = '"eg02"' # added hyps
mod = '"eg03"' # train a while
mod = '"eg04"' # 1 scale
mod = '"eg05"' # no synth
mod = '"eg06"' # consec=True
mod = '"eg07"' # comment out the synth part < ok. but this npz has no motion
mod = '"eg08"' # second file < a bit jumpier than i would like...
mod = '"eg09"' # S = 3
mod = '"eg10"' # make my own thing; assert S==2 < ok, much cleaner, but still jumpy
mod = '"eg11"' # cleaned up summs
mod = '"eg12"' # cleaned up summs; include the occ transform
mod = '"eg13"' # removed the warp loss
mod = '"eg14"' # add summ of the gt
mod = '"eg15"' # fix the hyps
mod = '"eg16"' # renamed DHW as ZYX
mod = '"eg17"' # same, fewer prints
mod = '"eg18"' # feed rgbd input
mod = '"eg19"' # cleaned up
mod = '"eg20"' # train a while
############## exps ##############
exps['builder'] = [
'carla_ego', # mode
'carla_traj_10_data', # dataset
'carla_bounds',
'3_iters',
'lr0',
'B1',
'no_shuf',
'train_feat3d',
'train_ego',
'log1',
]
exps['debugger'] = [
'carla_ego', # mode
'carla_traj_1_data', # dataset
'carla_bounds',
'1k_iters',
'lr4',
'B1',
'train_feat3d',
'train_ego',
'no_shuf',
'log10',
]
exps['trainer'] = [
'carla_ego', # mode
'carla_traj_train_data', # dataset
'carla_bounds',
'100k_iters',
'lr4',
'B2',
'train_feat3d',
'train_ego',
'log50',
]
############## groups ##############
groups['carla_ego'] = ['do_carla_ego = True']
groups['train_feat3d'] = [
'do_feat3d = True',
'feat3d_dim = 32',
]
groups['train_ego'] = [
'do_ego = True',
'ego_t_l2_coeff = 1.0',
'ego_deg_l2_coeff = 1.0',
'ego_num_scales = 2',
'ego_num_rots = 11',
'ego_max_deg = 4.0',
'ego_max_disp_z = 2',
'ego_max_disp_y = 1',
'ego_max_disp_x = 2',
'ego_synth_prob = 0.0',
]
############## datasets ##############
# dims for mem
SIZE = 32
Z = int(SIZE*4)
Y = int(SIZE*1)
X = int(SIZE*4)
K = 2 # how many objects to consider
N = 8 # how many objects per npz
S = 2
H = 128
W = 384
# H and W for proj stuff
PH = int(H/2.0)
PW = int(W/2.0)
dataset_location = "/projects/katefgroup/datasets/carla/processed/npzs"
groups['carla_traj_1_data'] = [
'dataset_name = "carla"',
'H = %d' % H,
'W = %d' % W,
'trainset = "taqs100i2one"',
'trainset_format = "traj"',
'trainset_consec = True',
'trainset_seqlen = %d' % S,
'dataset_location = "%s"' % dataset_location,
'dataset_filetype = "npz"'
]
groups['carla_traj_10_data'] = [
'dataset_name = "carla"',
'H = %d' % H,
'W = %d' % W,
'trainset = "taqs100i2ten"',
'trainset_format = "traj"',
'trainset_consec = True',
'trainset_seqlen = %d' % S,
'dataset_location = "%s"' % dataset_location,
'dataset_filetype = "npz"'
]
groups['carla_traj_train_data'] = [
'dataset_name = "carla"',
'H = %d' % H,
'W = %d' % W,
'trainset = "taqs100i2t"',
'trainset_format = "traj"',
'trainset_consec = True',
'trainset_seqlen = %d' % S,
'dataset_location = "%s"' % dataset_location,
'dataset_filetype = "npz"'
]
############## verify and execute ##############
def _verify_(s):
varname, eq, val = s.split(' ')
assert varname in globals()
assert eq == '='
assert type(s) is type('')
print(current)
assert current in exps
for group in exps[current]:
print(" " + group)
assert group in groups
for s in groups[group]:
print(" " + s)
_verify_(s)
exec(s)
s = "mod = " + mod
_verify_(s)
exec(s)
|
{"/model_carla_det.py": ["/hyperparams.py", "/nets/detnet.py"], "/model_carla_ego.py": ["/hyperparams.py", "/nets/egonet.py"], "/exp_carla_static.py": ["/exp_base.py"], "/model_carla_static.py": ["/hyperparams.py", "/nets/emb2dnet.py", "/nets/emb3dnet.py", "/nets/viewnet.py"], "/nets/detnet.py": ["/hyperparams.py", "/archs/encoder3d.py"], "/exp_carla_ego.py": ["/exp_base.py"], "/archs/encoder3d.py": ["/archs/pixelshuffle3d.py"], "/backend/saverloader.py": ["/hyperparams.py"], "/main.py": ["/model_carla_static.py", "/model_carla_ego.py", "/model_carla_det.py", "/hyperparams.py"], "/nets/flownet.py": ["/hyperparams.py"], "/nets/emb2dnet.py": ["/archs/encoder2d.py", "/hyperparams.py"], "/nets/egonet.py": ["/hyperparams.py"], "/nets/viewnet.py": ["/hyperparams.py"], "/exp_base.py": ["/pretrained_nets_carla.py"], "/nets/emb3dnet.py": ["/hyperparams.py"], "/exp_carla_det.py": ["/exp_base.py"]}
|
2,399
|
aharley/neural_3d_mapping
|
refs/heads/master
|
/archs/encoder3d.py
|
import torch
import torch.nn as nn
import time
import torch.nn.functional as F
import archs.pixelshuffle3d
class Skipnet3d(nn.Module):
def __init__(self, in_dim, out_dim, chans=64):
super(Skipnet3d, self).__init__()
conv3d = []
up_bn = [] # batch norm for deconv
conv3d_transpose = []
self.down_in_dims = [in_dim, chans, 2*chans]#, 4*chans]
self.down_out_dims = [chans, 2*chans, 4*chans, 8*chans]
self.down_ksizes = [4, 4, 4, 4]
self.down_strides = [2, 2, 2, 2]
padding = 1
# print('down dims: ', self.down_out_dims)
for i, (in_chan, out_chan, ksize, stride) in enumerate(zip(self.down_in_dims, self.down_out_dims, self.down_ksizes, self.down_strides)):
conv3d.append(nn.Sequential(
nn.ReplicationPad3d(padding),
nn.Conv3d(in_channels=in_chan, out_channels=out_chan, kernel_size=ksize, stride=stride, padding=0),
# nn.Conv3d(in_channels=in_chan, out_channels=out_chan, kernel_size=ksize, stride=stride, padding=padding),
nn.LeakyReLU(),
nn.BatchNorm3d(num_features=out_chan),
))
self.conv3d = nn.ModuleList(conv3d)
self.up_in_dims = [4*chans, 6*chans]
self.up_out_dims = [4*chans, 4*chans]
self.up_bn_dims = [6*chans, 5*chans]
self.up_ksizes = [4, 4]
self.up_strides = [2, 2]
padding = 1
# print('up dims: ', self.up_out_dims)
for i, (in_chan, bn_dim, out_chan, ksize, stride) in enumerate(zip(self.up_in_dims, self.up_bn_dims, self.up_out_dims, self.up_ksizes, self.up_strides)):
conv3d_transpose.append(nn.Sequential(
nn.ConvTranspose3d(in_channels=in_chan, out_channels=out_chan, kernel_size=ksize, stride=stride, padding=padding),
nn.LeakyReLU(),
))
up_bn.append(nn.BatchNorm3d(num_features=bn_dim))
self.conv3d_transpose = nn.ModuleList(conv3d_transpose)
self.up_bn = nn.ModuleList(up_bn)
# final 1x1x1 conv to get our desired out_dim
self.final_feature = nn.Conv3d(in_channels=self.up_bn_dims[-1], out_channels=out_dim, kernel_size=1, stride=1, padding=0)
def forward(self, inputs):
feat = inputs
skipcons = []
for conv3d_layer in self.conv3d:
feat = conv3d_layer(feat)
skipcons.append(feat)
skipcons.pop() # we don't want the innermost layer as skipcon
for i, (conv3d_transpose_layer, bn_layer) in enumerate(zip(self.conv3d_transpose, self.up_bn)):
# print('feat before up', feat.shape)
feat = conv3d_transpose_layer(feat)
feat = torch.cat([feat, skipcons.pop()], dim=1) #skip connection by concatenation
# print('feat before bn', feat.shape)
feat = bn_layer(feat)
feat = self.final_feature(feat)
return feat
class Res3dBlock(nn.Module):
def __init__(self, in_planes, out_planes, padding=1):
super(Res3dBlock, self).__init__()
self.res_branch = nn.Sequential(
nn.Conv3d(in_planes, out_planes, kernel_size=3, stride=1, padding=padding),
nn.BatchNorm3d(out_planes),
nn.ReLU(True),
nn.Conv3d(out_planes, out_planes, kernel_size=3, stride=1, padding=padding),
nn.BatchNorm3d(out_planes)
)
assert(padding==1 or padding==0)
self.padding = padding
if in_planes == out_planes:
self.skip_con = nn.Sequential()
else:
self.skip_con = nn.Sequential(
nn.Conv3d(in_planes, out_planes, kernel_size=1, stride=1, padding=0),
nn.BatchNorm3d(out_planes)
)
def forward(self, x):
res = self.res_branch(x)
# print('res', res.shape)
skip = self.skip_con(x)
if self.padding==0:
# the data has shrunk a bit
skip = skip[:,:,2:-2,2:-2,2:-2]
# print('skip', skip.shape)
return F.relu(res + skip, True)
class Conv3dBlock(nn.Module):
def __init__(self, in_planes, out_planes, stride=1):
super(Conv3dBlock, self).__init__()
self.conv = nn.Sequential(
nn.Conv3d(in_planes, out_planes, kernel_size=3, stride=stride, padding=0),
nn.BatchNorm3d(out_planes),
nn.ReLU(True),
)
def forward(self, x):
return self.conv(x)
class Pool3dBlock(nn.Module):
def __init__(self, pool_size):
super(Pool3dBlock, self).__init__()
self.pool_size = pool_size
def forward(self, x):
return F.max_pool3d(x, kernel_size=self.pool_size, stride=self.pool_size)
class Deconv3dBlock(nn.Module):
def __init__(self, in_planes, out_planes):
super(Deconv3dBlock, self).__init__()
self.deconv = nn.Sequential(
nn.ConvTranspose3d(in_planes, out_planes, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(),
)
def forward(self, x):
return self.deconv(x)
class Resnet3d(nn.Module):
def __init__(self, in_dim, out_dim, chans=32):
super().__init__()
self.encoder_layer0 = Res3dBlock(in_dim, chans)
self.encoder_layer1 = Pool3dBlock(2)
self.encoder_layer2 = Res3dBlock(chans, chans)
self.encoder_layer3 = Res3dBlock(chans, chans)
self.encoder_layer4 = Res3dBlock(chans, chans)
self.encoder_layer5 = Pool3dBlock(2)
self.encoder_layer6 = Res3dBlock(chans, chans)
self.encoder_layer7 = Res3dBlock(chans, chans)
self.encoder_layer8 = Res3dBlock(chans, chans)
self.encoder_layer9 = Deconv3dBlock(chans, chans)
self.final_layer = nn.Conv3d(in_channels=chans, out_channels=out_dim, kernel_size=1, stride=1, padding=0)
def forward(self, x):
x = self.encoder_layer0(x)
x = self.encoder_layer1(x)
x = self.encoder_layer2(x)
x = self.encoder_layer3(x)
x = self.encoder_layer4(x)
x = self.encoder_layer5(x)
x = self.encoder_layer6(x)
x = self.encoder_layer7(x)
x = self.encoder_layer8(x)
x = self.encoder_layer9(x)
x = self.final_layer(x)
return x
|
{"/model_carla_det.py": ["/hyperparams.py", "/nets/detnet.py"], "/model_carla_ego.py": ["/hyperparams.py", "/nets/egonet.py"], "/exp_carla_static.py": ["/exp_base.py"], "/model_carla_static.py": ["/hyperparams.py", "/nets/emb2dnet.py", "/nets/emb3dnet.py", "/nets/viewnet.py"], "/nets/detnet.py": ["/hyperparams.py", "/archs/encoder3d.py"], "/exp_carla_ego.py": ["/exp_base.py"], "/archs/encoder3d.py": ["/archs/pixelshuffle3d.py"], "/backend/saverloader.py": ["/hyperparams.py"], "/main.py": ["/model_carla_static.py", "/model_carla_ego.py", "/model_carla_det.py", "/hyperparams.py"], "/nets/flownet.py": ["/hyperparams.py"], "/nets/emb2dnet.py": ["/archs/encoder2d.py", "/hyperparams.py"], "/nets/egonet.py": ["/hyperparams.py"], "/nets/viewnet.py": ["/hyperparams.py"], "/exp_base.py": ["/pretrained_nets_carla.py"], "/nets/emb3dnet.py": ["/hyperparams.py"], "/exp_carla_det.py": ["/exp_base.py"]}
|
2,400
|
aharley/neural_3d_mapping
|
refs/heads/master
|
/backend/saverloader.py
|
import torch
import os,pathlib
import hyperparams as hyp
import numpy as np
def load_total(model, optimizer):
start_iter = 0
if hyp.total_init:
print("TOTAL INIT")
print(hyp.total_init)
start_iter = load(hyp.total_init, model, optimizer)
if start_iter:
print("loaded full model. resuming from iter %08d" % start_iter)
else:
print("could not find a full model. starting from scratch")
return start_iter
def load_weights(model, optimizer):
if hyp.total_init:
print("TOTAL INIT")
print(hyp.total_init)
start_iter = load(hyp.total_init, model, optimizer)
if start_iter:
print("loaded full model. resuming from iter %08d" % start_iter)
else:
print("could not find a full model. starting from scratch")
else:
# if (1):
start_iter = 0
inits = {"feat2dnet": hyp.feat2d_init,
"feat3dnet": hyp.feat3d_init,
"viewnet": hyp.view_init,
"detnet": hyp.det_init,
"flownet": hyp.flow_init,
"egonet": hyp.ego_init,
"occnet": hyp.occ_init,
}
for part, init in list(inits.items()):
# print('part', part)
if init:
if part == 'feat2dnet':
model_part = model.feat2dnet
elif part == 'feat3dnet':
model_part = model.feat3dnet
elif part == 'occnet':
model_part = model.occnet
elif part == 'flownet':
model_part = model.flownet
else:
assert(False)
if isinstance(model_part, list):
for mp in model_part:
iter = load_part([mp], part, init)
else:
iter = load_part(model_part, part, init)
if iter:
print("loaded %s at iter %08d" % (init, iter))
else:
print("could not find a checkpoint for %s" % init)
if hyp.reset_iter:
start_iter = 0
return start_iter
def save(model, checkpoint_dir, step, optimizer, keep_latest=3):
model_name = "model-%08d.pth"%(step)
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
prev_chkpts = list(pathlib.Path(checkpoint_dir).glob('model-*'))
prev_chkpts.sort(key=lambda p: p.stat().st_mtime,reverse=True)
if len(prev_chkpts) > keep_latest-1:
for f in prev_chkpts[keep_latest-1:]:
f.unlink()
path = os.path.join(checkpoint_dir, model_name)
torch.save({
'step': step,
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict()
}, path)
print("Saved a checkpoint: %s"%(path))
def load(model_name, model, optimizer):
print("reading full checkpoint...")
# checkpoint_dir = os.path.join("checkpoints/", model_name)
checkpoint_dir = os.path.join("saved_checkpoints/", model_name)
step = 0
if not os.path.exists(checkpoint_dir):
print("...ain't no full checkpoint here!")
else:
ckpt_names = os.listdir(checkpoint_dir)
steps = [int((i.split('-')[1]).split('.')[0]) for i in ckpt_names]
if len(ckpt_names) > 0:
step = max(steps)
model_name = 'model-%08d.pth' % (step)
path = os.path.join(checkpoint_dir, model_name)
print("...found checkpoint %s"%(path))
checkpoint = torch.load(path)
# # Print model's state_dict
# print("Model's state_dict:")
# for param_tensor in model.state_dict():
# print(param_tensor, "\t", model.state_dict()[param_tensor].size())
# input()
# # Print optimizer's state_dict
# print("Optimizer's state_dict:")
# for var_name in optimizer.state_dict():
# print(var_name, "\t", optimizer.state_dict()[var_name])
# input()
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
else:
print("...ain't no full checkpoint here!")
return step
def load_part(model, part, init):
print("reading %s checkpoint..." % part)
init_dir = os.path.join("saved_checkpoints", init)
print(init_dir)
step = 0
if not os.path.exists(init_dir):
print("...ain't no %s checkpoint here!"%(part))
else:
ckpt_names = os.listdir(init_dir)
steps = [int((i.split('-')[1]).split('.')[0]) for i in ckpt_names]
if len(ckpt_names) > 0:
step = max(steps)
ind = np.argmax(steps)
model_name = ckpt_names[ind]
path = os.path.join(init_dir, model_name)
print("...found checkpoint %s" % (path))
checkpoint = torch.load(path)
model_state_dict = model.state_dict()
# print(model_state_dict.keys())
for load_param_name, param in checkpoint['model_state_dict'].items():
model_param_name = load_param_name[len(part)+1:]
# print('load_param_name', load_param_name, 'model_param_name', model_param_name)
if part+"."+model_param_name != load_param_name:
continue
else:
if model_param_name in model_state_dict.keys():
# print(model_param_name, load_param_name)
# print('param in ckpt', param.data.shape)
# print('param in state dict', model_state_dict[model_param_name].shape)
model_state_dict[model_param_name].copy_(param.data)
else:
print('warning: %s is not in the state dict of the current model' % model_param_name)
else:
print("...ain't no %s checkpoint here!"%(part))
return step
|
{"/model_carla_det.py": ["/hyperparams.py", "/nets/detnet.py"], "/model_carla_ego.py": ["/hyperparams.py", "/nets/egonet.py"], "/exp_carla_static.py": ["/exp_base.py"], "/model_carla_static.py": ["/hyperparams.py", "/nets/emb2dnet.py", "/nets/emb3dnet.py", "/nets/viewnet.py"], "/nets/detnet.py": ["/hyperparams.py", "/archs/encoder3d.py"], "/exp_carla_ego.py": ["/exp_base.py"], "/archs/encoder3d.py": ["/archs/pixelshuffle3d.py"], "/backend/saverloader.py": ["/hyperparams.py"], "/main.py": ["/model_carla_static.py", "/model_carla_ego.py", "/model_carla_det.py", "/hyperparams.py"], "/nets/flownet.py": ["/hyperparams.py"], "/nets/emb2dnet.py": ["/archs/encoder2d.py", "/hyperparams.py"], "/nets/egonet.py": ["/hyperparams.py"], "/nets/viewnet.py": ["/hyperparams.py"], "/exp_base.py": ["/pretrained_nets_carla.py"], "/nets/emb3dnet.py": ["/hyperparams.py"], "/exp_carla_det.py": ["/exp_base.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.