index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
20,360
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/management/commands/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.core.management.base import BaseCommand, CommandError
from pootle_vcs.models import ProjectVCS
class SubCommand(BaseCommand):
requires_system_checks = False
def get_vcs(self, project):
try:
return project.vcs.get()
except ProjectVCS.DoesNotExist:
raise CommandError(
"Project (%s) is not managed in VCS"
% project.code)
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,361
|
phlax/pootle_vcs
|
refs/heads/master
|
/setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2006-2013 Translate House
#
# This file is part of Pootle_Vcs.
#
# Pootle_Vcs is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# Pootle_Vcs is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Pootle_Vcs; if not, see <http://www.gnu.org/licenses/>.
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
setup(
name='pootle_vcs',
version='0.0.1',
description='Pootle VCS integration',
long_description="Integration between Pootle and VCS backends",
url='https://github.com/phlax/pootle_vcs',
author='Ryan Northey',
author_email='ryan@synca.io',
license='GPL3',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: GPL3',
'Programming Language :: Python :: 2.7',
],
keywords='pootle vcs',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=['rq_scheduler', 'pootle'],
)
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,362
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/migrations/0004_auto_20150923_2206.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('pootle_vcs', '0003_auto_20150923_2155'),
]
operations = [
migrations.RenameField(
model_name='projectvcs',
old_name='poll_frequency',
new_name='pull_frequency',
),
]
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,363
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/finder.py
|
import os
import re
class TranslationFileFinder(object):
path_mapping = (
(".", "\."),
("<lang>", "(?P<lang>[\w]*)"),
("<filename>", "(?P<filename>[\w]*)"),
("<directory_path>", "(?P<directory_path>[\w\/]*)"))
def __init__(self, translation_path):
self.translation_path = translation_path
self.regex = re.compile(self._parse_path())
@property
def file_root(self):
file_root = self.translation_path.split("<")[0]
if not file_root.endswith("/"):
file_root = "/".join(file_root.split("/")[:-1])
return file_root
def find(self):
# TODO: make sure translation_path has no ..
# ..validate
for root, dirs, files in os.walk(self.file_root):
for filename in files:
file_path = os.path.join(root, filename)
match = self.regex.match(file_path)
if match:
yield file_path, match.groupdict()
def _parse_path(self):
path = self.translation_path
for k, v in self.path_mapping:
path = path.replace(k, v)
return path
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,364
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/files.py
|
import os
from import_export.utils import import_file
from pootle_store.models import Store
from pootle_translationproject.models import TranslationProject
class RepositoryFile(object):
def __init__(self, vcs, path, language, filename, directory_path=None):
self.vcs = vcs
self.language = language
self.filename = filename
if directory_path is not None:
self.directory_path = '/'.join(directory_path)
else:
self.directory_path = []
self.path = path
def __str__(self):
return "<%s: %s>" % (self.__name__, self.pootle_path)
@property
def pootle_path(self):
return "/".join(
['']
+ [x for x in
[self.language.code,
self.project.code,
self.directory_path,
self.filename]
if x])
@property
def file_path(self):
return os.path.join(
self.vcs.plugin.local_repo_path,
self.path.strip("/"))
@property
def exists(self):
return os.path.exists(self.file_path)
@property
def project(self):
return self.vcs.project
@property
def translation_project(self):
try:
return self.project.translationproject_set.get(
language=self.language)
except TranslationProject.DoesNotExist:
return TranslationProject.objects.create(
project=self.vcs.project,
language=self.language)
@property
def directory(self):
directory = self.translation_project.directory
if self.directory_path:
for subdir in self.directory_path.split("/"):
(directory,
created) = directory.child_dirs.get_or_create(name=subdir)
return directory
@property
def store(self):
store, created = Store.objects.get_or_create(
parent=self.directory,
translation_project=self.translation_project,
name=self.filename)
if created:
store.save()
return store
@property
def store_vcs(self):
from pootle_vcs.models import StoreVCS
store_vcs, created = StoreVCS.objects.get_or_create(
store=self.store, path=self.path)
return store_vcs
@property
def latest_commit(self):
raise NotImplementedError
def fetch(self):
return self.store_vcs
def pull(self):
with open(self.file_path) as f:
import_file(
f,
pootle_path=self.pootle_path,
rev=self.store.get_max_unit_revision())
store_vcs = self.store_vcs
store_vcs.last_sync_commit = self.latest_commit
store_vcs.last_sync_revision = self.store.get_max_unit_revision()
store_vcs.save()
def read(self):
# self.vcs.pull()
with open(self.file_path) as f:
return f.read()
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,365
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/plugins.py
|
import io
import logging
import os
from ConfigParser import ConfigParser
from pootle_language.models import Language
from pootle_store.models import Store
from .files import RepositoryFile
from .finder import TranslationFileFinder
logger = logging.getLogger(__name__)
class Plugin(object):
name = None
file_class = RepositoryFile
def __init__(self, vcs):
self.vcs = vcs
@property
def is_cloned(self):
if os.path.exists(self.local_repo_path):
return True
return False
@property
def local_repo_path(self):
vcs_path = "/tmp"
return os.path.join(vcs_path, self.vcs.project.code)
@property
def project(self):
return self.vcs.project
@property
def stores(self):
return Store.objects.filter(
translation_project__project=self.project)
@property
def translation_files(self):
from .models import StoreVCS
return StoreVCS.objects.filter(
store__translation_project__project=self.project)
def fetch_translation_files(self):
for repo_file in self.find_translation_files():
repo_file.fetch()
def find_translation_files(self):
config = self.read_config()
for section in config.sections():
if section == "default":
section_subdirs = []
else:
section_subdirs = section.split("/")
finder = TranslationFileFinder(
os.path.join(
self.local_repo_path,
config.get(section, "translation_path")))
for file_path, matched in finder.find():
lang_code = matched['lang']
try:
language = Language.objects.get(code=lang_code)
except Language.DoesNotExist:
logger.warning(
"Language does not exist for %s: %s"
% (self.vcs, lang_code))
subdirs = (
section_subdirs
+ [m for m in
matched.get('directory_path', '').strip("/").split("/")
if m])
filename = (
matched.get("filename") or os.path.basename(file_path))
yield self.file_class(
self.vcs,
file_path.replace(self.local_repo_path, ""),
language,
filename,
subdirs)
def pull_translation_files(self):
for repo_file in self.find_translation_files():
repo_file.pull()
def pull(self):
raise NotImplementedError
def push(self):
raise NotImplementedError
def read(self, path):
target = os.path.join(self.local_repo_path, path)
with open(target) as f:
content = f.read()
return content
def read_config(self):
self.pull()
config = ConfigParser()
config.readfp(io.BytesIO(self.read(self.vcs.pootle_config)))
return config
def status(self):
self.pull()
status = dict(
CONFLICT=[],
VCS_ADDED=[],
VCS_AHEAD=[],
POOTLE_AHEAD=[])
for store_vcs in self.translation_files:
repo_file = store_vcs.repository_file
repo_removed = not repo_file.exists
repo_added = (
store_vcs.last_sync_commit is None)
repo_changed = (
store_vcs.last_sync_commit is not None
and (repo_file.latest_commit
!= store_vcs.last_sync_commit))
pootle_changed = (
store_vcs.last_sync_commit is not None
and (store_vcs.store.get_max_unit_revision()
!= store_vcs.last_sync_revision))
if repo_removed:
status['VCS_REMOVED'].append(store_vcs)
elif repo_added:
status['VCS_ADDED'].append(store_vcs)
elif repo_changed and pootle_changed:
status['CONFLICT'].append(store_vcs)
elif repo_changed:
status['VCS_AHEAD'].append(store_vcs)
elif pootle_changed:
status['POOTLE_AHEAD'].append(store_vcs)
status['POOTLE_ADDED'] = self.stores.filter(vcs__isnull=True)
return status
class Plugins(object):
def __init__(self):
self.__plugins__ = {}
def register(self, plugin):
self.__plugins__[plugin.name] = plugin
def __getitem__(self, k):
return self.__plugins__[k]
def __contains__(self, k):
return k in self.__plugins__
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,366
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/management/commands/vcs_commands/info.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from pootle_vcs.management.commands import SubCommand
class ProjectInfoCommand(SubCommand):
help = "List VCS translations files managed by Pootle."
def handle(self, project, *args, **options):
vcs = self.get_vcs(project)
self.stdout.write("Project: %s" % project.code)
self.stdout.write("type: %s" % vcs.vcs_type)
self.stdout.write("URL: %s" % vcs.url)
self.stdout.write("enabled: %s" % vcs.enabled)
self.stdout.write("latest commit: %s" % vcs.get_latest_commit())
self.stdout.write("fetch frequency: %s" % vcs.fetch_frequency)
self.stdout.write("push frequency: %s" % vcs.push_frequency)
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,367
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/management/commands/vcs.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import logging
import os
from optparse import NO_DEFAULT
# This must be run before importing Django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'
from django.core.management.base import BaseCommand, CommandError
from pootle_project.models import Project
from pootle_vcs.models import ProjectVCS
from .vcs_commands.info import ProjectInfoCommand
from .vcs_commands.fetch_translations import FetchTranslationsCommand
from .vcs_commands.files import FilesCommand
from .vcs_commands.pull_translations import PullTranslationsCommand
from .vcs_commands.set_vcs import SetVCSCommand
from .vcs_commands.status import StatusCommand
logger = logging.getLogger('pootle.vcs')
class Command(BaseCommand):
help = "Pootle VCS."
subcommands = {
"info": ProjectInfoCommand,
"fetch_translations": FetchTranslationsCommand,
"files": FilesCommand,
"pull_translations": PullTranslationsCommand,
"set_vcs": SetVCSCommand,
"status": StatusCommand}
def handle_subcommand(self, project, command, *args, **options):
try:
subcommand = self.subcommands[command]()
except KeyError:
raise CommandError("Unrecognised command: %s" % command)
defaults = {}
for opt in subcommand.option_list:
if opt.default is NO_DEFAULT:
defaults[opt.dest] = None
else:
defaults[opt.dest] = opt.default
defaults.update(options)
return subcommand.execute(project, *args, **defaults)
def handle(self, *args, **kwargs):
if args:
project_code = args[0]
args = args[1:]
try:
project = Project.objects.get(code=project_code)
except Project.DoesNotExist:
project = None
if project:
return self.handle_subcommand(
project, *(args or ['info']), **kwargs)
else:
for project in Project.objects.all():
try:
self.stdout.write(
"%s\t%s"
% (project.code, project.vcs.get().url))
except ProjectVCS.DoesNotExist:
pass
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,368
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/management/commands/vcs_commands/set_vcs.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.core.management.base import CommandError
from pootle_vcs import plugins
from pootle_vcs.management.commands import SubCommand
from pootle_vcs.models import ProjectVCS
class SetVCSCommand(SubCommand):
help = "Status of vcs repositories."
def handle(self, project, *args, **options):
if not args or not len(args) == 2:
raise CommandError("You must a VCS type and VCS url")
try:
plugins[args[0]]
except KeyError:
raise CommandError("Unrecognised VCS type: %s" % args[0])
try:
vcs = project.vcs.get()
except ProjectVCS.DoesNotExist:
vcs = project.vcs.create()
vcs.vcs_type = args[0]
vcs.url = args[1]
vcs.save()
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,369
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/schedule.py
|
import django_rq
import datetime
def func():
print "Boom!"
scheduler = django_rq.get_scheduler('default')
scheduler.schedule(datetime.datetime.utcnow(),
func,
interval=5)
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,370
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/management/commands/vcs_commands/fetch_translations.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from pootle_vcs.management.commands import SubCommand
class FetchTranslationsCommand(SubCommand):
help = "Fetch translations into Pootle from VCS."
def handle(self, project, *args, **options):
self.get_vcs(project).fetch_translation_files()
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,371
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/__init__.py
|
from .plugins import Plugins, Plugin
from .files import RepositoryFile
(Plugin, RepositoryFile)
plugins = Plugins()
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,372
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/migrations/0002_projectvcs_project_type.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('pootle_vcs', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='projectvcs',
name='project_type',
field=models.CharField(default='git', max_length=32),
preserve_default=False,
),
]
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,373
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/management/commands/vcs_commands/status.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from pootle_vcs.models import ProjectVCS
from pootle_vcs.management.commands import SubCommand
class StatusCommand(SubCommand):
help = "Status of vcs repositories."
def handle(self, project, *args, **options):
try:
vcs = project.vcs.get()
except ProjectVCS.DoesNotExist:
vcs = None
status = vcs.status()
synced = (
not status['CONFLICT']
and not status['POOTLE_AHEAD']
and not status['POOTLE_ADDED']
and not status['VCS_ADDED']
and not status['VCS_AHEAD'])
if synced:
self.stdout.write("Everything up-to-date")
return
if status["CONFLICT"]:
self.stdout.write("Both changed:")
for repo_file in status["CONFLICT"]:
self.stdout.write(repo_file)
if status["POOTLE_ADDED"]:
for store in status["POOTLE_ADDED"]:
self.stdout.write(
" %-50s %-50s %-20s\n"
% ("", store.pootle_path,
"Pootle added: %s" % store.get_max_unit_revision()))
if status["POOTLE_AHEAD"]:
self.stdout.write("Pootle changed:")
for repo_file in status["POOTLE_AHEAD"]:
self.stdout.write(repo_file)
if status["VCS_ADDED"]:
for store_vcs in status["VCS_ADDED"]:
self.stdout.write(
" %-50s %-50s %-10s\n"
% (store_vcs.path,
store_vcs.store.pootle_path,
"VCS added: %s"
% store_vcs.repository_file.latest_commit[:8]))
if status["VCS_AHEAD"]:
for store_vcs in status["VCS_AHEAD"]:
self.stdout.write(
" %-50s %-50s %-20s\n"
% (store_vcs.path,
store_vcs.store.pootle_path,
"VCS updated: %s...%s"
% (store_vcs.last_sync_commit[:8],
store_vcs.repository_file.latest_commit[:8])))
self.stdout.write("\n")
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,374
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/management/commands/vcs_commands/files.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from pootle_vcs.management.commands import SubCommand
class FilesCommand(SubCommand):
help = "List VCS translations files managed by Pootle."
def handle(self, project, *args, **options):
vcs = self.get_vcs(project)
files = vcs.list_translation_files()
for store_vcs in files.order_by("path").iterator():
self.stdout.write(
" %-50s %-50s %-12s %-12s \n"
% (store_vcs.path,
store_vcs.store.pootle_path,
store_vcs.last_sync_revision,
(store_vcs.last_sync_commit or '')[:8]))
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,376
|
weiss1217/hideTwi
|
refs/heads/master
|
/hideTwi.py
|
#-------------------------------------------------------------------------------
# Name: hideTwi
# Purpose:
#
# Author: T
#
# Created: 24/12/2020
# Copyright: (c) T 2020
# Licence: <your licence>
#-------------------------------------------------------------------------------
import json, config #標準のjsonモジュールとconfig.pyの読み込み
from requests_oauthlib import OAuth1Session #OAuthのライブラリの読み込み
from urllib.parse import parse_qsl
import requests
import codecs
import os
import sys
from time import sleep
import urllib.request as urlreq
import datetime
import threading
import base64
import webbrowser
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
import sip
import ast
#アクセスURLのエンドポイント設定
url1 = "https://api.twitter.com/1.1/statuses/home_timeline.json" #タイムライン取得エンドポイント
url2 = "https://api.twitter.com/1.1/statuses/update.json" #ツイートポストエンドポイント
url3 = "https://api.twitter.com/1.1/favorites/create.json"
url4 = "https://upload.twitter.com/1.1/media/upload.json" #画像投稿
url5 = "https://api.twitter.com/oauth/request_token"
url6 = "https://api.twitter.com/oauth/authenticate"
url7 = "https://api.twitter.com/oauth/access_token"
#httpリクエストのための設定
CK = ""
CS = ""
AT = ""
ATS = ""
twitter = ""
image_list = []
delete_index = []
image_num = 0
alpha_rate = config.MAIN_ALPHA
image_alpha_rate = config.IMAGE_ALPHA
class ImageWindow(QWidget):
def __init__(self, parent=None):
super(ImageWindow, self).__init__(parent)
#メインウィンドウの設定
self.w = 1000
self.h = 480
self.resize(self.w, self.h)
self.setMinimumSize(self.w/2, self.h/2)
self.widthFactor = 1
self.heightFactor = 1
self.setWindowTitle('画像一覧')
self.setStyleSheet("background-color: " + config.IMAGE_COLOR + ";")
self.setWindowOpacity(image_alpha_rate)
self.label_list = []
self.button_list = []
self.image_display()
def image_display(self):
if len(image_list) == 0:
return
for i in range(len(image_list)):
#Tweetラベルの追加
self.label_list.append(QLabel(self))
self.label_list[i].move(50 , 40 * (i + 1))
self.label_list[i].setText('<p><font size="4" color="' + config.PHONT_COLOR + '">' + image_list[i] + '</font></p>')
#削除ボタンの追加
self.button_list.append(QPushButton('削除', self))
if i == 0:
self.button_list[i].clicked.connect(lambda: self.delete_image(0))
elif i == 1:
self.button_list[i].clicked.connect(lambda: self.delete_image(1))
elif i == 2:
self.button_list[i].clicked.connect(lambda: self.delete_image(2))
elif i == 3:
self.button_list[i].clicked.connect(lambda: self.delete_image(3))
self.button_list[i].resize(100, 30)
self.button_list[i].setStyleSheet("background-color: #FFFFFF;")
self.button_list[i].move(900, 40 * (i + 1))
def delete_image(self, index:int):
global image_num
global image_list
print(index)
minus_index = 0
if index == 0:
minus_index = 0
elif index == 1:
if 0 in delete_index:
minus_index += 1
elif index == 2:
if 0 in delete_index:
minus_index += 1
if 1 in delete_index:
minus_index += 1
elif index == 3:
if 0 in delete_index:
minus_index += 1
if 1 in delete_index:
minus_index += 1
if 2 in delete_index:
minus_index += 1
image_list.pop(index - minus_index )
delete_index.append(index)
image_num -= 1
self.label_list[index].hide()
self.button_list[index].hide()
main_window.update_image_num()
class MainWindow(QWidget):
progressChanged = pyqtSignal(int)
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.get_key()
self.oauth()
#メインウィンドウの設定
self.w = 1280
self.h = 300
self.resize(self.w, self.h)
self.setMinimumSize(self.w/2, self.h/2)
self.widthFactor = 1
self.heightFactor = 1
self.setWindowTitle('ついったーするやつ')
self.setStyleSheet("background-color: " + config.IMAGE_COLOR + ";")
self.setWindowOpacity(alpha_rate)
#ツイート関連の表示ウィジェットの設定
self.tweet_init()
#ハッシュタグ保存機能の表示ウィジェットの設定
self.hash_init()
#透過率変更機能ウィジェットの設定
self.alpha_change_init()
def get_key(self):
global CK
global CS
self.get_response = requests.get('https://mythos.pythonanywhere.com/twitter/request_key')
key_token = ast.literal_eval(self.get_response.content.decode("utf-8"))
CK = key_token["CK"]
CS = key_token["CS"]
def oauth(self):
self.request_response = requests.get('https://mythos.pythonanywhere.com/twitter/request_token?oauth_callback=https://mythos.pythonanywhere.com/twitter/access_token')
request_token = ast.literal_eval(self.request_response.content.decode("utf-8"))
authenticate_endpoint = "https://api.twitter.com/oauth/authenticate?oauth_token=" + request_token["oauth_token"]
webbrowser.open(authenticate_endpoint)
def get_AT(self):
global AT
global ATS
self.oauth_response = requests.get('https://mythos.pythonanywhere.com/twitter/oauth')
request_token = ast.literal_eval(self.oauth_response.content.decode("utf-8"))
AT = request_token["AT"]
ATS = request_token["ATS"]
def tweet_init(self):
#Tweetラベルの追加
self.lbl = QLabel(self)
self.lbl.move(50, 10)
self.lbl.setText('<p><font size="4" color="' + config.PHONT_COLOR + '">呟く内容を書けよ</font></p>')
# ツイートTextBoxの追加
self.textbox = QTextEdit(self)
self.textbox.move(40, 40)
self.textbox.setStyleSheet("background-color: #FFFFFF;")
# ツイートボタンの追加
self.tweetbutton = QPushButton('tweet', self)
self.tweetbutton.clicked.connect(self.tweet)
self.tweetbutton.resize(100, 30)
self.tweetbutton.setStyleSheet("background-color: #FFFFFF;")
# 画像添付ボタンの追加
self.imagebutton = QPushButton('画像添付', self)
self.imagebutton.clicked.connect(self.add_image)
self.imagebutton.resize(100, 30)
self.imagebutton.setStyleSheet("background-color: #FFFFFF;")
# 添付画像ラベルの追加
self.imagelbl = QLabel(self)
self.imagelbl.move(50, 125)
self.imagelbl.setText('<p><font size="4" color="' + config.PHONT_COLOR + '">添付画像数 : ' + str(image_num) + '</font></p>')
# 画像一覧ボタンの追加
self.listbutton = QPushButton('画像一覧', self)
self.listbutton.clicked.connect(self.list_image)
self.listbutton.resize(100, 30)
self.listbutton.move(180, 118)
self.listbutton.setStyleSheet("background-color: #FFFFFF;")
def hash_init(self):
#ハッシュタグラベルの追加
self.hashlbl = QLabel(self)
self.hashlbl.move(50, 170)
self.hashlbl.setText('<p><font size="4" color="' + config.PHONT_COLOR + '">保存したい画像のハッシュタグを書けよ</font></p>')
#保存時ふぁぼ機能チェックボックスの追加
self.hashcheckbox = QCheckBox("ふぁぼりてぇCheckBox", self)
self.hashcheckbox.move(340, 170)
self.hashcheckbox.setChecked(False)
# ハッシュタグTextBoxの追加
self.hashbox = QLineEdit(self)
self.hashbox.move(40, 200)
self.hashbox.setStyleSheet("background-color: #FFFFFF;")
# 保存ボタンの追加
self.savebutton = QPushButton('保存', self)
self.savebutton.clicked.connect(self.save_hash)
self.savebutton.resize(100, 30)
self.savebutton.setStyleSheet("background-color: #FFFFFF;")
#保存件数表示ラベルの追加
self.savelbl = QLabel(self)
self.savelbl.move(50, 240)
self.savelbl.setText('<p><font size="4" color="' + config.PHONT_COLOR + '">保存件数 : </font></p>')
self.savelbl.setVisible(False);
self.progressChanged.connect(self.visible_hash)
def visible_hash(self, count):
self.savelbl.setText('<p><font size="4" color="' + config.PHONT_COLOR + '">保存件数 :' + str(count) + ' 件 </font></p>')
self.savelbl.setVisible(True);
t=threading.Thread(target=self.invisible_hash)
t.start()
def invisible_hash(self):
sleep(5)
self.savelbl.setVisible(False);
def alpha_change_init(self):
self.slider = QSlider(Qt.Horizontal, self)
self.slider.setFocusPolicy(Qt.NoFocus)
self.slider.valueChanged[int].connect(self.alpha_change)
def alpha_change(self, value):
global alpha_rate
alpha_rate = 0.2 + value / 100 * 0.8
image_alpha_rate = 0.2 + value / 100 * 0.8
self.setWindowOpacity(alpha_rate)
def resizeEvent(self, event):
self.widthFactor = self.rect().width() / 1280
self.heightFactor = self.rect().height()/ 300
#ツイート機能ウィジェットの自動調整
self.textbox.resize(self.w*self.widthFactor*0.85, 70)
self.tweetbutton.move(40 + 30 + self.w*self.widthFactor*0.85, 80)
self.imagebutton.move(40 + 30 + self.w*self.widthFactor*0.85, 40)
#ハッシュタグ機能ウィジェットの自動調整
self.hashbox.resize(self.w*self.widthFactor*0.85,30)
self.savebutton.move(40 + 30 + self.w*self.widthFactor*0.85, 200)
#透過率調整つまみの自動調整
self.slider.move(self.w*self.widthFactor - 130, self.h*self.heightFactor - 40)
super(MainWindow, self).resizeEvent(event)
def tweet(self):
global image_num
global image_list
image_res_list = []
media_id_list = []
self.get_AT()
twitter = OAuth1Session(CK, CS, AT, ATS) #認証処理
tweet = self.textbox.toPlainText()
if image_num != 0:
for i in range(len(image_list)):
b64 = base64.encodestring(open(image_list[i], 'rb').read())
#画像投稿
files = {"media" : b64}
res_image = twitter.post(url4, params = files) #post送信
if res_image.status_code != 200:
print ("画像をアップロードできませんでした。: ", res_image.status_code, res_image.text )
else:
image_res_list.append(res_image)
for i in range(len(image_res_list)):
media_id_list.append(json.loads(image_res_list[i].text)['media_id'])
if image_num != 0:
if len(image_res_list) == 0:
print("画像投稿失敗")
image_list
return
else:
params = {"status" : tweet, "media_ids": media_id_list}
else:
params = {"status" : tweet}
res = twitter.post(url2, params = params) #post送信
if res.status_code == 200: #正常投稿出来た場合
print("tweet success")
self.textbox.setText("")
else: #正常投稿出来なかった場合
print("Failed. : %d"% res.status_code)
image_list.clear()
image_num = 0
self.update_image_num()
def add_image(self):
global image_num
if image_num > 3:
return
path = os.getcwd()
input_image_path = QFileDialog.getOpenFileName(
QFileDialog(), caption="入力画像", directory=path, filter="*.png *.jpg")[0]
if input_image_path != "":
image_list.append(input_image_path)
image_num += 1
self.imagelbl.setText('<p><font size="4" color="' + config.PHONT_COLOR + '">添付画像数 : ' + str(image_num) + '</font></p>')
if image_num > 3:
self.imagelbl.setText('<p><font size="4" color="' + config.PHONT_COLOR + '">添付画像数 : ' + str(image_num) + ' (MAX) </font></p>')
def list_image(self):
image_window = ImageWindow()
image_window.show()
def update_image_num(self):
self.imagelbl.setText('<p><font size="4" color="' + config.PHONT_COLOR + '">添付画像数 : ' + str(image_num) + '</font></p>')
#ハッシュタグを自動保存する
def save_hash(self):
self.get_AT()
twitter = OAuth1Session(CK, CS, AT, ATS)
hash = self.hashbox.text()
hash = hash.strip()
t=threading.Thread(target=self.save_hash_thread,args = (hash,))
t.start()
def save_hash_thread(self, hash):
if hash[:1] != "#":
hash = "#" + hash
query = hash + ' filter:images min_faves:0 exclude:retweets'
hash = hash[1:]
if config.IMAGE_DIRECTORY == "":
save_dir = "./" + hash
else:
if not os.path.exists(config.IMAGE_DIRECTORY):
print("指定したディレクトリは存在しません。")
return
else:
save_dir = config.IMAGE_DIRECTORY + "\\" + hash
if not os.path.exists(save_dir):
os.makedirs(save_dir)
params = {"q": query, "count": 200}
url = 'https://api.twitter.com/1.1/search/tweets.json'
twitter = OAuth1Session(CK, CS, AT, ATS) #認証処理
req = twitter.get(url, params=params)
result = []
if req.status_code == 200:
tweets = json.loads(req.text)
result = tweets['statuses']
else:
print("ERROR!: %d" % req.status_code)
return;
save_count = 0
for tweet in result:
name = tweet['user']['screen_name']
date = tweet['created_at']
date = date.replace(" +0000","")
date = date.replace(" ","-")
date = date.replace(":",".")
count = 0
try:
media_list = tweet['extended_entities']['media']
for img in media_list:
count += 1
img_url = img['media_url']
path = save_dir + "/[" + str(name) + "]_" + str(date) + "_" + str(count) + ".jpg"
print(path)
if os.path.exists(path):
print("重複のため保存しませんでした")
else:
tweet_id = tweet["id"]
params = {"id": tweet_id}
#print("id取得" + str(tweet_id))
if self.hashcheckbox.isChecked():
res = twitter.post(url3, params=params) #ふぁぼ
if res.status_code == 200: #正常投稿出来た場合
print("Favorite Success.")
else: #正常投稿出来なかった場合
print("Failed. : %d"% res.status_code)
urlreq.urlretrieve(img_url, path)
print("画像を保存しました", img_url)
save_count += 1
print("-・"*30)
except Exception as e:
print("画像を取得できませんでした")
print(e)
print("-・"*30)
self.progressChanged.emit(save_count)
if __name__ == '__main__':
app = QApplication(sys.argv)
main_window = MainWindow()
main_window.show()
sys.exit(app.exec_())
|
{"/hideTwi.py": ["/config.py"]}
|
20,377
|
weiss1217/hideTwi
|
refs/heads/master
|
/config.py
|
#メインウィンドウ透過率
MAIN_ALPHA = 0.5
#画像ウィンドウ透過率
IMAGE_ALPHA = 0.5
#テーマカラー(color code 若しくは color name)
#IMAGE_COLOR = "aliceblue" # or "#f0f8ff"
IMAGE_COLOR = "darkgray" # or "#a9a9a9"
#フォントカラー
PHONT_COLOR = "#000000"
#画像保存ディレクトリ
IMAGE_DIRECTORY = "" # or "D:\hoge\huga\photo"
|
{"/hideTwi.py": ["/config.py"]}
|
20,382
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/views.py
|
from datetime import date
from rest_framework import generics, serializers
from rest_framework.response import Response
from politicos.models import Deputado, GastoCotaParlamentar
class GastoCotaParlamentarSerializer(serializers.ModelSerializer):
class Meta:
model = GastoCotaParlamentar
fields = '__all__'
class DeputadoSerializer(serializers.ModelSerializer):
class Meta:
model = Deputado
fields = [
'id', 'nome', 'partido', 'uf', 'id_legislatura',
'gastos'
]
depth = 2
class DeputadoListView(generics.ListAPIView):
serializer_class = DeputadoSerializer
def get_queryset(self):
queryset = Deputado.objects.all().select_related('partido', 'uf')
hoje = date.today()
filtros = self.request.query_params.dict()
filtros.setdefault('gastos_mes', hoje.month)
filtros.setdefault('gastos_ano', hoje.year)
queryset = queryset.prefetch_gastos(**{
campo.replace('gastos_', ''): valor
for campo, valor in filtros.items()
if campo.startswith('gastos_')
})
return queryset
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,383
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/comum/migrations/0001_initial.py
|
# Generated by Django 2.0.4 on 2018-05-04 18:25
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Estado',
fields=[
('sigla', models.CharField(max_length=2, primary_key=True, serialize=False)),
('nome', models.CharField(max_length=255)),
],
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,384
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/management/commands/import_deputados.py
|
import requests
from django.core.management import BaseCommand
from politicos.models import Deputado
class Command(BaseCommand):
def pega_proxima_pagina(self, resposta):
tem_proxima_pagina = [
link['href'] for link in resposta['links']
if link['rel'] == 'next'
]
return tem_proxima_pagina and tem_proxima_pagina[0]
def handle(self, *args, **options):
deputados = []
camara_url = 'https://dadosabertos.camara.leg.br/api/v2/deputados/?formato=json&itens=100'
resposta = requests.get(camara_url).json()
proxima_pagina = self.pega_proxima_pagina(resposta)
while proxima_pagina:
for deputado in resposta['dados']:
deputados.append(Deputado(
id=deputado['id'],
nome=deputado['nome'],
partido_id=deputado['siglaPartido'],
uf_id=deputado['siglaUf'],
id_legislatura=deputado['idLegislatura']
))
resposta = requests.get(proxima_pagina).json()
proxima_pagina = self.pega_proxima_pagina(resposta)
Deputado.objects.bulk_create(deputados)
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,385
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/migrations/0003_auto_20180504_2040.py
|
# Generated by Django 2.0.4 on 2018-05-04 20:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('empresas', '0002_remove_empresa_unidade_federativa'),
]
operations = [
migrations.RemoveField(
model_name='socio',
name='empresa_origem',
),
migrations.AddField(
model_name='socio',
name='cpf_cnpj_socio',
field=models.CharField(db_index=True, max_length=14, null=True),
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,386
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/management/commands/import_pessoas_juridicas.py
|
from datetime import datetime
import pandas as pd
from django.core.management import BaseCommand
from django.db import transaction
from empresas.models import Empresa, Sociedade
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv', type=str)
parser.add_argument('uf', type=str)
parser.add_argument('inicio', type=int)
def handle(self, *args, **options):
uf = options['uf']
log = open(f'{uf}_PJ_LOG.txt', 'w')
cnpjs_nao_registrados = open(f'INVALIDOS_LOG.txt', 'a')
log.write(f'{datetime.now().isoformat()} Abrindo CSV para PJ\n')
csv = pd.read_csv(
options['csv'],
chunksize=100000,
dtype={'cpf_cnpj_socio': str, 'cnpj_empresa': str}
)
cnpjs_salvos = sorted(Empresa.objects.values_list('cnpj', flat=True))
for contador, grupo in enumerate(csv):
if contador >= options.get('inicio', 0):
log.write(f'{datetime.now().isoformat()} Filtrando socios PJ do grupo {contador}\n')
grupo = grupo[grupo['codigo_tipo_socio'] == 1]
sociedades = []
log.write(f'{datetime.now().isoformat()} Criando empresas com cnpj invalido do grupo {contador}\n')
invalidos = grupo[~grupo['cpf_cnpj_socio'].isin(cnpjs_salvos)]
for dados in invalidos.itertuples():
cnpjs_nao_registrados.write(f'{dados.cpf_cnpj_socio};{dados.nome_socio};{uf};{dados.cnpj_empresa};{dados.nome_empresa}]')
invalidos = invalidos.drop_duplicates(['cpf_cnpj_socio'])
empresas_invalidas = []
for dados in invalidos.itertuples():
empresas_invalidas.append(Empresa(
nome=f'INVALIDO {dados.nome_socio}',
cnpj=dados.cpf_cnpj_socio,
uf_id=uf,
))
cnpjs_salvos.append(dados.cpf_cnpj_socio)
Empresa.objects.bulk_create(empresas_invalidas)
log.write(f'{datetime.now().isoformat()} Importando sociedades do grupo {contador}\n')
for dados in grupo.itertuples():
sociedades.append(Sociedade(
tipo_socio=1,
qualificacao_socio=dados.codigo_qualificacao_socio,
empresa_id=dados.cnpj_empresa,
socio_pessoa_juridica_id=dados.cpf_cnpj_socio
))
log.write(f'{datetime.now().isoformat()} Cirando Sociedades do grupo {contador}\n')
Sociedade.objects.bulk_create(sociedades)
log.write(f'{datetime.now().isoformat()} Importação finalizada\n')
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,387
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/comum/models.py
|
from django.db import models
class Estado(models.Model):
sigla = models.CharField(max_length=2, primary_key=True)
nome = models.CharField(max_length=255)
def __str__(self):
return self.nome
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,388
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/models.py
|
from django.db import models
from django.db.models import Case, Count, F, OuterRef, Q, Subquery, Sum, Value, When
from django.db.models.functions import Coalesce
class EmpresaQuerySet(models.QuerySet):
def annotate_deputados(self):
from politicos.models import Deputado
deputados_qs = Deputado.objects.values_list('nome', flat=True)
return self.annotate(
deputado=Subquery(
deputados_qs.filter(
nome__in=OuterRef('sociedades__socio_pessoa_fisica__nome')
)
)
)
def annotate_deputados2(self):
from politicos.models import Deputado
deputados_qs = Deputado.objects.values_list('nome', flat=True)
return self.annotate(
deputado=Q(sociedades__socio_pessoa_fisica__nome__in=deputados_qs)
)
def annotate_graus_sociedades(self, grau):
lookups = lambda x: '__'.join(['participacoes_sociedades__socio_pessoa_juridica'] * x)
annotate_graus_sociedades = {
f'grau_{n}': Coalesce(
Sum(
Case(
When(
Q(**{f'{lookups(n)}__isnull': False})
&
~Q(**{f'{lookups(n)}': F(f'{lookups(n - 1)}' if n - 1 else 'cnpj')})
&
~Q(**{f'{lookups(n)}': F(f'{lookups(n - 2)}' if n - 2 > 0 else 'cnpj')}),
then=Value(1)
),
output_field=models.IntegerField(),
)
),
0,
)
for n in range(1, grau + 1)
}
return self.annotate(**annotate_graus_sociedades)
def annotate_graus_sociedades2(self, grau):
lookups = lambda x: '__'.join(['participacoes_sociedades__socio_pessoa_juridica'] * x)
annotate_graus_sociedades = {
f'grau_{n}': Count(
Q(**{f'{lookups(n)}__isnull': False})
&
~Q(**{f'{lookups(n)}': F(f'{lookups(n - 1)}' if n - 1 else 'cnpj')})
&
~Q(**{f'{lookups(n)}': F(f'{lookups(n - 2)}' if n - 2 > 0 else 'cnpj')}),
then=Value(1)
)
for n in range(1, grau + 1)
}
return self.annotate(**annotate_graus_sociedades)
class Empresa(models.Model):
cnpj = models.CharField(max_length=14, primary_key=True)
nome = models.CharField(max_length=255, null=True, db_index=True)
uf = models.ForeignKey(
'comum.Estado',
db_index=True,
related_name='empresas',
on_delete=models.PROTECT,
)
objects = EmpresaQuerySet.as_manager()
class PessoaFisica(models.Model):
nome = models.CharField(max_length=255, null=True, db_index=True)
class Estrangeiro(models.Model):
nome = models.CharField(max_length=255, null=True, db_index=True)
class Sociedade(models.Model):
TIPOS_SOCIO = (
(1, 'Pessoa Jurídica'),
(2, 'Pessoa Física'),
(3, 'Nome Exterior'),
)
QUALIFICACOES_SOCIO = (
(5, 'Administrador'),
(8, 'Conselheiro de Administração'),
(10, 'Diretor'),
(16, 'Presidente'),
(17, 'Procurador'),
(20, 'Sociedade Consorciada'),
(21, 'Sociedade Filiada'),
(22, 'Sócio'),
(23, 'Sócio Capitalista'),
(24, 'Sócio Comanditado'),
(25, 'Sócio Comanditário'),
(26, 'Sócio de Indústria'),
(28, 'Sócio-Gerente'),
(29, 'Sócio Incapaz ou Relat.Incapaz (exceto menor)'),
(30, 'Sócio Menor (Assistido/Representado)'),
(31, 'Sócio Ostensivo'),
(37, 'Sócio Pessoa Jurídica Domiciliado no Exterior'),
(38, 'Sócio Pessoa Física Residente no Exterior'),
(47, 'Sócio Pessoa Física Residente no Brasil'),
(48, 'Sócio Pessoa Jurídica Domiciliado no Brasil'),
(49, 'Sócio-Administrador'),
(52, 'Sócio com Capital'),
(53, 'Sócio sem Capital'),
(54, 'Fundador'),
(55, 'Sócio Comanditado Residente no Exterior'),
(56, 'Sócio Comanditário Pessoa Física Residente no Exterior'),
(57, 'Sócio Comanditário Pessoa Jurídica Domiciliado no Exterior'),
(58, 'Sócio Comanditário Incapaz'),
(59, 'Produtor Rural'),
(63, 'Cotas em Tesouraria'),
(65, 'Titular Pessoa Física Residente ou Domiciliado no Brasil'),
(66, 'Titular Pessoa Física Residente ou Domiciliado no Exterior'),
(67, 'Titular Pessoa Física Incapaz ou Relativamente Incapaz (exceto menor)'),
(68, 'Titular Pessoa Física Menor (Assistido/Representado)'),
(70, 'Administrador Residente ou Domiciliado no Exterior'),
(71, 'Conselheiro de Administração Residente ou Domiciliado no Exterior'),
(72, 'Diretor Residente ou Domiciliado no Exterior'),
(73, 'Presidente Residente ou Domiciliado no Exterior'),
(74, 'Sócio-Administrador Residente ou Domiciliado no Exterior'),
(75, 'Fundador Residente ou Domiciliado no Exterior'),
)
tipo_socio = models.PositiveSmallIntegerField(
choices=TIPOS_SOCIO,
null=True,
db_index=True
)
qualificacao_socio = models.PositiveSmallIntegerField(
choices=QUALIFICACOES_SOCIO,
null=True
)
empresa = models.ForeignKey(
Empresa,
related_name='sociedades',
null=True,
on_delete=models.PROTECT,
db_index=True
)
socio_pessoa_juridica = models.ForeignKey(
Empresa,
related_name='participacoes_sociedades',
null=True,
on_delete=models.PROTECT,
db_index=True
)
socio_pessoa_fisica = models.ForeignKey(
PessoaFisica,
related_name='participacoes_sociedades',
null=True,
on_delete=models.PROTECT,
db_index=True
)
socio_estrangeiro = models.ForeignKey(
Estrangeiro,
related_name='participacoes_sociedades',
null=True,
on_delete=models.PROTECT,
db_index=True
)
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,389
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/management/commands/import_empresas.py
|
import bisect
from datetime import datetime
import pandas as pd
from django.core.management import BaseCommand
from django.db import transaction
from empresas.models import Empresa
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv', type=str)
parser.add_argument('uf', type=str)
parser.add_argument('inicio', type=int)
def handle(self, *args, **options):
uf = options['uf']
log = open(f'{uf}_EMPRESAS_LOG.txt', 'w')
cnpjs_salvos = sorted(Empresa.objects.values_list('cnpj', flat=True))
log.write(f'{datetime.now().isoformat()} Abrindo CSV para {uf}\n')
csv = pd.read_csv(
options['csv'],
chunksize=100000,
dtype={'cpf_cnpj_socio': str, 'cnpj_empresa': str}
)
for contador, grupo in enumerate(csv):
if contador >= options.get('inicio', 0):
log.write(f'{datetime.now().isoformat()} Removendo duplicatas de empresas do grupo {contador} do {uf}\n')
grupo = grupo.drop_duplicates(['cnpj_empresa'], keep='first')
grupo = grupo[~grupo['cnpj_empresa'].isin(cnpjs_salvos)]
log.write(f'{datetime.now().isoformat()} Importando dados de empresas do grupo {contador} do {uf}\n')
empresas = []
for dados in grupo.itertuples():
empresas.append(Empresa(
cnpj=dados.cnpj_empresa,
nome=dados.nome_empresa,
uf_id=uf
))
bisect.insort(cnpjs_salvos, dados.cnpj_empresa)
log.write(f'{datetime.now().isoformat()} Cirando Empresas do grupo {contador} do {uf}\n')
Empresa.objects.bulk_create(empresas)
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,390
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/migrations/0003_deputado.py
|
# Generated by Django 2.0.4 on 2018-05-04 18:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('comum', '0001_initial'),
('politicos', '0002_auto_20180504_1821'),
]
operations = [
migrations.CreateModel(
name='Deputado',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('id_camara', models.IntegerField()),
('nome', models.CharField(max_length=255)),
('id_legislatura', models.IntegerField()),
('partido', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='politicos.Partido')),
('uf', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='comum.Estado')),
],
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,391
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/migrations/0006_auto_20180513_0541.py
|
# Generated by Django 2.0.4 on 2018-05-13 05:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('empresas', '0005_empresa_uf'),
]
operations = [
migrations.AlterField(
model_name='empresa',
name='empresas',
field=models.ManyToManyField(db_index=True, through='empresas.Sociedade', to='empresas.Empresa'),
),
migrations.AlterField(
model_name='estrangeiro',
name='empresas',
field=models.ManyToManyField(db_index=True, through='empresas.Sociedade', to='empresas.Empresa'),
),
migrations.AlterField(
model_name='pessoafisica',
name='empresas',
field=models.ManyToManyField(db_index=True, through='empresas.Sociedade', to='empresas.Empresa'),
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,392
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/_models.py
|
from django.db import models
class GastosDeputados(models.Model):
# Deputado
codlegislatura = models.IntegerField(null=True)
# Gasto
datemissao = models.DateTimeField(null=True)
idedocumento = models.IntegerField(null=True)
idecadastro = models.IntegerField(null=True)
indtipodocumento = models.IntegerField(null=True)
# Deputado
nucarteiraparlamentar = models.IntegerField(null=True)
nudeputadoid = models.IntegerField(null=True)
nulegislatura = models.IntegerField(null=True)
# Gasto
numano = models.IntegerField(null=True)
numespecificacaosubcota = models.IntegerField(null=True)
numlote = models.IntegerField(null=True)
nummes = models.IntegerField(null=True)
numparcela = models.IntegerField(null=True)
numressarcimento = models.IntegerField(null=True)
numsubcota = models.IntegerField(null=True)
# Deputado
sgpartido = models.CharField(max_length=18, null=True)
sguf = models.CharField(max_length=2, null=True)
txnomeparlamentar = models.CharField(max_length=63, null=True)
# Gasto
txtcnpjcpf = models.CharField(max_length=14, null=True)
txtdescricao = models.CharField(max_length=127, null=True)
txtdescricaoespecificacao = models.CharField(max_length=31, null=True)
txtfornecedor = models.CharField(max_length=255, null=True)
txtnumero = models.CharField(max_length=63, null=True)
txtpassageiro = models.CharField(max_length=63, null=True)
txttrecho = models.CharField(max_length=127, null=True)
vlrdocumento = models.DecimalField(null=True, max_digits=8, decimal_places=2)
vlrglosa = models.DecimalField(null=True, max_digits=8, decimal_places=2)
vlrliquido = models.DecimalField(null=True, max_digits=8, decimal_places=2)
vlrrestituicao = models.DecimalField(null=True, max_digits=8, decimal_places=2)
class Meta:
indexes = [
models.Index(fields=['-datemissao']),
models.Index(fields=['datemissao']),
models.Index(fields=['idedocumento']),
models.Index(fields=['numressarcimento']),
models.Index(fields=['sgpartido']),
models.Index(fields=['sguf']),
models.Index(fields=['txnomeparlamentar']),
models.Index(fields=['txtcnpjcpf']),
models.Index(fields=['txtdescricao']),
models.Index(fields=['txtdescricaoespecificacao']),
models.Index(fields=['txtfornecedor']),
models.Index(fields=['vlrliquido']),
]
ordering = ['-datemissao']
class SalariosMagistrados(models.Model):
# Magistrado
lotacao = models.CharField(max_length=255, null=True)
cargo = models.CharField(max_length=63, null=True)
cpf = models.CharField(max_length=11, null=True)
# Salario
data_de_publicacao = models.DateField(null=True)
descontos_diversos = models.DecimalField(max_digits=12, null=True, decimal_places=2)
diarias = models.DecimalField(max_digits=12, null=True, decimal_places=2)
direitos_eventuais = models.DecimalField(max_digits=12, null=True, decimal_places=2)
direitos_pessoais = models.DecimalField(max_digits=12, null=True, decimal_places=2)
imposto_de_renda = models.DecimalField(max_digits=12, null=True, decimal_places=2)
indenizacoes = models.DecimalField(max_digits=12, null=True, decimal_places=2)
mesano_de_referencia = models.DateField(null=True)
# Magistrado
nome = models.CharField(max_length=63, null=True)
orgao = models.CharField(max_length=63, null=True)
# Salario
previdencia_publica = models.DecimalField(max_digits=12, null=True, decimal_places=2)
remuneracao_do_orgao_de_origem = models.DecimalField(max_digits=12, null=True, decimal_places=2)
rendimento_liquido = models.DecimalField(max_digits=12, null=True, decimal_places=2)
retencao_por_teto_constitucional = models.DecimalField(max_digits=12, null=True, decimal_places=
2)
subsidio = models.DecimalField(max_digits=12, null=True, decimal_places=2)
total_de_descontos = models.DecimalField(max_digits=12, null=True, decimal_places=2)
total_de_rendimentos = models.DecimalField(max_digits=12, null=True, decimal_places=2)
tribunal = models.CharField(max_length=127, null=True)
url = models.CharField(max_length=2000, null=True)
class Meta:
indexes = [
models.Index(fields=['cargo']),
models.Index(fields=['mesano_de_referencia']),
models.Index(fields=['nome']),
models.Index(fields=['orgao']),
models.Index(fields=['rendimento_liquido']),
models.Index(fields=['retencao_por_teto_constitucional']),
models.Index(fields=['total_de_rendimentos']),
models.Index(fields=['tribunal']),
]
ordering = ['tribunal', 'nome']
class SociosBrasil(models.Model):
# Empresa
cnpj_empresa = models.CharField(max_length=14, null=True)
nome_empresa = models.CharField(max_length=255, null=True)
codigo_tipo_socio = models.IntegerField(null=True)
tipo_socio = models.CharField(max_length=15, null=True)
# Socio
cpf_cnpj_socio = models.CharField(max_length=14, null=True)
# Empresa
codigo_qualificacao_socio = models.IntegerField(null=True)
qualificacao_socio = models.CharField(max_length=127, null=True)
# Socio
nome_socio = models.CharField(max_length=255, null=True)
# Empresa
unidade_federativa = models.CharField(max_length=2, null=True)
class Meta:
indexes = [
models.Index(fields=['cnpj_empresa']),
models.Index(fields=['nome_empresa']),
models.Index(fields=['nome_socio']),
models.Index(fields=['unidade_federativa']),
]
ordering = ['cnpj_empresa', 'nome_socio']
class GastosDiretos(models.Model):
ano = models.IntegerField(null=True)
codigo_acao = models.CharField(max_length=4, null=True)
codigo_elemento_despesa = models.IntegerField(null=True)
codigo_favorecido = models.CharField(max_length=112, null=True)
codigo_funcao = models.IntegerField(null=True)
codigo_grupo_despesa = models.IntegerField(null=True)
codigo_orgao = models.IntegerField(null=True)
codigo_orgao_superior = models.IntegerField(null=True)
codigo_programa = models.IntegerField(null=True)
codigo_subfuncao = models.IntegerField(null=True)
codigo_unidade_gestora = models.IntegerField(null=True)
data_pagamento = models.DateField(null=True)
data_pagamento_original = models.CharField(max_length=112, null=True)
gestao_pagamento = models.CharField(max_length=112, null=True)
linguagem_cidada = models.CharField(max_length=199, null=True)
mes = models.IntegerField(null=True)
nome_acao = models.CharField(max_length=247, null=True)
nome_elemento_despesa = models.CharField(max_length=113, null=True)
nome_favorecido = models.CharField(max_length=208, null=True)
nome_funcao = models.CharField(max_length=21, null=True)
nome_grupo_despesa = models.CharField(max_length=25, null=True)
nome_orgao = models.CharField(max_length=45, null=True)
nome_orgao_superior = models.CharField(max_length=45, null=True)
nome_programa = models.CharField(max_length=110, null=True)
nome_subfuncao = models.CharField(max_length=50, null=True)
nome_unidade_gestora = models.CharField(max_length=45, null=True)
numero_documento = models.CharField(max_length=112, null=True)
valor = models.DecimalField(max_digits=18, null=True, decimal_places=2)
class Meta:
indexes = [
models.Index(['-data_pagamento']),
models.Index(['codigo_favorecido']),
models.Index(['nome_elemento_despesa']),
models.Index(['nome_favorecido']),
models.Index(['nome_funcao']),
models.Index(['nome_grupo_despesa']),
models.Index(['nome_orgao_superior']),
models.Index(['nome_subfuncao']),
models.Index(['nome_unidade_gestora']),
models.Index(['valor']),
]
ordering = ['-data_pagamento', 'nome_favorecido']
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,393
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/migrations/0005_empresa_uf.py
|
# Generated by Django 2.0.4 on 2018-05-07 20:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('comum', '0001_initial'),
('empresas', '0004_auto_20180507_1717'),
]
operations = [
migrations.AddField(
model_name='empresa',
name='uf',
field=models.ForeignKey(default='ND', on_delete=django.db.models.deletion.PROTECT, related_name='empresas', to='comum.Estado'),
preserve_default=False,
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,394
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/management/commands/import_pessoas_fisicas.py
|
from datetime import datetime
import pandas as pd
from django.core.management import BaseCommand
from django.db import transaction
from empresas.models import PessoaFisica, Sociedade
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv', type=str)
parser.add_argument('inicio', type=int)
def handle(self, *args, **options):
log = open('PF_LOG.txt', 'w')
log.write(f'{datetime.now().isoformat()} Abrindo CSV\n')
csv = pd.read_csv(
options['csv'],
chunksize=100000,
dtype={'cpf_cnpj_socio': str, 'cnpj_empresa': str}
)
pessoa_id = 1
for contador, grupo in enumerate(csv):
if contador >= options.get('inicio', 0):
log.write(f'{datetime.now().isoformat()} Filtrando socios PF do grupo {contador}\n')
grupo = grupo[grupo['codigo_tipo_socio'] == 2]
sociedades = []
pessoas = []
log.write(f'{datetime.now().isoformat()} Importando dados do grupo {contador}\n')
for indice, dados in enumerate(grupo.itertuples()):
pessoas.append(PessoaFisica(nome=dados.nome_socio, id=pessoa_id))
sociedades.append(Sociedade(
tipo_socio=2,
qualificacao_socio=dados.codigo_qualificacao_socio,
empresa_id=dados.cnpj_empresa,
socio_pessoa_fisica_id=pessoa_id
))
pessoa_id += 1
log.write(f'{datetime.now().isoformat()} Cirando PFs do grupo {contador}\n')
PessoaFisica.objects.bulk_create(pessoas)
log.write(f'{datetime.now().isoformat()} Cirando Sociedades do grupo {contador}\n')
Sociedade.objects.bulk_create(sociedades)
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,395
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/graficos.py
|
import matplotlib.pylab as plt
from politicos.models import Deputado
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
def graficos_gasto_mensal():
media_mensal = Deputado.objects.get_media_mensal()
dados = sorted(media_mensal.items())
eixo_x = ['/'.join(dado[0].split('_')[1:3]) for dado in dados]
eixo_y = [dado[1] if dado[1] else 0 for dado in dados]
return zip(chunks(eixo_x, 12), chunks(eixo_y, 12))
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,396
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/management/commands/import_estrangeiros.py
|
import bisect
from datetime import datetime
import pandas as pd
import numpy as np
import csv
from django.core.management import BaseCommand
from django.db import transaction
from empresas.models import Estrangeiro, Sociedade
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv', type=str)
parser.add_argument('inicio', type=int)
def handle(self, *args, **options):
log = open('ESTRANGEIROS_LOG.txt', 'w')
log.write(f'{datetime.now().isoformat()} Abrindo CSV\n')
csv = pd.read_csv(
options['csv'],
chunksize=100000,
dtype={'cpf_cnpj_socio': str, 'cnpj_empresa': str}
)
for contador, grupo in enumerate(csv):
if contador >= options.get('inicio', 0):
log.write(f'{datetime.now().isoformat()} Filtrando socios estrangeiros do grupo {contador}\n')
grupo = grupo[grupo['codigo_tipo_socio'] == 3]
sociedades = []
log.write(f'{datetime.now().isoformat()} Inserindo dados de estrangeiros do grupo {contador}\n')
for dados in grupo.itertuples():
estrangeiro = Estrangeiro.objects.create(nome=dados.nome_socio)
sociedades.append(Sociedade(
tipo_socio=3,
qualificacao_socio=dados.codigo_qualificacao_socio,
empresa_id=dados.cnpj_empresa,
socio_estrangeiro=estrangeiro
))
log.write(f'{datetime.now().isoformat()} Cirando Sociedades do grupo {contador}\n')
Sociedade.objects.bulk_create(sociedades)
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,397
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/migrations/0005_auto_20180508_1559.py
|
# Generated by Django 2.0.4 on 2018-05-08 15:59
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('empresas', '0005_empresa_uf'),
('politicos', '0004_auto_20180504_1903'),
]
operations = [
migrations.CreateModel(
name='GastoCotaParlamentar',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('legislatura', models.IntegerField(null=True)),
('data_emissao', models.DateTimeField(null=True)),
('id_documento', models.IntegerField(null=True)),
('tipo_documento', models.IntegerField(choices=[(0, 'Nota Fiscal'), (1, 'Recibo'), (2, 'Despesa no Exterior')], null=True)),
('ano', models.IntegerField(null=True)),
('especificacao_subcota', models.IntegerField(null=True)),
('lote', models.IntegerField(null=True)),
('mes', models.IntegerField(null=True)),
('parcela', models.IntegerField(null=True)),
('ressarcimento', models.IntegerField(null=True)),
('subcota', models.IntegerField(null=True)),
('cpf', models.CharField(max_length=14, null=True)),
('descricao', models.CharField(max_length=127, null=True)),
('descricao_especificacao', models.CharField(max_length=31, null=True)),
('fornecedor', models.CharField(max_length=255, null=True)),
('numero_documento', models.CharField(max_length=63, null=True)),
('nome_passageiro', models.CharField(max_length=63, null=True)),
('trecho_viagem', models.CharField(max_length=127, null=True)),
('valor_documento', models.DecimalField(decimal_places=2, max_digits=8, null=True)),
('valor_glosa', models.DecimalField(decimal_places=2, max_digits=8, null=True)),
('valor_liquido', models.DecimalField(decimal_places=2, max_digits=8, null=True)),
('valor_restituicao', models.DecimalField(decimal_places=2, max_digits=8, null=True)),
],
options={
'ordering': ['-data_emissao'],
},
),
migrations.AddField(
model_name='deputado',
name='carteira_parlamentar',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='gastocotaparlamentar',
name='deputado',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='gastos', to='politicos.Deputado'),
),
migrations.AddField(
model_name='gastocotaparlamentar',
name='empresa',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='gastos_deputados', to='empresas.Empresa'),
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,398
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/apps.py
|
from django.apps import AppConfig
class PoliticosConfig(AppConfig):
name = 'politicos'
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,399
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/migrations/0002_auto_20180504_1821.py
|
# Generated by Django 2.0.4 on 2018-05-04 18:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('politicos', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='partido',
name='id',
),
migrations.AlterField(
model_name='partido',
name='sigla',
field=models.CharField(max_length=15, primary_key=True, serialize=False),
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,400
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/management/commands/import_partidos.py
|
import requests
from django.core.management import BaseCommand
from politicos.models import Partido
class Command(BaseCommand):
def handle(self, *args, **options):
partidos = []
camara_url = 'https://dadosabertos.camara.leg.br/api/v2/partidos/?formato=json&itens=100'
resposta = requests.get(camara_url).json()
for partido in resposta['dados']:
partidos.append(Partido(
id_camara=partido['id'],
nome=partido['nome'],
sigla=partido['sigla'],
))
Partido.objects.bulk_create(partidos)
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,401
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/models.py
|
from django.db import models
from django.db.models import Avg, F, FilteredRelation, Prefetch, Q, Sum
class DeputadoQuerySet(models.QuerySet):
def annotate_gasto_mensal_por_deputado(self):
meses = range(1, 13)
anos = range(2009, 2019)
annotations = {
f'gastos_{ano}_{mes:02}': Sum(
'gastos__valor_liquido',
filter=Q(gastos__mes=mes, gastos__ano=ano)
)
for ano in anos for mes in meses
}
return self.annotate(**annotations)
def annotate_gasto_no_mes_por_deputado(self, mes, ano):
annotation = {
f'gastos_{ano}_{mes:02}': Sum(
'gastos__valor_liquido',
filter=Q(gastos__mes=mes, gastos__ano=ano)
)
}
return self.annotate(**annotation)
def annotate_gasto_no_mes_por_deputado2(self, mes, ano):
return self.annotate(
gastos_filtrados=FilteredRelation(
'gastos',
condition=Q(gastos__mes=mes, gastos__ano=ano)
)
).annotate(
**{f'gastos_{ano}_{mes:02}': Sum('gastos_filtrados__valor_liquido')}
)
def get_media_mensal(self):
meses = range(1, 13)
anos = range(2009, 2019)
aggregations = {
f'media_{ano}_{mes:02}': Avg(f'gastos_{ano}_{mes:02}')
for ano in anos for mes in meses
}
return self.annotate_gasto_mensal_por_deputado().aggregate(**aggregations)
def prefetch_gastos(self, **kwargs):
gastos_queryset = GastoCotaParlamentar.objects.select_related(
'empresa'
).filter(**kwargs)
prefetch = Prefetch('gastos', queryset=gastos_queryset)
return self.prefetch_related(prefetch)
def annotate_gastos_acima_dobro(self, descricao_gasto):
media = GastoCotaParlamentar.objects.filter_descricao(descricao_gasto).media()
acima_dobro = Q(gastos__descricao=descricao_gasto, gastos__valor_liquido__gt=media * 2)
count_acima_dobro = Count('pk', filter=acima_dobro)
count_geral = Count('pk', filter=Q(gastos__descricao=descricao_gasto))
return self.annotate(gastos_acima_dobro=count_acima_dobro, qtd_gastos=count_geral)
class GastoCotaParlamentarQuerySet(models.QuerySet):
def filter_descricao(self, descricao):
return self.filter(descricao=descricao)
def media(self):
return self.aggregate(media=Avg('valor_liquido'))['media']
class Partido(models.Model):
sigla = models.CharField(max_length=15, primary_key=True)
nome = models.CharField(max_length=255)
id_camara = models.IntegerField()
def __str__(self):
return self.sigla
class Deputado(models.Model):
nome = models.CharField(max_length=255)
partido = models.ForeignKey(Partido, on_delete=models.PROTECT)
uf = models.ForeignKey('comum.Estado', on_delete=models.PROTECT)
id_legislatura = models.IntegerField()
carteira_parlamentar = models.IntegerField(null=True)
objects = DeputadoQuerySet.as_manager()
def __str__(self):
return f'{self.nome} - {self.partido_id}'
class GastoCotaParlamentar(models.Model):
TIPOS_DOCUMENTO = (
(0, 'Nota Fiscal'),
(1, 'Recibo'),
(2, 'Despesa no Exterior'),
)
legislatura = models.IntegerField(null=True)
data_emissao = models.DateTimeField(null=True)
id_documento = models.IntegerField(null=True)
tipo_documento = models.IntegerField(choices=TIPOS_DOCUMENTO, null=True)
ano = models.IntegerField(null=True)
especificacao_subcota = models.IntegerField(null=True)
lote = models.IntegerField(null=True)
mes = models.IntegerField(null=True)
parcela = models.IntegerField(null=True)
ressarcimento = models.IntegerField(null=True)
subcota = models.IntegerField(null=True)
cpf = models.CharField(max_length=14, null=True)
descricao = models.CharField(max_length=127, null=True)
descricao_especificacao = models.CharField(max_length=31, null=True)
fornecedor = models.CharField(max_length=255, null=True)
numero_documento = models.CharField(max_length=63, null=True)
nome_passageiro = models.CharField(max_length=63, null=True)
trecho_viagem = models.CharField(max_length=127, null=True)
valor_documento = models.DecimalField(null=True, max_digits=8, decimal_places=2)
valor_glosa = models.DecimalField(null=True, max_digits=8, decimal_places=2)
valor_liquido = models.DecimalField(null=True, max_digits=8, decimal_places=2)
valor_restituicao = models.DecimalField(null=True, max_digits=8, decimal_places=2)
deputado = models.ForeignKey(
Deputado,
related_name='gastos',
db_index=True,
on_delete=models.PROTECT,
)
empresa = models.ForeignKey(
'empresas.Empresa',
related_name='gastos_deputados',
db_index=True,
on_delete=models.PROTECT,
null=True,
)
objects = GastoCotaParlamentarQuerySet.as_manager()
def __str__(self):
return f'{self.valor_documento} {self.mes}/{self.ano}'
class Meta:
indexes = [
models.Index(fields=['-data_emissao']),
models.Index(fields=['data_emissao']),
models.Index(fields=['mes']),
models.Index(fields=['ano']),
models.Index(fields=['descricao']),
models.Index(fields=['descricao_especificacao']),
models.Index(fields=['fornecedor']),
models.Index(fields=['valor_liquido']),
models.Index(fields=['valor_documento']),
models.Index(fields=['deputado']),
models.Index(fields=['empresa']),
]
ordering = ['-data_emissao']
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,402
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/comum/apps.py
|
from django.apps import AppConfig
class ComumConfig(AppConfig):
name = 'comum'
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,403
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/migrations/0004_auto_20180504_1903.py
|
# Generated by Django 2.0.4 on 2018-05-04 19:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('politicos', '0003_deputado'),
]
operations = [
migrations.RemoveField(
model_name='partido',
name='deferimento',
),
migrations.RemoveField(
model_name='partido',
name='legenda',
),
migrations.RemoveField(
model_name='partido',
name='presidente_nacional',
),
migrations.AddField(
model_name='partido',
name='id_camara',
field=models.IntegerField(default=1),
preserve_default=False,
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,404
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/management/commands/import_gastos.py
|
from dateutil.parser import parse
from datetime import datetime
import pandas as pd
from django.core.management import BaseCommand
from empresas.models import Empresa
from politicos.models import GastoCotaParlamentar, Deputado
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv', type=str)
parser.add_argument('inicio', type=int)
def parse_data(self, data):
try:
return parse(data)
except Exception:
return None
def handle(self, *args, **options):
log = open('GASTOS_LOG.txt', 'a')
cnpjs_salvos = list(Empresa.objects.values_list('cnpj', flat=True))
deputados_salvos = list(Deputado.objects.values_list('id', flat=True))
log.write(f'{datetime.now().isoformat()} Abrindo CSV\n')
csv = pd.read_csv(
options['csv'],
chunksize=100000,
converters={
'txtCNPJCPF': str,
}
)
for contador, grupo in enumerate(csv):
if contador >= options.get('inicio', 0):
log.write(f'{datetime.now().isoformat()} Importando dados do grupo {contador}\n')
grupo = grupo[grupo['codLegislatura'] == 55]
log.write(f'{datetime.now().isoformat()} Criando empresas não registradas do grupo {contador}\n')
empresas_invalidas = grupo[
(grupo['txtCNPJCPF'].str.len() == 14) &
(~grupo['txtCNPJCPF'].isin(cnpjs_salvos))
]
empresas_invalidas = empresas_invalidas.drop_duplicates(['txtCNPJCPF'], keep='first')
empresas = []
for empresa in empresas_invalidas.itertuples():
empresas.append(Empresa(
cnpj=empresa.txtCNPJCPF,
nome=empresa.txtFornecedor,
uf_id=empresa.sgUF
))
cnpjs_salvos.append(empresa.txtCNPJCPF)
Empresa.objects.bulk_create(empresas)
log.write(f'{datetime.now().isoformat()} Criando deputados não registradas do grupo {contador}\n')
deputados = grupo[
(~grupo['idecadastro'].isin(deputados_salvos))
]
deputados = deputados.drop_duplicates(['idecadastro'], keep='first')
deputados_novos = []
for dados in deputados.itertuples():
deputados_novos.append(Deputado(
id=dados.idecadastro,
nome=dados.txNomeParlamentar,
partido_id=dados.sgPartido,
id_legislatura=dados.codLegislatura,
carteira_parlamentar=dados.nuCarteiraParlamentar,
uf_id=dados.sgUF,
))
deputados_salvos.append(dados.idecadastro)
Deputado.objects.bulk_create(deputados_novos)
log.write(f'{datetime.now().isoformat()} Importando gastos do grupo {contador}\n')
gastos = []
for dados in grupo.itertuples():
cnpj = dados.txtCNPJCPF if len(dados.txtCNPJCPF) == 14 else None
cpf = dados.txtCNPJCPF if len(dados.txtCNPJCPF) == 11 else None
data = self.parse_data(dados.datEmissao)
gastos.append(GastoCotaParlamentar(
deputado_id=dados.idecadastro,
empresa_id=cnpj,
cpf=cpf,
legislatura=dados.codLegislatura,
data_emissao=data,
id_documento=dados.ideDocumento,
tipo_documento=dados.indTipoDocumento,
ano=dados.numAno,
mes=dados.numMes,
subcota=dados.numSubCota,
especificacao_subcota=dados.numEspecificacaoSubCota,
lote=dados.numLote,
parcela=dados.numParcela,
descricao=dados.txtDescricao,
descricao_especificacao=dados.txtDescricaoEspecificacao,
fornecedor=dados.txtFornecedor,
numero_documento=dados.txtNumero,
nome_passageiro=dados.txtPassageiro,
trecho_viagem=dados.txtTrecho,
valor_documento=dados.vlrDocumento,
valor_glosa=dados.vlrGlosa,
valor_liquido=dados.vlrLiquido,
valor_restituicao=dados.vlrRestituicao,
))
log.write(f'{datetime.now().isoformat()} Criando gastos do grupo {contador}\n')
GastoCotaParlamentar.objects.bulk_create(gastos)
log.write(f'{datetime.now().isoformat()} Importação finalizada\n')
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,405
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/empresas/migrations/0007_auto_20180517_1932.py
|
# Generated by Django 2.0.4 on 2018-05-17 19:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('empresas', '0006_auto_20180513_0541'),
]
operations = [
migrations.RemoveField(
model_name='empresa',
name='empresas',
),
migrations.RemoveField(
model_name='estrangeiro',
name='empresas',
),
migrations.RemoveField(
model_name='pessoafisica',
name='empresas',
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,406
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/migrations/0007_auto_20180513_0241.py
|
# Generated by Django 2.0.4 on 2018-05-13 02:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('politicos', '0006_remove_deputado_id_camara'),
]
operations = [
migrations.AlterField(
model_name='gastocotaparlamentar',
name='empresa',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='gastos_deputados', to='empresas.Empresa'),
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,407
|
labcodes/dados_brasil_io
|
refs/heads/master
|
/politicos/migrations/0008_auto_20180516_1511.py
|
# Generated by Django 2.0.4 on 2018-05-16 15:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('politicos', '0007_auto_20180513_0241'),
]
operations = [
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['-data_emissao'], name='politicos_g_data_em_147448_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['data_emissao'], name='politicos_g_data_em_706148_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['mes'], name='politicos_g_mes_2ed906_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['ano'], name='politicos_g_ano_2863bc_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['descricao'], name='politicos_g_descric_179e32_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['descricao_especificacao'], name='politicos_g_descric_747dd9_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['fornecedor'], name='politicos_g_fornece_d50161_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['valor_liquido'], name='politicos_g_valor_l_54a491_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['valor_documento'], name='politicos_g_valor_d_77e5c1_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['deputado'], name='politicos_g_deputad_193b26_idx'),
),
migrations.AddIndex(
model_name='gastocotaparlamentar',
index=models.Index(fields=['empresa'], name='politicos_g_empresa_edaff8_idx'),
),
]
|
{"/politicos/views.py": ["/politicos/models.py"], "/politicos/management/commands/import_deputados.py": ["/politicos/models.py"], "/empresas/management/commands/import_pessoas_juridicas.py": ["/empresas/models.py"], "/empresas/models.py": ["/politicos/models.py"], "/empresas/management/commands/import_empresas.py": ["/empresas/models.py"], "/empresas/management/commands/import_pessoas_fisicas.py": ["/empresas/models.py"], "/politicos/graficos.py": ["/politicos/models.py"], "/empresas/management/commands/import_estrangeiros.py": ["/empresas/models.py"], "/politicos/management/commands/import_partidos.py": ["/politicos/models.py"], "/politicos/management/commands/import_gastos.py": ["/empresas/models.py", "/politicos/models.py"]}
|
20,421
|
fieldhawker/akagi
|
refs/heads/master
|
/app/aimaker/views.py
|
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponseRedirect
from django.urls import reverse_lazy
from django.utils import timezone
from django_filters.views import FilterView
from django.shortcuts import render
from django.views.generic import TemplateView
from app.aimaker.forms import AiMakerForm
from app.aimaker.models import Photo
from app.aimaker.utils import AiMakerRequest, AiMakerResponse
import logging
class AiMakerView(LoginRequiredMixin, TemplateView):
"""
AIメーカーへのリクエストを行う画面
"""
def __init__(self):
self.params = {'state': "",
'label': "",
'score': "",
'base64': "",
'form': AiMakerForm()}
# GETリクエスト(index.htmlを初期表示)
def get(self, req):
return render(req, 'aimaker/index.html', self.params)
# POSTリクエスト(index.htmlに結果を表示)
def post(self, req):
# POSTされたフォームデータを取得
form = AiMakerForm(req.POST, req.FILES)
# フォームデータのエラーチェック
if not form.is_valid():
raise ValueError('invalid form')
# フォームデータから画像ファイルを取得
photo = Photo(image=form.cleaned_data['image'])
# 画像ファイルをbase64で受け取る
base64 = photo.image_src()
# AIメーカー
response = AiMakerRequest(base64)
result = AiMakerResponse(response)
# 結果を格納
self.params['state'] = result['state']
self.params['label'] = result['label']
self.params['score'] = '{:.2%}'.format(result['score'])
self.params['base64'] = base64
# ページの描画指示
return render(req, 'aimaker/result.html', self.params)
|
{"/app/aimaker/views.py": ["/app/aimaker/models.py", "/app/aimaker/utils.py"], "/app/urls.py": ["/app/views.py", "/app/views_top.py", "/app/aimaker/views.py"], "/app/views.py": ["/app/forms.py"]}
|
20,422
|
fieldhawker/akagi
|
refs/heads/master
|
/app/urls.py
|
from django.urls import path
from django.conf.urls import url, include
from .models import Item
from django.views.generic import TemplateView
from .views import ItemFilterView, ItemDetailView, ItemCreateView, ItemUpdateView, ItemDeleteView, FaceView
from .views_top import TopIndexView
from .aimaker.views import AiMakerView
# アプリケーションのルーティング設定
urlpatterns = [
path('', TopIndexView.as_view(), name='top'),
# ml5.js
path('ml5/styletransfervideo', TemplateView.as_view(
template_name='app/ml5/style_transfer_video.html'), name='style_transfer_video'),
# 顔判定
path('face/', FaceView.as_view(), name='face'),
# CRUD
path('data/detail/<int:pk>/', ItemDetailView.as_view(), name='detail'),
path('data/create/', ItemCreateView.as_view(), name='create'),
path('data/update/<int:pk>/', ItemUpdateView.as_view(), name='update'),
path('data/delete/<int:pk>/', ItemDeleteView.as_view(), name='delete'),
path('data/', ItemFilterView.as_view(), name='index'),
# AIメーカー
path('aimaker/', AiMakerView.as_view(), name='aimaker'),
]
|
{"/app/aimaker/views.py": ["/app/aimaker/models.py", "/app/aimaker/utils.py"], "/app/urls.py": ["/app/views.py", "/app/views_top.py", "/app/aimaker/views.py"], "/app/views.py": ["/app/forms.py"]}
|
20,423
|
fieldhawker/akagi
|
refs/heads/master
|
/app/views_top.py
|
from django.contrib.auth.mixins import LoginRequiredMixin
# from django.http import HttpResponseRedirect
# from django.urls import reverse_lazy
# from django.utils import timezone
# from django.views.generic import DetailView
# from django.views.generic.edit import CreateView, UpdateView, DeleteView
# from django_filters.views import FilterView
from django.views.generic.base import TemplateView
# from .filters import ItemFilterSet
# from .forms import ItemForm
# from .models import Item
# 未ログインのユーザーにアクセスを許可する場合は、LoginRequiredMixinを継承から外してください。
#
# LoginRequiredMixin:未ログインのユーザーをログイン画面に誘導するMixin
# 参考:https://docs.djangoproject.com/ja/2.1/topics/auth/default/#the-loginrequired-mixin
class TopIndexView(LoginRequiredMixin, TemplateView):
"""
ビュー:トップページ表示画面
以下のパッケージを使用
・django-filter 一覧画面(ListView)に検索機能を追加
https://django-filter.readthedocs.io/en/master/
"""
template_name = 'app/top_index.html'
|
{"/app/aimaker/views.py": ["/app/aimaker/models.py", "/app/aimaker/utils.py"], "/app/urls.py": ["/app/views.py", "/app/views_top.py", "/app/aimaker/views.py"], "/app/views.py": ["/app/forms.py"]}
|
20,424
|
fieldhawker/akagi
|
refs/heads/master
|
/app/aimaker/utils.py
|
from logging import basicConfig, getLogger, DEBUG
from django.http import HttpResponse
import requests
import json
import io
import base64
basicConfig(level=DEBUG)
logger = getLogger(__name__)
def AiMakerRequest(base64):
logger.debug('AiMakerRequest')
# AIメーカーのWebAPIから取得したデータを返却する
API_Key = 'c28f3694803e7631c5feb0831f29be77670a5d6f77ede6be444afd0b6d86280d2b67b3cc50730753e7e49c2342fd5b18'
id = 3673
url = 'https://aimaker.io/image/classification/api'
query = {
'id': id,
'apikey': API_Key,
'base64': base64
}
try:
# APIリクエスト
response = requests.post(url, query)
response = response.json()
# logger.debug(response)
return response
except:
logger.debug('Except AiMakerRequest.')
return []
def AiMakerResponse(json):
logger.debug('AiMakerResponse')
# AIメーカーから受け取ったJSONから必要な値を取得
# 想定されるレスポンスは以下のようになっている
# {
# "state": 1,
# "url": "https://aimaker.io/sample.png",
# "labels": {
# "0": {
# "score": 0.997,
# "label": "ラベル0"
# },
# "1": {
# "score": 0.003,
# "label": "ラベル1"
# }
# }
# }
result = {
'state': 'NG',
'label': "ラベル0",
'score': 0
}
max_score = 0
max_label = ''
if not json['state'] == 1:
logger.debug(result)
return result
for label in json['labels']:
if max_score < label["score"]:
max_score = label["score"]
max_label = label["label"]
result = {
'state': 'OK',
'label': max_label,
'score': max_score
}
# logger.debug(result)
return result
|
{"/app/aimaker/views.py": ["/app/aimaker/models.py", "/app/aimaker/utils.py"], "/app/urls.py": ["/app/views.py", "/app/views_top.py", "/app/aimaker/views.py"], "/app/views.py": ["/app/forms.py"]}
|
20,425
|
fieldhawker/akagi
|
refs/heads/master
|
/app/views.py
|
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponseRedirect
from django.urls import reverse_lazy
from django.utils import timezone
from django.views.generic import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django_filters.views import FilterView
from .filters import ItemFilterSet
from .forms import ItemForm
from .models import Item
# 顔判定
from django.shortcuts import render
from django.views.generic import TemplateView
from .forms import ImageForm
from .face import detect
import logging
# 未ログインのユーザーにアクセスを許可する場合は、LoginRequiredMixinを継承から外してください。
#
# LoginRequiredMixin:未ログインのユーザーをログイン画面に誘導するMixin
# 参考:https://docs.djangoproject.com/ja/2.1/topics/auth/default/#the-loginrequired-mixin
class ItemFilterView(LoginRequiredMixin, FilterView):
"""
ビュー:一覧表示画面
以下のパッケージを使用
・django-filter 一覧画面(ListView)に検索機能を追加
https://django-filter.readthedocs.io/en/master/
"""
model = Item
# django-filter 設定
filterset_class = ItemFilterSet
# django-filter ver2.0対応 クエリ未設定時に全件表示する設定
strict = False
# 1ページの表示
paginate_by = 10
def get(self, request, **kwargs):
"""
リクエスト受付
セッション変数の管理:一覧画面と詳細画面間の移動時に検索条件が維持されるようにする。
"""
logger = logging.getLogger('django')
logger.info('ItemFilterView.get request.')
# 一覧画面内の遷移(GETクエリがある)ならクエリを保存する
if request.GET:
request.session['query'] = request.GET
# 詳細画面・登録画面からの遷移(GETクエリはない)ならクエリを復元する
else:
request.GET = request.GET.copy()
if 'query' in request.session.keys():
for key in request.session['query'].keys():
request.GET[key] = request.session['query'][key]
return super().get(request, **kwargs)
def get_queryset(self):
"""
ソート順・デフォルトの絞り込みを指定
"""
# デフォルトの並び順として、登録時間(降順)をセットする。
return Item.objects.all().order_by('-created_at')
def get_context_data(self, *, object_list=None, **kwargs):
"""
表示データの設定
"""
# 表示データを追加したい場合は、ここでキーを追加しテンプレート上で表示する
# 例:kwargs['sample'] = 'sample'
return super().get_context_data(object_list=object_list, **kwargs)
class ItemDetailView(LoginRequiredMixin, DetailView):
"""
ビュー:詳細画面
"""
model = Item
def get_context_data(self, **kwargs):
"""
表示データの設定
"""
# 表示データの追加はここで 例:
# kwargs['sample'] = 'sample'
return super().get_context_data(**kwargs)
class ItemCreateView(LoginRequiredMixin, CreateView):
"""
ビュー:登録画面
"""
model = Item
form_class = ItemForm
success_url = reverse_lazy('index')
def form_valid(self, form):
"""
登録処理
"""
item = form.save(commit=False)
item.created_by = self.request.user
item.created_at = timezone.now()
item.updated_by = self.request.user
item.updated_at = timezone.now()
item.save()
return HttpResponseRedirect(self.success_url)
class ItemUpdateView(LoginRequiredMixin, UpdateView):
"""
ビュー:更新画面
"""
model = Item
form_class = ItemForm
success_url = reverse_lazy('index')
def form_valid(self, form):
"""
更新処理
"""
item = form.save(commit=False)
item.updated_by = self.request.user
item.updated_at = timezone.now()
item.save()
return HttpResponseRedirect(self.success_url)
class ItemDeleteView(LoginRequiredMixin, DeleteView):
"""
ビュー:削除画面
"""
model = Item
success_url = reverse_lazy('index')
def delete(self, request, *args, **kwargs):
"""
削除処理
"""
item = self.get_object()
item.delete()
return HttpResponseRedirect(self.success_url)
class FaceView(TemplateView):
# コンストラクタ
def __init__(self):
self.params = {'result_list': [],
'result_name': "",
'result_img': "",
'form': ImageForm()}
# GETリクエスト(index.htmlを初期表示)
def get(self, req):
try:
logger = logging.getLogger('django')
logger.info('FaceView.get request.')
return render(req, 'face/index.html', self.params)
except:
import traceback
traceback.print_exc()
# POSTリクエスト(index.htmlに結果を表示)
def post(self, req):
# POSTされたフォームデータを取得
form = ImageForm(req.POST, req.FILES)
# フォームデータのエラーチェック
if not form.is_valid():
raise ValueError('invalid form')
# フォームデータから画像ファイルを取得
image = form.cleaned_data['image']
# 画像ファイルを指定して顔分類
result = detect(image)
# 顔分類の結果を格納
self.params['result_list'], self.params['result_name'], self.params['result_img'] = result
# ページの描画指示
return render(req, 'face/index.html', self.params)
|
{"/app/aimaker/views.py": ["/app/aimaker/models.py", "/app/aimaker/utils.py"], "/app/urls.py": ["/app/views.py", "/app/views_top.py", "/app/aimaker/views.py"], "/app/views.py": ["/app/forms.py"]}
|
20,426
|
fieldhawker/akagi
|
refs/heads/master
|
/app/aimaker/models.py
|
from django.db import models
import io
import base64
class Photo(models.Model):
# 保存先ディレクトリの指定
image = models.ImageField(upload_to='images')
def image_src(self):
with self.image.open() as img:
base64_img = base64.b64encode(img.read()).decode()
return 'data:' + img.file.content_type + ';base64,' + base64_img
|
{"/app/aimaker/views.py": ["/app/aimaker/models.py", "/app/aimaker/utils.py"], "/app/urls.py": ["/app/views.py", "/app/views_top.py", "/app/aimaker/views.py"], "/app/views.py": ["/app/forms.py"]}
|
20,427
|
fieldhawker/akagi
|
refs/heads/master
|
/app/forms.py
|
from django import forms
from .models import Item
class ItemForm(forms.ModelForm):
"""
モデルフォーム構成クラス
・公式 モデルからフォームを作成する
https://docs.djangoproject.com/ja/2.1/topics/forms/modelforms/
"""
class Meta:
model = Item
fields = '__all__'
# 以下のフィールド以外が入力フォームに表示される
# AutoField
# auto_now=True
# auto_now_add=Ture
# editable=False
class ImageForm(forms.Form):
image = forms.ImageField(label="判定する画像を選択してください",
error_messages={'missing' : '画像ファイルが選択されていません。',
'invalid' : '分類する画像ファイルを選択してください。',
'invalid_image' : '画像ファイルではないようです。'})
|
{"/app/aimaker/views.py": ["/app/aimaker/models.py", "/app/aimaker/utils.py"], "/app/urls.py": ["/app/views.py", "/app/views_top.py", "/app/aimaker/views.py"], "/app/views.py": ["/app/forms.py"]}
|
20,429
|
pawelgalka/bubbleshooter
|
refs/heads/master
|
/arrow.py
|
# coding=utf-8
#file for class of bubble and it's events
#Paweł Gałka 11.08
from settings import *
class Arrow(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.angle = 90
arrowImage = pygame.image.load('arrow.png')
arrowImage.convert_alpha()
arrowRect = arrowImage.get_rect()
self.image = arrowImage
self.transImage = self.image
self.rect = arrowRect
self.rect.centerx = STARTX
self.rect.centery = STARTY
def draw(self):
display.blit(self.transImage, self.rect)
def update(self, angle, vector):
self.transImage = pygame.transform.rotate(self.image, -self.angle+angle)
self.rect = self.transImage.get_rect(center=self.rect.midbottom)
self.rect.centerx = STARTX
self.rect.centery = STARTY
|
{"/arrow.py": ["/settings.py"], "/board.py": ["/settings.py", "/bubble.py"], "/main.py": ["/settings.py", "/bubble.py", "/arrow.py", "/board.py"], "/bubble.py": ["/settings.py"]}
|
20,430
|
pawelgalka/bubbleshooter
|
refs/heads/master
|
/board.py
|
# coding=utf-8
from settings import *
from bubble import *
def CreateEmptyBoard():
board = []
for i in range (ROWS):
col = []
for j in range (COLS):
col.append(EMPTY)
board.append(col)
return board
def FillBoard(board, colorlist):
for i in range(STARTLAYERS):
for j in range(len(board[i])):
random.shuffle(colorlist)
newBubble = Bubble(colorlist[0])
board[i][j] = newBubble
setPosition(board)
def setPosition(board):
#set them in array
for row in range(ROWS):
for col in range(len(board[row])):
if board[row][col]!=EMPTY:
board[row][col].rect.x = (BALLSIZE*col)+5*WIDTH/640
board[row][col].rect.y = (BALLSIZE*row)+5*HEIGHT/480
#print(row,col,board[row][col].rect.x,board[row][col].rect.y)
#make pattern - move odd rows
for row in range (1, ROWS, 2):
for col in range(len(board[row])):
if board[row][col]!=EMPTY:
board[row][col].rect.x += BALLRADIUS
#delete empty space between balls
for row in range(1, ROWS):
for col in range(len(board[row])):
if board[row][col]!=EMPTY:
board[row][col].rect.y -= row*5*HEIGHT/480
deleteExtraBalls(board)
def deleteExtraBalls(board):
for row in range(ROWS):
for col in range(len(board[row])):
if board[row][col] != EMPTY:
if board[row][col].rect.right > WIDTH:
board[row][col] = EMPTY
def drawBoard(board):
for i in range(len(board)):
for j in range(len(board[i])):
if board[i][j] != EMPTY:
board[i][j].draw()
def getVector():
mousePos = pygame.mouse.get_pos()
vector = pygame.math.Vector2((mousePos[0] - STARTX, STARTY - mousePos[1]))
if vector.x == 0 :
return vector, 90
if vector.y < 0 and vector.x < 0:
return vector, 179
if vector.y < 0 and vector.x > 0:
return vector, 1
angle = math.degrees(math.atan(vector.y / vector.x))
if angle < 0:
angle += 180
#print(angle)
return vector, angle
def getBubble(colors):
random.shuffle(colors)
return Bubble(colors[0], x=WIDTH-BALLSIZE-10)
def stopBubble(board, ball):
for row in range(len(board)):
for col in range(len(board[row])):
# print(row,col)
if (board[row][col] != EMPTY and ball != None):
# print(ball.rect.top)
if (pygame.sprite.collide_rect(ball, board[row][col])) or ball.rect.top <= 0:
# print(pygame.sprite.collide_rect(ball, board[row][col]))
if ball.rect.top <= 0:
newCol, newRow = addToTop(ball, board)
board[newRow][newCol] = copy.copy(ball)
board[newRow][newCol].row = newRow
board[newRow][newCol].col = newCol
# print(newRow,newCol)
elif ball.rect.centery>=board[row][col].rect.centery: #hitting under ball
# print('pod',row,col)
if ball.rect.centerx<board[row][col].rect.centerx: #LD corner
if row%2==0: #longer line
newRow = row + 1
newCol = col - 1
else: #shorter line
newRow = row + 1
newCol = col
else: #RD corner
if row%2==0: #longer line
newRow = row + 1
newCol = col
else: #shorter line
newRow = row + 1
newCol = col + 1
board[newRow][newCol] = copy.copy(ball)
# print(board[newRow][newCol] is EMPTY)
board[newRow][newCol].row = newRow
board[newRow][newCol].col = newCol
else: # hitting over ball
# print('nad',row,col)
if row == 0:
# pass
newCol, newRow = addToTop(ball, board)
elif ball.rect.centerx < board[row][col].rect.centerx: # LU corner
if row % 2 == 0: # longer line
newRow = row - 1
newCol = col - 1
if board[newRow][newCol] is not EMPTY:
newRow += 1
else: # shorter line
newRow = row - 1
newCol = col
if board[newRow][newCol] is not EMPTY:
newRow += 1
newCol -= 1
else: # RU corner
if row % 2 == 0: # longer line
newRow = row - 1
newCol = col
if board[newRow][newCol] is not EMPTY:
newRow += 1
newCol += 1
else: # shorter line
newRow = row - 1
newCol = col + 1
if board[newRow][newCol] is not EMPTY:
newRow += 1
# print(newRow, newCol)
board[newRow][newCol] = copy.copy(ball)
board[newRow][newCol].row = newRow
board[newRow][newCol].col = newCol
deleteList = []
deleteBubbles(board, newRow, newCol, ball.color, deleteList)
if len(deleteList)>=3:
popBubbles(board,deleteList)
print(deleteList)
deleteFloaters(board)
ball = None
setPosition(board)
# updateColors(board,COLORS)
print(COLORS)
checkwin = checkWin(board)
return ball, board, checkwin
def deleteFloaters(board):
filledFirst = []
pattern = [i for i in range(16)]
for col in range(len(board[0])):
if board[0][col]!=EMPTY:
filledFirst.append(col)
unfilledList = diff(filledFirst,pattern)
unfilledList.insert(0,0)
print(unfilledList)
copyBoard = copy.deepcopy(board)
for row in range (len(board)):
for col in range(len(board[row])):
board[row][col]=EMPTY
print(board)
for col in unfilledList:
checkFloaters(board,copyBoard,0,col)
def checkFloaters(board, copyBoard, row, col):
if row<0 or row>len(board)-1 or col<0 or col>len(board[row])-1:
print(1)
return
elif copyBoard[row][col]==EMPTY:
# print(row, col, board[row][col], copyBoard[row][col], end=' ')
print(2)
return
elif board[row][col] == copyBoard[row][col]:
# print(row, col, board[row][col], copyBoard[row][col], end=' ')
print(3)
return
board[row][col] = copyBoard[row][col]
if row%2 == 0: #check LU,RU,L,R,LD,RD
if row!=0:
checkFloaters(board, copyBoard, row - 1, col - 1) # left up
checkFloaters(board, copyBoard, row - 1, col) # right up
checkFloaters(board, copyBoard, row, col - 1) # left
checkFloaters(board, copyBoard, row, col + 1) # right
checkFloaters(board, copyBoard, row + 1, col - 1) # left down
checkFloaters(board, copyBoard, row + 1, col) # right down
else:
checkFloaters(board, copyBoard, row - 1, col) # left up
checkFloaters(board, copyBoard, row - 1, col + 1) # right up
checkFloaters(board, copyBoard, row, col - 1) # left
checkFloaters(board, copyBoard, row, col + 1) # right
checkFloaters(board, copyBoard, row + 1, col) # left down
checkFloaters(board, copyBoard, row + 1, col + 1) # right down
def addToTop(ball, board):
newRow = 0
x = ball.rect.centerx
newCol = math.floor(x*COLS/WIDTH)
# newCol = ((x + 5) * COLS) // WIDTH
# if (board[newRow][newCol] is not EMPTY):
# if ball.rect.right <= board[newRow][newCol].rect.left:
# newCol -= 1
# else:
# newCol += 1
return newCol, newRow
def deleteBubbles(board, row, col, color, deleteList):
# print("wejscie")
if row < 0 or row > len(board)-1 or col < 0 or col > len(board[row])-1: # out of range
return
if board[row][col] is EMPTY: # field is empty
return
if board[row][col].color != color: # not right color
return
for ball in deleteList: # check if field is not already on list to delete
if ball[0]==row and ball[1]==col:
return
deleteList.append((row,col))
# if row == 0: #check L,R,LD,RD
# deleteBubbles(board, row, col - 1, color, deleteList) # left
# deleteBubbles(board, row, col + 1, color, deleteList) # right
# deleteBubbles(board, row + 1, col - 1, color, deleteList) # left down
# deleteBubbles(board, row + 1, col, color, deleteList) # right down
if row%2 == 0: #check LU,RU,L,R,LD,RD
if row!=0:
deleteBubbles(board, row - 1, col - 1, color, deleteList) # left up
deleteBubbles(board, row - 1, col, color, deleteList) # right up
deleteBubbles(board, row, col - 1, color, deleteList) # left
deleteBubbles(board, row, col + 1, color, deleteList) # right
deleteBubbles(board, row + 1, col - 1, color, deleteList) # left down
deleteBubbles(board, row + 1, col, color, deleteList) # right down
else:
deleteBubbles(board, row - 1, col, color, deleteList) # left up
deleteBubbles(board, row - 1, col + 1, color, deleteList) # right up
deleteBubbles(board, row, col - 1, color, deleteList) # left
deleteBubbles(board, row, col + 1, color, deleteList) # right
deleteBubbles(board, row + 1, col, color, deleteList) # left down
deleteBubbles(board, row + 1, col + 1, color, deleteList) # right down
# def updateColors(board, colorList):
# colorList.clear()
# for row in range (len(board)):
# for col in range(len(board[row])):
# if board[row][col]!=EMPTY and board[row][col].color not in colorList :
# colorList.append(color)
def popBubbles(board, deleteList):
pygame.time.delay(40)
for bubble in deleteList:
board[bubble[0]][bubble[1]] = EMPTY
def checkWin(board):
for row in range (len(board)):
for col in range(len(board[row])):
if board[row][col]!=EMPTY:
return False
return True
def checkBottom(board):
for col in range (len(board[10])):
if board[10][col]!=EMPTY:
return False
return True
|
{"/arrow.py": ["/settings.py"], "/board.py": ["/settings.py", "/bubble.py"], "/main.py": ["/settings.py", "/bubble.py", "/arrow.py", "/board.py"], "/bubble.py": ["/settings.py"]}
|
20,431
|
pawelgalka/bubbleshooter
|
refs/heads/master
|
/settings.py
|
# coding=utf-8
#Paweł Gałka 11.08
import sys
import pygame as pygame
import pygame.gfxdraw
import random, math, time, copy
from pygame.locals import *
random.seed()
#colors
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
ORANGE = (255, 128, 0)
YELLOW = (255, 255, 0)
PURPLE = (102, 0, 101)
NAVY = (13, 200, 255)
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
BEIGE = (229, 255, 204)
COLORS = [RED, GREEN, BLUE, ORANGE, YELLOW, PURPLE, NAVY]
BGCOLOR = BEIGE
#game settings
FPS = 120
WIDTH = 640
HEIGHT = 480
TEXT = 20
BALLRADIUS = WIDTH//32
BALLSIZE = 2*BALLRADIUS
BALLHEIGHT = 9
STARTX = WIDTH/2
STARTY = HEIGHT - BALLSIZE
ROWS = 14
COLS = 16
EMPTY = 0
FULL = 1
STARTLAYERS = 5
global display
display = pygame.display.set_mode((WIDTH, HEIGHT)) # tuple width,height
pygame.display.set_caption("BUBBLE SHOOTER") # change title of window
display.convert()
# help functions not related to game
def diff(first, second):
second = set(second)
return [item for item in first if item not in second]
|
{"/arrow.py": ["/settings.py"], "/board.py": ["/settings.py", "/bubble.py"], "/main.py": ["/settings.py", "/bubble.py", "/arrow.py", "/board.py"], "/bubble.py": ["/settings.py"]}
|
20,432
|
pawelgalka/bubbleshooter
|
refs/heads/master
|
/setup.py
|
import cx_Freeze #for now, do the wildcard import, though the bigger the script gets, I would recommend an as ... structure
executables = [cx_Freeze.Executable("main.py")]
cx_Freeze.setup(
name="Bubble shooter",
author = "Pawel Galka",
options={"build_exe": {"packages":["pygame"],
"include_files":["arrow.png"]}},
executables = executables,
version = "1.0.0"
)
|
{"/arrow.py": ["/settings.py"], "/board.py": ["/settings.py", "/bubble.py"], "/main.py": ["/settings.py", "/bubble.py", "/arrow.py", "/board.py"], "/bubble.py": ["/settings.py"]}
|
20,433
|
pawelgalka/bubbleshooter
|
refs/heads/master
|
/main.py
|
# coding=utf-8
#Paweł Gałka 11.08
from settings import *
from bubble import *
from arrow import Arrow
from board import *
def main():
pygame.init()
clock = pygame.time.Clock()
board = CreateEmptyBoard()
FillBoard(board,COLORS)
launchBall = False
ball = getBubble(COLORS)
ball.rect.centerx = STARTX
nextBall = getBubble(COLORS)
# board[0][15] = copy.deepcopy(ball)
# setPosition(board)
arrow = Arrow()
while 1: # main game loop
display.fill(BEIGE)
vector, angle = getVector()
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == MOUSEBUTTONDOWN:
if not launchBall:
ball.shoot(angle)
launchBall = True
if event.type == MOUSEMOTION:
arrow.update(angle, vector)
drawBoard(board)
nextBall.draw()
if ball is not None:
ball.update()
ball.draw()
#print(ball.rect.centerx, ball.rect.centery)
ball, board, checkwin = stopBubble(board, ball)
else:
launchBall = False
ball = Bubble(nextBall.color)
nextBall = getBubble(COLORS)
arrow.draw()
if checkwin:
return 1
elif checkBottom(board)==False:
return 2
pygame.display.update()
clock.tick(FPS)
if __name__=='__main__':
main()
|
{"/arrow.py": ["/settings.py"], "/board.py": ["/settings.py", "/bubble.py"], "/main.py": ["/settings.py", "/bubble.py", "/arrow.py", "/board.py"], "/bubble.py": ["/settings.py"]}
|
20,434
|
pawelgalka/bubbleshooter
|
refs/heads/master
|
/bubble.py
|
# coding=utf-8
#file for class of bubble and it's events
#Paweł Gałka 11.08
from settings import *
class Bubble(pygame.sprite.Sprite):
def __init__(self, color, row=0, column=0, x=STARTX, y=STARTY):
pygame.sprite.Sprite.__init__(self)
self.rect = pygame.Rect(0,0,30,30) #30 because balls have grid
self.rect.centerx = x
self.rect.centery = y
self.radius = BALLRADIUS
self.color = color
self.row = row
self.column = column
self.speed = 0
self.angle = 0
def update(self, *args):
xmove = math.cos(math.radians(self.angle))*self.speed
ymove = -math.sin(math.radians(self.angle))*self.speed
self.rect.centerx += xmove
self.rect.centery += ymove
if self.rect.left<0 or self.rect.right>WIDTH:
self.angle = 180-self.angle
if self.rect.top<0 or self.rect.bottom>HEIGHT:
self.angle = 180-self.angle
self.speed *= -1
def draw(self):
pygame.gfxdraw.filled_circle(display, self.rect.centerx, self.rect.centery, self.radius, self.color)
pygame.gfxdraw.aacircle(display,self.rect.centerx, self.rect.centery, self.radius, BLACK)
def shoot(self, angle):
self.angle = angle
self.speed = 10
|
{"/arrow.py": ["/settings.py"], "/board.py": ["/settings.py", "/bubble.py"], "/main.py": ["/settings.py", "/bubble.py", "/arrow.py", "/board.py"], "/bubble.py": ["/settings.py"]}
|
20,435
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/fetcher/_repository.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import random
import time
from typing import Any, Dict, List, cast
import typepy
from .._const import DATETIME_FORMAT, RETRY_COUNT, CardType, CommonCardKey, Result
from ._base import AbstractCardDataFetcher, CardData
from ._common import dump_organization, to_chart_data
def ghc_client_thread_helper(ghc_client) -> Dict[str, Any]:
return {
"branches_count": ghc_client.branches_count,
"pulls_count": ghc_client.pulls_count,
"subscribers_count": ghc_client.repo.get("subscribers_count"),
"license": ghc_client.repo.get("license"),
}
def get_contributors_count_helper(ghc_client) -> Dict[str, Any]:
return {"contributors_count": ghc_client.contributors_count}
def get_tags_count_helper(ghc_client) -> Dict[str, Any]:
return {"tags_count": ghc_client.tags_count}
def get_open_issues_helper(repo) -> Dict[str, Dict[str, list]]:
import collections
issue_counter = None
for issue in repo.get_issues():
label_names = [label.name for label in issue.labels]
if not label_names:
label_names = ["not set"]
if issue_counter is None:
issue_counter = collections.Counter(label_names)
else:
issue_counter += collections.Counter(label_names)
return {
"open_issues": to_chart_data(
cast(collections.Counter, issue_counter), aggregate_threshold=7
)
}
class RepositoryCardDataFetcher(AbstractCardDataFetcher):
@property
def type(self) -> str:
return CardType.REPOSITORY
def fetch(self) -> CardData:
self._logger.debug(f"fetching repository data: id={self.id}")
thread_list = [
self._pool.apply_async(ghc_client_thread_helper, args=[self._ghc_client]),
self._pool.apply_async(get_contributors_count_helper, args=[self._ghc_client]),
self._pool.apply_async(get_tags_count_helper, args=[self._ghc_client]),
]
card_data = super().fetch()
repo = self._pygh_client.get_repo(self.id)
thread_list.append(self._pool.apply_async(get_open_issues_helper, args=[repo]))
card_data[CommonCardKey.AVATAR_URL] = repo.owner.avatar_url
card_data[CommonCardKey.CARD_TYPE] = CardType.REPOSITORY
card_data[CommonCardKey.CREATED_AT] = repo.created_at.strftime(DATETIME_FORMAT)
card_data[CommonCardKey.DESCRIPTION] = repo.description
card_data[CommonCardKey.EMOJIS] = self._get_emoji_mapping(repo.description)
card_data[CommonCardKey.HTML_URL] = repo.html_url
card_data[CommonCardKey.NAME] = repo.name
card_data[CommonCardKey.UPDATED_AT] = repo.updated_at.strftime(DATETIME_FORMAT)
languages = repo.get_languages()
card_data["forks_count"] = repo.forks_count
card_data["has_issues"] = repo.has_issues
card_data["has_wiki"] = repo.has_wiki
card_data["language"] = repo.language
card_data["languages"] = to_chart_data(languages, aggregate_threshold=4)
card_data["languages_count"] = len(languages)
card_data["owner_name"] = repo.owner.name
card_data["owner_html_url"] = repo.owner.html_url
card_data["open_issues_count"] = repo.open_issues_count
card_data["organization"] = dump_organization(repo.organization)
card_data["repo_homepage"] = None if typepy.is_null_string(repo.homepage) else repo.homepage
card_data["stargazers_count"] = repo.stargazers_count
card_data["topics"] = self.__get_topics()
for i in range(RETRY_COUNT):
try:
card_data["participation"] = repo.get_stats_participation().all # type: ignore
except AttributeError:
max_sleep_secs = 2**i # noqa
self._logger.warn(
f"failed to get '{self.id}' participation stats. retrying in 5 seconds"
)
card_data["participation"] = []
time.sleep(random.random())
continue
break
else:
self._logger.error("failed to get participation stats")
card_data[CommonCardKey.RESULT] = Result.ERROR
card_data["commits_last_year"] = sum(cast(List[int], card_data["participation"]))
try:
card_data["latest_tag"] = repo.get_tags()[0].name
except IndexError:
self._logger.debug("tag not found in the repository")
for i, thread in enumerate(thread_list):
thead_id = f"thread {i + 1:d}/{len(thread_list):d}"
self._logger.debug(f"wait for {thead_id}")
card_data.update(thread.get())
self._logger.debug(f"complete {thead_id}")
return card_data
# def __get_releases(self):
# return self._ghc_client.get("/repos/{:s}/releases".format(self.id))
def __get_topics(self) -> List[str]:
values = self._ghc_client.get(
f"/repos/{self.id:s}",
headers={"accept": "application/vnd.github.mercy-preview+json"},
)
# get topics: https://developer.github.com/v3/repos/
return cast(List[str], values.get("topics"))
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,436
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/_logger.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import logbook
import typepy
def get_logger(log_level: int, extra_name: str):
logger_name_list = []
if typepy.is_not_null_string(extra_name):
logger_name_list.append(extra_name)
logger = logbook.Logger(" ".join(logger_name_list))
logger.level = log_level
if log_level == logbook.NOTSET:
logger.disable()
return logger
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,437
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/_const.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
PROGRAM_NAME = "ghscard"
CARD_DATA_VERSION = 2
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z"
MAX_PER_PAGE = 100
RETRY_COUNT = 3
class AppConfigKey:
GITHUB_API_ACCESS_TOKEN = "github_api_personal_access_token"
OUTPUT_DIR = "output_dir"
INDENT = "indent"
class CardType:
USER = "User"
ORGANIZATION = "Organization"
REPOSITORY = "Repository"
class CommonCardKey:
AVATAR_URL = "avatar_url"
CARD_TYPE = "card_type"
CREATED_AT = "created_at"
DESCRIPTION = "description"
EMOJIS = "emojis"
FETCHD_AT = "fetched_at"
HTML_URL = "html_url"
ID = "id"
NAME = "name"
RESULT = "result"
UPDATED_AT = "updated_at"
VERSION = "data_version"
class Result:
SUCCESS = "success"
ERROR = "error"
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,438
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/_emoji.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import re
from typing import Mapping, cast
class EmojiParser:
__re_emoji = re.compile(r":[\+a-zA-Z0-9_-]+:")
def __init__(self, emoji_kv_mapping: Mapping[str, str]) -> None:
if not emoji_kv_mapping:
raise ValueError("required emoji key-value mapping")
self.__emoji_mapping = emoji_kv_mapping
def get_url(self, emoji: str) -> str:
try:
emoji = emoji.strip().strip(":")
except (TypeError, AttributeError) as e:
raise ValueError(e)
return cast(str, self.__emoji_mapping.get(emoji))
def parse(self, text: str) -> list:
emoji_list = []
try:
for emoji_candidate in self.__re_emoji.findall(text):
emoji_candidate = emoji_candidate.strip(":")
if emoji_candidate in self.__emoji_mapping:
emoji_list.append(emoji_candidate)
except (TypeError, AttributeError) as e:
raise ValueError(e)
return emoji_list
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,439
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/__init__.py
|
from .__version__ import __author__, __copyright__, __email__, __license__, __version__
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,440
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/_error.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
class ApiStatusError(Exception):
"""
Raised when GitHub API is in red status.
"""
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,441
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/fetcher/_common.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from typing import Counter, Dict, Union
from .._const import CommonCardKey
ChartData = Dict[str, list]
def dump_organization(organization) -> Dict[str, str]:
if not organization:
return {}
if organization.name:
organization_name = organization.name
else:
organization_name = organization.html_url.split("/")[-1]
return {
CommonCardKey.AVATAR_URL: organization.avatar_url,
CommonCardKey.HTML_URL: organization.html_url,
CommonCardKey.NAME: organization_name,
"public_repos": organization.public_repos,
}
def to_chart_data(
label_count_mapping: Union[Counter, Dict[str, int]], aggregate_threshold: int
) -> ChartData:
if not label_count_mapping:
return {"labels": [], "data": []}
label_name_list = []
label_count_list = []
others_count = None
for i, kv in enumerate(sorted(label_count_mapping.items(), key=lambda x: x[1], reverse=True)):
key, value = kv
if (i + 1) > aggregate_threshold:
if others_count is None:
others_count = value
else:
others_count += value
continue
label_name_list.append(key)
label_count_list.append(value)
if others_count:
label_name_list.append("others")
label_count_list.append(others_count)
return {"labels": label_name_list, "data": label_count_list}
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,442
|
thombashi/ghscard
|
refs/heads/master
|
/test/test_emoji.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import pytest
from ghscard._emoji import EmojiParser
emojis = {
"+1": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f44d.png?v7",
"-1": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f44e.png?v7",
"100": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f4af.png?v7",
"1234": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f522.png?v7",
"1st_place_medal": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f947.png?v7",
"2nd_place_medal": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f948.png?v7",
"3rd_place_medal": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f949.png?v7",
"8ball": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f3b1.png?v7",
"a": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f170.png?v7",
"ab": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f18e.png?v7",
"abc": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f524.png?v7",
"abcd": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f521.png?v7",
"accept": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f251.png?v7",
"aerial_tramway": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f6a1.png?v7",
"afghanistan": "https://assets-cdn.github.com/images/icons/emoji/unicode/1f1e6-1f1eb.png?v7",
"airplane": "https://assets-cdn.github.com/images/icons/emoji/unicode/2708.png?v7",
}
@pytest.fixture
def emoji_parser():
return EmojiParser(emojis)
class Test_Emoji_constructor:
@pytest.mark.parametrize(["value", "expected"], [[None, ValueError]])
def test_exception(self, emoji_parser, value, expected):
with pytest.raises(expected):
EmojiParser(value)
class Test_Emoji_parse:
@pytest.mark.parametrize(
["value", "expected"],
[
[":+1:", ["+1"]],
["a:-1:b", ["-1"]],
["a:-1:b:accept:c", ["-1", "accept"]],
["", []],
[":", []],
[":+1", []],
],
)
def test_normal(self, emoji_parser, value, expected):
assert emoji_parser.parse(value) == expected
@pytest.mark.parametrize(["value", "expected"], [[None, ValueError], [1, ValueError]])
def test_exception(self, emoji_parser, value, expected):
with pytest.raises(expected):
emoji_parser.parse(value)
class Test_Emoji_get_url:
@pytest.mark.parametrize(
["value", "expected"],
[
[
"1st_place_medal",
"https://assets-cdn.github.com/images/icons/emoji/unicode/1f947.png?v7",
],
[
":1st_place_medal:",
"https://assets-cdn.github.com/images/icons/emoji/unicode/1f947.png?v7",
],
],
)
def test_normal(self, emoji_parser, value, expected):
assert emoji_parser.get_url(value) == expected
@pytest.mark.parametrize(["value", "expected"], [[None, ValueError], [1, ValueError]])
def test_exception(self, emoji_parser, value, expected):
with pytest.raises(expected):
emoji_parser.get_url(value)
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,443
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/_stopwatch.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import contextlib
import datetime
from ._const import DATETIME_FORMAT
@contextlib.contextmanager
def stopwatch(logger, name):
start_time = datetime.datetime.now()
logger.debug(f"start {name:s}: {start_time.strftime(DATETIME_FORMAT):s}")
try:
yield
finally:
logger.debug(f"complete {name:s}: time={datetime.datetime.now() - start_time} [sec]")
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,444
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/fetcher/_organization.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from .._const import DATETIME_FORMAT, CardType, CommonCardKey
from ._base import AbstractCardDataFetcher, CardData
class OrganizationCardDataFetcher(AbstractCardDataFetcher):
@property
def type(self) -> str:
return CardType.ORGANIZATION
def fetch(self) -> CardData:
self._logger.debug(f"fetching org data: id={self.id}")
card_data = super().fetch()
org = self._pygh_client.get_organization(self.id)
description = self.__get_description()
card_data[CommonCardKey.AVATAR_URL] = org.avatar_url
card_data[CommonCardKey.CARD_TYPE] = CardType.ORGANIZATION
card_data[CommonCardKey.CREATED_AT] = org.created_at.strftime(DATETIME_FORMAT)
card_data[CommonCardKey.DESCRIPTION] = description
card_data[CommonCardKey.EMOJIS] = self._get_emoji_mapping(description)
card_data[CommonCardKey.HTML_URL] = org.html_url
card_data[CommonCardKey.NAME] = self.id
card_data[CommonCardKey.UPDATED_AT] = org.updated_at.strftime(DATETIME_FORMAT)
card_data["blog"] = org.blog
card_data["company"] = org.company
card_data["email"] = org.email
card_data["location"] = org.location
card_data["public_gists"] = org.public_gists
card_data["public_repos"] = org.public_repos
card_data["public_members_count"] = sum([1 for _member in org.get_public_members()])
return card_data
def __get_description(self):
return self._ghc_client.get(f"/orgs/{self.id:s}").get("description")
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,445
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/_cache.py
|
from datetime import datetime
from functools import total_ordering
from typing import Union
from datetimerange import DateTimeRange
from path import Path
@total_ordering
class CacheTime:
@property
def second(self) -> Union[int, float]:
return self.__second
@property
def hour(self) -> float:
return self.second / (60**2)
def __init__(self, second: Union[int, float]):
self.__second = second
def __eq__(self, other) -> bool:
return self.second == other.second
def __lt__(self, other) -> bool:
return self.second < other.second
class CacheManager:
def __init__(self, logger, cache_lifetime: CacheTime) -> None:
self.__logger = logger
self.__cache_lifetime = cache_lifetime
def is_cache_available(self, cache_file_path: Path) -> bool:
if not cache_file_path.isfile():
self.__logger.debug(f"cache not found: {cache_file_path}")
return False
try:
dtr = DateTimeRange(datetime.fromtimestamp(cache_file_path.mtime), datetime.now())
except OSError:
return False
if not dtr.is_valid_timerange():
return False
cache_elapsed = CacheTime(dtr.get_timedelta_second())
cache_msg = "path={path}, lifetime={lifetime:.1f}h, elapsed={elapsed:.1f}h".format(
path=cache_file_path, lifetime=self.__cache_lifetime.hour, elapsed=cache_elapsed.hour
)
if cache_elapsed < self.__cache_lifetime:
self.__logger.debug(f"cache available: {cache_msg}")
return True
self.__logger.debug(f"cache expired: {cache_msg}")
return False
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,446
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/_detector.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import github
import typepy
from github.GithubException import UnknownObjectException
from ._const import CardType
class GithubIdDetector:
@property
def id(self) -> str:
return self.__id
def __init__(self, id: str, logger, pygh_client: github.Github) -> None:
try:
self.__id = id.strip().strip("/")
except (TypeError, AttributeError) as e:
raise ValueError(e)
id_item_list = [id_item.strip() for id_item in self.__id.split("/")]
self.__id = "/".join(id_item_list)
if typepy.is_null_string(self.__id):
raise ValueError("id must not be an empty string")
logger.debug(f"id: {id}")
self.__pygh_client = pygh_client
self.__card_type = self.__get_card_type()
def is_user(self) -> bool:
return self.get_id_type() == CardType.USER
def is_organization(self) -> bool:
return self.get_id_type() == CardType.ORGANIZATION
def is_repository(self) -> bool:
return self.get_id_type() == CardType.REPOSITORY
def get_id_type(self) -> str:
return self.__card_type
def __get_card_type(self) -> str:
id_item_list = self.id.split("/")
if len(id_item_list) > 2:
raise ValueError(
"invalid format for a GitHub id: "
"expected='<user name>' or '<user name>/<repo name>', "
"actual='{}'".format(id)
)
if len(id_item_list) == 2:
user_name, repo_name = id_item_list
self.__validate_user_name(user_name)
self.__validate_repo_name(repo_name)
return CardType.REPOSITORY
user_name = id_item_list[0]
self.__validate_user_name(user_name)
try:
self.__pygh_client.get_organization(self.id)
return CardType.ORGANIZATION
except UnknownObjectException:
pass
return CardType.USER
@staticmethod
def __validate_user_name(user_name: str) -> None:
if typepy.is_null_string(user_name):
raise ValueError("user/organization name must not be an empty string")
@staticmethod
def __validate_repo_name(repo_name: str) -> None:
if typepy.is_null_string(repo_name):
raise ValueError("repository name must not be an empty string")
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,447
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/_generator.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import errno
import os.path
from typing import Mapping
import click
import github
import msgfy
import typepy
from github.GithubException import BadCredentialsException, UnknownObjectException
from path import Path
from pathvalidate import sanitize_filename
from ._cache import CacheManager, CacheTime
from ._const import MAX_PER_PAGE, AppConfigKey
from ._detector import GithubIdDetector
from ._github_client import GitHubClient
from ._stopwatch import stopwatch
from .fetcher import (
AbstractCardDataFetcher,
OrganizationCardDataFetcher,
RepositoryCardDataFetcher,
UserCardDataFetcher,
)
try:
import simplejson as json
except ImportError:
import json # type: ignore
class CardGenerator:
def __init__(self, logger, app_config: Mapping[str, str], is_overwrite: bool) -> None:
self.__logger = logger
self.__access_token = app_config.get(AppConfigKey.GITHUB_API_ACCESS_TOKEN)
self.__output_dir = Path(app_config.get(AppConfigKey.OUTPUT_DIR))
self.__indent = app_config.get(AppConfigKey.INDENT)
cache_time = CacheTime(24 * (60**2))
if is_overwrite:
cache_time = CacheTime(0)
self.__cache_manager = CacheManager(logger, cache_time)
if typepy.is_not_null_string(self.__access_token):
logger.debug("access token found in the configuration file")
self.__pygh_client = github.Github(self.__access_token, per_page=MAX_PER_PAGE)
def generate_card(self, github_id: str) -> int:
self.__set_github_id(github_id)
output_path = self.__output_dir.joinpath(
"{:s}.json".format(sanitize_filename(github_id, "_"), null_value_handler=raise_error)
)
if self.__cache_manager.is_cache_available(output_path):
self.__logger.notice(f"skip: valid card data already exist: {output_path}")
return 0
try:
with stopwatch(self.__logger, f"fetch {github_id} {self.__data_fetcher.type}"):
card_data = self.__data_fetcher.fetch()
except OSError as e:
self.__logger.error(msgfy.to_error_message(e))
return errno.ECONNRESET
except BadCredentialsException:
self.__logger.error("invalid GitHub API public access token")
return errno.EBADRQC
except KeyboardInterrupt:
self.terminate()
raise
except UnknownObjectException as e:
if e.status == 404:
message = "'{}' {}".format(self.__data_fetcher.id, e.data.get("message"))
else:
message = e.data.message # type: ignore
self.__logger.error(
"{:s} failed to get GitHub data: type={}, id={}, status={}, "
"message={}".format(
e.__class__.__name__,
self.__data_fetcher.type,
self.__data_fetcher.id,
e.status,
message,
)
)
return errno.ENODATA
card_data_text = json.dumps(card_data, indent=self.__indent, ensure_ascii=False)
self.__logger.debug(f"fetched card data: {card_data_text}")
try:
self.__make_output_dir()
except TypeError:
click.echo(card_data_text)
return 0
except OSError as e:
self.__logger.error(msgfy.to_error_message(e))
return e.args[0]
try:
with open(output_path, "w", encoding="utf-8") as f:
f.write(card_data_text + "\n")
except OSError as e:
self.__logger.error(msgfy.to_error_message(e))
return e.args[0]
self.__logger.info(
f"written {self.__detector.get_id_type().lower():s} data to '{output_path:s}'"
)
return 0
def terminate(self) -> None:
self.__data_fetcher.terminate()
def __get_data_fetcher_class(self):
# pytype: disable=attribute-error
if self.__detector.is_repository():
return RepositoryCardDataFetcher
if self.__detector.is_user():
return UserCardDataFetcher
if self.__detector.is_organization():
return OrganizationCardDataFetcher
raise ValueError(f"unknown id type: {self.__detector.id}")
# pytype: enable=attribute-error
def __set_github_id(self, github_id: str) -> None:
self.__github_id = github_id
self.__detector = GithubIdDetector(
self.__github_id, self.__logger, pygh_client=self.__pygh_client
)
self.__data_fetcher = self.__create_data_fetcher()
def __create_data_fetcher(self) -> AbstractCardDataFetcher:
# pytype: disable=attribute-error
return self.__get_data_fetcher_class()(
pygh_client=self.__pygh_client,
ghc_client=GitHubClient(
logger=self.__logger, github_id=self.__detector.id, access_token=self.__access_token
),
id=self.__detector.id,
logger=self.__logger,
)
# pytype: enable=attribute-error
def __make_output_dir(self) -> None:
if os.path.isdir(self.__output_dir):
return
self.__logger.debug(f"creating directory: {self.__output_dir}")
os.makedirs(self.__output_dir)
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,448
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/__main__.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import errno
import os
import sys
import appconfigpy
import click
import logbook
import logbook.more
import msgfy
import retryrequests
import typepy
from appconfigpy import ConfigItem, ConfigManager, DefaultDisplayStyle
from github.GithubException import RateLimitExceededException
from .__version__ import __version__
from ._const import PROGRAM_NAME, AppConfigKey
from ._generator import CardGenerator
from ._logger import get_logger
QUIET_LOG_LEVEL = logbook.NOTSET
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"], obj={})
CONFIG_ITEMS = [
ConfigItem(
name=AppConfigKey.GITHUB_API_ACCESS_TOKEN,
initial_value=None,
prompt_text="GitHub API Personal Access Token",
default_display_style=DefaultDisplayStyle.PART_VISIBLE,
),
ConfigItem(
name=AppConfigKey.OUTPUT_DIR, prompt_text="Output Directory Path", initial_value="."
),
ConfigItem(name=AppConfigKey.INDENT, prompt_text="Indent Size", initial_value=4),
]
logbook.more.ColorizedStderrHandler(
level=logbook.DEBUG, format_string="[{record.level_name}] {record.channel}: {record.message}"
).push_application()
class Context:
LOG_LEVEL = "LOG_LEVEL"
def get_api_status() -> str:
r = retryrequests.get("https://kctbh9vrtdwd.statuspage.io/api/v2/status.json")
r.raise_for_status()
return r.json()["status"]["indicator"]
@click.group(context_settings=CONTEXT_SETTINGS)
@click.version_option(version=__version__, message="%(prog)s %(version)s")
@click.option("--debug", "log_level", flag_value=logbook.DEBUG, help="for debug print.")
@click.option(
"--quiet", "log_level", flag_value=QUIET_LOG_LEVEL, help="suppress execution log messages."
)
@click.pass_context
def cmd(ctx, log_level):
ctx.obj[Context.LOG_LEVEL] = logbook.INFO if log_level is None else log_level
@cmd.command()
@click.pass_context
def configure(ctx):
"""
Create a configuration file which includes GitHub API public access token.
The value you provide for the GitHub API public access token written to
the configuration file (~/.ghscard).
Example:
To create a new configuration:
$ ghscard configure
GitHub API Personal Access Token: <input access token>
"""
appconfigpy.set_log_level(ctx.obj[Context.LOG_LEVEL])
app_config_mgr = ConfigManager(PROGRAM_NAME, CONFIG_ITEMS)
sys.exit(app_config_mgr.configure())
@cmd.command()
@click.argument("github_id_list", type=str, nargs=-1)
@click.option("--api-token", default=None, help="GitHub API access token.")
@click.option(
"-o",
"--output-dir",
metavar="PATH",
default=None,
help="Output path of the SQLite database file.",
)
@click.option(
"--overwrite",
"is_overwrite",
is_flag=True,
help="Overwrite card data even if data already exist and not expired.",
)
@click.pass_context
def gen(ctx, github_id_list, api_token, output_dir, is_overwrite):
"""
Generate a GitHub user/repository card data file.
ID need to either '<user-name>' or '<user-name>/<repository-name>'.
Example:
$ ghscard gen thombashi/ghscard
"""
log_level = ctx.obj[Context.LOG_LEVEL]
logger = get_logger(log_level, f"{PROGRAM_NAME:s} gen")
appconfigpy.set_log_level(log_level)
if get_api_status() == "major":
logger.error("GitHub API status is in red status")
sys.exit(1)
try:
app_configs = ConfigManager(PROGRAM_NAME, CONFIG_ITEMS).load()
except ValueError as e:
logger.debug(msgfy.to_debug_message(e))
app_configs = {}
if typepy.is_not_null_string(output_dir):
app_configs[AppConfigKey.OUTPUT_DIR] = output_dir
if typepy.is_not_null_string(api_token):
app_configs[AppConfigKey.GITHUB_API_ACCESS_TOKEN] = api_token
elif os.environ.get("GITHUB_TOKEN"):
app_configs[AppConfigKey.GITHUB_API_ACCESS_TOKEN] = os.environ.get("GITHUB_TOKEN")
if not github_id_list:
logger.error(
"command requires at least one argument: "
"'<user-name>' or '<user-name>/<repository-name>'"
)
sys.exit(errno.EINVAL)
return_code_list = []
generator = CardGenerator(logger, app_configs, is_overwrite)
for gh_id in github_id_list:
try:
return_code_list.append(generator.generate_card(gh_id))
except KeyboardInterrupt:
sys.exit(errno.EINTR)
except RateLimitExceededException as e:
logger.error(e)
sys.exit(errno.ENOSR)
if any(return_code_list):
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
cmd()
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,449
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/fetcher/__init__.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from ._base import AbstractCardDataFetcher
from ._organization import OrganizationCardDataFetcher
from ._repository import RepositoryCardDataFetcher
from ._user import UserCardDataFetcher
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,450
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/fetcher/_user.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import collections
from multiprocessing.pool import AsyncResult # noqa
from typing import Any, Counter, Dict, List, Optional, Union # noqa
from .._const import DATETIME_FORMAT, CardType, CommonCardKey
from ._base import AbstractCardDataFetcher, CardData
from ._common import dump_organization, to_chart_data
def ghc_starred_count_helper(ghc_client) -> Dict[str, int]:
return {"stars": ghc_client.starred_count}
def ghc_organizations_helper(user) -> Dict[str, List[Dict[str, str]]]:
return {"organizations": [dump_organization(organization) for organization in user.get_orgs()]}
def ghc_languages_helper(user) -> Dict[str, Dict[str, list]]:
language_mapping = collections.Counter() # type: Counter
for repo in user.get_repos():
language_mapping[repo.language] += 1
try:
del language_mapping[None]
except KeyError:
pass
return {"languages": to_chart_data(language_mapping, 5)}
class UserCardDataFetcher(AbstractCardDataFetcher):
@property
def type(self) -> str:
return CardType.USER
def fetch(self) -> CardData:
self._logger.debug(f"fetching user data: id={self.id}")
card_data = super().fetch()
user = self._pygh_client.get_user(self.id)
thread_list = [
self._pool.apply_async(ghc_starred_count_helper, args=[self._ghc_client]),
self._pool.apply_async(ghc_organizations_helper, args=[user]),
self._pool.apply_async(ghc_languages_helper, args=[user]),
] # type: List[AsyncResult]
# this will raise UnknownObjectException when failed to get data
card_data["profile_name"] = user.name
card_data[CommonCardKey.AVATAR_URL] = user.avatar_url
card_data[CommonCardKey.CARD_TYPE] = CardType.USER
card_data[CommonCardKey.CREATED_AT] = user.created_at.strftime(DATETIME_FORMAT)
card_data[CommonCardKey.DESCRIPTION] = user.bio
card_data[CommonCardKey.EMOJIS] = self._get_emoji_mapping(user.bio)
card_data[CommonCardKey.HTML_URL] = user.html_url
card_data[CommonCardKey.NAME] = self.id
card_data[CommonCardKey.UPDATED_AT] = user.updated_at.strftime(DATETIME_FORMAT)
card_data["blog"] = user.blog
card_data["company"] = user.company
card_data["email"] = user.email
card_data["followers"] = user.followers
card_data["following"] = user.following
card_data["location"] = user.location
card_data["public_gists"] = user.public_gists
card_data["public_repos"] = user.public_repos
for i, thread in enumerate(thread_list):
thead_id = "thread {:d}/{:d}".format(i + 1, len(thread_list))
self._logger.debug("wait for {}".format(thead_id))
card_data.update(thread.get())
self._logger.debug("complete {}".format(thead_id))
return card_data
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,451
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/_github_client.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import re
from typing import Dict
import msgfy
import retryrequests
import typepy
from github.GithubException import RateLimitExceededException
from ._const import MAX_PER_PAGE
from ._emoji import EmojiParser
from ._stopwatch import stopwatch
class GitHubClient:
@property
def emojis(self) -> Dict[str, str]:
if self.__emojis: # type: ignore
return self.__emojis # type: ignore
self.__emojis = self.get("/emojis")
return self.__emojis
@property
def emoji_parser(self) -> EmojiParser:
if self.__emoji_parser: # type: ignore
return self.__emoji_parser # type: ignore
self.__emoji_parser = EmojiParser(self.emojis)
return self.__emoji_parser
@property
def repo(self):
if self.__repos:
return self.__repos
self.__repos = self.get(
f"/repos/{self.__github_id:s}",
headers={"accept": "application/vnd.github.drax-preview+json"},
)
# get license: https://developer.github.com/v3/licenses/
return self.__repos
@property
def branches_count(self) -> int:
return self.__get_count("branches")
@property
def contributors_count(self) -> int:
return self.__get_count("contributors")
@property
def pulls_count(self) -> int:
return self.__get_count("pulls")
@property
def tags_count(self) -> int:
return self.__get_count("tags")
@property
def starred_count(self) -> int:
return self.__get_count("starred")
def __init__(self, logger, github_id, access_token=None):
self._logger = logger
self.__github_id = github_id
self.__access_token = access_token
self.__emojis = None
self.__emoji_parser = None
self.__repos = None
def get(self, operation: str, headers: dict = None, params: dict = None) -> dict:
if not headers:
headers = {}
if not params:
params = {}
if typepy.is_not_null_string(self.__access_token):
headers["authorization"] = f"token {self.__access_token:s}"
api_url = f"https://api.github.com{operation:s}"
response = retryrequests.get(api_url, headers=headers, params=params)
self._logger.debug(f"API called: {response.url}")
try:
response_json = response.json()
except ValueError:
return {}
try:
message = response_json.get("message")
except AttributeError:
return response_json
if message:
if re.search(".* list is too large to list .* via the API", message):
raise OSError(message)
if response.status_code == 403 and re.search("^API rate limit exceeded for ", message):
raise RateLimitExceededException(status=response.status_code, data=message)
return response_json
def get_page(self, operation: str, page) -> dict:
return self.get(operation, params={"per_page": str(MAX_PER_PAGE), "page": page})
def _get_branches(self, page) -> dict:
# https://developer.github.com/v3/repos/branches/
return self.get_page(f"/repos/{self.__github_id:s}/branches", page=page)
def _get_contributors(self, page) -> dict:
return self.get_page(f"/repos/{self.__github_id:s}/contributors", page=page)
def _get_pulls(self, page) -> dict:
# https://developer.github.com/v3/pulls/
return self.get_page(f"/repos/{self.__github_id:s}/pulls", page=page)
def _get_tags(self, page) -> dict:
# https://developer.github.com/v3/git/tags/
return self.get_page(f"/repos/{self.__github_id:s}/tags", page=page)
def _get_releases(self, page) -> dict:
# https://developer.github.com/v3/repos/releases/
return self.get_page(f"/repos/{self.__github_id:s}/releases", page=page)
def _get_starred(self, page) -> dict:
return self.get_page(f"/users/{self.__github_id:s}/starred", page=page)
def __get_count(self, param_name: str) -> int:
attr_template = "__{:s}"
method_template = "_get_{:s}"
try:
count = getattr(self, attr_template.format(param_name))
if count:
return count
except AttributeError:
pass
total_count = 0
page = 1
with stopwatch(self._logger, f"get {param_name:s}"):
while True:
method_name = method_template.format(param_name)
with stopwatch(self._logger, f"{method_name:s} page {page:d}"):
try:
subtotal_count = len(getattr(self, method_name)(page))
except OSError as e:
self._logger.debug(msgfy.to_debug_message(e))
# total_count = None
break
if not subtotal_count:
break
total_count += subtotal_count
page += 1
setattr(self, attr_template.format(param_name), total_count)
return total_count
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,452
|
thombashi/ghscard
|
refs/heads/master
|
/ghscard/fetcher/_base.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import abc
import datetime
import multiprocessing
from typing import Dict, List, Union
import github
from .._const import CARD_DATA_VERSION, DATETIME_FORMAT, CommonCardKey, Result
from .._github_client import GitHubClient
from ._common import ChartData
EmojiMap = Dict[str, str]
Topics = List[str]
CardData = Dict[str, Union[int, str, None, ChartData, EmojiMap, Topics]]
class AbstractCardDataFetcher(metaclass=abc.ABCMeta):
@abc.abstractproperty
def type(self) -> str: # pragma: no cover
pass
@property
def id(self) -> str:
return self.__id
def __init__(
self, pygh_client: github.Github, ghc_client: GitHubClient, id: str, logger
) -> None:
self.__id = id
self._logger = logger
self._pygh_client = pygh_client
self._ghc_client = ghc_client
self._pool = multiprocessing.Pool(processes=4)
def fetch(self) -> CardData:
return {
CommonCardKey.ID: self.id,
CommonCardKey.FETCHD_AT: datetime.datetime.now().strftime(DATETIME_FORMAT),
CommonCardKey.VERSION: CARD_DATA_VERSION,
CommonCardKey.RESULT: Result.SUCCESS,
}
def terminate(self) -> None:
self._pool.terminate()
def _get_emoji_mapping(self, text) -> EmojiMap:
try:
emoji_list = self._ghc_client.emoji_parser.parse(text)
except ValueError:
emoji_list = []
emoji_mapping = {}
for emoji in emoji_list:
emoji_mapping[emoji] = self._ghc_client.emoji_parser.get_url(emoji)
return emoji_mapping
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,453
|
thombashi/ghscard
|
refs/heads/master
|
/docs/make_readme.py
|
#!/usr/bin/env python3
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sys
from readmemaker import ReadmeMaker
PROJECT_NAME = "ghscard"
OUTPUT_DIR = ".."
def write_quickstart(maker):
maker.set_indent_level(0)
maker.write_introduction_file("quickstart.txt")
maker.inc_indent_level()
maker.write_chapter("For more information")
maker.write_lines(
[
"More examples are available at ",
f"https://{PROJECT_NAME:s}.rtfd.io/en/latest/pages/usage/index.html",
]
)
def main():
maker = ReadmeMaker(
PROJECT_NAME,
OUTPUT_DIR,
is_make_toc=True,
project_url=f"https://github.com/thombashi/{PROJECT_NAME}",
)
maker.write_chapter("Summary")
maker.write_introduction_file("summary.txt")
maker.write_introduction_file("badges.txt")
maker.write_chapter("Demo")
maker.write_introduction_file("demo.txt")
maker.write_introduction_file("installation.rst")
write_quickstart(maker)
maker.write_file(maker.doc_page_root_dir_path.joinpath("environment.rst"))
maker.write_chapter("Documentation")
maker.write_lines([f"https://{PROJECT_NAME:s}.rtfd.io/"])
maker.write_file(maker.doc_page_root_dir_path.joinpath("sponsors.rst"))
return 0
if __name__ == "__main__":
sys.exit(main())
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,454
|
thombashi/ghscard
|
refs/heads/master
|
/test/test_detector.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import github
import logbook
import pytest
from github.GithubException import UnknownObjectException
from ghscard._const import CardType
from ghscard._detector import GithubIdDetector
def monkey_get_organization(a, b):
raise UnknownObjectException("dummy_status", "dummy_data", None)
logger = logbook.Logger("test")
class Test_GithubIdDetector_constructor:
@pytest.mark.parametrize(
["value", "expected"],
[
[None, ValueError],
["", ValueError],
["/", ValueError],
["//", ValueError],
["a/b/c", ValueError],
],
)
def test_exception(self, value, expected):
with pytest.raises(expected):
GithubIdDetector(value, logger, pygh_client=github.Github(None))
class Test_GithubIdDetector_id:
@pytest.mark.parametrize(
["value", "expected"],
[
["thombashi", "thombashi"],
["thombashi/", "thombashi"],
["/thombashi", "thombashi"],
["/thombashi/", "thombashi"],
["thombashi/ghscard", "thombashi/ghscard"],
[" thombashi / ghscard ", "thombashi/ghscard"],
],
)
def test_normal(self, monkeypatch, value, expected):
monkeypatch.setattr(github.Github, "get_organization", monkey_get_organization)
detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))
assert detector.id == expected
class Test_GithubIdDetector_is_user:
@pytest.mark.parametrize(
["value", "expected"],
[["thombashi", True], ["thombashi/", True], ["/thombashi", True], ["/thombashi/", True]],
)
def test_normal(self, monkeypatch, value, expected):
monkeypatch.setattr(github.Github, "get_organization", monkey_get_organization)
detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))
assert detector.is_user() == expected
class Test_GithubIdDetector_is_organization:
@pytest.mark.parametrize(
["value", "expected"],
[["thombashi", True], ["thombashi/", True], ["/thombashi", True], ["/thombashi/", True]],
)
def test_normal(self, monkeypatch, value, expected):
monkeypatch.setattr(github.Github, "get_organization", lambda a, b: a)
detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))
assert detector.is_organization() == expected
class Test_GithubIdDetector_is_repository:
@pytest.mark.parametrize(
["value", "expected"], [["thombashi/ghscard", True], [" thombashi / ghscard ", True]]
)
def test_normal(self, value, expected):
detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))
assert detector.is_repository() == expected
class Test_GithubIdDetector_get_id_type:
@pytest.mark.parametrize(
["value", "expected"],
[["thombashi", CardType.USER], ["thombashi/ghscard", CardType.REPOSITORY]],
)
def test_normal(self, monkeypatch, value, expected):
monkeypatch.setattr(github.Github, "get_organization", monkey_get_organization)
detector = GithubIdDetector(value, logger, pygh_client=github.Github(None))
assert detector.get_id_type() == expected
|
{"/ghscard/fetcher/_repository.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/fetcher/_common.py": ["/ghscard/_const.py"], "/test/test_emoji.py": ["/ghscard/_emoji.py"], "/ghscard/_stopwatch.py": ["/ghscard/_const.py"], "/ghscard/fetcher/_organization.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py"], "/ghscard/_detector.py": ["/ghscard/_const.py"], "/ghscard/_generator.py": ["/ghscard/_cache.py", "/ghscard/_const.py", "/ghscard/_detector.py", "/ghscard/_github_client.py", "/ghscard/_stopwatch.py", "/ghscard/fetcher/__init__.py"], "/ghscard/__main__.py": ["/ghscard/_const.py", "/ghscard/_generator.py", "/ghscard/_logger.py"], "/ghscard/fetcher/__init__.py": ["/ghscard/fetcher/_base.py", "/ghscard/fetcher/_organization.py", "/ghscard/fetcher/_repository.py", "/ghscard/fetcher/_user.py"], "/ghscard/fetcher/_user.py": ["/ghscard/_const.py", "/ghscard/fetcher/_base.py", "/ghscard/fetcher/_common.py"], "/ghscard/_github_client.py": ["/ghscard/_const.py", "/ghscard/_emoji.py", "/ghscard/_stopwatch.py"], "/ghscard/fetcher/_base.py": ["/ghscard/_const.py", "/ghscard/_github_client.py", "/ghscard/fetcher/_common.py"], "/test/test_detector.py": ["/ghscard/_const.py", "/ghscard/_detector.py"]}
|
20,507
|
pshegde96/multilayer-perceptron
|
refs/heads/master
|
/trainer.py
|
import numpy as np
import pickle,shutil
from network import Network
from cross_entropy import cross_entropy
import matplotlib.pyplot as plt
import math
import time
import argparse
from mnist import MNIST
import os
'''Parse CommandLine Arguments '''
parser = argparse.ArgumentParser()
parser.add_argument('model_id',help='Enter the model number')
parser.add_argument('-activation',help='Activation in the Hidden Layers')
parser.add_argument('-layers',help='Hidden Layers, pass as string with numbers separated by commas')
parser.add_argument('-no_iter',help='Number of mini-batch iterations to train',type=int)
parser.add_argument('-batch_size',help='Batch size',type=int)
parser.add_argument('-initial_lr',help='Initial Learning Rate',type=float)
parser.add_argument('-lr_decay',help='Learning Rate Decay every 200 epochs',type=float)
parser.add_argument('-lambda_reg',help='L2 norm regularization parameter',type=float)
parser.add_argument('-momentum',help='Momentum Weight',type=float)
parser.add_argument('-savemodel',help='1 to save,default 0',type=int) #not yet implemented
parser.add_argument('-modeldir',help='Specify dir to store models with / suffixed.Default:models/') #not yet implemented
args = parser.parse_args()
'''Important Parameters'''
MODEL = './models/'+str(args.model_id)
BATCH_SIZE = 64
if args.batch_size:
BATCH_SIZE = int(args.batch_size)
LAYERS_SIZE = [784,1000,500,250,10]
if args.layers:
LAYERS_SIZE = map(int,args.layers.split(','))
LEARNING_RATE = 0.3
if args.initial_lr:
LEARNING_RATE = float(args.initial_lr)
LR_DECAY = 1.0 #EVERY 200 ITERATIONS
if args.lr_decay:
LR_DECAY = float(args.lr_decay)
LAMBDA_REG = 0.0
if args.lambda_reg:
LAMBDA_REG = float(args.lambda_reg)
NO_ITER = 8000
if args.no_iter:
NO_ITER = int(args.no_iter)
ACTIVATION = 'sigmoid'
if args.activation:
ACTIVATION = str(args.activation)
MOMENTUM = 0.0
if args.momentum:
MOMENTUM = float(args.momentum)
'''Print the parameters so that user can verify them '''
print 'Architecture: {}'.format(LAYERS_SIZE)
print 'Batch Size: {}'.format(BATCH_SIZE)
print 'Initial Learning Rate: {}'.format(LEARNING_RATE)
print 'Learning Rate Decay every 200 iterations: {}'.format(LR_DECAY)
print 'Momentum Weight: {}'.format(MOMENTUM)
print 'Lambda of L2 Weight Regularization: {}'.format(LAMBDA_REG)
print 'Total Number of Iterations: {}'.format(NO_ITER)
print 'Activation in Hidden Layers: {}'.format(ACTIVATION)
if os.path.exists(MODEL):
print '\n\n WARNING!!!: The model id that you are trying to train already exists.'
print 'If you continue the program the existing model will be deleted \n\n\n'
print '\n Press Enter to Continue'
raw_input()
'''Load the Data-Set'''
data = MNIST('./data/')
X_train,Y_train = data.load_training()
X_test,Y_test = data.load_testing()
X_train = np.array(X_train)
Y_train = np.array(Y_train)
X_test = np.array(X_test)
Y_test = np.array(Y_test)
#Normalize the data
X_mean = np.mean(X_train,axis=0)
X_train = X_train-X_mean
X_std = np.sqrt(np.mean(X_train**2,axis=0))
X_train = X_train/(X_std+1e-10)
X_test = (X_test-X_mean)/(X_std+1e-7)
'''Let the training begin '''
index = 0 #start from the first element
net = Network(LAYERS_SIZE,activation=ACTIVATION)
net.init_network()
loss_train = []
steps_train = []
loss_test = []
steps_test = []
accuracy_test = []
#Use try block to stop the training when Ctrl-C is pressed
try:
for step in range(NO_ITER):
if index+BATCH_SIZE >= X_train.shape[0]:
index = 0
#permute the data to instill a sense of random sampling
permute = np.random.permutation(X_train.shape[0])
X_train = X_train[permute]
Y_train = Y_train[permute]
X_batch = X_train[index:index+BATCH_SIZE]
Y_batch = Y_train[index:index+BATCH_SIZE]
Y_hat = net.forward_pass(X_batch)
#Record the training loss
loss = cross_entropy(Y_hat,Y_batch,one_hot='False')
loss_train.append(loss)
steps_train.append(step)
#Update parameters
net.backward_pass(Y_batch,LAMBDA_REG,LEARNING_RATE= LEARNING_RATE,MOMENTUM=MOMENTUM)
for layer in net.layers:
layer.W += layer.dW_v
layer.b += layer.db_v
if step%200 == 0:
#compute test loss
LEARNING_RATE *= LR_DECAY
Y_hat_test = net.forward_pass(X_test)
loss_test1 = cross_entropy(Y_hat_test,Y_test,one_hot='False')
#Also compute the test accuracy
p_test = net.forward_pass(X_test)
Y_test_hat = np.zeros_like(p_test)
Y_test_onehot = np.zeros_like(p_test)
for i in range(len(Y_test)):
Y_test_hat[i,np.argmax(p_test[i])]=1
Y_test_onehot[i,Y_test[i]] =1
test_accuracy = np.sum(Y_test_hat*Y_test_onehot)/Y_test.shape[0]
#Record data
steps_test.append(step)
loss_test.append(loss_test1)
accuracy_test.append(test_accuracy)
print 'STEP: {} \t BATCH LOSS: {} \t TEST LOSS: {} \t TEST ACCURACY: {}'.format(step,loss,loss_test1,test_accuracy)
index += BATCH_SIZE
#If Ctrl-C is pressed, exit the training
except KeyboardInterrupt:
print '\n'
p_test = net.forward_pass(X_test)
Y_test_hat = np.zeros_like(p_test)
Y_test_onehot = np.zeros_like(p_test)
for i in range(len(Y_test)):
Y_test_hat[i,np.argmax(p_test[i])]=1
Y_test_onehot[i,Y_test[i]] =1
print np.sum(Y_test_hat*Y_test_onehot)/Y_test.shape[0]
'''Save the model'''
for layer in net.layers:
del layer.dW
del layer.dW_v
del layer.db
del layer.db_v
del layer.X
del layer.Z
del layer.A
if os.path.exists(MODEL):
shutil.rmtree(MODEL)
os.makedirs(MODEL)
with open(MODEL+'/weights.pkl','wb') as output:
pickle.dump(net,output,pickle.HIGHEST_PROTOCOL)
#Also save the important data
with open(MODEL+'/data.pkl','wb') as output:
pickle.dump([steps_train,loss_train,steps_test,loss_test,accuracy_test],output,pickle.HIGHEST_PROTOCOL)
|
{"/network.py": ["/layers.py", "/softmax.py"], "/layers.py": ["/activations.py"]}
|
20,508
|
pshegde96/multilayer-perceptron
|
refs/heads/master
|
/softmax.py
|
import numpy as np
'''
Computes the softmax of a matrix considering the rows as input variables
'''
def softmax(x,tmp=1):
big = np.max(x,axis=1)
x = x-big.reshape(-1,1)
exp = np.exp(x*tmp)
return exp/(np.sum(exp,axis=1)).reshape(-1,1)
if __name__ == "__main__":
main()
|
{"/network.py": ["/layers.py", "/softmax.py"], "/layers.py": ["/activations.py"]}
|
20,509
|
pshegde96/multilayer-perceptron
|
refs/heads/master
|
/activations.py
|
'''
Contains all the activation functions implemented along with their derivatives
'''
import numpy as np
def sigmoid_fn(X):
return 1/(1+np.exp(-X))
def sigmoid_derivative(X):
sigm = sigmoid_fn(X)
return sigm*(1-sigm)
def relu_fn(X):
return np.clip(X,0,None)
def relu_derivative(X):
der = np.zeros_like(X)
der[X>=0] = 1
return der
|
{"/network.py": ["/layers.py", "/softmax.py"], "/layers.py": ["/activations.py"]}
|
20,510
|
pshegde96/multilayer-perceptron
|
refs/heads/master
|
/network.py
|
import numpy as np
from layers import Layer
from softmax import softmax
class Network:
def __init__(self,layers_size,activation='relu',task='classification'):
self.layers_size = layers_size
self.activation = activation
self.task = task
def init_network(self):
self.layers = []
#initialize all layers except the last one witht the specified activation
for l in range(len(self.layers_size)-2):
self.layers.append(Layer(activation='relu',
in_dim=self.layers_size[l],
out_dim=self.layers_size[l+1]))
self.layers[l].init_variables() #initialize the weights of the layer
#Now add the final softmax layer
self.layers.append(Layer(activation='linear',
in_dim=self.layers_size[-2],
out_dim=self.layers_size[-1],
posn='final'))
self.layers[-1].init_variables() #initialize the weights of the layer
def forward_pass(self,X):
X_new = np.copy(X)
for layer in self.layers:
X_old = np.copy(X_new)
X_new = layer.forward(X_old)
if self.task == 'classification':
self.Y_hat = softmax(X_new)
#Yet to implement for regression
else:
pass
return self.Y_hat
def backward_pass(self,Y_vec,LAMBDA_REG=0,LEARNING_RATE=0.1,MOMENTUM=0.3):
#encode Y_vec in one-hot form
Y = np.zeros_like(self.Y_hat)
Y[range(self.Y_hat.shape[0]),Y_vec] = 1
delta_plus = (self.Y_hat - Y)/self.Y_hat.shape[0]
#process the final layer differently:
delta_plus = self.layers[-1].backward(delta_plus=delta_plus,
W_plus=None,LAMBDA_REG=LAMBDA_REG,
LEARNING_RATE=LEARNING_RATE,
MOMENTUM=MOMENTUM)
#go backwards through the layers, omitting the last layer
for i in range(len(self.layers)-1):
delta_plus = self.layers[-2-i].backward(delta_plus=delta_plus,
W_plus=np.copy(self.layers[-1-i].W),
LAMBDA_REG=LAMBDA_REG,
LEARNING_RATE=LEARNING_RATE,
MOMENTUM=MOMENTUM)
|
{"/network.py": ["/layers.py", "/softmax.py"], "/layers.py": ["/activations.py"]}
|
20,511
|
pshegde96/multilayer-perceptron
|
refs/heads/master
|
/layers.py
|
import numpy as np
import activations as act
class Layer:
def __init__(self,activation='relu',in_dim=1,out_dim=1,posn='hidden'):
self.activation = activation
self.in_dim = in_dim
self.out_dim = out_dim
self.posn=posn
#Initialize the weight matrix and the bias vector via Xavier Initialization
def init_variables(self):
self.W = 0.1*np.random.randn(self.in_dim,self.out_dim)/np.sqrt(self.in_dim)
self.b = 0.1*np.ones((1,self.out_dim)) #initialize with a small +ve value so that relu neurons don't go to 0 at birth
#momentum parameters; initialized with 0
self.dW_v = np.zeros_like(self.W)
self.db_v = np.zeros_like(self.b)
'''
The operation is A = f(Z)
Z = XW
'''
def forward(self,X):
self.X = X
self.Z = X.dot(self.W)+self.b
if self.activation == 'linear':
self.A = self.Z
elif self.activation == 'sigmoid':
self.A = act.sigmoid_fn(self.Z)
else :
self.A = act.relu_fn(self.Z)
return self.A
def backward(self,delta_plus,W_plus,LAMBDA_REG=0,
LEARNING_RATE=0.1,MOMENTUM=0.3):
#process the final layer differently
if self.posn == 'final':
delta = np.copy(delta_plus)
else:
if self.activation == 'linear':
f_derivative = np.ones_like(self.Z)
elif self.activation == 'sigmoid':
f_derivative = act.sigmoid_derivative(self.Z)
else:
f_derivative = act.relu_derivative(self.Z)
delta = (delta_plus.dot(W_plus.T))*f_derivative
self.dW = self.X.T.dot(delta) + LAMBDA_REG*self.W
self.db = np.ones((1,self.X.shape[0])).dot(delta)
self.dW_v = MOMENTUM*self.dW_v - LEARNING_RATE*self.dW
self.db_v = MOMENTUM*self.db_v - LEARNING_RATE*self.db
#return delta to calc grad for the previous layer
return delta
|
{"/network.py": ["/layers.py", "/softmax.py"], "/layers.py": ["/activations.py"]}
|
20,512
|
pshegde96/multilayer-perceptron
|
refs/heads/master
|
/numerical_gradient.py
|
import numpy as np
import cPickle,gzip
from network import Network
from cross_entropy import cross_entropy
import copy
'''Important Parameters'''
BATCH_SIZE = 64
LAYERS_SIZE = [784,50,20,10]
LEARNING_RATE = 0.001
LR_DECAY = 0.85 #EVERY 200 ITERATIONS
LAMBDA_REG = 0.005
NO_ITER = 8000
h = 1e-5
'''Load the Data-Set'''
f = gzip.open('mnist.pkl.gz','rb')
train_set,val_set,test_set = cPickle.load(f)
f.close()
X_train =train_set[0]
Y_train = train_set[1]
X_test = test_set[0]
Y_test = test_set[1]
'''Let the training begin '''
net = Network(LAYERS_SIZE,activation='sigmoid')
net.init_network()
X_batch = X_train[0:1000]
Y_batch = Y_train[0:1000]
Y_hat = net.forward_pass(X_batch)
#Calculate Numerical Gradient
net.backward_pass(Y_batch)
diff = 0
count = 0
for k in range(len(net.layers)):
for i in range(net.layers[k].W.shape[0]):
for j in range(net.layers[k].W.shape[1]):
net2 = copy.deepcopy(net)
net2.layers[k].W[i,j] += h
f1 = cross_entropy(net2.forward_pass(X_batch),Y_batch,one_hot='False')
net2.layers[k].W[i,j] -= 2*h
f2 = cross_entropy(net2.forward_pass(X_batch),Y_batch,one_hot='False')
diff += (net.layers[k].dW[i,j] - (f1-f2)/2/h)**2
count +=1
print count
print diff/count
|
{"/network.py": ["/layers.py", "/softmax.py"], "/layers.py": ["/activations.py"]}
|
20,513
|
pshegde96/multilayer-perceptron
|
refs/heads/master
|
/cross_entropy.py
|
import numpy as np
def cross_entropy(Y_hat,y,one_hot='True'):
if one_hot == 'False':
Y = np.zeros_like(Y_hat)
Y[range(y.shape[0]),y] = 1
else:
Y=y
inter = Y*np.log(Y_hat+1e-7)
cross_entropy = -1.0/Y.shape[0]*(np.sum(inter))
return cross_entropy
|
{"/network.py": ["/layers.py", "/softmax.py"], "/layers.py": ["/activations.py"]}
|
20,515
|
eduardogpg/important_people
|
refs/heads/master
|
/config.py
|
import os
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'my_custome_secret_key'
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'mysql://root:@localhost/important_people'
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
{"/forms.py": ["/models.py"], "/manage.py": ["/config.py", "/forms.py", "/models.py"]}
|
20,516
|
eduardogpg/important_people
|
refs/heads/master
|
/forms.py
|
from wtforms import Form
from wtforms import TextField
from wtforms import PasswordField
from wtforms.fields.html5 import EmailField
from wtforms import HiddenField
from wtforms import validators
from models import User
def length_honeypot(form, field):
if len(field.data) > 0:
raise validators.ValidationError('Este campo debe de estar vacio!')
class LoginForm(Form):
username = TextField('Username',[validators.Required(message = 'El username es requerido')])
password = PasswordField('Password', [validators.Required(message='El password es requerido')])
honeypot = HiddenField("",[ length_honeypot ])
class CreateForm(Form):
username = TextField('Username', [
validators.Required(message = 'El username es requerido.'),
validators.length(min=4, max=25, message='Ingrese un username valido.') ])
email = EmailField('Correo electronico',[
validators.Required(message = 'El email es requerido.'),
validators.Email(message='Ingre un email valido.'),
validators.length(min=4, max=25, message='Ingrese un email valido.') ])
password = PasswordField('Password', [validators.Required(message='El password es requerido')])
honeypot = HiddenField("",[ length_honeypot ])
def validate_username(form, field):
username = field.data
user = User.query.filter_by(username = username).first()
if user is not None:
raise validators.ValidationError('El username ya se encuentra registrado!')
class CreateArticleForm(Form):
pass
|
{"/forms.py": ["/models.py"], "/manage.py": ["/config.py", "/forms.py", "/models.py"]}
|
20,517
|
eduardogpg/important_people
|
refs/heads/master
|
/common/advanced_message.py
|
class AdvancedMessage(object):
messages = []
class Message():
def __init__(self, message = '', type_message ='', identifier=''):
self.message = message
self.type_message = type_message
self.identifier = identifier
self.create_pre_idetifier()
def create_pre_idetifier(self):
self.pre_idetifier = "alert {}".format(self.identifier)
@classmethod
def add(cls, message ='', type_message ='', identifier = ''):
new_message = cls.Message(message, type_message, identifier)
cls.messages.append(new_message)
@classmethod
def get_messages(cls):
return cls.messages
|
{"/forms.py": ["/models.py"], "/manage.py": ["/config.py", "/forms.py", "/models.py"]}
|
20,518
|
eduardogpg/important_people
|
refs/heads/master
|
/models.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask_sqlalchemy import SQLAlchemy
from werkzeug.security import generate_password_hash
from werkzeug.security import check_password_hash
import datetime
db = SQLAlchemy()
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(25), unique=True)
email = db.Column(db.String(30))
password = db.Column(db.String(66))
created_date = db.Column(db.DateTime, default=datetime.datetime.now)
updated_date = db.Column(db.DateTime, default=datetime.datetime.now)
def __init__(self, username, password, email):
self.username = username
self.password = self.__create_pasword(password)
self.email = email
def __create_pasword(self, password):
return generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password, password)
class Article(db.Model):
__tablename__ = 'articles'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(50))
content = db.Column(db.Text)
created_date = db.Column(db.DateTime, default=datetime.datetime.now)
updated_date = db.Column(db.DateTime, default=datetime.datetime.now)
|
{"/forms.py": ["/models.py"], "/manage.py": ["/config.py", "/forms.py", "/models.py"]}
|
20,519
|
eduardogpg/important_people
|
refs/heads/master
|
/manage.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Eduardo Ismael García Pérez'
__contact__ = '@eduardo_gpg'
from flask import Flask
from flask import render_template
from flask import request
from flask import session
from flask import redirect
from flask import url_for
from flask import flash
from flask import Markup
from flask_wtf.csrf import CsrfProtect
from config import DevelopmentConfig
from forms import LoginForm
from forms import CreateForm
from models import db as database
from models import User
from common import AdvancedMessage
app = Flask(__name__)
app.config.from_object(DevelopmentConfig)
csrf = CsrfProtect()
@app.after_request
def after_request(response):
return response
def create_session(username, user_id):
session['username'] = username
session['id'] = user_id
def success_authentication(request, user):
create_session(user.username, user.id)
success_message = 'Bienvenido a la plataforma {}'.format(user.username)
flash(success_message)
return redirect(url_for('dashboard'))
@app.route('/', methods = ['GET'])
def index():
return render_template('index.html')
@app.route('/user/new', methods = ['GET', 'POST'])
def user_new():
create_form = CreateForm(request.form)
if request.method == 'POST' and create_form.validate():
username = create_form.username.data
password = create_form.password.data
email = create_form.email.data
user = User(username, password, email)
database.session.add(user)
database.session.commit()
return success_authentication(request,user)
return render_template('user/new.html', form = create_form)
@app.route('/login', methods = ['GET','POST'])
def login():
login_form = LoginForm(request.form)
if request.method == 'POST' and login_form.validate():
username = login_form.username.data
password = login_form.password.data
user = User.query.filter_by(username = username).first()
if user is not None and user.verify_password(password):
return success_authentication(request, user)
else:
error_message = 'Usuario o password incorrectos.'
AdvancedMessage.add(message = error_message, identifier = 'alert-danger')
return render_template('login.html', form = login_form, messages = AdvancedMessage.get_messages())
@app.route('/logout', methods = ['GET'])
def logout():
session.pop('username', None)
return redirect(url_for('login'))
@app.route('/dashboard', methods = ['GET'])
def dashboard():
username = session['username']
is_authenticated = True
return render_template('user/dashboard.html', username = username, is_authenticated = is_authenticated)
if __name__ == '__main__':
csrf.init_app(app)
database.init_app(app)
with app.app_context():
database.create_all()
app.run()
|
{"/forms.py": ["/models.py"], "/manage.py": ["/config.py", "/forms.py", "/models.py"]}
|
20,545
|
ciampluca/unsupervised_counting
|
refs/heads/master
|
/train_adv.py
|
import sys
import tqdm
import os
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
from torch.utils.data.sampler import WeightedRandomSampler
from torchvision.transforms.functional import normalize
from config import Config
from models.CSRNet import CSRNet
from models.discriminator import FCDiscriminator
from utils.utils import random_seed, get_transforms, compute_discriminator_accuracy
from datasets.NDISPark import NDISPark
import warnings
warnings.filterwarnings("ignore", category=UserWarning)
# Config default values
EPOCHS = 300
BATCH_SIZE = 1
ROOT_DATASET = "/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark"
ROOT_VAL_DATASET = "/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark"
LAMBDA_ADV_LOSS = 5e-05
LAMBDA_DISC_LOSS = 0.0001
def main(args):
print(args)
# Loading configuration
cfg = Config(
epochs=args.epochs,
batch_size=args.batch_size,
root_dataset=args.source_dataset_path,
root_val_dataset=args.target_dataset_path,
lambda_adv_loss=args.lambda_adv,
lambda_disc_loss=args.lambda_disc,
)
# Reproducibility
seed = cfg.seed
if torch.cuda.is_available():
random_seed(seed, True)
else:
random_seed(seed, False)
# Defining exp name
exp_name = "_Train{}_Val{}_{}_advLoss{}_discLoss{}_lr{}_batchSize{}".\
format(cfg.root_dataset.rsplit("/", 1)[1], cfg.root_val_dataset.rsplit("/", 1)[1], cfg.model_name,
cfg.lambda_adv_loss, cfg.lambda_disc_loss, cfg.lr_base, cfg.batch_size)
# Creating tensorboard writer
tensorboard_writer = SummaryWriter(comment=exp_name)
# Loading model
model = CSRNet().to(cfg.device)
# Loading discriminator
discriminator = FCDiscriminator(num_classes=1).to(cfg.device)
# Defining criterion and optimizer for the model
criterion = nn.MSELoss()
optimizer = torch.optim.Adam(
model.parameters(),
lr=cfg.lr_base,
)
# Defining criterion and optimizer for the discriminator
discriminator_criterion = nn.BCEWithLogitsLoss()
discriminator_optimizer = torch.optim.Adam(
params=discriminator.parameters(),
lr=cfg.discriminator_lr_base,
betas=(0.9, 0.99),
)
# Creating datasets
train_dataset = NDISPark(
root_dataset=cfg.root_dataset,
phase="source",
transform=get_transforms(general_transforms=True, train=True),
img_transform=get_transforms(img_transforms=True),
target_transform=get_transforms(target_transforms=True),
)
val_dataset = NDISPark(
root_dataset=cfg.root_val_dataset,
phase="target",
transform=get_transforms(general_transforms=True),
img_transform=get_transforms(img_transforms=True),
target_transform=get_transforms(target_transforms=True),
)
target_dataset = NDISPark(
root_dataset=cfg.root_val_dataset,
phase="target",
transform=get_transforms(general_transforms=True),
img_transform=get_transforms(img_transforms=True),
target_transform=get_transforms(target_transforms=True),
)
# Creating samplers for target dataloader
weights = [1.0] * len(target_dataset)
target_sampler = WeightedRandomSampler(
weights=weights,
num_samples=len(train_dataset),
replacement=True
)
# Creating dataloaders
train_dataloader = DataLoader(
train_dataset,
shuffle=True,
batch_size=cfg.batch_size,
num_workers=cfg.num_workers,
pin_memory=torch.cuda.is_available(),
)
target_dataloader = DataLoader(
target_dataset,
batch_size=cfg.batch_size,
sampler=target_sampler,
pin_memory=torch.cuda.is_available(),
num_workers=cfg.num_workers,
)
val_dataloader = DataLoader(
val_dataset,
shuffle=False,
batch_size=1,
num_workers=cfg.num_workers,
pin_memory=torch.cuda.is_available(),
)
# Defining labels for adversarial training
source_label = 0
target_label = 1
min_mae, min_mse, min_are = sys.maxsize, sys.maxsize, sys.maxsize
min_mae_epoch, min_mse_epoch, min_are_epoch = -1, -1, -1
# Iterating over epochs...
for epoch in range(1, cfg.epochs):
model.train()
discriminator.train()
epoch_loss, disc_epoch_loss, model_epoch_loss, adv_epoch_loss = 0.0, 0.0, 0.0, 0.0
epoch_mae, epoch_mse, epoch_are = 0.0, 0.0, 0.0
epoch_disc_adv_acc, epoch_disc_1_acc, epoch_disc_2_acc = 0.0, 0.0, 0.0
# Creating an iterator over the target dataloader
target_iterator = iter(target_dataloader)
# Training for one epoch
for i, source_data in enumerate(tqdm.tqdm(train_dataloader)):
# Setting grads to zero
optimizer.zero_grad()
discriminator_optimizer.zero_grad()
######################
# Training the model #
######################
# Don't accumulate grads in Discriminator
for param in discriminator.parameters():
param.requires_grad = False
# TRAINING WITH SOURCE LABELED IMAGE
# Retrieving source image and gt
source_image = source_data['image'].to(cfg.device)
source_gt_density_map = source_data['densitymap'].to(cfg.device)
# Computing pred density map
source_pred_density_map = model(source_image)
# Computing loss
source_loss = criterion(source_pred_density_map, source_gt_density_map)
source_loss.backward()
model_epoch_loss += source_loss.item()
# Computing MAE, MSE and ARE
mae = abs(source_pred_density_map.data.sum() - source_gt_density_map.data.sum())
epoch_mae += mae.item()
mse = (source_pred_density_map.data.sum() - source_gt_density_map.data.sum()) ** 2
epoch_mse += mse.item()
are = abs(source_pred_density_map.data.sum() - source_gt_density_map.data.sum()) / torch.clamp(
source_gt_density_map.data.sum(), min=1)
epoch_are += are.item()
# TRAINING WITH TARGET UNLABELED IMAGE (ADV LOSS)
# Retrieving target image
target_data = target_iterator.__next__()
target_image = target_data['image'].to(cfg.device)
# Computing pred density map
target_pred_density_map = model(target_image)
# Computing output of the discriminator
discriminator_pred = discriminator(target_pred_density_map)
# Computing adv loss (between discriminator prediction and source-values label)
source_values_label = torch.FloatTensor(discriminator_pred.data.size()).fill_(source_label).to(cfg.device)
adv_loss = discriminator_criterion(discriminator_pred, source_values_label)
adv_loss = cfg.lambda_adv_loss * adv_loss
adv_loss.backward()
adv_epoch_loss += adv_loss.item()
# Computing accuracy of the discriminator
disc_adv_acc = compute_discriminator_accuracy(source_values_label, discriminator_pred, cfg)
epoch_disc_adv_acc += disc_adv_acc
# Computing total loss and backwarding it
loss = source_loss + adv_loss
epoch_loss += loss.item()
# loss.backward()
optimizer.step()
##############################
# Training the discriminator #
##############################
# Bringing back requires_grad
for param in discriminator.parameters():
param.requires_grad = True
# TRAINING WITH SOURCE LABELED IMAGE
# Computing output of the discriminator
source_pred_density_map = source_pred_density_map.detach()
discriminator_pred = discriminator(source_pred_density_map)
# Computing discriminator loss (between discriminator prediction and source-values label)
source_values_label = torch.FloatTensor(discriminator_pred.data.size()).fill_(source_label).to(cfg.device)
disc_loss = discriminator_criterion(discriminator_pred, source_values_label)
disc_loss = cfg.lambda_disc_loss * disc_loss
# Computing accuracy of the discriminator
disc_1_acc = compute_discriminator_accuracy(source_values_label, discriminator_pred, cfg)
epoch_disc_1_acc += disc_1_acc
# Backwarding loss
disc_epoch_loss += disc_loss.item()
disc_loss.backward()
# TRAINING WITH TARGET UNLABELED IMAGE
# Computing output of the discriminator
target_pred_density_map = target_pred_density_map.detach()
discriminator_pred = discriminator(target_pred_density_map)
# Computing discriminator loss (between discriminator prediction and target-values label)
target_values_label = torch.FloatTensor(discriminator_pred.data.size()).fill_(target_label).to(cfg.device)
disc_loss = discriminator_criterion(discriminator_pred, target_values_label)
disc_loss = cfg.lambda_disc_loss * disc_loss
# Computing accuracy of the discriminator
disc_2_acc = compute_discriminator_accuracy(target_values_label, discriminator_pred, cfg)
epoch_disc_2_acc += disc_2_acc
# Backwarding loss
disc_epoch_loss += disc_loss.item()
disc_loss.backward()
# Performing optimizer step
discriminator_optimizer.step()
tensorboard_writer.add_scalar('Train/Loss', epoch_loss / len(train_dataset), epoch)
tensorboard_writer.add_scalar('Train/Disc_Loss', disc_epoch_loss / len(train_dataset), epoch)
tensorboard_writer.add_scalar('Train/MAE', epoch_mae / len(train_dataset), epoch)
tensorboard_writer.add_scalar('Train/MSE', epoch_mse / len(train_dataset), epoch)
tensorboard_writer.add_scalar('Train/ARE', epoch_are / len(train_dataset), epoch)
tensorboard_writer.add_scalar('Train/Discr_Adv_Acc', epoch_disc_adv_acc / len(train_dataset), epoch)
tensorboard_writer.add_scalar('Train/Discr_1_Acc', epoch_disc_1_acc / len(train_dataset), epoch)
tensorboard_writer.add_scalar('Train/Discr_2_Acc', epoch_disc_2_acc / len(train_dataset), epoch)
tensorboard_writer.add_scalar('Train/Model_Loss', model_epoch_loss / len(train_dataset), epoch)
tensorboard_writer.add_scalar('Train/Adv_Loss', adv_epoch_loss / len(train_dataset), epoch)
# Validate the epoch
model.eval()
with torch.no_grad():
epoch_mae, epoch_mse, epoch_are, epoch_loss = 0.0, 0.0, 0.0, 0.0
for i, data in enumerate(tqdm.tqdm(val_dataloader)):
# Retrieving image and density map
image = data['image'].to(cfg.device)
gt_density_map = data['densitymap'].to(cfg.device)
# Computing output and val loss
pred_density_map = model(image)
val_loss = criterion(pred_density_map, gt_density_map)
epoch_loss += val_loss.item()
pred_density_map = pred_density_map.detach()
# Computing MAE and MSE
mae = abs(pred_density_map.data.sum() - gt_density_map.data.sum())
epoch_mae += mae.item()
mse = (pred_density_map.data.sum() - gt_density_map.data.sum()) ** 2
epoch_mse += mse.item()
are = abs(pred_density_map.data.sum() - gt_density_map.data.sum()) / torch.clamp(
gt_density_map.data.sum(), min=1)
epoch_are += are.item()
epoch_mae /= len(val_dataset)
epoch_mse /= len(val_dataset)
epoch_are /= len(val_dataset)
epoch_loss /= len(val_dataset)
# Saving last model
torch.save(model.state_dict(), os.path.join(cfg.checkpoint_folder, "last.pth"))
# Eventually saving best models
if epoch_mae < min_mae:
min_mae, min_mae_epoch = epoch_mae, epoch
torch.save(model.state_dict(), os.path.join(cfg.checkpoint_folder, str(epoch) + "_mae.pth"))
if epoch_mse < min_mse:
min_mse, min_mse_epoch = epoch_mse, epoch
torch.save(model.state_dict(), os.path.join(cfg.checkpoint_folder, str(epoch) + "_mse.pth"))
if epoch_are < min_are:
min_are, min_are_epoch = epoch_are, epoch
torch.save(model.state_dict(), os.path.join(cfg.checkpoint_folder, str(epoch) + "_are.pth"))
print('Epoch ', epoch, ' MAE: ', epoch_mae, ' Min MAE: ', min_mae, ' Min Epoch: ', min_mae_epoch,
min_mae_epoch, 'MSE: ', epoch_mse, 'ARE: ', epoch_are)
tensorboard_writer.add_scalar('Val/MAE', epoch_mae, epoch)
tensorboard_writer.add_scalar('Val/MSE', epoch_mse, epoch)
tensorboard_writer.add_scalar('Val/ARE', epoch_are, epoch)
tensorboard_writer.add_scalar('Val/Loss', epoch_loss, epoch)
tensorboard_writer.add_image(str(epoch) + '/Image',
normalize(image.cpu().squeeze(dim=0),
mean=[-0.5 / 0.225, -0.5 / 0.225, -0.5 / 0.225],
std=[1 / 0.225, 1 / 0.225, 1 / 0.225]))
tensorboard_writer.add_image(
str(epoch) + '/Pred Count:' + str('%.2f' % (pred_density_map.cpu().squeeze(dim=0).sum())),
torch.abs(pred_density_map.squeeze(dim=0)) / torch.max(pred_density_map.squeeze(dim=0)))
tensorboard_writer.add_image(
str(epoch) + '/GT count:' + str('%.2f' % (gt_density_map.cpu().squeeze(dim=0).sum())),
gt_density_map.squeeze(dim=0) / torch.max(gt_density_map.squeeze(dim=0)))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--source-dataset-path', default=ROOT_DATASET, help='source dataset root path')
parser.add_argument('--target-dataset-path', default=ROOT_VAL_DATASET, help='target dataset root path')
parser.add_argument('--epochs', default=EPOCHS, type=int, help='number of total epochs to run')
parser.add_argument('-b', '--batch-size', default=BATCH_SIZE, type=int, help='batch_size')
parser.add_argument('--lambda-adv', default=LAMBDA_ADV_LOSS, type=float, help='lambda for the adv loss')
parser.add_argument('--lambda-disc', default=LAMBDA_DISC_LOSS, type=float, help='lambda for the discriminator loss')
args = parser.parse_args()
main(args)
|
{"/train_adv.py": ["/config.py", "/utils/utils.py", "/datasets/NDISPark.py"], "/datasets/NDISPark.py": ["/utils/utils.py"], "/test.py": ["/datasets/NDISPark.py", "/utils/utils.py"], "/utils/utils.py": ["/utils/transforms.py"]}
|
20,546
|
ciampluca/unsupervised_counting
|
refs/heads/master
|
/datasets/NDISPark.py
|
import os
from PIL import Image
import numpy as np
import torch
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
from torchvision.transforms.functional import to_pil_image, normalize
from utils.utils import get_transforms
class NDISPark(Dataset):
def __init__(self, root_dataset, phase="source", transform=None, img_transform=None, target_transform=None):
assert phase == "source" or phase == "target" or phase == "test", "phase not present"
self.imgs_path = os.path.join(root_dataset, phase + '_data/images')
self.densities_path = os.path.join(root_dataset, phase + '_data/densitymaps')
self.data_files = [filename for filename in os.listdir(self.imgs_path)
if os.path.isfile(os.path.join(self.imgs_path, filename))]
self.transform = transform
self.img_transform = img_transform
self.target_transform = target_transform
self.phase = phase
# We just need number of vehicles present in the images
if phase == "test":
self.gt = {}
gt_txt_path = os.path.join(root_dataset, phase + "_data", "test_counting_gt.txt")
with open(gt_txt_path) as f:
content = f.readlines()
content = [x.strip() for x in content]
content = content[:-1]
for line in content:
(key, val) = line.split()
self.gt[key] = float(val)
def __len__(self):
return len(self.data_files)
def __getitem__(self, index):
index = index % len(self.data_files)
fname = self.data_files[index]
# Loading image
img = Image.open(os.path.join(self.imgs_path, fname))
if img.mode == 'L' or img.mode == 'RGBA':
img = img.convert('RGB')
# Loading density map. If we are in the test phase we just need the total number of vehicles, so density
# maps are just fake black images
if self.phase == "test":
den_map = Image.new('F', img.size)
else:
den_map = Image.open(os.path.join(self.densities_path, fname.rsplit(".", 1)[0] + ".tiff"))
if self.transform is not None:
img, den_map = self.transform((img, den_map))
if self.img_transform is not None:
img = self.img_transform(img)
if self.target_transform is not None:
den_map = self.target_transform(den_map)
if self.phase == "test":
# Retrieving gt number of vehicles
key = fname.rsplit(".", 1)[0]
num = self.gt.get(key)
return {'image': img, 'densitymap': den_map, 'name': fname, 'num': num}
else:
return {'image': img, 'densitymap': den_map, 'name': fname}
# # Testing code
# if __name__ == "__main__":
# root = "/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark"
# root_val = "/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark"
# phase = "target"
# DIM_RESIZE = None
#
# train_dataset = NDISPark(
# root_dataset=root,
# transform=get_transforms(general_transforms=True, train=True, dim_resize=DIM_RESIZE),
# img_transform=get_transforms(img_transforms=True),
# target_transform=get_transforms(target_transforms=True),
# )
# val_dataset = NDISPark(
# root_dataset=root_val,
# phase=phase,
# transform=get_transforms(general_transforms=True, dim_resize=DIM_RESIZE),
# img_transform=get_transforms(img_transforms=True,),
# target_transform=get_transforms(target_transforms=True),
# )
#
# train_dataloader = DataLoader(
# train_dataset,
# shuffle=False,
# batch_size=1,
# )
# val_dataloader = DataLoader(
# val_dataset,
# shuffle=False,
# batch_size=1,
# num_workers=1,
# )
#
# for i, data in enumerate(train_dataloader):
# name = data['name'][0].rsplit(".", 1)[0]
# print(name)
#
# image = data['image'].squeeze(dim=0)
# image = normalize(image, mean=[-0.5 / 0.225, -0.5 / 0.225, -0.5 / 0.225], std=[1 / 0.225, 1 / 0.225, 1 / 0.225])
# pil_image = to_pil_image(image)
# pil_image.save(os.path.join("../output_debug/", name + ".png"))
#
# if phase == "test":
# num = data['num'].cpu().item()
# print(num)
# else:
# density_map = data['densitymap'].squeeze(dim=0)
# pil_density_map = to_pil_image((density_map/torch.max(density_map))*255)
# np_density_map = density_map.cpu().detach().numpy().astype(np.float32)
# unique, counts = np.unique(np_density_map, return_counts=True)
# num = np.sum(np_density_map)
# num_double_check = len(unique)-1
# pil_density_map.save(os.path.join("../output_debug/", "density_" + name + ".tiff"))
# print(num)
# print(num_double_check)
|
{"/train_adv.py": ["/config.py", "/utils/utils.py", "/datasets/NDISPark.py"], "/datasets/NDISPark.py": ["/utils/utils.py"], "/test.py": ["/datasets/NDISPark.py", "/utils/utils.py"], "/utils/utils.py": ["/utils/transforms.py"]}
|
20,547
|
ciampluca/unsupervised_counting
|
refs/heads/master
|
/config.py
|
import os
import time
import torch
class Config:
def __init__(self, batch_size=None, lr_base=None, discriminator_lr_base=None, epochs=None, root_dataset=None,
root_val_dataset=None, checkpoint_folder=None, momentum=0.9, weight_decay=0.0001, model_name=None,
input_dim_resize=480, num_workers=4, lambda_adv_loss=0, lambda_disc_loss=0, dataset_random_split=None,
dataset_roi_masked=None, seed=10):
self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
self.batch_size = 1 if not batch_size else batch_size
self.lr_base = 1e-5 if not lr_base else lr_base
self.discriminator_lr_base = 1e-5 if not discriminator_lr_base else discriminator_lr_base
self.momentum = momentum
self.dataset_random_split = dataset_random_split
self.dataset_roi_masked = dataset_roi_masked
self.weight_decay = weight_decay
self.lambda_adv_loss = lambda_adv_loss
self.lambda_disc_loss = lambda_disc_loss
self.input_dim_resize = input_dim_resize
self.epochs = 100 if not epochs else epochs
self.num_workers = num_workers
self.root_dataset = './data/NDISPark' if not root_dataset else root_dataset
self.root_val_dataset = './data/NDISPark' if not root_val_dataset else root_val_dataset
self.dataset_name = self.root_dataset.rsplit("/", 1)[1]
self.model_name = 'CSRNet' if not model_name else model_name
self.date_and_time = time.strftime("%Y%m%d%H%M")
self.checkpoint_folder = os.path.join('./checkpoints', self.dataset_name, self.model_name, self.date_and_time) \
if not checkpoint_folder else checkpoint_folder
self.seed = seed
os.makedirs(self.checkpoint_folder, exist_ok=True)
|
{"/train_adv.py": ["/config.py", "/utils/utils.py", "/datasets/NDISPark.py"], "/datasets/NDISPark.py": ["/utils/utils.py"], "/test.py": ["/datasets/NDISPark.py", "/utils/utils.py"], "/utils/utils.py": ["/utils/transforms.py"]}
|
20,548
|
ciampluca/unsupervised_counting
|
refs/heads/master
|
/utils/transforms.py
|
import random
from PIL import Image
import numpy as np
import torchvision.transforms.functional as F
class RandomHorizontalFlip(object):
def __call__(self, img_and_density):
"""
img: PIL.Image
img_and_density: PIL.Image
"""
img, density_map = img_and_density
if random.random() < 0.5:
return img.transpose(Image.FLIP_LEFT_RIGHT), density_map.transpose(Image.FLIP_LEFT_RIGHT)
else:
return img, density_map
class PairedCrop(object):
"""
Paired Crop for both image and its density map.
Note that due to the maxpooling in the neural network,
we must promise that the size of input image is the corresponding factor.
"""
def __init__(self, factor=16):
self.factor = factor
@staticmethod
def get_params(img, factor):
w, h = img.size
if w % factor == 0 and h % factor == 0:
return 0, 0, h, w
else:
return 0, 0, h - (h % factor), w - (w % factor)
def __call__(self, img_and_density):
"""
img_and_density: PIL.Image
"""
img, density_map = img_and_density
i, j, th, tw = self.get_params(img, self.factor)
img = F.crop(img, i, j, th, tw)
density_map = F.crop(density_map, i, j, th, tw)
return img, density_map
class CustomResize(object):
def __init__(self, dim=480):
self.dim = dim
def __call__(self, img_and_density):
img, density_map = img_and_density
np_den_map = np.array(density_map)
num_objs = np.sum(np_den_map)
img = F.resize(img, size=self.dim, interpolation=Image.ANTIALIAS)
density_map = F.resize(density_map, size=self.dim, interpolation=Image.NEAREST)
# Ensure that sum=#objects after resizing
np_den_map = np.array(density_map)
if np.sum(np_den_map) != 0.0:
np_den_map = num_objs * np_den_map / np.sum(np_den_map)
density_map = Image.fromarray(np_den_map, mode="F")
return img, density_map
|
{"/train_adv.py": ["/config.py", "/utils/utils.py", "/datasets/NDISPark.py"], "/datasets/NDISPark.py": ["/utils/utils.py"], "/test.py": ["/datasets/NDISPark.py", "/utils/utils.py"], "/utils/utils.py": ["/utils/transforms.py"]}
|
20,549
|
ciampluca/unsupervised_counting
|
refs/heads/master
|
/test.py
|
import os
import tqdm
from PIL import Image
import numpy as np
import torch
from torch.utils.data import DataLoader
from models.CSRNet import CSRNet
from datasets.NDISPark import NDISPark
from utils.utils import get_transforms
# Parameters
ROOT_DATASET = "/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark"
PHASE = "test"
MODEL_NAME = "CSRNet"
MODEL_CHECKPOINT = "/home/luca/workspace/unsupervised_counting/checkpoints/NDISPark/CSRNet/202008041749/74_mae.pth"
GT_TXT_FILE = True
RESULTS = "/home/luca/Downloads/temp_results/NDISPark/basic/results"
PREDS = "/home/luca/Downloads/temp_results/NDISPark/basic/preds"
def main():
torch.backends.cudnn.enabled = False
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
# Creating output folder
preds_output_folder = os.path.join(PREDS, "obtained_with_best_model_mae")
if not os.path.exists(preds_output_folder):
os.makedirs(preds_output_folder)
# Loading model
model = CSRNet()
# Loading checkpoint
model.load_state_dict(torch.load(MODEL_CHECKPOINT))
model.to(device)
model.eval()
dataset = NDISPark(
root_dataset=ROOT_DATASET,
phase=PHASE,
transform=get_transforms(general_transforms=True),
img_transform=get_transforms(img_transforms=True),
target_transform=get_transforms(target_transforms=True),
)
dataloader = DataLoader(
dataset,
shuffle=False,
batch_size=1,
num_workers=1,
pin_memory=torch.cuda.is_available(),
)
total_mae, total_mse, total_are = 0.0, 0.0, 0.0
with torch.no_grad():
for i, data in enumerate(tqdm.tqdm(dataloader)):
# Retrieving image and density map
image = data['image'].to(device)
gt_density_map = data['densitymap'].to(device)
# Computing pred density map
pred_density_map = model(image)
# Computing MAE, MSE and ARE
if GT_TXT_FILE:
gt_num = data['num'].cpu().item()
mae = abs(pred_density_map.data.sum() - gt_num)
total_mae += mae.item()
mse = (pred_density_map.data.sum() - gt_num) ** 2
total_mse += mse.item()
are = abs(pred_density_map.data.sum() - gt_num) / gt_num
total_are += are.item()
else:
mae = abs(pred_density_map.data.sum() - gt_density_map.data.sum())
total_mae += mae.item()
mse = (pred_density_map.data.sum() - gt_density_map.data.sum()) ** 2
total_mse += mse.item()
are = abs(pred_density_map.data.sum() - gt_density_map.data.sum()) / torch.clamp(
gt_density_map.data.sum(), min=1)
total_are += are.item()
density_to_save = pred_density_map.detach()
density_to_save = density_to_save.squeeze(0).squeeze(0).cpu().numpy()
density_to_save = np.absolute(density_to_save)
density_to_save = 255 * (density_to_save / np.max(density_to_save))
density_to_save = density_to_save.astype(np.uint8)
# density_to_save = (255 * (density_to_save - np.min(density_to_save)) / (
# np.max(density_to_save) - np.min(density_to_save))).astype(np.uint8)
pil_density = Image.fromarray(density_to_save)
pil_density.save(os.path.join(preds_output_folder, data['name'][0].rsplit(".", 1)[0] + ".png"))
# pil_density.save(os.path.join(preds_output_folder, data['name'][0].rsplit(".", 1)[0] + ".tiff"))
print("Image: {}, AE: {}, SE: {}, RE: {}".format(data['name'][0], mae.item(), mse.item(), are.item()))
string_to_write = "Model: {}, Checkpoint: {}, MAE: {}, MSE: {}, ARE: {}".\
format(MODEL_NAME, MODEL_CHECKPOINT, total_mae/len(dataset), total_mse/len(dataset), total_are/len(dataset))
with open(os.path.join(RESULTS, "obtained_with_best_model_mae.txt"), "w") as result_file:
result_file.write(string_to_write)
print(string_to_write)
if __name__ == "__main__":
main()
|
{"/train_adv.py": ["/config.py", "/utils/utils.py", "/datasets/NDISPark.py"], "/datasets/NDISPark.py": ["/utils/utils.py"], "/test.py": ["/datasets/NDISPark.py", "/utils/utils.py"], "/utils/utils.py": ["/utils/transforms.py"]}
|
20,550
|
ciampluca/unsupervised_counting
|
refs/heads/master
|
/utils/utils.py
|
from PIL import Image
import numpy as np
import torch
from torchvision.transforms import Compose, ToTensor, Normalize
from utils.transforms import PairedCrop, RandomHorizontalFlip, CustomResize
def get_transforms(general_transforms=None, img_transforms=None, target_transforms=None, train=None, dim_resize=None):
transforms_list = []
if general_transforms:
if dim_resize:
transforms_list.append(CustomResize(dim=dim_resize))
if train:
transforms_list.append(RandomHorizontalFlip())
transforms_list.append(PairedCrop())
if img_transforms:
transforms_list.append(ToTensor())
transforms_list.append(Normalize(mean=[0.5, 0.5, 0.5], std=[0.225, 0.225, 0.225]))
if target_transforms:
transforms_list.append(ToTensor())
return Compose(transforms_list)
def random_seed(seed_value, use_cuda):
np.random.seed(seed_value) # cpu vars
torch.manual_seed(seed_value) # cpu vars
if use_cuda:
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
def compute_discriminator_accuracy(label, pred, cfg):
boolean_label = label.type(torch.BoolTensor).to(cfg.device)
acc = torch.mean((torch.eq(torch.sigmoid(pred) > .5, boolean_label)).type(torch.FloatTensor))
return acc
|
{"/train_adv.py": ["/config.py", "/utils/utils.py", "/datasets/NDISPark.py"], "/datasets/NDISPark.py": ["/utils/utils.py"], "/test.py": ["/datasets/NDISPark.py", "/utils/utils.py"], "/utils/utils.py": ["/utils/transforms.py"]}
|
20,565
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/tests/django/details/abstractmodels/Person.py
|
from django.db import models
from django.contrib.auth.models import User
from django.core.validators import MaxValueValidator, MinValueValidator
GENDER = (
('F',"""Female"""),
('M',"""Male"""),
)
GENDER = (
('M',"""Make"""),
('F',"""Female"""),
)
class Person(models.Model):
person_id = models.AutoField("Person ID", primary_key=True)
person_creationDate = models.DateTimeField("Created on", auto_now_add=True)
person_updateDate = models.DateTimeField("Updated on", auto_now=True)
person_user = models.ForeignKey(User, verbose_name="Created by", related_name="created_by_user")
class Meta: abstract = True
class Demographics(models.Model):
demographics_gender = models.CharField("""Gender""", choices=GENDER, max_length=10)
demographics_age = models.IntegerField("Age", max_length=3)
demographics_weight = models.DecimalField("Weight", max_digits=5, decimal_places=3, validators=[MinValueValidator(20.000000),MaxValueValidator(200.000000)])
demographics_weight = models.ForeignKey("Country", verbose_name="Country")
class Meta: abstract = True
class AbstractPerson(Person,
Demographics):
def __unicode__(self): return str(self.person_id)+' - '+str(self.demographics_gender)+' - '+str(self.demographics_age)
class Meta:
abstract = True
verbose_name = "Person"
verbose_name_plural = "People"
def ShowHideIf(self, checkingField, rules):
values, listOfFields = rules
values = values.split(';')
if str(self.__dict__[checkingField]) in values:
for field in listOfFields:
if not self.__dict__[checkingField]!=None: return False
return True
def ShowHideIfManyToMany(self, checkingField, rules):
values, listOfFields = rules
values = values.split(';')
selected = getattr(self,checkingField).all()
active = False
for v in selected:
if v in values:
active=True
break
if active:
for field in listOfFields:
if self.__dict__[checkingField]==None: return False
return True
def is_complete(self):
return getattr(self,'demographics_gender')!=None and \
getattr(self,'demographics_age')!=None and \
getattr(self,'demographics_weight')!=None and \
getattr(self,'demographics_weight')!=None
is_complete.short_description="Complete"
is_complete.boolean = True
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,566
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/tests/django/details/abstractmodels/Country.py
|
from django.db import models
from django.contrib.auth.models import User
from django.core.validators import MaxValueValidator, MinValueValidator
class Country(models.Model):
country_name = models.CharField("Name", max_length=50)
class Meta: abstract = True
class Country(models.Model):
country_id = models.AutoField("Id", primary_key=True)
class Meta: abstract = True
class AbstractCountry(Country,
Country):
def __unicode__(self): return str(self.country_name)
class Meta:
abstract = True
verbose_name = "Country"
verbose_name_plural = "Countries"
def ShowHideIf(self, checkingField, rules):
values, listOfFields = rules
values = values.split(';')
if str(self.__dict__[checkingField]) in values:
for field in listOfFields:
if not self.__dict__[checkingField]!=None: return False
return True
def ShowHideIfManyToMany(self, checkingField, rules):
values, listOfFields = rules
values = values.split(';')
selected = getattr(self,checkingField).all()
active = False
for v in selected:
if v in values:
active=True
break
if active:
for field in listOfFields:
if self.__dict__[checkingField]==None: return False
return True
def is_complete(self):
return getattr(self,'country_id')!=None and \
getattr(self,'country_name')!=None
is_complete.short_description="Complete"
is_complete.boolean = True
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,567
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/tests/test1.py
|
from googlespreadsheet2django.builder import export_code
if __name__ == "__main__":
export_code('1HWhdkKIHUK-tOEJWEp6gVh3evyV1YipgqV7QeTsUtYI', 'django')
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,568
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/tests/django/details/admins/PersonAdmin.py
|
from details.models import Person
from django.forms import Textarea, CheckboxSelectMultiple
from django.forms.models import ModelMultipleChoiceField
from django.utils.translation import ugettext as _
from django.contrib import admin
from django.conf import settings
from django.db import models
from common.admintools import export_xlsx, printable_html
class PersonAdminAbstract(admin.ModelAdmin):
change_form_template = 'admin/my_change_form.html'
list_display = ('person_id','demographics_gender','demographics_age','person_creationDate','person_updateDate','person_user',)
list_filter = ('demographics_gender','demographics_age',)
search_fields = ['person_id',]
readonly_fields = ('person_id', 'person_creationDate', 'person_updateDate', 'person_user',)
fieldsets = [
('Demographics',{
'classes': ('suit-tab suit-tab-2demographics',),
'fields': ['demographics_gender','demographics_age','demographics_weight','demographics_weight']
}),
]
suit_form_tabs = [
(u'2demographics', u'2. Demographics')
]
radio_fields = {
'demographics_gender': admin.VERTICAL
}
actions = [export_xlsx,]
formfield_overrides = dict((
(models.TextField,dict((( 'widget',Textarea(attrs=dict(rows=5, cols=120,style='width: 600px;') )),) )),
(models.ManyToManyField,dict((('widget',CheckboxSelectMultiple),)))
),)
class Media:
css = dict(all=['generic.css','fixadmin.css'])
js = ('generic.js','models/person.js')
def save_model(self, request, obj, form, change):
if obj.pk==None: obj.person_user = request.user
super(PersonAdminAbstract, self).save_model(request, obj, form, change)
def queryset(self, request):
qs = super(PersonAdminAbstract, self).queryset(request)
groups = request.user.groups.all()
qs = qs.filter( person_user__groups = groups ).distinct()
return qs
def get_actions(self, request):
actions = super(PersonAdminAbstract, self).get_actions(request)
user = request.user
#if not user.groups.filter(name=settings.HTML_EXPORTER_PROFILE_GROUP).exists(): del actions['printable_html']
if not user.groups.filter(name=settings.EXCEL_EXPORTER_PROFILE_GROUP).exists(): del actions['export_xlsx']
return actions
def construct_change_message(self, request, form, formsets):
message = super(PersonAdminAbstract, self).construct_change_message(request, form, formsets)
change_message = []
if form.changed_data:
values = []
for x in form.changed_data:
field = form.fields[x]
initial = form.initial[x]
value = form.cleaned_data[x]
if isinstance(field, ModelMultipleChoiceField):
value = [int(y.pk) for y in value]
initial = [int(y) for y in initial]
values.append( _("<b>%s</b>: <span style='color:#4682B4' >%s</span> -> <span style='color:#00A600' >%s</span>" % (x, str(initial), str(value)) ) )
change_message.append( '<ul><li>%s</li></ul>' % '</li><li>'.join(values) )
message += ' '.join(change_message)
return message
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,569
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/tests/django/details/admin.py
|
##### auto:start:Person #####
from models import Person
from admins.PersonAdmin import *
class PersonAdmin(PersonAdminAbstract):
pass
##### auto:end:Person #####
##### auto:start:Country #####
from models import Country
from admins.CountryAdmin import *
class CountryAdmin(CountryAdminAbstract):
pass
##### auto:end:Country #####
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
20,570
|
UmSenhorQualquer/googlespreadsheet2django
|
refs/heads/master
|
/googlespreadsheet2django/answers/choice.py
|
class Choice(object):
def __init__(self, code, label):
self._code = code
self._label = label
def __unicode__(self): return "\t('%s',\"\"\"%s\"\"\")" % (self._code, self._label)
def __str__(self): return self.__unicode__()
|
{"/tests/test1.py": ["/googlespreadsheet2django/builder.py"], "/googlespreadsheet2django/answers/answers.py": ["/googlespreadsheet2django/answers/choice.py"], "/googlespreadsheet2django/models/models_loader.py": ["/googlespreadsheet2django/models/field.py", "/googlespreadsheet2django/models/model.py"], "/googlespreadsheet2django/answers/answers_loader.py": ["/googlespreadsheet2django/answers/choice.py", "/googlespreadsheet2django/answers/answers.py"], "/googlespreadsheet2django/builder.py": ["/googlespreadsheet2django/models/models_loader.py", "/googlespreadsheet2django/answers/answers_loader.py"], "/googlespreadsheet2django/models/abstract_model.py": ["/googlespreadsheet2django/models/field.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.