index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
12,258
|
tominsam/feedify
|
refs/heads/main
|
/urls.py
|
from django.conf.urls import *
from django.contrib import admin
admin.autodiscover()
from flickr.feeds import FlickrPhotoFeed
from instagram.feeds import InstagramPhotoFeed
urlpatterns = patterns('',
(r'^feedify-admin/', include(admin.site.urls)),
url(r'^$', "core.views.index"),
url(r'^flickr/$', "flickr.views.index"),
url(r'^flickr/auth/$', "flickr.views.auth"),
url(r'^flickr/feed/([^/]+)/$', FlickrPhotoFeed()),
url(r'^instagram/$', "instagram.views.index"),
url(r'^instagram/auth/$', "instagram.views.auth"),
url(r'^instagram/feed/([^/]+)/$', InstagramPhotoFeed()),
)
|
{"/instagram/feeds.py": ["/instagram/models.py", "/flickr/feeds.py"], "/flickr/feeds.py": ["/flickr/models.py"], "/instagram/admin.py": ["/instagram/models.py"], "/urls.py": ["/flickr/feeds.py", "/instagram/feeds.py"], "/flickr/admin.py": ["/flickr/models.py"]}
|
12,259
|
tominsam/feedify
|
refs/heads/main
|
/instagram/models.py
|
from django.db import models
from django.core.cache import cache
import urllib2
import datetime
import uuid
import json
import time
class InstagramException(Exception):
def __init__(self, code, message):
self.code = code
super(InstagramException, self).__init__(message)
def __unicode__(self):
return u"%s: %s"%(self.code, self.message)
class AccessToken(models.Model):
key = models.CharField(max_length=100, null=False, blank=False, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow, null=False, blank=False)
fetched = models.DateTimeField(null=True)
updated = models.DateTimeField(blank=False, null=False)
username = models.CharField(max_length=100, null=False, blank=False)
userid = models.CharField(max_length=20, null=False, blank=False, unique=True)
feed_secret = models.CharField(max_length=13, null=False, blank=False, unique=True)
def __str__(self):
return self.key
@classmethod
def from_string(cls, string):
data = json.loads(string)
properties = dict(
key = data["access_token"],
username = data["user"]["username"],
userid = data["user"]["id"],
updated = datetime.datetime.utcnow(),
)
token, created = cls.objects.get_or_create(userid=properties["userid"], defaults=properties)
if not created:
for k, v in properties.items():
setattr(token, k, v)
token.save()
return token
def save(self, *args, **kwargs):
if not self.feed_secret:
self.feed_secret = str(uuid.uuid4())[:13]
return super(AccessToken, self).save(*args, **kwargs)
def get_photos(self, method="users/self/feed"):
cache_key = 'instagram_items_%s_%s'%(self.id, method)
self.last_time = None
photos = cache.get(cache_key)
if not photos:
url = "https://api.instagram.com/v1/%s?access_token=%s"%(method, self.key)
start = time.time()
try:
conn = urllib2.urlopen(url)
data = json.loads(conn.read())
except Exception:
return []
self.last_time = time.time() - start
photos = data["data"]
cache.set(cache_key, photos, 120)
for p in photos:
p["created_time"] = datetime.datetime.utcfromtimestamp(float(p["created_time"]))
if not p["link"]:
# private photos don't have public links. link to full-rez image instead.
p["link"] = p["images"]["standard_resolution"]["url"]
return photos
def touch(self):
self.fetched = datetime.datetime.utcnow()
self.save()
|
{"/instagram/feeds.py": ["/instagram/models.py", "/flickr/feeds.py"], "/flickr/feeds.py": ["/flickr/models.py"], "/instagram/admin.py": ["/instagram/models.py"], "/urls.py": ["/flickr/feeds.py", "/instagram/feeds.py"], "/flickr/admin.py": ["/flickr/models.py"]}
|
12,260
|
tominsam/feedify
|
refs/heads/main
|
/session/middleware.py
|
# http://scratchpad.cmlenz.net/370f3e0d58804d38c3bc14e514272fda/
from base64 import b64decode, b64encode
import hashlib
from time import time
import zlib
import logging
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase
from django.utils.cache import patch_vary_headers
from django.utils.http import cookie_date
import json
MAX_COOKIE_SIZE = 4096
class SessionMiddleware(object):
def process_request(self, request):
cookie = request.COOKIES.get(settings.SESSION_COOKIE_NAME)
request.session = SessionStore(cookie)
def process_response(self, request, response):
try:
session = request.session
except AttributeError:
return response # 404 page, for instance
if session.deleted:
response.delete_cookie(settings.SESSION_COOKIE_NAME)
else:
if session.accessed:
patch_vary_headers(response, ('Cookie',))
if session.modified or settings.SESSION_SAVE_EVERY_REQUEST:
if session.get_expire_at_browser_close():
max_age = None
expires = None
else:
max_age = session.get_expiry_age()
expires = cookie_date(time() + max_age)
cookie = session.encode(session._session)
if len(cookie) <= MAX_COOKIE_SIZE:
response.set_cookie(settings.SESSION_COOKIE_NAME, cookie,
max_age = max_age, expires=expires,
domain = settings.SESSION_COOKIE_DOMAIN,
path = settings.SESSION_COOKIE_PATH,
secure = settings.SESSION_COOKIE_SECURE or None
)
else:
# The data doesn't fit into a cookie, not sure what's the
# best thing to do in this case. Right now, we just leave
# the old cookie intact if there was one. If Django had
# some kind of standard logging interface, we could also
# log a warning here.
pass
return response
class SessionStore(SessionBase):
def __init__(self, cookie):
SessionBase.__init__(self, 'cookie')
self.cookie = cookie
self.deleted = False
def exists(self, session_key):
return self.cookie and not self.deleted
def create(self):
pass
def save(self, must_create=False):
pass
def delete(self, session_key=None):
self.deleted = True
def load(self):
if self.cookie:
return self.decode(self.cookie)
return {}
def cycle_key(self):
pass
def encode(self, session_dict):
data = json.dumps(session_dict)
json_md5 = hashlib.md5(data + settings.SECRET_KEY).hexdigest()
try:
return b64encode(zlib.compress(data + json_md5))
except Exception:
return ''
def decode(self, session_data):
try:
data = zlib.decompress(b64decode(session_data))
except Exception:
return {}
data, json_md5 = data[:-32], data[-32:]
if hashlib.md5(data + settings.SECRET_KEY).hexdigest() != json_md5:
logging.error('User tampered with session cookie')
return {}
return json.loads(data)
|
{"/instagram/feeds.py": ["/instagram/models.py", "/flickr/feeds.py"], "/flickr/feeds.py": ["/flickr/models.py"], "/instagram/admin.py": ["/instagram/models.py"], "/urls.py": ["/flickr/feeds.py", "/instagram/feeds.py"], "/flickr/admin.py": ["/flickr/models.py"]}
|
12,261
|
tominsam/feedify
|
refs/heads/main
|
/instagram/views.py
|
from instagram.models import AccessToken, InstagramException
from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import render
from django.conf import settings
import urllib, urllib2
import logging
def index(request):
if not request.session.get("i"):
return render(request, "instagram/anon.html", dict(title="instagram"))
try:
token = AccessToken.objects.get(pk=request.session["i"])
except AccessToken.DoesNotExist:
del request.session["i"]
return HttpResponseRedirect("/instagram/")
try:
photos = token.get_photos()
except InstagramException, e:
logging.error("can't talk to instagram: %s"%e)
return HttpResponseRedirect("/instagram/auth/?logout")
return render(request, "instagram/list.html", dict(
title = "instagram",
token = token,
photos = photos,
time = token.last_time,
))
def auth(request):
if request.GET.get("logout") is not None:
del request.session["i"]
return HttpResponseRedirect("/instagram/")
redirect = "%s/instagram/auth/"%settings.SITE_URL
# bounce step 1
if not request.GET.get("code") and not request.GET.get("error"):
return HttpResponseRedirect("%s?client_id=%s&redirect_uri=%s&response_type=code"%(settings.INSTAGRAM_AUTHORIZE_URL, settings.INSTAGRAM_API_KEY, redirect))
# error in auth. Probably turned us down.
error = request.REQUEST.get("error")
if error:
messages.add_message(request, messages.INFO, "Problem talking to instagram: %s. Try re-doing auth."%error)
return HttpResponseRedirect("/instagram/")
# successful auth
code = request.REQUEST.get("code")
if code:
try:
conn = urllib2.urlopen(settings.INSTAGRAM_ACCESS_TOKEN_URL, urllib.urlencode(dict(
client_id = settings.INSTAGRAM_API_KEY,
client_secret = settings.INSTAGRAM_API_SECRET,
grant_type= "authorization_code",
redirect_uri= redirect,
code = code,
)))
except urllib2.HTTPError, e:
messages.add_message(request, messages.INFO, "Problem talking to instagram: %s. Try re-doing auth."%e.read())
return HttpResponseRedirect("/instagram/")
# saves the token as well.
token = AccessToken.from_string(conn.read())
# keep session small
request.session['i'] = token.id
return HttpResponseRedirect("/instagram/")
|
{"/instagram/feeds.py": ["/instagram/models.py", "/flickr/feeds.py"], "/flickr/feeds.py": ["/flickr/models.py"], "/instagram/admin.py": ["/instagram/models.py"], "/urls.py": ["/flickr/feeds.py", "/instagram/feeds.py"], "/flickr/admin.py": ["/flickr/models.py"]}
|
12,262
|
tominsam/feedify
|
refs/heads/main
|
/core/exception_handling.py
|
import logging
import traceback
class ExceptionMiddleware(object):
def process_exception(self, request, exception):
logging.error(traceback.format_exc())
return None
|
{"/instagram/feeds.py": ["/instagram/models.py", "/flickr/feeds.py"], "/flickr/feeds.py": ["/flickr/models.py"], "/instagram/admin.py": ["/instagram/models.py"], "/urls.py": ["/flickr/feeds.py", "/instagram/feeds.py"], "/flickr/admin.py": ["/flickr/models.py"]}
|
12,263
|
tominsam/feedify
|
refs/heads/main
|
/flickr/admin.py
|
from flickr.models import *
from django.contrib import admin
admin.site.register(RequestToken,
list_display = ("key", "created"),
date_hierarchy = "created",
)
admin.site.register(AccessToken,
list_display = ("key", "nsid", "fullname", "created", "fetched"),
date_hierarchy = "created",
)
|
{"/instagram/feeds.py": ["/instagram/models.py", "/flickr/feeds.py"], "/flickr/feeds.py": ["/flickr/models.py"], "/instagram/admin.py": ["/instagram/models.py"], "/urls.py": ["/flickr/feeds.py", "/instagram/feeds.py"], "/flickr/admin.py": ["/flickr/models.py"]}
|
12,264
|
tominsam/feedify
|
refs/heads/main
|
/fabfile.py
|
import os, sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "deployinator"))
from deployinator.deployinator import *
fab_init("feedify",
database = "feedify",
postgresql = True,
rules = {
"nginx": "deploy/nginx.conf",
"gunicorn": {
"port": 8002,
}
}
)
|
{"/instagram/feeds.py": ["/instagram/models.py", "/flickr/feeds.py"], "/flickr/feeds.py": ["/flickr/models.py"], "/instagram/admin.py": ["/instagram/models.py"], "/urls.py": ["/flickr/feeds.py", "/instagram/feeds.py"], "/flickr/admin.py": ["/flickr/models.py"]}
|
12,266
|
xAngad/HyChance
|
refs/heads/main
|
/main.py
|
import os
import discord
from dotenv import load_dotenv
load_dotenv()
from scripts.utils import createEmbed, createErrorEmbed
client = discord.Client()
aliases = ["!duel", "!d", "!1v1"]
@client.event
async def on_ready():
print(f"Bot logged in as: {client.user}")
print("Running...")
await client.change_presence(status=discord.Status.online, activity=discord.Game("!duel"))
@client.event
async def on_message(message):
if message.author.id == client.user.id:
return
words = message.content.split()
if words[0] in aliases:
if len(words) == 3:
p1, p2 = words[1], words[2]
embedStats = createEmbed(p1, p2)
await message.channel.send(embed=embedStats)
else:
embedError = createErrorEmbed()
await message.channel.send(embed=embedError)
client.run(os.environ["TOKEN"])
|
{"/main.py": ["/scripts/utils.py"], "/scripts/utils.py": ["/scripts/stats.py"], "/scripts/stats.py": ["/scripts/utils.py"]}
|
12,267
|
xAngad/HyChance
|
refs/heads/main
|
/scripts/utils.py
|
import discord
import numpy as np
from mojang import MojangAPI
def toPercentage(victories, defeats):
return (victories / (victories + defeats))
def winner(p1, p2):
probability_p1 = (p1*(1-p2)) / (p1*(1-p2) + p2*(1-p1))
probability_p2 = 1 - probability_p1
return [probability_p1, probability_p2]
def playerStats(p):
from scripts.stats import Player
player = Player(p)
player_preds = player.predict()
return player_preds
def createEmbed(p1, p2):
UUIDs = [MojangAPI.get_uuid(p1), MojangAPI.get_uuid(p2)]
IGNs = [MojangAPI.get_username(UUID) for UUID in UUIDs]
# players = [p1, p2]
# Create player predictions
p1_probs = playerStats(p1)
p2_probs = playerStats(p2)
sw_probs = winner(p1_probs["sw"], p2_probs["sw"])
bw_probs = winner(p1_probs["bw"], p2_probs["bw"])
duels_probs = winner(p1_probs["duels"], p2_probs["duels"])
sw_winner = IGNs[np.argmax(sw_probs)]
bw_winner = IGNs[np.argmax(bw_probs)]
duels_winner = IGNs[np.argmax(duels_probs)]
# Initialization
embed = discord.Embed(type="rich", color=discord.Color.gold())
# Author (comment out most probably)
embed.set_author(name="HyChance - 1v1 Win Predictor",
icon_url="https://crafatar.com/avatars/6327b3fb426b4b6a92fba78e13173a22?size=400")
# Thumbnail
embed.set_thumbnail(url="https://assets.change.org/photos/8/bv/gd/yuBVGDPtWevQvmQ-800x450-noPad.jpg?1571761596")
# Stats fields
embed.add_field(
name="SkyWars",
value=f"`{sw_winner}` (`{round(max(sw_probs)*100, 2)}% chance`)",
inline=False
)
embed.add_field(
name="Bedwars",
value=f"`{bw_winner}` (`{round(max(bw_probs)*100, 2)}% chance`)",
inline=False
)
embed.add_field(
name="Duels",
value=f"`{duels_winner}` (`{round(max(duels_probs)*100, 2)}% chance`)",
inline=False
)
# Footer w contact me
embed.set_footer(text="See any bugs? DM me: xAngad#4229" )
return embed
def createErrorEmbed():
embed = discord.Embed(type="rich", color=discord.Color.red())
embed.set_author(name="HyChance - 1v1 Win Predictor",
icon_url="https://crafatar.com/avatars/6327b3fb426b4b6a92fba78e13173a22?size=400")
embed.set_thumbnail(url="https://www.freeiconspng.com/uploads/error-icon-4.png")
embed.add_field(
name="Error",
value="Please use format: `!duel <Player #1> <Player #2>`",
inline=False
)
embed.set_footer(text="See any bugs? DM me: xAngad#4229")
return embed
# def swXPtoLVL(xp):
# xps = [0, 20, 70, 150, 250, 500, 1000, 2000, 3500, 6000, 10000, 15000]
# if xp >= 15000:
# return (xp - 15000)/10000 + 12
# else:
# for i in range(len(xps)):
# if xp < xps[i]:
# return i + float(xp - xps[i-1]) / (xps[i] - xps[i-1])
|
{"/main.py": ["/scripts/utils.py"], "/scripts/utils.py": ["/scripts/stats.py"], "/scripts/stats.py": ["/scripts/utils.py"]}
|
12,268
|
xAngad/HyChance
|
refs/heads/main
|
/scripts/stats.py
|
import os
from dotenv import load_dotenv
import requests
from pprint import pprint
from mojang import MojangAPI
load_dotenv()
class Player(object):
def __init__(self, ign):
super().__init__()
self.uuid = MojangAPI.get_uuid(str(ign))
self.api = os.environ["API_KEY"]
self.link = f"https://api.hypixel.net/player?key={self.api}&uuid={self.uuid}"
self.hydata = requests.get(self.link).json()
self.stats = self.hydata["player"]["stats"]
def rawStats(self):
raw = {"bw": {},
"sw": {},
"duels": {}}
# Bedwars
raw["bw"]["kills"] = self.stats["Bedwars"]["kills_bedwars"] if "kills_bedwars" in self.stats["Bedwars"] else 0
raw["bw"]["deaths"] = self.stats["Bedwars"]["deaths_bedwars"] if "deaths_bedwars" in self.stats["Bedwars"] else 0
raw["bw"]["fkills"] = self.stats["Bedwars"]["final_kills_bedwars"] if "final_kills_bedwars" in self.stats["Bedwars"] else 0
raw["bw"]["fdeaths"] = self.stats["Bedwars"]["final_deaths_bedwars"] if "final_deaths_bedwars" in self.stats["Bedwars"] else 0
raw["bw"]["solo_fkills"] = self.stats["Bedwars"]["eight_one_final_kills_bedwars"] if "eight_one_final_kills_bedwars" in self.stats["Bedwars"] else 0
raw["bw"]["solo_fdeaths"] = self.stats["Bedwars"]["eight_one_final_deaths_bedwars"] if "eight_one_final_deaths_bedwars" in self.stats["Bedwars"] else 0
# SkyWars
raw["sw"]["kills"] = self.stats["SkyWars"]["kills"] if "kills" in self.stats["SkyWars"] else 0
raw["sw"]["deaths"] = self.stats["SkyWars"]["deaths"] if "deaths" in self.stats["SkyWars"] else 0
# Duels
raw["duels"]["wins"] = self.stats["Duels"]["wins"] if "wins" in self.stats["Duels"] else 0
raw["duels"]["losses"] = self.stats["Duels"]["losses"] if "losses" in self.stats["Duels"] else 0
return raw
def predict(self):
from scripts.utils import toPercentage
raw = self.rawStats()
predictions = {}
predictions["sw"] = toPercentage(raw["sw"]["kills"], raw["sw"]["deaths"])
predictions["bw"] = toPercentage(raw["bw"]["solo_fkills"], raw["bw"]["solo_fdeaths"])
predictions["duels"] = toPercentage(raw["duels"]["wins"], raw["duels"]["losses"])
return predictions
|
{"/main.py": ["/scripts/utils.py"], "/scripts/utils.py": ["/scripts/stats.py"], "/scripts/stats.py": ["/scripts/utils.py"]}
|
12,288
|
tdegeus/makemovie
|
refs/heads/master
|
/test/trim.py
|
import matplotlib.pyplot as plt
filenames = []
for i in range(5):
filename = 'image_{0:d}.png'.format(i)
filenames += [filename]
fig, ax = plt.subplots()
ax.plot([0, 1], [0, 1])
plt.savefig(filename)
import makemovie
makemovie.trim(filenames, verbose=False)
|
{"/test/trim.py": ["/makemovie/__init__.py"], "/makemovie/cli/trim_images.py": ["/makemovie/__init__.py"]}
|
12,289
|
tdegeus/makemovie
|
refs/heads/master
|
/setup.py
|
from setuptools import setup
from setuptools import find_packages
import re
filepath = 'makemovie/__init__.py'
__version__ = re.findall(r'__version__ = \'(.*)\'', open(filepath).read())[0]
setup(
name = 'makemovie',
version = __version__,
license = 'MIT',
author = 'Tom de Geus',
author_email = 'tom@geus.me',
description = 'Create a movie from a bunch of images.',
long_description = 'Create a movie from a bunch of images.',
keywords = 'ffmpeg',
url = 'https://github.com/tdegeus/makemovie',
packages = find_packages(),
install_requires = ['docopt>=0.6.2', 'click>=4.0'],
entry_points = {
'console_scripts': [
'makemovie = makemovie.cli.makemovie:main',
'trim_images = makemovie.cli.trim_images:main']})
|
{"/test/trim.py": ["/makemovie/__init__.py"], "/makemovie/cli/trim_images.py": ["/makemovie/__init__.py"]}
|
12,290
|
tdegeus/makemovie
|
refs/heads/master
|
/makemovie/cli/trim_images.py
|
'''trim_image
Trim a batch of images.
Usage:
trim_image [options] <image>...
Arguments:
The images to trim.
Options:
-a, --append=<str>
Append filenames, if empty the input files are overwritten. [default: ]
--background=<str>
Apply a background color (e.g. "none" or "white").
--flatten
Flatten input images: required for transparent PNG-files.
--temp-dir=<str>
Output directory for temporary images (deleted if not specified).
-v, --verbose
Print all executed commands.
-h, --help
Show help.
--version
Show version.
(c-MIT) T.W.J. de Geus | tom@geus.me | www.geus.me | github.com/tdegeus
'''
import docopt
from .. import __version__
from .. import trim
def main():
args = docopt.docopt(__doc__, version = __version__)
trim(
filenames = args['<image>'],
background = args['--background'] if args['--background'] is not None else 'white',
flatten = args['--flatten'],
append = args['--append'],
temp_dir = args['--temp-dir'],
verbose = args['--verbose'])
|
{"/test/trim.py": ["/makemovie/__init__.py"], "/makemovie/cli/trim_images.py": ["/makemovie/__init__.py"]}
|
12,291
|
tdegeus/makemovie
|
refs/heads/master
|
/makemovie/__init__.py
|
__version__ = '0.1.0'
import tempfile
import os
import re
from subprocess import check_output, STDOUT
from shutil import which
def _exec(cmd, verbose = False):
r'''
Execute command and return output.
'''
if verbose:
print(cmd)
output = check_output(cmd, shell = True, stderr = STDOUT).decode("utf-8")
if verbose and len(output) > 0:
print(output)
return output
def _mkdir(dirname, verbose = False):
r'''
Make directory if it does not yet exist.
'''
if os.path.isdir(dirname):
return
os.makedirs(dirname)
if verbose:
print('mkdir {0:s}'.format(dirname))
def _mv(orig, dest, verbose = False):
r'''
Move file from "orig" to "dest".
'''
os.rename(orig, dest)
if verbose:
print('mv {0:s} {1:s}'.format(orig, dest))
def _check_get_abspath(filenames):
r'''
Check if files exist, and return their path as absolute file-paths.
'''
if type(filenames) == str:
filenames = [filenames]
filenames = [os.path.abspath(f) for f in filenames]
for filename in filenames:
if not os.path.isfile(filename):
raise IOError('"{0:s}" does not exist'.format(filename))
return filenames
def _make_convert_tempdir(temp_dir = None, verbose = False):
r'''
Make a temporary directory and returns its absolute file-path.
If not specified a directory-name is automatically generated.
'''
if temp_dir is None:
temp_dir = tempfile.mkdtemp()
else:
temp_dir = os.path.abspath(temp_dir)
_mkdir(temp_dir, verbose)
return temp_dir
def _convert(filenames, options, append = None, temp_dir = None, verbose = False):
r'''
Run convert on a batch of files.
Options:
- filenames: list of filenames (assumed to exist)
- options: the options for the convert command (string)
- append: if specified the "filenames" are not replaced, but appended (before the extension)
- temp_dir: temporary directory (assumed to exist)
- verbose: if true, all commands and output are printed to the screen
'''
if not which('convert'):
raise IOError('"convert" not found, please install ImageMagick')
for filename in filenames:
temp_file = os.path.join(temp_dir, os.path.relpath(filename))
_exec('convert {options:s} "{old:s}" "{new:s}"'.format(
options = options,
old = filename,
new = temp_file),
verbose = verbose)
if append:
base, ext = os.path.splitext(filename)
dest = os.path.join(base, append, ext)
else:
dest = filename
_mv(temp_file, dest, verbose)
def flatten(
filenames,
append = False,
temp_dir = None,
verbose = False):
r'''
Flatten batch of images.
:arguments:
**filenames** (``<list<str>>``)
A list of filenames.
:options:
**append** (``<str>``)
If specified the original images are not overwritten. Rather the filename is
appended with the string specified here. Note that this implies that there could be others
files that are overwritten.
**temp_dir** (``<str>``)
If specified that directory is used as temporary directory. Otherwise, a directory is
automatically selected.
**verbose** ([``False``] | ``True``)
If True, all commands are printed to the standard output.
'''
if not which('convert'):
raise IOError('"convert" not found, please install ImageMagick')
filenames = _check_get_abspath(filenames)
temp_dir = _make_convert_tempdir(temp_dir, verbose)
opt += ['-flatten']
_convert(filenames, ' '.join(opt), append, verbose)
def set_background(
filenames,
background,
flatten = False,
append = False,
temp_dir = None,
verbose = False):
r'''
Trim a batch of files.
:arguments:
**filenames** (``<list<str>>``)
A list of filenames.
**background** (``<str>``)
Apply a background colour (e.g. "none" or "white").
:options:
**flatten** ([``False``] | ``True``)
Flatten images: required for transparent PNG-files.
**append** (``<str>``)
If specified the original images are not overwritten. Rather the filename is
appended with the string specified here. Note that this implies that there could be others
files that are overwritten.
**temp_dir** (``<str>``)
If specified that directory is used as temporary directory. Otherwise, a directory is
automatically selected.
**verbose** ([``False``] | ``True``)
If True, all commands are printed to the standard output.
'''
if not which('convert'):
raise IOError('"convert" not found, please install ImageMagick')
filenames = _check_get_abspath(filenames)
temp_dir = _make_convert_tempdir(temp_dir, verbose)
opt += ['-background {0:s}'.format(background)]
if background != 'none':
opt += ['-alpha remove']
if flatten:
opt += ['-flatten']
_convert(filenames, ' '.join(opt), append, verbose)
def trim(
filenames,
background = 'none',
flatten = False,
append = False,
temp_dir = None,
verbose = False):
r'''
Trim a batch of files.
:arguments:
**filenames** (``<list<str>>``)
A list of filenames.
:options:
**background** ([``'none'``] | ``<str>``)
Apply a background colour (e.g. "none" or "white").
**flatten** ([``False``] | ``True``)
Flatten images: required for transparent PNG-files.
**append** (``<str>``)
If specified the original images are not overwritten. Rather the filename is
appended with the string specified here. Note that this implies that there could be others
files that are overwritten.
**temp_dir** (``<str>``)
If specified that directory is used as temporary directory. Otherwise, a directory is
automatically selected.
**verbose** ([``False``] | ``True``)
If True, all commands are printed to the standard output.
'''
if not which('convert'):
raise IOError('"convert" not found, please install ImageMagick')
filenames = _check_get_abspath(filenames)
temp_dir = _make_convert_tempdir(temp_dir, verbose)
# dry run to get trim size of each image
split = lambda txt: \
re.split(r'([0-9]*)(x)([0-9]*)(\ )([0-9]*)(x)([0-9]*)([\+][0-9]*)([\+][0-9]*)(.*)', txt)
out = []
for filename in filenames:
out += [_exec('convert -trim -verbose "{old:s}" "{new:s}"'.format(
old = filename,
new = os.path.join(temp_dir, 'tmp.png')),
verbose = verbose)]
out = [o.split('\n')[1] for o in out]
# width of the original image
w = [int(split(o)[1]) for o in out]
# height of the original image
h = [int(split(o)[3]) for o in out]
# width of the trimmed image
w0 = [int(split(o)[5]) for o in out]
# height of the trimmed image
h0 = [int(split(o)[7]) for o in out]
# horizontal position at which the trimmed image starts
x = [int(split(o)[8]) for o in out]
# vertical position at which the trimmed image starts
y = [int(split(o)[9]) for o in out]
assert min(w0) == max(w0)
assert min(h0) == max(h0)
# select crop dimensions, add "convert" options to apply at the same time, and run "convert"
dim = {
'w': max(w) + (max(x) - min(x)),
'h': max(h) + (max(y) - min(y)),
'x': min(x),
'y': min(y)}
opt = ['-crop {w:d}x{h:d}{x:+d}{y:+d}'.format(**dim)]
opt += ['-background {0:s}'.format(background)]
if background != 'none':
opt += ['-alpha remove']
if flatten:
opt += ['-flatten']
_convert(filenames, ' '.join(opt), append, temp_dir, verbose)
def rsvg_convert(
filenames,
background = 'none',
ext = '.png',
verbose = False):
r'''
Convert SVG images.
:arguments:
**filenames** (``<list<str>>``)
A list of filenames.
:options:
**background** ([``'none'``] | ``<str>``)
Apply a background colour (e.g. "none" or "white").
**ext** ([``'.png'``] | ``<str>``)
Extension to which to convert to.
**verbose** ([``False``] | ``True``)
If True, all commands are printed to the standard output.
:returns:
List of new filenames.
'''
if not which('rsvg-convert'):
raise IOError('"rsvg-convert" not found')
filenames = _check_get_abspath(filenames)
out = []
for filename in filenames:
dest = os.path.splitext(file)[0] + '.png'
out += [dest]
_exec('rsvg-convert -b {background:s} "{old:s}" -o "{new:s}"'.format(
background = background,
old = filename,
new = dest),
verbose = verbose)
return out
|
{"/test/trim.py": ["/makemovie/__init__.py"], "/makemovie/cli/trim_images.py": ["/makemovie/__init__.py"]}
|
12,294
|
tbarbugli/saleor
|
refs/heads/master
|
/saleor/settings.py
|
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
SITE_ID = 1
PROJECT_ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
ROOT_URLCONF = 'saleor.urls'
WSGI_APPLICATION = 'saleor.wsgi.application'
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
INTERNAL_IPS = ['127.0.0.1']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev.sqlite',
}
}
TIME_ZONE = 'Europe/Rome'
LANGUAGE_CODE = 'it'
LANGUAGES = [
('it', 'Italiano'),
]
USE_I18N = False
USE_L10N = False
USE_TZ = False
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(PROJECT_ROOT, 'saleor', 'static'),
]
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'django.contrib.staticfiles.finders.DefaultStorageFinder',
'compressor.finders.CompressorFinder',
]
TEMPLATE_DIRS = [
os.path.join(PROJECT_ROOT, 'templates'),
os.path.join(PROJECT_ROOT, 'saleor', 'templates'),
]
CMS_TEMPLATES = (
# ('content_page.html', 'Content Page'),
('product_page.html', 'Product page'),
)
TEMPLATE_LOADERS = [
'django.template.loaders.app_directories.Loader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.eggs.Loader',
]
# Make this unique, and don't share it with anybody.
SECRET_KEY = 's$au$-tl&u-5m^i5ojzx2qd=lbv=+y5ihr5@or5b(qfaw%f7$n'
MIDDLEWARE_CLASSES = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'cart.middleware.CartMiddleware',
'saleor.middleware.CheckHTML',
]
DJANGO_CMS_MIDDLEWARES = [
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
'cms.middleware.language.LanguageCookieMiddleware',
]
MIDDLEWARE_CLASSES += DJANGO_CMS_MIDDLEWARES
THUMBNAIL_PROCESSORS = (
'easy_thumbnails.processors.colorspace',
'easy_thumbnails.processors.autocrop',
#'easy_thumbnails.processors.scale_and_crop',
'filer.thumbnail_processors.scale_and_crop_with_subject_location',
'easy_thumbnails.processors.filters',
)
TEMPLATE_CONTEXT_PROCESSORS = [
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
'saleor.context_processors.googe_analytics',
'saleor.context_processors.canonical_hostname',
'saleor.context_processors.default_currency',
]
DJANGO_CMS_TEMPLATE_CONTEXT_PROCESSORS = [
'cms.context_processors.media',
'sekizai.context_processors.sekizai',
]
TEMPLATE_CONTEXT_PROCESSORS += DJANGO_CMS_TEMPLATE_CONTEXT_PROCESSORS
INSTALLED_APPS = [
# External apps that need to go before django's
# Django modules
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.sites',
'django.contrib.auth',
# External apps
'south',
'django_prices',
'mptt',
'payments',
'reversion',
'compressor',
# Local apps
'saleor',
'product',
'cart',
'coupon',
'checkout',
'registration',
'payment',
'delivery',
'qrcode',
'userprofile',
'order',
]
DJANGO_CMS_APPS = [
'cms',
'menus',
'sekizai',
'filer',
'easy_thumbnails',
'cmsplugin_filer_file',
'cmsplugin_filer_folder',
'cmsplugin_filer_image',
'cmsplugin_filer_teaser',
'cmsplugin_filer_video',
'cms.plugins.link',
'cms.plugins.text',
]
INSTALLED_APPS += DJANGO_CMS_APPS
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'filters': ['require_debug_true'],
'formatter': 'simple'
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'saleor': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
}
}
AUTHENTICATION_BACKENDS = (
'registration.backends.EmailPasswordBackend',
'registration.backends.ExternalLoginBackend',
'registration.backends.TrivialBackend',
)
AUTH_USER_MODEL = 'auth.User'
CANONICAL_HOSTNAME = 'localhost:8000'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGIN_URL = '/account/login'
WARN_ABOUT_INVALID_HTML5_OUTPUT = False
SATCHLESS_DEFAULT_CURRENCY = 'EUR'
ACCOUNT_ACTIVATION_DAYS = 3
LOGIN_REDIRECT_URL = "home"
FACEBOOK_APP_ID = "YOUR_FACEBOOK_APP_ID"
FACEBOOK_SECRET = "YOUR_FACEBOOK_APP_SECRET"
GOOGLE_CLIENT_ID = "YOUR_GOOGLE_APP_ID"
GOOGLE_SECRET = "YOUR_GOOGLE_APP_SECRET"
# Adyen settings
# admin / gIsu2ahvCY7k
ADYEN_MERCHANT_ACCOUNT = 'ColpaccioCOM'
ADYEN_MERCHANT_SECRET = 'c(dd)*n*n9ps-kp9+2=p-57%g9ywlgk7#vqfq-0e+%o69iqc2b'
ADYEN_DEFAULT_SKIN = 'zI79cYBf'
ADYEN_ENVIRONMENT = 'test'
PAYMENT_BASE_URL = 'http://%s/' % CANONICAL_HOSTNAME
PAYMENT_MODEL = "payment.Payment"
PAYMENT_VARIANTS = {
'default': (
'payments.adyen.AdyenProvider', {
'skin_code': ADYEN_DEFAULT_SKIN, 'merchant_account': ADYEN_MERCHANT_ACCOUNT
}
),
}
CHECKOUT_PAYMENT_CHOICES = [
('default', 'Adyen')
]
import dj_database_url
if os.environ.get('DATABASE_URL'):
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Django Compressor Settings
COMPRESS_ENABLED = True
COMPRESS_PRECOMPILERS = (
('text/coffeescript', 'coffee --compile --stdio'),
('text/less', 'lessc {infile} {outfile}'),
)
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,295
|
tbarbugli/saleor
|
refs/heads/master
|
/coupon/urls.py
|
from django.conf.urls import patterns, url
from .views import CouponCodeDetailView
urlpatterns = patterns('',
url(r'^code/(?P<slug>[a-zA-Z0-9-]+)/$', CouponCodeDetailView.as_view(), name='coupon-code-detail'),
)
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,296
|
tbarbugli/saleor
|
refs/heads/master
|
/utils/__init__.py
|
from compressor.templatetags.compress import CompressorNode
from django.template.base import Template
def seizaki_compress(context, data, name):
"""
Data is the string from the template (the list of js files in this case)
Name is either 'js' or 'css' (the sekizai namespace)
We basically just manually pass the string through the {% compress 'js' %} template tag
"""
print data
return CompressorNode(nodelist=Template(data).nodelist, kind=name, mode='file').render(context=context)
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,297
|
tbarbugli/saleor
|
refs/heads/master
|
/coupon/views.py
|
from django.views.generic import DetailView
from .models import CouponCode
class CouponCodeDetailView(DetailView):
queryset = CouponCode.objects.all()
slug_field = 'code'
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,298
|
tbarbugli/saleor
|
refs/heads/master
|
/delivery/models.py
|
from django.db.models.signals import post_save
from order.models import Order
def deliver_digital_on_paid_order(sender, instance, **kwargs):
if instance.status == 'fully-paid':
digital_deliveries = instance.groups.filter(digitaldeliverygroup__isnull=False)
for delivery in digital_deliveries:
delivery.change_status('shipped')
post_save.connect(deliver_digital_on_paid_order, sender=Order)
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,299
|
tbarbugli/saleor
|
refs/heads/master
|
/coupon/tests.py
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from .models import Coupon
from product.models import Category
from prices import Price
from order.models import Order
from userprofile.models import Address
class TestCoupon(TestCase):
def setUp(self):
self.category, c = Category.objects.get_or_create(
name='test_category'
)
self.coupon = Coupon.objects.create(
name='test_coupon',
price=Price(1, currency='USD'),
category=self.category
)
self.address = Address.objects.create()
def test_auto_shipping_order(self):
order = Order.objects.create(
billing_address=self.address
)
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,300
|
tbarbugli/saleor
|
refs/heads/master
|
/order/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Order'
db.create_table(u'order_order', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('status', self.gf('django.db.models.fields.CharField')(default='new', max_length=32)),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('last_status_change', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='orders', null=True, to=orm['auth.User'])),
('billing_address', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['userprofile.Address'])),
('anonymous_user_email', self.gf('django.db.models.fields.EmailField')(default='', max_length=75, blank=True)),
('token', self.gf('django.db.models.fields.CharField')(default='', max_length=36, blank=True)),
))
db.send_create_signal(u'order', ['Order'])
# Adding model 'DeliveryGroup'
db.create_table(u'order_deliverygroup', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('status', self.gf('django.db.models.fields.CharField')(default='new', max_length=32)),
('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='groups', to=orm['order.Order'])),
('price', self.gf('django_prices.models.PriceField')(default=0, currency='EUR', max_digits=12, decimal_places=4)),
))
db.send_create_signal(u'order', ['DeliveryGroup'])
# Adding model 'ShippedDeliveryGroup'
db.create_table(u'order_shippeddeliverygroup', (
(u'deliverygroup_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['order.DeliveryGroup'], unique=True, primary_key=True)),
('address', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['userprofile.Address'])),
))
db.send_create_signal(u'order', ['ShippedDeliveryGroup'])
# Adding model 'DigitalDeliveryGroup'
db.create_table(u'order_digitaldeliverygroup', (
(u'deliverygroup_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['order.DeliveryGroup'], unique=True, primary_key=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75)),
))
db.send_create_signal(u'order', ['DigitalDeliveryGroup'])
# Adding model 'OrderedItem'
db.create_table(u'order_ordereditem', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('delivery_group', self.gf('django.db.models.fields.related.ForeignKey')(related_name='items', to=orm['order.DeliveryGroup'])),
('product', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, on_delete=models.SET_NULL, to=orm['product.Product'])),
('product_name', self.gf('django.db.models.fields.CharField')(max_length=128)),
('quantity', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=4)),
('unit_price_net', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=4)),
('unit_price_gross', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=4)),
))
db.send_create_signal(u'order', ['OrderedItem'])
def backwards(self, orm):
# Deleting model 'Order'
db.delete_table(u'order_order')
# Deleting model 'DeliveryGroup'
db.delete_table(u'order_deliverygroup')
# Deleting model 'ShippedDeliveryGroup'
db.delete_table(u'order_shippeddeliverygroup')
# Deleting model 'DigitalDeliveryGroup'
db.delete_table(u'order_digitaldeliverygroup')
# Deleting model 'OrderedItem'
db.delete_table(u'order_ordereditem')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.page': {
'Meta': {'ordering': "('tree_id', 'lft')", 'object_name': 'Page'},
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'order.deliverygroup': {
'Meta': {'object_name': 'DeliveryGroup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': u"orm['order.Order']"}),
'price': ('django_prices.models.PriceField', [], {'default': '0', 'currency': "'EUR'", 'max_digits': '12', 'decimal_places': '4'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '32'})
},
u'order.digitaldeliverygroup': {
'Meta': {'object_name': 'DigitalDeliveryGroup', '_ormbases': [u'order.DeliveryGroup']},
u'deliverygroup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['order.DeliveryGroup']", 'unique': 'True', 'primary_key': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'})
},
u'order.order': {
'Meta': {'ordering': "('-last_status_change',)", 'object_name': 'Order'},
'anonymous_user_email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['userprofile.Address']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_status_change': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '32'}),
'token': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '36', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'order.ordereditem': {
'Meta': {'object_name': 'OrderedItem'},
'delivery_group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': u"orm['order.DeliveryGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['product.Product']"}),
'product_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'quantity': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '4'}),
'unit_price_gross': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '4'}),
'unit_price_net': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '4'})
},
u'order.shippeddeliverygroup': {
'Meta': {'object_name': 'ShippedDeliveryGroup', '_ormbases': [u'order.DeliveryGroup']},
'address': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['userprofile.Address']"}),
u'deliverygroup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['order.DeliveryGroup']", 'unique': 'True', 'primary_key': 'True'})
},
u'product.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['product.Category']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
u'product.product': {
'Meta': {'object_name': 'Product'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'to': u"orm['product.Category']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'price': ('django_prices.models.PriceField', [], {'currency': "'EUR'", 'max_digits': '12', 'decimal_places': '4'}),
'product_page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'sku': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'userprofile.address': {
'Meta': {'object_name': 'Address'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'country_area': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'street_address_1': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'street_address_2': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'})
}
}
complete_apps = ['order']
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,301
|
tbarbugli/saleor
|
refs/heads/master
|
/product/admin.py
|
from django.contrib import admin
# from .models import DigitalShip, Ship, Category
from .models import Category
from mptt.admin import MPTTModelAdmin
# admin.site.register(DigitalShip)
# admin.site.register(Ship)
admin.site.register(Category, MPTTModelAdmin)
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,302
|
tbarbugli/saleor
|
refs/heads/master
|
/coupon/models.py
|
import datetime
from django.core.urlresolvers import reverse
from django.db import models
from product.models import Product, StockedProduct
import random
import string
CODE_CHARS = string.letters + string.digits
CODE_LENGTH = 24
class CouponCode(models.Model):
coupon = models.ForeignKey('coupon.Coupon')
order = models.ForeignKey('order.Order', blank=True, null=True)
redeemed_on = models.DateField(blank=True, null=True)
code = models.CharField(max_length=200)
def can_be_used(self):
today = datetime.datetime.now()
if self.coupon.valid_from is not None and self.coupon_valid_from < today:
return False
if self.coupon.valid_until is not None and self.coupon_valid_until > today:
return False
return self.redeemed_on is None
def get_absolute_url(self):
return reverse('coupon:coupon-code-detail', kwargs={'slug': str(self.code)})
def owner(self):
return self.order.anonymous_user_email or self.order.user.email
class Coupon(Product, StockedProduct):
short_description = models.TextField(blank=True, null=True)
terms = models.TextField(blank=True, null=True)
valid_from = models.DateField(blank=True, null=True)
valid_until = models.DateField(blank=True, null=True)
def email_coupon_code(self, coupon_code):
print 'send email with %r' % coupon_code
def deliver(self, order):
coupon_code = self.create_code(order)
self.email_coupon_code(coupon_code)
def generate_coupon_code(self):
assert self.pk is not None
return str(self.pk) + "".join(random.choice(CODE_CHARS) for i in xrange(CODE_LENGTH))
def create_code(self, order):
code = self.generate_coupon_code()
return CouponCode.objects.create(
coupon=self,
order=order,
code=code
)
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,303
|
tbarbugli/saleor
|
refs/heads/master
|
/coupon/admin.py
|
from django.contrib import admin
from .models import Coupon, CouponCode
class CouponCodeInlineAdmin(admin.TabularInline):
model = CouponCode
readonly_fields = ['coupon', 'owner','code', 'redeemed_on']
can_delete = False
extra = 0
class CouponAdmin(admin.ModelAdmin):
inlines = [CouponCodeInlineAdmin]
admin.site.register(Coupon, CouponAdmin)
|
{"/coupon/urls.py": ["/coupon/views.py"], "/coupon/views.py": ["/coupon/models.py"], "/coupon/tests.py": ["/coupon/models.py"], "/coupon/admin.py": ["/coupon/models.py"]}
|
12,328
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/user_profile/tasks.py
|
"""
Contains all recurring tasks relevant to the user.
This includes:
* Calculating user's net worth
* Calculating data necessary for the charts shown in UI
* Scheduling scans for new trades in his exchange and wallet accounts
* ...
"""
from backend.celery import app
@app.task
def test(arg):
"""Test task"""
print(arg)
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,329
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/schema.py
|
import graphene
import backend.coins.schema
import backend.accounts.schema
import backend.transactions.schema
import backend.user_profile.schema
class Query(backend.coins.schema.Query, backend.user_profile.schema.Query,
backend.accounts.schema.Query, backend.transactions.schema.Query,
graphene.ObjectType):
# This class will inherit from multiple Queries
# as we begin to add more apps to our project
pass
class Mutation(graphene.ObjectType):
create_account = backend.accounts.schema.CreateAccountMutation.Field()
create_crypto_address = backend.accounts.schema.CreateCryptoAddressMutation.Field(
)
edit_crypto_address = backend.accounts.schema.EditCryptoAddressMutation.Field(
)
edit_account = backend.accounts.schema.EditAccountMutation.Field()
account_refresh_transactions = backend.accounts.schema.AccountRefreshTransactionsMutation.Field(
)
coins_refresh_mutation = backend.coins.schema.CoinRefreshTransactionsMutation.Field(
)
import_transaction = backend.transactions.schema.ImportTransactionsMutation.Field(
)
schema = graphene.Schema(query=Query, mutation=Mutation)
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,330
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/coins/models.py
|
'''Contains the models for this app'''
from django.db import models
class Coin(models.Model):
'''Database model representing a coin'''
class Meta:
ordering = ("symbol", )
id = models.AutoField(primary_key=True)
cc_id = models.IntegerField(unique=True)
img_url = models.CharField(max_length=200)
name = models.CharField(max_length=200)
symbol = models.CharField(max_length=10)
coin_name = models.CharField(max_length=200)
full_name = models.CharField(max_length=200)
def __str__(self):
'''Assembles a string description for this object'''
return "{} - {}".format(self.symbol, self.full_name)
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,331
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/schema.py
|
import json
import graphene
import time
from django.db.models import QuerySet
from graphene_django.types import DjangoObjectType
from backend.transactions.models import Transaction
from backend.transactions.importers.livecoin import import_data_livecoin
class TransactionType(DjangoObjectType):
class Meta:
model = Transaction
only_fields = [
'id',
'owner',
'date',
'spent_currency',
'spent_amount',
'source_peer',
'acquired_currency',
'acquired_amount',
"target_peer",
"fee_currency",
"fee_amount",
"book_price_eur",
"book_price_btc",
"book_price_fee_eur",
"book_price_fee_btc",
"icon",
]
tags = graphene.List(graphene.String)
@staticmethod
def resolve_tags(self: Transaction, context, **kwargs):
"""Resolve all tags associated with this object"""
return self.tags.all().order_by("name")
class Query(object):
# Single transaction by ID
get_transaction = graphene.Field(
TransactionType, id=graphene.Int(required=True))
def resolve_get_transaction(self, info, **kwargs) -> Transaction:
if not info.context.user.is_authenticated:
return None
transaction_id = kwargs.get('id')
if transaction_id is not None:
t = Transaction.objects.get(pk=transaction_id)
if t.owner == info.context.user:
return t
return None
# Get all transaction where user has access rights
all_transactions = graphene.List(TransactionType)
def resolve_all_transactions(self, info, **kwargs) -> QuerySet:
if not info.context.user.is_authenticated:
return Transaction.objects.none()
filtered = Transaction.objects.filter(owner=info.context.user)
return filtered
class TransactionData(graphene.InputObjectType):
"""Data to import from the client. This should normally be already pre-processed data.
"""
date = graphene.String()
transaction_type = graphene.String(
required=True,
description="""
Options:
exchange - exchange between currencies on this peer
transfer - transfer one coin from one wallet to another
buy - buy cryptos from fiat
sell - sell cryptos for fiat
income - receive cryptos for a service or selling of a good (refferal bonus, selling of hardware etc)
expense - pay for a service or a good (online subscription, buy of an hardware)
mining - mining income
""")
transaction_type_raw = graphene.String(
description=
"""The raw unprocessed transaction type coming from the data source.
Can be different from peer to peer. This is included so the importer in the server might implement this in a non standard way"""
)
spent_currency = graphene.String(required=True)
spent_amount = graphene.Float(required=True)
source_peer = graphene.ID()
acquired_currency = graphene.String()
acquired_amount = graphene.Float()
target_peer = graphene.ID()
fee_currency = graphene.String()
fee_amount = graphene.Float()
tags = graphene.List(graphene.String)
class ImportTransactionInput(graphene.InputObjectType):
"""The input type for the import mutation.
"""
service_type = graphene.String()
import_mechanism = graphene.String(
required=True,
description="""
The mechanism of import: \n
* csv - Import via file (csv, excel, etc)
* manual - Import with manually entered input
""")
transactions = graphene.List(TransactionData, required=True)
class ImportTransactionsMutation(graphene.relay.ClientIDMutation):
"""Contains the import mutations"""
class Input:
"""The input class for the mutation"""
data = graphene.Field(ImportTransactionInput, required=True)
status = graphene.Int()
formErrors = graphene.String()
transactions = graphene.List(TransactionType)
@classmethod
def mutate(cls, root, info, input=None):
if not info.context.user.is_authenticated:
return ImportTransactionsMutation(status=403)
if input.data.service_type == "livecoin":
transactions = import_data_livecoin(input.data, info.context.user)
return ImportTransactionsMutation(
status=200, transactions=transactions)
return ImportTransactionsMutation(
status=404,
formErrors="Service type {} not found".format(
input.data.service_type))
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,332
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/coins/admin.py
|
'''Contains all database models for the coins django app'''
from django.contrib import admin
from backend.coins.models import Coin
admin.site.register(Coin)
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,333
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/celery.py
|
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backend.settings")
app = Celery("backend")
app.config_from_object("django.conf:settings", namespace='CELERY')
app.autodiscover_tasks()
app.conf.beat_schedule = {
'update-coins-every-24-hours': {
'task': 'backend.coins.tasks.async_update_supported_coins',
'schedule': 86400.0, # 24 hours
'options': {
'task_id': "task_update_coins"
},
},
}
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,334
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/fetchers/generic_exchange.py
|
"""
Contains all functions related to importing transactions
from exchanges supported by the ccxt library
"""
import ccxt
import time
from datetime import datetime, timezone
from django.utils.timezone import now
from requests.exceptions import ReadTimeout
from dateutil import parser
from django.db.models import QuerySet
from backend.utils.utils import get_name_price
from backend.accounts.models import Account
from backend.transactions.models import Transaction
from backend.transactions.models import TransactionUpdateHistoryEntry
from ...utils.utils import exchange_can_batch
def fetch_trades_unbatched(exchange: ccxt.Exchange):
"""
Some exchanges like Binance don't support fetching all trades at
once and need to fetch per trading pair (market).
"""
markets = exchange.load_markets()
trades = []
for market in markets:
try:
trades += exchange.fetch_my_trades(market)
except ReadTimeout as err:
print(err)
continue
# exchange.rateLimit is milliseconds but time.sleep expects seconds
# plus add an extra 2 seconds as some exchanges like Bitfinex have varying rate limits
# and still return rate limit exceeded errors when using the value provided by CCXT
time.sleep((exchange.rateLimit / 1000) + 2)
return trades
def update_exchange_trx_generic(account: Account):
"""
Fetches all trades and if older than last check imports to database
"""
exchange: ccxt.Exchange = None
starttime: datetime = now()
if hasattr(ccxt, account.service_type):
exchange: ccxt.Exchange = getattr(ccxt, account.service_type)({
"api_key":
account.api_key,
"secret":
account.api_secret
})
else:
print("nope")
last_update_query: QuerySet = TransactionUpdateHistoryEntry.objects.filter(
account=account).order_by('-date')
latest_update = datetime.utcfromtimestamp(0).replace(tzinfo=timezone.utc)
if last_update_query.count():
latest_update = last_update_query[:1][0].date
transactions = []
trades = []
if exchange_can_batch(account.service_type):
trades = exchange.fetch_my_trades()
else:
trades = fetch_trades_unbatched(exchange)
total = len(trades)
num_imports = 0
if trades:
for trade in trades:
# print(trade["symbol"] + " " + trade["datetime"])
trade_date = parser.parse(trade["datetime"])
if trade_date <= latest_update:
print("skiping ", trade["symbol"] + " " + trade["datetime"])
continue
split = trade["symbol"].split("/")
trx = Transaction()
if trade["side"] == "buy":
trx.spent_amount = trade["cost"]
trx.spent_currency = split[1]
trx.acquired_amount = trade["amount"]
trx.acquired_currency = split[0]
elif trade["side"] == "sell":
trx.spent_amount = trade["amount"]
trx.spent_currency = split[0]
trx.acquired_amount = trade["cost"]
trx.acquired_currency = split[1]
trx.fee_amount = trade["fee"]["cost"]
trx.fee_currency = trade["fee"]["currency"]
trx.date = trade["datetime"]
trx.owner = account.owner
trx.source_peer = account
trx.target_peer = account
date = parser.parse(trx.date)
timestamp = time.mktime(date.timetuple())
trx.book_price_btc = get_name_price(
trx.spent_amount, trx.spent_currency, "BTC", timestamp)
trx.book_price_eur = get_name_price(
trx.spent_amount, trx.spent_currency, "EUR", timestamp)
trx.book_price_fee_btc = get_name_price(
trx.fee_amount, trx.fee_currency, "BTC", timestamp)
trx.book_price_fee_eur = get_name_price(
trx.fee_amount, trx.fee_currency, "EUR", timestamp)
trx.icon = Transaction.TRX_ICON_EXCHANGE
trx.save()
trx.tags.add(account.service_type, Transaction.TRX_TAG_EXCHANGE)
trx.save()
num_imports += 1
time.sleep(0.2) # avoid hammering the API's
print("Imported {} trades.".format(num_imports))
entry: TransactionUpdateHistoryEntry = TransactionUpdateHistoryEntry(
date=starttime, account=account, fetched_transactions=num_imports)
entry.save()
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,335
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/migrations/0007_auto_20180510_1515.py
|
# Generated by Django 2.0.5 on 2018-05-10 15:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0006_auto_20180506_1126'),
]
operations = [
migrations.AlterModelOptions(
name='cryptoaddress',
options={'ordering': ('id',)},
),
migrations.AddField(
model_name='cryptoaddress',
name='watch',
field=models.BooleanField(default=False),
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,336
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/tests/test_fetcher_generic.py
|
"""Contains all tests for the generic exchange fetcher"""
import time
from datetime import datetime, timedelta
import pytest
from django.utils.timezone import now
from _pytest.monkeypatch import MonkeyPatch
from mixer.backend.django import mixer
import ccxt
import cryptocompare
from backend.accounts.models import Account
from backend.transactions.models import Transaction
from backend.transactions.models import TransactionUpdateHistoryEntry
from ..fetchers.generic_exchange import update_exchange_trx_generic
pytestmark = pytest.mark.django_db
def new_get_historical_price(coin, curr="EUR", timestamp=time.time()):
return {coin: {curr: 2000.0}}
def new_load_markets(self):
return {
'BTC/ETH': {},
}
BINANCE_CHECK_TRANSACTION_ID = 2
BINANCE_AMOUNT = 0.20931215
BINANCE_COST = 0.00357691
BINANCE_PRICE = 0.01708888
BINANCE_BOOK_PRICE_EUR = \
new_get_historical_price("BTC")["BTC"]["EUR"] * BINANCE_AMOUNT
def new_fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
ret_binance = [{
'amount': 0.1,
'cost': 0.0003,
'datetime': '2018-01-10T06:03:29.213Z',
'fee': {
'cost': 0.0002,
'currency': 'BNB'
},
'id': '1',
'price': 0.1,
'side': 'sell',
'symbol': 'BTC/ETH',
'timestamp': 1515564209213,
}, {
'amount': BINANCE_AMOUNT,
'cost': BINANCE_COST,
'datetime': '2017-12-28T09:26:52.249Z',
'fee': {
'cost': 0.011,
'currency': 'BNB'
},
'id': '2',
'price': BINANCE_PRICE,
'side': 'sell',
'symbol': 'LTC/BTC',
'timestamp': 1514453212249,
}, {
'amount': 240.0,
'cost': 0.01,
'datetime': '2018-01-08T18:23:09.665Z',
'fee': {
'cost': 0.0037,
'currency': 'BNB'
},
'id': '3',
'price': 4.335e-05,
'side': 'buy',
'symbol': 'XMR/BTC',
'timestamp': 1515694988665,
}]
ret_cryptopia = [{
'amount': 7.58039241,
'cost': 0.00356278,
'datetime': '2018-01-16T06:04:09.889Z',
'fee': {
'cost': 7.13e-06,
'currency': 'BTC'
},
'id': '1',
'price': 0.00047,
'side': 'buy',
'symbol': 'EMC/BTC',
'timestamp': 1516082648889,
}, {
'amount': 0.20931215,
'cost': 0.00357691,
'datetime': '2018-01-16T05:59:03.521Z',
'fee': {
'cost': 7.15e-06,
'currency': 'BTC'
},
'id': '2',
'price': 0.01708888,
'side': 'sell',
'symbol': 'LTC/BTC',
'timestamp': 1516082342521,
}, {
'amount': 130.77497801,
'cost': 0.0353184,
'datetime': '2017-12-25T20:25:33.460Z',
'fee': {
'cost': 7.064e-05,
'currency': 'LTC'
},
'id': '3',
'price': 0.00027007,
'side': 'buy',
'symbol': 'DGB/LTC',
'timestamp': 1514233533460,
}, {
'amount': 130.77497801,
'cost': 0.0353184,
'datetime': '2017-12-25T20:25:33.460Z',
'fee': {
'cost': 7.064e-05,
'currency': 'LTC'
},
'id': '4',
'price': 0.00027007,
'side': 'buy',
'symbol': 'DGB/LTC',
'timestamp': 1514233533460,
}]
if symbol is None:
return ret_cryptopia
else:
return ret_binance
@pytest.fixture
def patch_ccxt(monkeypatch: MonkeyPatch):
monkeypatch.setattr(ccxt.binance, "load_markets", new_load_markets)
monkeypatch.setattr(ccxt.binance, "fetch_my_trades", new_fetch_my_trades)
monkeypatch.setattr(ccxt.cryptopia, "load_markets", new_load_markets)
monkeypatch.setattr(ccxt.cryptopia, "fetch_my_trades", new_fetch_my_trades)
monkeypatch.setattr(cryptocompare, "get_historical_price",
new_get_historical_price)
def test_update_exchange_trx_generic_binance(monkeypatch: MonkeyPatch):
user = mixer.blend("auth.User")
account_bin: Account = mixer.blend(
"accounts.Account", owner=user, service_type="binance")
account_crypt: Account = mixer.blend(
"accounts.Account", owner=user, service_type="cryptopia")
patch_ccxt(monkeypatch)
update_exchange_trx_generic(account_bin)
update_exchange_trx_generic(account_crypt)
t = Transaction.objects.filter(target_peer=account_bin)
assert t.count() == 3
t = Transaction.objects.filter(target_peer=account_crypt)
assert t.count() == 4
t: Transaction = Transaction.objects.get(pk=BINANCE_CHECK_TRANSACTION_ID)
assert float(t.spent_amount) == BINANCE_AMOUNT
assert float(t.acquired_amount) == BINANCE_COST
assert float(t.book_price_eur) == BINANCE_BOOK_PRICE_EUR
update_entry = TransactionUpdateHistoryEntry.objects.get(
account=account_bin)
assert update_entry.fetched_transactions == 3
update_entry = TransactionUpdateHistoryEntry.objects.get(
account=account_crypt)
assert update_entry.fetched_transactions == 4
def test_update_exchange_trx_generic_transaction_history(
monkeypatch: MonkeyPatch):
""" Test, that the update function does not import """
user = mixer.blend("auth.User")
account_bin: Account = mixer.blend(
"accounts.Account", owner=user, service_type="binance")
patch_ccxt(monkeypatch)
date: datetime = now()
mixer.blend(
"transactions.TransactionUpdateHistoryEntry",
date=date,
account=account_bin,
fetched_transactions=3)
monkeypatch.setattr(
ccxt.binance, "fetch_my_trades",
lambda self, symbol=None, since=None, limit=None, params={}:
[
{
'amount': 0.3,
'cost': 0.00032,
'datetime': str(date + timedelta(days=-1)), # Should be discarded
'fee': {
'cost': 0.00044,
'currency': 'BNB'
},
'id': '4',
'price': 0.1,
'side': 'sell',
'symbol': 'BTC/ETH',
'timestamp': 1515564209213,
},
{
'amount': BINANCE_AMOUNT,
'cost': BINANCE_COST,
'datetime': str(date + timedelta(days=1)),
'fee': {
'cost': 0.011,
'currency': 'BNB'
},
'id': '5',
'price': BINANCE_PRICE,
'side': 'sell',
'symbol': 'LTC/BTC',
'timestamp': 1514453212249,
}
])
update_exchange_trx_generic(account_bin)
transaction = Transaction.objects.filter(target_peer=account_bin)
assert transaction.count(
) == 1, "Should not import transactions older than last update time"
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,337
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/migrations/0003_peer_class_type.py
|
# Generated by Django 2.0.2 on 2018-03-17 08:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_auto_generate_default_objects'),
]
operations = [
migrations.AddField(
model_name='peer',
name='class_type',
field=models.CharField(default='Bier', editable=False, max_length=50),
preserve_default=False,
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,338
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/schema.py
|
import json
import graphene
import ccxt
import celery
from django.db.models import ObjectDoesNotExist
from graphene_django.types import DjangoObjectType
from backend.accounts.models import Account, CryptoAddress, Peer
from backend.coins.models import Coin
from backend.accounts.tasks import async_update_account_trx
class PeerType(DjangoObjectType):
class Meta:
model = Peer
class SupportedService(graphene.ObjectType):
short_name = graphene.String()
long_name = graphene.String()
importer = graphene.String()
class SupportedSymbol(graphene.ObjectType):
symbol = graphene.String()
base = graphene.String()
quote = graphene.String()
class AccountType(DjangoObjectType):
class Meta:
model = Account
class CryptoAddressType(DjangoObjectType):
class Meta:
model = CryptoAddress
class Query(object):
# Single account by ID or name
get_account = graphene.Field(
AccountType,
id=graphene.ID(required=False, description="ID of the peer"),
name=graphene.String(required=False, description="Name of the peer"))
def resolve_get_account(self, info, **kwargs):
account_id = kwargs.get('id')
account_name = kwargs.get('name')
if account_id is not None:
return Account.objects.get(pk=account_id)
if account_name is not None:
return Account.objects.get(name=account_name)
# Get all accounts where user has access rights
all_accounts = graphene.List(AccountType)
def resolve_all_accounts(self, info, **kwargs):
if not info.context.user.is_authenticated:
return Account.objects.none()
filtered = Account.objects.filter(owner=info.context.user)
return filtered
get_crypto_addresses = graphene.List(
CryptoAddressType,
peer_id=graphene.ID(required=True, description="ID of the peer"),
description="Gets all crypto addresses for a peer")
def resolve_get_crypto_addresses(self, info, **kwargs):
"""Gets all crypto addresses for a peer"""
if not info.context.user.is_authenticated:
return CryptoAddress.objects.none()
peer_id = kwargs.get('peer_id')
try:
peer = Peer.objects.get(pk=peer_id)
except ObjectDoesNotExist:
return CryptoAddress.objects.none()
if not peer.owner == info.context.user:
return CryptoAddress.objects.none()
return CryptoAddress.objects.filter(peer=peer)
supported_services = graphene.List(SupportedService)
def resolve_supported_services(self, info, **kwargs):
l = []
for val in Account.SERVICE_TYPES:
s = SupportedService()
s.short_name = val[0]
s.long_name = val[1]
s.importer = val[2]
l.append(s)
return l
supported_symbols = graphene.List(
SupportedSymbol, service=graphene.String(required=True))
def resolve_supported_symbols(self, info, **kwargs):
l = []
if not info.context.user.is_authenticated:
return l
service_id = kwargs.get('service')
try:
exchange = getattr(ccxt, service_id)()
markets = exchange.load_markets()
for m in markets:
market = markets[m]
if market:
s = SupportedSymbol()
s.symbol = market["symbol"]
s.base = market["base"]
s.quote = market["quote"]
l.append(s)
except AttributeError:
# coinbase will land here
# it is not supported by ccxt and will receive special treatment
pass
return l
class CreateAccountMutation(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String()
service_type = graphene.String()
symbols = graphene.String()
api_key = graphene.String()
api_secret = graphene.String()
status = graphene.Int()
formErrors = graphene.String()
account = graphene.Field(AccountType)
@classmethod
def mutate(cls, root, info, input: Input):
if not info.context.user.is_authenticated:
return CreateAccountMutation(status=403)
name = input.get("name", "").strip()
service_type = input.get("service_type", "").strip()
symbols = input.get("symbols", "").strip()
api_key = input.get("api_key", "").strip()
api_secret = input.get("api_secret", "").strip()
# TODO: validate input using django forms or whatnot
if not name or not service_type:
return CreateAccountMutation(
status=400,
formErrors=json.dumps({
"account": ["Please enter valid account data"]
}))
if Account.objects.filter(name=name).exists():
print("exists")
return CreateAccountMutation(
status=422,
formErrors=json.dumps({
"account": ["A account with this name exists"]
}))
obj = Account.objects.create(
owner=info.context.user,
name=name,
slug=name,
service_type=service_type,
symbols=symbols,
api_key=api_key,
api_secret=api_secret)
return CreateAccountMutation(status=200, account=obj)
class CreateCryptoAddressMutation(graphene.relay.ClientIDMutation):
class Input:
account_id = graphene.ID(required=True)
address = graphene.String(required=True)
coin_id = graphene.ID(required=True)
watch = graphene.Boolean()
status = graphene.Int()
formErrors = graphene.String()
address = graphene.Field(CryptoAddressType)
@classmethod
def mutate(cls, root, info, input: Input):
if not info.context.user.is_authenticated:
return CreateAccountMutation(
status=403, client_mutation_id=input['client_mutation_id'])
account_id = input.get("account_id", -1)
address = input.get("address", "").strip()
coin_id = input.get("coin_id", -1)
watch = input.get("watch", False)
try:
account: Account = Account.objects.get(pk=account_id)
except ObjectDoesNotExist:
return CreateCryptoAddressMutation(
status=404,
formErrors=json.dumps({
"account_id": ["Please enter valid account id"]
}),
client_mutation_id=input['client_mutation_id'])
if not account.owner == info.context.user:
return CreateAccountMutation(
status=403, client_mutation_id=input['client_mutation_id'])
try:
coin: Coin = Coin.objects.get(pk=coin_id)
except ObjectDoesNotExist:
return CreateCryptoAddressMutation(
status=404,
formErrors=json.dumps({
"coin_id": ["Please enter valid coin id"]
}),
client_mutation_id=input['client_mutation_id'])
crypto_address = CryptoAddress.objects.create(
peer=account, coin=coin, address=address, watch=watch)
return CreateCryptoAddressMutation(
status=200,
address=crypto_address,
client_mutation_id=input['client_mutation_id'])
class EditCryptoAddressMutation(graphene.relay.ClientIDMutation):
class Input:
id = graphene.ID(required=True)
address = graphene.String(required=True)
coin_id = graphene.ID(required=True)
watch = graphene.Boolean()
status = graphene.Int()
formErrors = graphene.String()
address = graphene.Field(CryptoAddressType)
@classmethod
def mutate(cls, root, info, input: Input):
if not info.context.user.is_authenticated:
return EditCryptoAddressMutation(
status=403, client_mutation_id=input['client_mutation_id'])
object_id = input.get("id", None)
address = input.get("address", None)
coin_id = input.get("coin_id", None)
watch = input.get("watch", False)
try:
crypto_address: CryptoAddress = CryptoAddress.objects.get(
pk=object_id)
except ObjectDoesNotExist:
return EditCryptoAddressMutation(
status=404,
formErrors=json.dumps({
"id": ["Address ID not found"]
}),
client_mutation_id=input['client_mutation_id'])
if not crypto_address.peer.owner.id == info.context.user.id:
return EditCryptoAddressMutation(
status=403, client_mutation_id=input['client_mutation_id'])
try:
coin: Coin = Coin.objects.get(pk=coin_id)
except ObjectDoesNotExist:
return EditCryptoAddressMutation(
status=404,
formErrors=json.dumps({
"coin_id": ["Coin not fund"]
}),
client_mutation_id=input['client_mutation_id'])
crypto_address.address = address
crypto_address.coin = coin
crypto_address.watch = watch
crypto_address.save()
return EditCryptoAddressMutation(
status=200,
address=crypto_address,
client_mutation_id=input['client_mutation_id'])
class EditAccountMutation(graphene.relay.ClientIDMutation):
class Input:
account_id = graphene.Int()
name = graphene.String()
api_key = graphene.String()
api_secret = graphene.String()
status = graphene.Int()
formErrors = graphene.String()
account = graphene.Field(AccountType)
@classmethod
def mutate(cls, root, info, input: Input):
if not info.context.user.is_authenticated:
return EditAccountMutation(status=403)
account_id = input.get("account_id", -1)
name = input.get("name", "").strip()
api_key = input.get("api_key", "").strip()
api_secret = input.get("api_secret", "").strip()
# TODO: validate input using django forms or whatnot
if account_id < 0 or not name or not api_key or not api_secret:
return EditAccountMutation(
status=400,
formErrors=json.dumps({
"account": ["Please enter valid account data"]
}))
try:
account: Account = Account.objects.get(pk=account_id)
except ObjectDoesNotExist:
return EditAccountMutation(
status=422,
formErrors=json.dumps({
"account": ["Account does not exists"]
}))
if account.owner != info.context.user:
return EditAccountMutation(status=403)
if not account:
return EditAccountMutation(
status=422,
formErrors=json.dumps({
"account": ["This account does not exist"]
}))
account.name = name
account.api_key = api_key
account.api_secret = api_secret
account.save()
return EditAccountMutation(status=200, account=account)
class AccountRefreshTransactionsMutation(graphene.relay.ClientIDMutation):
class Input:
account_id = graphene.String()
status = graphene.Int()
formErrors = graphene.String()
msg = graphene.String()
@classmethod
def mutate(cls, root, info, input) -> "AccountRefreshTransactionsMutation":
if not info.context.user.is_authenticated:
return AccountRefreshTransactionsMutation(status=403)
if input.get("account_id", -1) == -1:
return AccountRefreshTransactionsMutation(status=400)
account_id = input.get("account_id", -1).strip()
try:
id_int = int(account_id)
if id_int < 0:
raise ValueError("Invalid input")
except ValueError as err:
return AccountRefreshTransactionsMutation(status=400)
account: Account = Account.objects.get(pk=account_id)
if account.owner != info.context.user:
return AccountRefreshTransactionsMutation(status=403)
tid = account_id + account.name
# celery.result will only be available until after the task has run once
if hasattr(celery, "result") and celery.result.AsyncResult(
tid).status == "RUNNING":
print("skipping task")
return AccountRefreshTransactionsMutation(
msg="Task is already running", status=202)
else:
try:
print("starting task")
async_update_account_trx.apply_async(
args=[account_id], task_id=tid)
except async_update_account_trx.OperationalError as err:
print("Sending task raised: %r", err)
return AccountRefreshTransactionsMutation(status=500)
return AccountRefreshTransactionsMutation(msg="Working", status=200)
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,339
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/migrations/0004_auto_20180408_1110.py
|
# Generated by Django 2.0.3 on 2018-04-08 11:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('transactions', '0003_transaction_icon'),
]
operations = [
migrations.AlterField(
model_name='transaction',
name='acquired_amount',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='transaction',
name='acquired_currency',
field=models.CharField(default='', max_length=10),
),
migrations.AlterField(
model_name='transaction',
name='spent_amount',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='transaction',
name='spent_currency',
field=models.CharField(default='', max_length=10),
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,340
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/migrations/0001_initial.py
|
# Generated by Django 2.0.2 on 2018-03-15 19:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('coins', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('address', models.CharField(max_length=256)),
('address_str', models.CharField(blank=True, max_length=300)),
('coin', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='coins.Coin')),
],
),
migrations.CreateModel(
name='Peer',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Account',
fields=[
('peer_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='accounts.Peer')),
('slug', models.SlugField()),
('service_type', models.CharField(max_length=50)),
('api_key', models.CharField(max_length=100)),
('api_secret', models.CharField(max_length=100)),
('creation_date', models.DateTimeField(auto_now_add=True)),
('symbols', models.CharField(blank=True, max_length=1000, null=True)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
bases=('accounts.peer',),
),
migrations.AddField(
model_name='address',
name='peer',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='accounts.Peer'),
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,341
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/tests/test_models.py
|
import pytest
from mixer.backend.django import mixer
from django.core.exceptions import ObjectDoesNotExist
from .. import schema
# We need to do this so that writing to the DB is possible in our tests.
pytestmark = pytest.mark.django_db
# Great introduction to TDD with Python + Django:
# https://www.youtube.com/watch?v=41ek3VNx_6Q
def test_peer_str_func():
name = "test123"
obj = mixer.blend("accounts.peer", name=name)
assert obj.__str__() == "[Peer] {}".format(
name), "Should be the peer's name"
def test_address_str_func():
address = "test123"
obj = mixer.blend("accounts.CryptoAddress", address=address)
symbol = obj.coin.symbol
assert obj.__str__() == "{}:{}".format(symbol, address)
def test_account_creation():
obj = mixer.blend("accounts.Account")
assert obj.pk > 0, "Should create an Account instance"
def test_account_str_func():
name = "test123"
obj = mixer.blend("accounts.Account", name=name)
assert obj.__str__() == "[Account] {}".format(
name), "Should be the accounts's name"
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,342
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/importers/livecoin.py
|
"""Livecoin exchange importer functions"""
import time
from backend.utils.utils import get_name_price
from backend.transactions.models import Transaction
import arrow
from backend.accounts.models import Peer
def import_data_livecoin(data, user):
"""Import data from a CSV file exported from Livecoin
Arguments:
data {object} -- Object with the Livecoin data
user {user} -- Current logged in User
Returns:
Transaction -- List with the imported transactions
"""
transactions = []
peer_cache = {}
for trx_input in data.transactions: # type: TransactionData
if trx_input.transaction_type_raw == "Deposit":
continue
trx = Transaction()
date = arrow.get(trx_input.date, "DD.MM.YYYY HH:mm:ss")
timestamp = date.timestamp
trx.date = date.datetime
trx.owner = user
# calculate book price by spent amount
book_price_ok = False
if trx_input.spent_amount > 0 and trx_input.spent_currency is not "":
trx.spent_amount = trx_input.spent_amount
trx.spent_currency = trx_input.spent_currency
trx.book_price_btc = get_name_price(
trx.spent_amount, trx.spent_currency, "BTC", timestamp)
trx.book_price_eur = get_name_price(
trx.spent_amount, trx.spent_currency, "EUR", timestamp)
book_price_ok = True
if trx_input.acquired_amount > 0 and trx_input.acquired_currency is not "":
trx.acquired_amount = trx_input.acquired_amount
trx.acquired_currency = trx_input.acquired_currency
if not book_price_ok:
trx.book_price_btc = get_name_price(trx.acquired_amount,
trx.acquired_currency,
"BTC", timestamp)
trx.book_price_eur = get_name_price(trx.acquired_amount,
trx.acquired_currency,
"EUR", timestamp)
# if trx_input.source_peer not in trx
trx.source_peer = Peer(pk=trx_input.source_peer)
trx.target_peer = Peer(pk=trx_input.target_peer)
if trx_input.fee_amount > 0:
trx.fee_amount = trx_input.fee_amount
trx.fee_currency = trx_input.fee_currency
trx.book_price_fee_btc = get_name_price(
trx.fee_amount, trx.fee_currency, "BTC", timestamp)
trx.book_price_fee_eur = get_name_price(
trx.fee_amount, trx.fee_currency, "EUR", timestamp)
if trx_input.transaction_type == "exchange":
trx.icon = Transaction.TRX_ICON_EXCHANGE
trx.save()
trx.tags.add(data.service_type, data.import_mechanism,
Transaction.TRX_TAG_EXCHANGE)
elif trx_input.transaction_type == "income":
trx.icon = Transaction.TRX_ICON_INCOME
trx.save()
trx.tags.add(data.service_type, data.import_mechanism,
Transaction.TRX_TAG_INCOME)
elif trx_input.transaction_type == "transfer":
trx.icon = Transaction.TRX_ICON_TRANSFER
trx.save()
trx.tags.add(data.service_type, data.import_mechanism,
Transaction.TRX_TAG_TRANSFER)
else:
trx.icon = Transaction.TRX_ICON_WARNING
trx.save()
trx.tags.add(data.service_type, data.import_mechanism,
Transaction.TRX_TAG_WARNING)
if trx_input.tags:
for tag in trx_input.tags:
trx.tags.add(tag)
trx.save()
transactions.append(trx)
return transactions
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,343
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/models.py
|
"""Contains all models necessary for the Transactions application"""
from django.db import models
from taggit.managers import TaggableManager
class Transaction(models.Model):
"""Database model for a single transaction"""
# exchange between currencies
TRX_TAG_EXCHANGE = "exchange"
TRX_ICON_EXCHANGE = "shuffle"
# transfer one coin from one wallet to another
TRX_TAG_TRANSFER = "transfer"
TRX_ICON_TRANSFER = "send"
# buy cryptos from fiat
TRX_TAG_BUY = "buy"
TRX_ICON_BUY = "subdirectory_arrow_right"
# sell cryptos for fiat
TRX_TAG_SELL = "sell"
TRX_ICON_SELL = "subdirectory_arrow_left"
# income for a service or sell of a good (refferal bonus, selling of hardware etc)
TRX_TAG_INCOME = "income"
TRX_ICON_INCOME = "arrow_forward"
# expense for a service or buy of a good (online subscription, buy of an hardware)
TRX_TAG_EXPENSE = "expense"
TRX_ICON_EXPENSE = "arrow_backward"
# mining income
TRX_TAG_MINING = "mining"
TRX_ICON_MINING = "gavel"
# for transactions that need attention by the user
TRX_TAG_WARNING = "warning"
TRX_ICON_WARNING = "warning"
class Meta:
ordering = ('-date', )
id = models.AutoField(primary_key=True)
owner = models.ForeignKey(
related_name='owner',
to='auth.user',
on_delete=models.PROTECT,
)
date = models.DateTimeField()
# Spent
spent_currency = models.CharField(max_length=10, default="---")
spent_amount = models.DecimalField(
max_digits=19, decimal_places=10, default=0)
source_peer = models.ForeignKey(
default=1,
related_name='source_peer',
to='accounts.Peer',
on_delete=models.PROTECT)
# Acquired
acquired_currency = models.CharField(max_length=10, default="---")
acquired_amount = models.DecimalField(
max_digits=19, decimal_places=10, default=0)
target_peer = models.ForeignKey(
default=1,
related_name='target_peer',
to='accounts.Peer',
on_delete=models.PROTECT)
# Fees and book prices are calculated using the mean price of the coin at that day
fee_currency = models.CharField(max_length=10, default="---")
fee_amount = models.DecimalField(
max_digits=19, default=0, decimal_places=10)
# book price is the price of the spent amount in BTC and FIAT
book_price_eur = models.DecimalField(max_digits=19, decimal_places=10)
book_price_btc = models.DecimalField(max_digits=19, decimal_places=10)
# fee price is the price of the spent amount in BTC and FIAT
book_price_fee_eur = models.DecimalField(
max_digits=19, default=0, decimal_places=10)
book_price_fee_btc = models.DecimalField(
max_digits=19, default=0, decimal_places=10)
tags = TaggableManager()
icon = models.CharField(default="help_outline", max_length=100)
def __str__(self):
# convertion to float removes trailing 0's
return "{} {} => {} {} ==> {} EUR".format(
float(self.spent_amount), self.spent_currency,
float(self.acquired_amount), self.acquired_currency,
float(self.book_price_eur))
class TransactionUpdateHistoryEntry(models.Model):
id = models.AutoField(primary_key=True)
date = models.DateTimeField()
account = models.ForeignKey(
to='accounts.Account',
on_delete=models.PROTECT,
)
fetched_transactions = models.IntegerField()
def __str__(self):
return "{} {} {}".format(self.account.id, self.date,
self.fetched_transactions)
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,344
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/views.py
|
from graphene_django.views import GraphQLView
# https://github.com/graphql-python/graphene-django/issues/252
class GraphQLErrorFormatView(GraphQLView):
@staticmethod
def format_error(error):
print(error)
if hasattr(error, 'original_error') and error.original_error:
formatted = {"message": str(error.original_error)}
if isinstance(error.original_error, UnauthorizedError):
formatted['code'] = "401"
elif isinstance(error.original_error, PermissionDeniedError):
formatted['code'] = "403"
return formatted
return GraphQLView.format_error(error)
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,345
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/coins/migrations/0001_initial.py
|
# Generated by Django 2.0.2 on 2018-03-10 18:00
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Coin',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('cc_id', models.IntegerField(unique=True)),
('img_url', models.CharField(max_length=200)),
('name', models.CharField(max_length=200)),
('symbol', models.CharField(max_length=10)),
('coin_name', models.CharField(max_length=200)),
('full_name', models.CharField(max_length=200)),
],
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,346
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/migrations/0005_auto_20180506_1118.py
|
# Generated by Django 2.0.5 on 2018-05-06 11:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('coins', '0001_initial'),
('accounts', '0004_auto_20180408_1110'),
]
operations = [
migrations.RenameModel(
old_name='Address',
new_name='CryptoAddress',
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,347
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/coins/tests/test_tasks.py
|
'''Contains all task tests for this application'''
import pytest
from django.db.models.query import QuerySet
from backend.coins.models import Coin
from ...celery import app as celery_app
from ..tasks import async_update_supported_coins
pytestmark = pytest.mark.django_db
def new_get_coin_list(format_list): #pylint: disable=W0613
'''
Fake cryptocompare.get_coin_list function
Provides two valid and one invalid coins
'''
return {
'BTC': {
'Id': '1000',
'ImageUrl': '/media/124/btc.png',
'Name': 'Bitcoin',
'Symbol': 'BTC',
'CoinName': 'Bitcoin',
'FullName': 'Bitcoin (BTC)'
},
'LTC': {
'Id': '1001',
'ImageUrl': '/media/124/ltc.png',
'Name': 'Litecoin',
'Symbol': 'LTC',
'CoinName': 'Litecoin',
'FullName': 'Litecoin (LTC)'
},
'BTCC': {
# Should trigger an exception (Id not an int) and not be imported
'Id': 'Except',
'ImageUrl': '/media/124/btc.png',
'Name': 'BitcoinCrash',
'Symbol': 'BTCC',
'CoinName': 'Bitcoin Crash',
'FullName': 'Bitcoin Crash (BTCC)'
}
}
def new_get_coin_list_updated(format_list): #pylint: disable=W0613
'''
Fake cryptocompare.get_coin_list function
Provides one updated and one additional coin
'''
return {
'LTC': {
'Id': '1001',
'ImageUrl': '/media/345/ltc_updated.png',
'Name': 'Litecoin',
'Symbol': 'LTC',
'CoinName': 'Litecoin Updated',
'FullName': 'Litecoin (LTC)'
},
'XLM': {
'Id': '1002',
'ImageUrl': '/media/1234/xlm.png',
'Name': 'Stellar Lumens',
'Symbol': 'XLM',
'CoinName': 'Lumens',
'FullName': 'Stellar Lumens (XLM)'
}
}
@celery_app.task
def test_async_update_coins(monkeypatch):
'''Test the supported coin update function'''
# Test import the objects
monkeypatch.setattr('cryptocompare.get_coin_list', new_get_coin_list)
async_update_supported_coins() #pylint: disable=E1120
all_coins: QuerySet = Coin.objects.all()
assert all_coins.count() == 2
coin: Coin = all_coins.first()
assert coin.cc_id == 1000
assert coin.img_url == '/media/124/btc.png'
assert coin.name == 'Bitcoin'
assert coin.symbol == 'BTC'
assert coin.coin_name == 'Bitcoin'
assert coin.full_name == 'Bitcoin (BTC)'
# Test update the database object
monkeypatch.setattr('cryptocompare.get_coin_list',
new_get_coin_list_updated)
async_update_supported_coins() # pylint: disable=E1120
assert Coin.objects.all().count() == 3, 'Test add one new coin'
ltc_updated: Coin = Coin.objects.get(pk=2)
assert ltc_updated.cc_id == 1001
assert ltc_updated.img_url == '/media/345/ltc_updated.png'
assert ltc_updated.coin_name == 'Litecoin Updated'
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,348
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/coins/tests/test_schema.py
|
'''Contains all schema tests for the application'''
import pytest
from mixer.backend.django import mixer
from django.contrib.auth.models import AnonymousUser
from django.test import RequestFactory
from ...test_utils.utils import mock_resolve_info
import backend
from .. import schema
pytestmark = pytest.mark.django_db
def test_coin_type():
instance = schema.CoinType()
assert instance
def test_resolve_all_coins():
'''Test allCoins Query'''
user_a = mixer.blend('auth.User')
req = RequestFactory().get('/')
req.user = AnonymousUser()
resolve_info = mock_resolve_info(req)
mixer.blend('coins.Coin')
mixer.blend('coins.Coin')
mixer.blend('coins.Coin')
mixer.blend('coins.Coin')
query = schema.Query()
res = query.resolve_all_coins(resolve_info)
assert res.count() == 0, 'User not logged in, should return 0 transactions'
req.user = user_a
res = query.resolve_all_coins(resolve_info)
assert res.count(
) == 4, 'User A is logged in, should return 4 transactions'
def test_update_supported_coins(monkeypatch):
'''Test the supported coins update mutation '''
req = RequestFactory().get('/')
req.user = AnonymousUser()
resolve_info = mock_resolve_info(req)
data = {'client_mutation_id': '1'}
mut = schema.CoinRefreshTransactionsMutation()
res = mut.mutate(None, resolve_info, data)
assert res.status == 403, 'Should return 403 if user is not logged in'
req.user = mixer.blend('auth.User')
mut = schema.CoinRefreshTransactionsMutation()
res = mut.mutate(None, resolve_info, data)
assert res.status == 403, 'Should return 403 if user is not logged in but not superuser'
monkeypatch.setattr(backend.coins.tasks.async_update_supported_coins,
'apply_async', lambda task_id: print("mock called"))
req.user.is_superuser = True
res = mut.mutate(None, resolve_info, data)
assert res.status == 200, 'Should return 200 as task was started'
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,349
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/coins/migrations/0002_auto_20180510_1515.py
|
# Generated by Django 2.0.5 on 2018-05-10 15:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('coins', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='coin',
options={'ordering': ('symbol',)},
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,350
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/tests/test_schema.py
|
import pytest
from mixer.backend.django import mixer
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.models import AnonymousUser
from django.test import RequestFactory
import backend.transactions.schema as schema
from backend.transactions.models import Transaction
from backend.test_utils.utils import mock_resolve_info, gen_fake_transaction
# We need to do this so that writing to the DB is possible in our tests.
pytestmark = pytest.mark.django_db
def test_transaction_type():
instance = schema.TransactionType()
assert instance
def test_resolve_get_transaction_by_id():
anonuser = AnonymousUser()
usera = mixer.blend("auth.User")
userb = mixer.blend("auth.User")
gen_fake_transaction(owner=usera)
gen_fake_transaction(owner=usera)
gen_fake_transaction(owner=usera)
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolveInfo = mock_resolve_info(req)
query = schema.Query()
res = query.resolve_get_transaction(resolveInfo, **{"id": 1})
assert res == None, "User not logged in, should return None"
query = schema.Query()
req.user = usera
res = query.resolve_get_transaction(resolveInfo, **{"id": 1})
assert isinstance(res, Transaction), "Should return a transaction object"
assert res.id == 1, "Should return transaction with id 1"
res = query.resolve_get_transaction(resolveInfo, **{"id": 2})
assert isinstance(res, Transaction), "Should return a transaction object"
assert res.id == 2, "Should return transaction with id 2"
req.user = userb
res = query.resolve_get_transaction(resolveInfo, **{"id": 2})
assert res == None, "User should not have access to another users transaction"
with pytest.raises(ObjectDoesNotExist) as excinfo:
res = query.resolve_get_transaction(resolveInfo, **{"id": 5})
def test_resolve_all_transactions():
anonuser = AnonymousUser()
usera = mixer.blend("auth.User")
userb = mixer.blend("auth.User")
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolveInfo = mock_resolve_info(req)
gen_fake_transaction(owner=usera)
gen_fake_transaction(owner=usera)
gen_fake_transaction(owner=userb)
gen_fake_transaction(owner=userb)
gen_fake_transaction(owner=userb)
query = schema.Query()
res = query.resolve_all_transactions(resolveInfo)
assert res.count() == 0, "User not logged in, should return 0 transactions"
req.user = usera
res = query.resolve_all_transactions(resolveInfo)
assert res.count(
) == 2, "User A is logged in, should return 2 transactions"
req.user = userb
res = query.resolve_all_transactions(resolveInfo)
assert res.count(
) == 3, "User B is logged in, should return 3 transactions"
def test_import_csv_data_mutation(mocker):
"""
test if user is authenticated ✓
test fail at erroneous data input ✓
test if appropriate import function is called ✓
"""
usera = mixer.blend("auth.User")
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolve_info = mock_resolve_info(req)
mut = schema.ImportTransactionsMutation()
res = mut.mutate(None, resolve_info, {})
assert res.status == 403, "Should return 403 if user is not logged in"
data = schema.ImportTransactionsMutation.Input()
data.data = schema.ImportTransactionInput()
data.data.service_type = "fakeexchange"
data.data.import_mechanism = "csv"
data.data.transactions = []
req.user = usera
res = mut.mutate(None, resolve_info, data)
assert res.status == 404, "Service not found, should return 404"
assert res.formErrors == "Service type {} not found".format(
data.data.service_type
), "Service not found, send correct error message"
mocker.patch("backend.transactions.schema.import_data_livecoin")
data.data.service_type = "livecoin"
res = mut.mutate(None, resolve_info, data)
assert res.status == 200
schema.import_data_livecoin.assert_called_once() # pylint: disable=E1101
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,351
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/migrations/0001_initial.py
|
# Generated by Django 2.0.2 on 2018-03-15 19:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('accounts', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('date', models.DateTimeField()),
('spent_currency', models.CharField(max_length=10)),
('spent_amount', models.DecimalField(decimal_places=10, max_digits=19)),
('acquired_currency', models.CharField(max_length=10)),
('acquired_amount', models.DecimalField(decimal_places=10, max_digits=19)),
('fee_currency', models.CharField(max_length=10)),
('fee_amount', models.DecimalField(decimal_places=10, max_digits=19)),
('book_price_eur', models.DecimalField(decimal_places=10, max_digits=19)),
('book_price_btc', models.DecimalField(decimal_places=10, max_digits=19)),
('book_price_fee_eur', models.DecimalField(decimal_places=10, max_digits=19)),
('book_price_fee_btc', models.DecimalField(decimal_places=10, max_digits=19)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='owner', to=settings.AUTH_USER_MODEL)),
('source_account', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='source_account', to='accounts.Account')),
('target_account', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='target_account', to='accounts.Account')),
],
),
migrations.CreateModel(
name='TransactionUpdateHistoryEntry',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('date', models.DateTimeField()),
('fetched_transactions', models.IntegerField()),
('account', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='accounts.Account')),
],
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,352
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/utils/utils.py
|
"""Contains various utility functions"""
import time
import cryptocompare as cc
from diskcache import FanoutCache
def exchange_can_batch(exchange: str) -> bool:
# For some exchanges it is impossible to get all trades for
# an account and we have to fetch each symbol individually.
# Binance, for example. Cryptopia does not have this problem.
if exchange == "binance":
return False
elif exchange == "bitfinex":
return False
return True
# use a simple cache mechanism to avoid hammering the API
CACHE = FanoutCache('/tmp/diskcache/fanoutcache')
def get_name_price(amount: float,
base: str,
target: str,
timestamp: float = time.time()) -> float:
"""
Calculated the price of one name in another name.
Returns a float with the converted value as a decimal.Decimal
Keyword arguments:
amount -- amount to convert
base -- name to convert from
target -- name to convert to
date -- historic date as a Unix Timestamp (default: time.time())
"""
key = base + target + str(timestamp)
request_res = CACHE.get(key, None)
if request_res is None:
request_res = cc.get_historical_price(base, target, timestamp)
CACHE.add(key, request_res)
val = request_res[base][target]
return amount * val
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,353
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/fetchers/coinbase.py
|
"""
Contains all functions related to importing Coinbase data
Note: We cannot use Transaction.objects.bulk_create since
django-taggit needs object id before saving.
"""
import json
import time
from requests.sessions import Session
from datetime import datetime, timezone
from collections import namedtuple
from dateutil import parser
from django.utils.timezone import now
from coinbase.wallet.client import Client, APIObject
from backend.transactions.models import Transaction, TransactionUpdateHistoryEntry
from backend.accounts.models import Account
from backend.utils.utils import get_name_price
TAG_COINBASE = "coinbase"
def process_send(cb_trx, timestamp: int, account: Account) -> Transaction:
"""Process all Coinbase send transactions
Arguments:
cb_trx {APIObject} -- the coinbase transaction to import
timestamp {float} -- timestamp of last import from coinbase
account {Account} -- the account this transaction originates from
Returns:
Transaction -- a Transaction object
"""
new_trx = Transaction()
new_trx.date = cb_trx["created_at"]
# minus on coinbase (source peer)
new_trx.spent_amount = abs(float(cb_trx["amount"]["amount"]))
new_trx.spent_currency = cb_trx["amount"]["currency"]
network = cb_trx["network"]
tag = ""
if network["status"] == "off_blockchain":
# could be a refferal bonus from Coinbase
new_trx.acquired_amount = abs(float(cb_trx["amount"]["amount"]))
new_trx.acquired_currency = cb_trx["amount"]["currency"]
tag = Transaction.TRX_TAG_INCOME
new_trx.icon = Transaction.TRX_ICON_INCOME
# a refferal bonus has no fee, so use defaults from model
else:
# amount received on target peer (spent amount with network fees deducted)
new_trx.acquired_amount = abs(
float(network["transaction_amount"]["amount"]))
new_trx.acquired_currency = network["transaction_amount"]["currency"]
# network fee for this transaction
new_trx.fee_amount = abs(float(network["transaction_fee"]["amount"]))
new_trx.fee_currency = network["transaction_fee"]["currency"]
new_trx.book_price_fee_eur = get_name_price(
new_trx.fee_amount, new_trx.fee_currency, "EUR", timestamp)
new_trx.book_price_fee_btc = get_name_price(
new_trx.fee_amount, new_trx.fee_currency, "BTC", timestamp)
tag = Transaction.TRX_TAG_TRANSFER
new_trx.icon = Transaction.TRX_ICON_TRANSFER
# calculate book prices
# number might be negative, make absolute
new_trx.book_price_eur = abs(float(cb_trx["native_amount"]["amount"]))
new_trx.book_price_btc = get_name_price(
new_trx.spent_amount, new_trx.spent_currency, "BTC", timestamp)
new_trx.owner = account.owner
new_trx.source_peer = account
# TODO: get target address and query database for known addresses
# If it exists, get the parent Peer for this address and set as target
# new_trx.target_peer = None
new_trx.save()
new_trx.tags.add(TAG_COINBASE, tag)
new_trx.save()
def process_buy_sell(cb_trx, timestamp, account: Account) -> Transaction:
"""Process all Coinbase buys and sells
Arguments:
cb_trx {APIObject} -- the coinbase transaction to import
timestamp {float} -- timestamp of last import from coinbase
account {Account} -- the account this buy or sell originates from
Raises:
ValueError -- when resource is not "buy" or "sell"
Returns:
Transaction -- a Transaction object
"""
new_trx: Transaction = Transaction()
new_trx.date = cb_trx["created_at"]
tag = "None"
if cb_trx["resource"] == "buy":
new_trx.acquired_amount = float(cb_trx["amount"]["amount"])
new_trx.acquired_currency = cb_trx["amount"]["currency"]
new_trx.spent_amount = float(cb_trx["total"]["amount"])
new_trx.spent_currency = cb_trx["total"]["currency"]
new_trx.icon = Transaction.TRX_ICON_BUY
tag = Transaction.TRX_TAG_BUY
elif cb_trx["resource"] == "sell":
new_trx.acquired_amount = float(cb_trx["total"]["amount"])
new_trx.acquired_currency = cb_trx["total"]["currency"]
new_trx.spent_amount = float(cb_trx["amount"]["amount"])
new_trx.spent_currency = cb_trx["amount"]["currency"]
new_trx.icon = Transaction.TRX_ICON_SELL
tag = Transaction.TRX_TAG_SELL
else:
raise ValueError("Type of transaction must either be buy or sell")
if new_trx.acquired_currency == "BTC":
new_trx.book_price_btc = new_trx.acquired_amount
else:
new_trx.book_price_btc = get_name_price(new_trx.acquired_amount,
new_trx.acquired_currency,
"BTC", timestamp)
new_trx.book_price_eur = abs(float(cb_trx["total"]["amount"]))
new_trx.book_price_btc = get_name_price(new_trx.book_price_eur, "EUR",
"BTC", timestamp)
new_trx.fee_amount = new_trx.book_price_fee_eur = abs(
float(cb_trx["fees"][0]["amount"]["amount"]))
new_trx.fee_currency = cb_trx["fees"][0]["amount"]["currency"]
new_trx.book_price_fee_btc = get_name_price(new_trx.book_price_fee_eur,
"EUR", "BTC", timestamp)
new_trx.owner = account.owner
new_trx.source_peer = account
new_trx.target_peer = account
new_trx.save()
new_trx.tags.add(TAG_COINBASE, tag)
new_trx.save()
def fetch_from_cb(what_to_fetch: str, cb_client: Client,
cb_account_id: str) -> []:
"""Fetch the specified data from Coinbase
buys and sells: Merchant buyouts like FIAT -> BTC etc.
transfers: Coin transfers from Coinbase to a wallet address
Arguments:
what_to_fetch {str} -- either "buys", "sells" or "transfers"
cb_client {Client} -- coinbase client object
cb_account_id {str} -- coinbase account id to use
Returns:
[] -- a list with the APIObjects from Coinbase
"""
the_list = []
data = dict()
next_uri = ""
while next_uri != None:
if what_to_fetch == "buys":
ret = cb_client.get_buys(cb_account_id, **data)
elif what_to_fetch == "sells":
ret = cb_client.get_sells(cb_account_id, **data)
elif what_to_fetch == "transfers":
ret = cb_client.get_transactions(cb_account_id, **data)
the_list.extend(ret["data"])
next_uri = ret.pagination["next_uri"]
if next_uri != None:
data["starting_after"] = ret["data"][-1]["id"]
return the_list
def update_coinbase_trx(account: Account):
"""Synchronizes all transactions from Coinbase"""
last_update_query = TransactionUpdateHistoryEntry.objects.filter(
account=account).order_by('-date')
latest_update = datetime.utcfromtimestamp(0).replace(tzinfo=timezone.utc)
if last_update_query.count():
latest_update = last_update_query[:1][0].date
client: Client = Client(account.api_key, account.api_secret)
cb_accounts = client.get_accounts()
num_imports = 0
for cb_account in cb_accounts["data"]:
if cb_account["type"] == "fiat":
continue
# Unfortunately, the coinbase API only returns buys and sells
# without the fee data when fetching through get_transactions.
# For that reason we still have to use client.get_buys() and client.get_sells()
# and cannot use the data returned from client.get_transactions()
cb_transactions = fetch_from_cb("transfers", client, cb_account["id"])
for cb_trx in cb_transactions:
if cb_trx["type"] == "send":
date = parser.parse(cb_trx["created_at"])
if date <= latest_update:
continue
timestamp = time.mktime(date.timetuple())
process_send(cb_trx, timestamp, account)
num_imports += 1
time.sleep(1) # sleep to prevent api spam
buy_sell_list = []
buy_sell_list.extend(fetch_from_cb("buys", client, cb_account["id"]))
buy_sell_list.extend(fetch_from_cb("sells", client, cb_account["id"]))
for buy_sell in buy_sell_list:
if buy_sell["resource"] == "buy" or buy_sell["resource"] == "sell":
if buy_sell["status"] != "completed":
# Skip everything not completed.
# This could be created or canceled.
continue
date = parser.parse(buy_sell["created_at"])
if date <= latest_update:
continue
timestamp = time.mktime(date.timetuple())
process_buy_sell(buy_sell, timestamp, account)
num_imports += 1
time.sleep(1) # sleep to prevent api spam
entry: TransactionUpdateHistoryEntry = TransactionUpdateHistoryEntry(
date=now(), account=account, fetched_transactions=num_imports)
entry.save()
print("Imported {} transactions".format(num_imports))
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,354
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/migrations/0005_auto_20180412_1710.py
|
# Generated by Django 2.0.3 on 2018-04-12 17:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('transactions', '0004_auto_20180408_1110'),
]
operations = [
migrations.AlterField(
model_name='transaction',
name='acquired_currency',
field=models.CharField(default='---', max_length=10),
),
migrations.AlterField(
model_name='transaction',
name='spent_currency',
field=models.CharField(default='---', max_length=10),
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,355
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/tests/test_schema.py
|
import pytest
import ccxt
from mixer.backend.django import mixer
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.models import AnonymousUser
from django.test import RequestFactory
from ...test_utils.utils import mock_resolve_info
from backend.accounts.models import Account, CryptoAddress
from .. import schema
# We need to do this so that writing to the DB is possible in our tests.
pytestmark = pytest.mark.django_db
def test_account_type():
instance = schema.AccountType()
assert instance
def test_resolve_get_accounts_by_id():
mixer.blend("accounts.Account")
mixer.blend("accounts.Account")
mixer.blend("accounts.Account")
query = schema.Query()
res = query.resolve_get_account(None, **{"id": 1})
assert res.id == 1, "Should return account with id 1"
res = query.resolve_get_account(None, **{"id": 2})
assert res.id == 2, "Should return account with id 2"
with pytest.raises(ObjectDoesNotExist) as excinfo:
res = query.resolve_get_account(None, **{"id": 5})
def test_resolve_get_account_by_name():
mixer.blend("accounts.Account", name="first")
mixer.blend("accounts.Account", name="second")
mixer.blend("accounts.Account", name="third")
query = schema.Query()
res = query.resolve_get_account(None, **{"name": "first"})
assert res.name == "first", "Should return account with name \"first\""
res = query.resolve_get_account(None, **{"name": "third"})
assert res.name == "third", "Should return account with name \"third\""
with pytest.raises(ObjectDoesNotExist) as excinfo:
res = query.resolve_get_account(None, **{"name": "nonexistend"})
def test_resolve_all_accounts():
anonuser = AnonymousUser()
usera = mixer.blend("auth.User")
userb = mixer.blend("auth.User")
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolveInfo = mock_resolve_info(req)
mixer.blend("accounts.Account", owner=usera)
mixer.blend("accounts.Account", owner=usera)
mixer.blend("accounts.Account", owner=userb)
mixer.blend("accounts.Account", owner=userb)
mixer.blend("accounts.Account", owner=userb)
query = schema.Query()
res = query.resolve_all_accounts(resolveInfo)
assert res.count() == 0, "User not logged in, should return 0 accounts"
req.user = usera
res = query.resolve_all_accounts(resolveInfo)
assert res.count() == 2, "User A is logged in, should return 2 accounts"
req.user = userb
res = query.resolve_all_accounts(resolveInfo)
assert res.count() == 3, "User B is logged in, should return 3 accounts"
def test_resolve_get_crypto_addresses():
# 1 Should not be able to anonymously get addresses
# 2 Should not be able to get another users addresses
# 3 Should return 0 if account does not exist
# 4 Should return 0 if no peer id is passed in
# 5 Should successfully receive addresses if conditions are met
user_a = mixer.blend("auth.User")
user_b = mixer.blend("auth.User")
account_a: Account = mixer.blend("accounts.Account", owner=user_a)
account_b: Account = mixer.blend("accounts.Account", owner=user_b)
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolve_info = mock_resolve_info(req)
mixer.blend("accounts.CryptoAddress", peer=account_a)
mixer.blend("accounts.CryptoAddress", peer=account_a)
mixer.blend("accounts.CryptoAddress", peer=account_a)
mixer.blend("accounts.CryptoAddress", peer=account_b)
mixer.blend("accounts.CryptoAddress", peer=account_b)
mixer.blend("accounts.CryptoAddress", peer=account_b)
query = schema.Query()
res = query.resolve_get_crypto_addresses(resolve_info,
**{"peer_id": account_a.id})
assert res.count() == 0, "User not logged in, should return 0 addresses"
req.user = user_b
res = query.resolve_get_crypto_addresses(resolve_info,
**{"peer_id": account_a.id})
assert res.count() == 0, """
User b requests addresses for account of user a, should return no addresses"""
req.user = user_a
res = query.resolve_get_crypto_addresses(resolve_info, **{"peer_id": 15})
assert res.count(
) == 0, """Non existing peer, should return no addresses"""
res = query.resolve_get_crypto_addresses(resolve_info, **{})
assert res.count() == 0, "No peer ID passed, should return Error"
res = query.resolve_get_crypto_addresses(resolve_info,
**{"peer_id": account_a.id})
assert res.count() == 3, "Valid request should return 3 addresses"
def test_resolve_supported_services():
query = schema.Query()
res = query.resolve_supported_services(None)
assert len(res) > 0, "Should return more than one service"
def test_resolve_supported_symbols():
query = schema.Query()
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolveInfo = mock_resolve_info(req)
res = query.resolve_supported_symbols(resolveInfo,
**{"service": "binance"})
assert len(res) == 0, "User not logged in, should return 0 symbols"
req.user = mixer.blend("auth.User")
res = query.resolve_supported_symbols(resolveInfo,
**{"service": "binance"})
assert len(res) > 0, "User logged in, should return at least one symbol"
def test_create_account_mutation():
mut = schema.CreateAccountMutation()
data = {
"name": "test1",
"service_type": "binance",
"symbols": '["ETH/BTC", "XLM/ETH"]',
"api_key": "ateswg",
"api_secret": "ssdge"
}
req = RequestFactory().get("/")
# AnonymousUser() is equal to a not logged in user
req.user = AnonymousUser()
resolveInfo = mock_resolve_info(req)
res = mut.mutate(None, resolveInfo, data)
assert res.status == 403, "Should return 403 if user is not logged in"
req.user = mixer.blend("auth.User")
res = mut.mutate(None, resolveInfo, {})
assert res.status == 400, "Should return 400 if there are form errors"
assert "account" in res.formErrors, "Should have form error for account in field"
res = mut.mutate(None, resolveInfo, data)
assert res.status == 200, 'Should return 200 if user is logged in and submits valid data'
assert res.account.pk == 1, 'Should create new account'
res = mut.mutate(None, resolveInfo, data)
assert res.status == 422, 'Should return 422 if account with this name exists'
def test_edit_account_mutation():
# 1 Should not be able to to edit accounts when unauthenticated (status 403)
# 2 Should not be able to edit other users accounts (status 403)
# 3 Should return error message when no id or wrong data type was supplied (status 400)
# 4 Should return success message when update was successfuly started (status 200)
mut = schema.EditAccountMutation()
anonuser = AnonymousUser()
usera = mixer.blend("auth.User")
userb = mixer.blend("auth.User")
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolve_info = mock_resolve_info(req)
name_initial = "test1"
name_updated = "test2"
account: Account = mixer.blend(
"accounts.Account",
owner=usera,
name=name_initial,
service_type="binance",
symbols='["ETH/BTC", "XLM/ETH"]',
api_key="ateswg",
api_secret="ssdge")
data = {
"account_id": account.pk,
"name": name_updated,
"api_key": "1234",
"api_secret": "5678"
}
req = RequestFactory().get("/")
# AnonymousUser() is equal to a not logged in user
req.user = AnonymousUser()
resolve_info = mock_resolve_info(req)
res = mut.mutate(None, resolve_info, data)
account: Account = Account.objects.get(pk=account.pk)
assert account.name == name_initial, "Should not have edited name"
assert res.status == 403, "Should return 403 if user is not logged in"
req.user = userb
res = mut.mutate(None, resolve_info, data)
account: Account = Account.objects.get(pk=account.pk)
assert account.name == name_initial, "Should not have edited name"
assert res.status == 403, "Should return 403 if user is trying to modify another users account"
req.user = usera
res = mut.mutate(
None, resolve_info, {
"account_id": 5,
"name": name_updated,
"api_key": "1234",
"api_secret": "5678"
})
assert res.status == 422, "Should return 422 if account does not exist"
res = mut.mutate(None, resolve_info, {})
account: Account = Account.objects.get(pk=account.pk)
assert account.name == name_initial, "Should not have edited name"
assert res.status == 400, "Should return 400 if there are form errors"
assert "account" in res.formErrors, "Should have form error for account in field"
res = mut.mutate(None, resolve_info, data)
assert res.status == 200, 'Should return 200 if user is logged in and submits valid data'
assert res.account.name == name_updated, 'Name should match'
assert res.account.api_key == data["api_key"], 'API Key should match'
assert res.account.api_secret == data[
"api_secret"], 'API secret should match'
def test_create_crypto_address_mutation():
# 1 Should not be able to to trigger mutation when unauthenticated (status 403)
# 2 Should return error when account does not exist (status 404)
# 3 Should return error when account does not belong to the logged in user (status 403)
# 4 Should return error when coin does not exist (status 404)
# 5 Should return success message and address info when address was successfully added (status 200)
# 6 Default value for watch should be false
user_a = mixer.blend("auth.User")
user_b = mixer.blend("auth.User")
account_a: Account = mixer.blend("accounts.Account", owner=user_a)
mixer.blend("accounts.Account", owner=user_b)
coin_a = mixer.blend("coins.Coin")
mut = schema.CreateCryptoAddressMutation()
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolve_info = mock_resolve_info(req)
data = {
"account_id": 199, # non existing account id
"address": "addr_a",
"coin_id": coin_a.id,
"client_mutation_id": "test"
}
res = mut.mutate(None, resolve_info, data)
assert res.status == 403, """
Should not be able to to trigger mutation when unauthenticated"""
assert res.client_mutation_id == "test"
req.user = user_a
res = mut.mutate(None, resolve_info, data)
assert res.status == 404, "Should return error when account does not exist"
assert "account_id" in res.formErrors, """
Should return an error message containing 'account_id'"""
assert res.client_mutation_id == "test"
# User B tries to add an address to User A's account
req.user = user_b
data["account_id"] = account_a.id
res = mut.mutate(None, resolve_info, data)
assert res.status == 403, """
Should return error when account does not belong to the logged in user"""
assert res.client_mutation_id == "test"
req.user = user_a
data["coin_id"] = 199
res = mut.mutate(None, resolve_info, data)
assert res.status == 404, "Should return error when coin does not exist"
assert "coin_id" in res.formErrors, """
Should return an error message containing 'account_id'"""
assert res.client_mutation_id == "test"
data["coin_id"] = coin_a.id
res = mut.mutate(None, resolve_info, data)
assert res.status == 200, "Should return success message when update was successfully started"
assert res.address is not None, "Address must not be None"
assert res.client_mutation_id == "test"
assert not res.address.watch, "Default watch should be False"
data["watch"] = True
res = mut.mutate(None, resolve_info, data)
assert res.address.watch, "Watch should be True"
def test_edit_crypto_address_mutation():
# 1 Should not be able to to trigger mutation when unauthenticated (status 403)
# 2 Should return error when address does not exist (status 404)
# 3 Should return error when address does not belong to the logged in user (status 403)
# 4 Should return error when coin does not exist (status 404)
# 5 Should return success message and address info when address was
# successfully edited (status 200)
# 6 Default value for watch should be false
user_a = mixer.blend("auth.User")
user_b = mixer.blend("auth.User")
account_a: Account = mixer.blend("accounts.Account", owner=user_a)
mixer.blend("accounts.Account", owner=user_b)
coin_a = mixer.blend("coins.Coin")
coin_b = mixer.blend("coins.Coin")
crypto_address_a: CryptoAddress = mixer.blend(
"accounts.CryptoAddress", peer=account_a, coin_id=coin_b.id)
mut = schema.EditCryptoAddressMutation()
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolve_info = mock_resolve_info(req)
data = {
"id": 199, # non existing address
"address": "changed_addr",
"coin_id": coin_a.id,
"client_mutation_id": "test",
"watch": True
}
res = mut.mutate(None, resolve_info, data)
assert res.status == 403, """
Should not be able to to trigger mutation when unauthenticated"""
assert res.client_mutation_id == "test"
req.user = user_a
res = mut.mutate(None, resolve_info, data)
assert res.status == 404, "Should return error when address object does not exist"
assert "id" in res.formErrors, """
Should return an error message containing 'id'"""
assert res.client_mutation_id == "test"
# User B tries to edit an address of User A
req.user = user_b
data["id"] = crypto_address_a.id
res = mut.mutate(None, resolve_info, data)
assert res.status == 403, """
Should return error when address does not belong to the logged in user"""
assert res.client_mutation_id == "test"
req.user = user_a
data["coin_id"] = 199
res = mut.mutate(None, resolve_info, data)
assert res.status == 404, "Should return error when coin does not exist"
assert "coin_id" in res.formErrors, """
Should return an error message containing 'account_id'"""
assert res.client_mutation_id == "test"
data["coin_id"] = coin_a.id
res = mut.mutate(None, resolve_info, data)
assert res.status == 200, "Should return success message when update was successfully started"
assert res.address.address == "changed_addr", "Address must be 'changed_addr'"
assert res.client_mutation_id == "test"
assert res.address.coin.id == coin_a.id, "Coin should be Coin A now"
assert res.address.watch, "Watch should be True"
def test_refresh_transactions_mutation(monkeypatch):
# 1 Should not be able to to trigger mutation when unauthenticated (status 403)
# 2 Should not be able to update other users accounts (status 403)
# 3 Should return error message when no id or wrong data type was supplied (status 400)
# 4 Should return success message when update was successfuly started (status 200)
usera = mixer.blend("auth.User")
userb = mixer.blend("auth.User")
mixer.blend("accounts.Account", owner=usera) # id 1
mixer.blend("accounts.Account", owner=userb) # id 2
mut = schema.AccountRefreshTransactionsMutation()
req = RequestFactory().get("/")
req.user = AnonymousUser()
resolveInfo = mock_resolve_info(req)
data = {"account_id": "1"}
res = mut.mutate(None, resolveInfo, data)
assert res.status == 403, 'Should not be able to to trigger mutation when unauthenticated (status 403)'
req.user = userb
res = mut.mutate(None, resolveInfo, data)
assert res.status == 403, 'Should not be able to update other users accounts (status 403)'
res = mut.mutate(None, resolveInfo, {})
assert res.status == 400, 'Should return error status when supplied no input at all'
data = {"account_id": "a"}
res = mut.mutate(None, resolveInfo, data)
assert res.status == 400, 'Should return error status when supplied incorrect input'
data = {"account_id": "-1"}
res = mut.mutate(None, resolveInfo, data)
assert res.status == 400, 'Should return error status when supplied incorrect input'
# TODO: Find reason why this won't work
#
## This prints True:
# print(
# hasattr(backend.transactions.fetchers.generic_exchange,
# "update_exchange_trx_generic"))
#
## but the Lambda is never used
#monkeypatch.setattr(backend.transactions.fetchers.generic_exchange,
# "update_exchange_trx_generic",
# new_update_exchange_trx_generic)
#req.user = usera
#res = mut.mutate(None, resolveInfo, data)
#assert res.status == 200, 'Should return success message when update was successfuly started (status 200)'
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,356
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/utils/tests/test_utils.py
|
"""Contains all tests for the utility functions"""
import pytest
from _pytest.monkeypatch import MonkeyPatch
import cryptocompare
from ..utils import exchange_can_batch, get_name_price
def test_exchange_can_batch():
assert exchange_can_batch("binance") == False
assert exchange_can_batch("cryptopia") == True
pytestmark = pytest.mark.django_db
def new_get_historical_price(base, target, timestamp):
"""
Replaces calls to cryptocomapere.get_historical_price function
Names | Date | timestamp |Rate
-----------|------------|----------- |-----------
BTC -> ETH | 2017-12-11 | 1512950400 | 32.91
BTC -> EUR | 2017-12-11 | 1512950400 | 13006.11
BTC -> EUR | 2018-01-05 | 1514764800 | 12268.25
XLM -> BTC | 2018-01-02 | 1509753600 | 0.00003136
LTC -> EUR | 2017-11-07 | 1515110400 | 48.52
BNB -> BTC | 2017-12-28 | 1514419200 | 0.0006253
"""
if base == "BTC" and target == "ETH" and timestamp == 1512950400:
return {"BTC": {"ETH": 32.91}}
elif base == "BTC" and target == "EUR" and timestamp == 1512950400:
return {"BTC": {"EUR": 13006.11}}
elif base == "BTC" and target == "EUR" and timestamp == 1514764800:
return {"BTC": {"EUR": 12268.25}}
elif base == "XLM" and target == "BTC" and timestamp == 1509753600:
return {"XLM": {"BTC": 0.00003136}}
elif base == "LTC" and target == "EUR" and timestamp == 1515110400:
return {"LTC": {"EUR": 48.52}}
elif base == "BNB" and target == "BTC" and timestamp == 1514419200:
return {"BNB": {"BTC": 0.0006253}}
return {} # fail since there is no data for this request
def test_name_converter(monkeypatch: MonkeyPatch):
"""
Tests the conversion of one name to another at a specific date
Amount | To | Date | Result
---------|-----|------------|---------------
5 BTC | ETH | 2017-12-11 | 164.55 ETH
1 BTC | EUR | 2017-12-11 | 13006.11 EUR
0.1 BTC | EUR | 2018-01-05 | 1226.825 EUR
1500 XLM | BTC | 2018-01-02 | 0.04704 BTC
5 LTC | EUR | 2017-11-07 | 242.6 EUR
300 BNB | BTC | 2017-12-28 | 0.18759 BTC
"""
monkeypatch.setattr(cryptocompare, "get_historical_price",
new_get_historical_price)
result = get_name_price(5, "BTC", "ETH", 1512950400)
assert round(result, 2) == 164.55
result = get_name_price(1, "BTC", "EUR", 1512950400)
assert round(result, 2) == 13006.11
result = get_name_price(0.1, "BTC", "EUR", 1514764800)
assert round(result, 3) == 1226.825
result = get_name_price(1500, "XLM", "BTC", 1509753600)
assert round(result, 5) == 0.04704
result = get_name_price(5, "LTC", "EUR", 1515110400)
assert round(result, 1) == 242.6
result = get_name_price(300, "BNB", "BTC", 1514419200)
assert round(result, 6) == 0.18759
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,357
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/admin.py
|
from django.contrib import admin
from backend.accounts.models import CryptoAddress, Peer, Account
admin.site.register(CryptoAddress)
admin.site.register(Peer)
admin.site.register(Account)
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,358
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/tests/test_importer_livecoin.py
|
"""Contains all tests for the generic exchange fetcher"""
import random
import pytest
from _pytest.monkeypatch import MonkeyPatch
from mixer.backend.django import mixer
from faker import Faker
import cryptocompare
import backend.transactions.schema as schema
from backend.accounts.models import Account
from backend.transactions.importers.livecoin import import_data_livecoin
pytestmark = pytest.mark.django_db
def make_fake_transaction_data(date=None,
transaction_type=None,
transaction_type_raw=None,
spent_currency=None,
spent_amount=None,
source_peer=None,
acquired_currency=None,
acquired_amount=None,
target_peer=None,
fee_currency=None,
fee_amount=None,
tags=None):
"""Generate a fake transaction data input. Mixer is unable to blend these"""
# pylint: disable=E1101
fake = Faker()
transaction_data = schema.TransactionData()
transaction_data.date = date or fake.date_time_between(
start_date="-30y", end_date="now",
tzinfo=None).strftime("%d.%m.%Y %H:%m:%S")
transaction_data.transaction_type = transaction_type or "exchange"
transaction_data.transaction_type_raw = transaction_type_raw or "Buy"
transaction_data.spent_currency = \
spent_currency or fake.cryptocurrency_code()
transaction_data.spent_amount = spent_amount or random.uniform(1, 20)
transaction_data.source_peer = source_peer or 1
transaction_data.acquired_currency = \
acquired_currency or fake.cryptocurrency_code()
transaction_data.acquired_amount = \
acquired_amount or random.uniform(0.001, 10)
transaction_data.target_peer = target_peer or 1
transaction_data.fee_currency = fee_currency or transaction_data.spent_currency
transaction_data.fee_amount = fee_amount or random.uniform(0.000001, 0.001)
transaction_data.tags = tags or ["tag1", "tag2"]
return transaction_data
def new_get_historical_price(base, target, date):
"""Fake crypto compare API"""
return {base: {target: 10}}
def test_import_csv_livecoin(monkeypatch: MonkeyPatch):
user = mixer.blend("auth.User")
livecoin: Account = mixer.blend(
"accounts.Account", owner=user, service_type="livecoin")
monkeypatch.setattr(cryptocompare, "get_historical_price",
new_get_historical_price)
data = schema.ImportTransactionInput()
data.service_type = "livecoin"
data.import_mechanism = "csv"
data.transactions = [
make_fake_transaction_data(),
make_fake_transaction_data(),
make_fake_transaction_data(
transaction_type_raw="Deposit"), # should be skipped
make_fake_transaction_data(
acquired_amount=10, acquired_currency="ETH"),
make_fake_transaction_data(transaction_type="income"),
make_fake_transaction_data(transaction_type="transfer"),
make_fake_transaction_data(
spent_amount=0,
spent_currency="h",
acquired_amount=0.01,
acquired_currency="BTC",
transaction_type="income"),
make_fake_transaction_data(transaction_type="unkown type")
]
res = import_data_livecoin(data, user)
assert len(res) == 7
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,359
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/coins/schema.py
|
'''Contains all Graphql code for the coins application'''
import graphene
import celery
from graphene_django.types import DjangoObjectType
from backend.coins.models import Coin
from backend.coins.tasks import async_update_supported_coins
class CoinType(DjangoObjectType):
'''The coin GraphQL type'''
class Meta:
'''The connection between the ype and the model'''
model = Coin
class Query(object):
'''Get all coins where user has access rights'''
all_coins = graphene.List(CoinType)
def resolve_all_coins(self, info):
'''Returns all available coins'''
if not info.context.user.is_authenticated:
return Coin.objects.none()
return Coin.objects.all()
class CoinRefreshTransactionsMutation(graphene.relay.ClientIDMutation):
'''GraphQL Mutation for refreshing supported coins'''
status = graphene.Int()
formErrors = graphene.String()
msg = graphene.String()
@classmethod
def mutate(cls, root, info, input) -> "CoinRefreshTransactionsMutation":
'''Runs the celery background task to update the coins'''
if not info.context.user.is_superuser:
return CoinRefreshTransactionsMutation(
status=403, client_mutation_id=input['client_mutation_id'])
if hasattr(celery, "result") and celery.result.AsyncResult(
"task_update_coins").status == "RUNNING":
print("skipping task")
return CoinRefreshTransactionsMutation(
msg="Task is already running",
status=202,
client_mutation_id=input['client_mutation_id'])
else:
try:
print("starting task")
async_update_supported_coins.apply_async(
task_id="task_update_coins")
except async_update_supported_coins.OperationalError as err:
print("Sending task raised: %r", err)
return CoinRefreshTransactionsMutation(
status=500, client_mutation_id=input['client_mutation_id'])
return CoinRefreshTransactionsMutation(
msg="Working",
status=200,
client_mutation_id=input['client_mutation_id'])
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,360
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/coins/tests/test_models.py
|
'''Contains all model tests for this application'''
import pytest
from mixer.backend.django import mixer
from backend.coins.models import Coin
pytestmark = pytest.mark.django_db
def test_coin_creation():
'''Test Coin object creation'''
obj = mixer.blend("coins.Coin")
assert obj.pk > 0, "Should create a Coin instance"
def test_coin_str_func():
'''Test Coin object string function'''
name = "BTC - Bitcoin"
coin: Coin = mixer.blend(
"coins.Coin", cc_id=50, symbol="BTC", full_name="Bitcoin")
assert coin.__str__() == name, "Should be the coins's name"
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,361
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/migrations/0002_auto_20180325_1753.py
|
# Generated by Django 2.0.3 on 2018-04-01 17:15
from django.db import migrations, models
import django.db.models.deletion
import taggit.managers
class Migration(migrations.Migration):
dependencies = [
('taggit', '0002_auto_20150616_2121'),
('accounts', '0003_peer_class_type'),
('transactions', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='transaction',
options={'ordering': ('-date',)},
),
migrations.RemoveField(
model_name='transaction',
name='source_account',
),
migrations.RemoveField(
model_name='transaction',
name='target_account',
),
migrations.AddField(
model_name='transaction',
name='source_peer',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.PROTECT, related_name='source_peer', to='accounts.Peer'),
),
migrations.AddField(
model_name='transaction',
name='tags',
field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'),
),
migrations.AddField(
model_name='transaction',
name='target_peer',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.PROTECT, related_name='target_peer', to='accounts.Peer'),
),
migrations.AlterField(
model_name='transaction',
name='book_price_fee_btc',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='transaction',
name='book_price_fee_eur',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='transaction',
name='fee_amount',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='transaction',
name='fee_currency',
field=models.CharField(default='---', max_length=10),
),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,362
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/coins/tasks.py
|
"""Contains all async tasks necessary for Accounts"""
from __future__ import absolute_import, unicode_literals
import cryptocompare
from backend.celery import app
from backend.coins.models import Coin
@app.task(bind=True)
def async_update_supported_coins(self):
"""Starts a celery async task to update supported coins"""
self.update_state(state='RUNNING', meta={'current': 0, 'total': 100})
coins_list = cryptocompare.get_coin_list(False)
new_coins = 0
updated = 0
length = len(coins_list)
print_counter = 0
for idx, coin_key in enumerate(coins_list):
item = coins_list.get(coin_key)
try:
_id = int(item.get("Id"))
except ValueError:
continue
try:
coin: Coin = Coin.objects.get(cc_id=_id)
updated += 1
except Coin.DoesNotExist:
coin = Coin()
coin.cc_id = _id
new_coins += 1
coin.img_url = item.get('ImageUrl', '')
coin.name = item.get('Name', '')
coin.symbol = item.get('Symbol', '')
coin.coin_name = item.get('CoinName', '')
coin.full_name = item.get('FullName', '')
coin.save()
percent_done = int((idx + 1) / length * 100)
self.update_state(
state='RUNNING', meta={
'current': percent_done,
'total': 100
})
print_counter += 1
if print_counter is 30:
print("Status: {}%".format(percent_done))
print_counter = 0
print("new: {} updated: {}".format(new_coins, updated))
self.update_state(state='SUCCESS', meta={'current': 100, 'total': 100})
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,363
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/tests/test_models.py
|
import pytest
from django.utils.timezone import now
from mixer.backend.django import mixer
from django.core.exceptions import ObjectDoesNotExist
from backend.accounts.models import Account
from backend.transactions.models import Transaction
from backend.transactions.models import TransactionUpdateHistoryEntry
from backend.test_utils.utils import gen_fake_transaction
# We need to do this so that writing to the DB is possible in our tests.
pytestmark = pytest.mark.django_db
# Great introduction to TDD with Python + Django:
# https://www.youtube.com/watch?v=41ek3VNx_6Q
def test_transaction_creation():
obj = gen_fake_transaction()
assert obj.pk > 0, "Should create an Transaction instance"
def test_transaction_str_func():
name = "50.0 BTC => 150.01 ETH ==> 300.0 EUR"
t = gen_fake_transaction(
spent_amount=50.0000,
spent_currency="BTC",
acquired_amount=150.0100,
acquired_currency="ETH",
book_price_eur=300)
assert t.__str__() == name, "Should be the transaction's name"
def test_transaction_history_entry_creation():
account: Account = mixer.blend("accounts.Account")
obj = mixer.blend(
"transactions.TransactionUpdateHistoryEntry", account=account)
assert obj.pk > 0, "Should create an Transaction instance"
def test_transaction_history_entry_str_func():
account: Account = mixer.blend("accounts.Account")
datea = now()
dateb = now()
entrya: TransactionUpdateHistoryEntry = mixer.blend(
"transactions.TransactionUpdateHistoryEntry",
date=datea,
account=account,
fetched_transactions=3)
entryb: TransactionUpdateHistoryEntry = mixer.blend(
"transactions.TransactionUpdateHistoryEntry",
date=dateb,
account=account,
fetched_transactions=6)
namea = "{} {} {}".format(1, datea, 3)
nameb = "{} {} {}".format(1, dateb, 6)
assert entrya.__str__() == namea
assert entryb.__str__() == nameb
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,364
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/tasks.py
|
"""Contains all async tasks necessary for Accounts"""
from __future__ import absolute_import, unicode_literals
from backend.celery import app
from backend.accounts.models import Account
from backend.transactions.fetchers.generic_exchange import update_exchange_trx_generic
from backend.transactions.fetchers.coinbase import update_coinbase_trx
@app.task(bind=True)
def async_update_account_trx(self, account_id):
"""Starts a celery async task to update transaction for an account"""
account: Account = Account.objects.get(pk=account_id)
print("Starting task update transactions for account: ", account.name)
self.update_state(state='RUNNING', meta={'current': 0, 'total': 3})
if account.service_type == "coinbase":
update_coinbase_trx(account)
else:
update_exchange_trx_generic(account)
self.update_state(state='SUCCESS', meta={'current': 3, 'total': 3})
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,365
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/test_utils/utils.py
|
import pytest
import random
from faker import Faker
from mixer.backend.django import mixer
from django.utils import timezone
from graphql.execution.base import ResolveInfo
from backend.transactions.models import Transaction
pytestmark = pytest.mark.django_db
def mock_resolve_info(req) -> ResolveInfo:
return ResolveInfo(None, None, None, None, None, None, None, None, None,
req)
def gen_fake_transaction(owner=None,
date=None,
spent_currency=None,
spent_amount=None,
source_peer=None,
acquired_currency=None,
acquired_amount=None,
target_peer=None,
fee_currency=None,
fee_amount=None,
book_price_btc=None,
book_price_eur=None,
tags=None) -> Transaction:
"""Generate a fake Transaction. Mixer cannot handle this class"""
fake = Faker()
transaction = Transaction()
transaction.owner = owner or mixer.blend("auth.User")
transaction.date = date or timezone.make_aware(
fake.date_time_between(start_date="-30y", end_date="now", tzinfo=None))
transaction.spent_currency = spent_currency or fake.cryptocurrency_code()
transaction.spent_amount = spent_amount or random.uniform(1, 20)
transaction.source_peer = source_peer or mixer.blend("accounts.Peer")
transaction.acquired_currency = \
acquired_currency or fake.cryptocurrency_code()
transaction.acquired_amount = acquired_amount or random.uniform(1, 20)
transaction.target_peer = target_peer or mixer.blend("accounts.Peer")
transaction.fee_currency = fee_currency or fake.cryptocurrency_code()
transaction.fee_amount = fee_amount or random.uniform(0, 1)
transaction.book_price_btc = book_price_btc or random.uniform(0, 20)
transaction.book_price_eur = book_price_eur or random.uniform(0, 50)
transaction.save()
if tags:
for tag in tags:
transaction.tags.add(tag)
transaction.save()
return transaction
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,366
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/tests/test_fetcher_coinbase.py
|
from datetime import datetime, timedelta
import json
import pytest
from django.utils.timezone import now
from _pytest.monkeypatch import MonkeyPatch
from mixer.backend.django import mixer
import coinbase
import cryptocompare
from backend.accounts.models import Account
from backend.transactions.models import Transaction
from ..fetchers.coinbase import update_coinbase_trx
pytestmark = pytest.mark.django_db
class MockAPIObject(dict):
"""Mock of coinbase APIObject"""
def __init__(self, pagination=None, data=None):
self.__pagination = pagination or {"next_uri": None}
self["data"] = data or []
super(MockAPIObject, self).__init__()
@property
def pagination(self):
"""Return the pagination data"""
return self.__pagination
def new_get_accounts(self):
"""Fake coinbase get accounts for user"""
return MockAPIObject(data=[{
"id": "fiat_id",
"type": "fiat"
},
{
"id": "wallet_id_btc",
"type": "wallet"
},
{
"id": "wallet_id_ltc",
"type": "wallet"
}])
def new_get_buys(self, cb_account_id):
"""Fake get buys for account"""
if cb_account_id == "wallet_id_btc":
return MockAPIObject(data=[
{
"created_at": "2017-12-27T15:16:22Z",
"resource": "buy",
"status": "completed",
"amount": {
"amount": 0.04,
"currency": "BTC"
},
"total": {
"amount": 300,
"currency": "EUR"
},
"fees": [{
"amount": {
"amount": 4.4,
"currency": "EUR"
}
}]
},
{
"created_at": "2017-12-27T15:16:22Z",
"resource": "buy",
# should be skipped since it was canceled
"status": "canceled"
},
{
"created_at": "2018-01-28T13:11:35Z",
"resource": "buy",
"status": "completed",
"amount": {
"amount": 0.05,
"currency": "BTC"
},
"total": {
"amount": 350,
"currency": "EUR"
},
"fees": [{
"amount": {
"amount": 4.50,
"currency": "EUR"
}
}]
},
{
"created_at": "2018-01-28T13:11:35Z",
# should be skipped and not end up in the database (neither sell nor buy)
# and it's status is canceled
"resource": "should be skipped",
"status": "canceled",
"amount": {
"amount": 0.05,
"currency": "BTC"
},
"total": {
"amount": 350,
"currency": "EUR"
},
"fees": [{
"amount": {
"amount": 4.50,
"currency": "EUR"
}
}]
}
])
elif cb_account_id == "wallet_id_ltc":
return MockAPIObject(
data=[{
"created_at": "2018-01-22T12:26:35Z",
"resource": "buy",
"status": "completed",
"amount": {
"amount": 2.2,
"currency": "LTC"
},
"total": {
"amount": 260,
"currency": "EUR"
},
"fees": [{
"amount": {
"amount": 5,
"currency": "EUR"
}
}]
}, {
"created_at": "2018-01-22T11:04:01Z",
"resource": "buy",
"status": "completed",
"amount": {
"amount": 1.4,
"currency": "LTC"
},
"total": {
"amount": 100,
"currency": "EUR"
},
"fees": [{
"amount": {
"amount": 3,
"currency": "EUR"
}
}]
}])
else:
return MockAPIObject()
def new_get_sells(self, cb_account_id):
"""Fake get sells for account"""
if cb_account_id == "wallet_id_btc":
return MockAPIObject(
data=[{
"created_at": "2018-01-25T11:24:52Z",
"resource": "sell",
"status": "completed",
"amount": {
"amount": 0.06,
"currency": "BTC"
},
"total": {
"amount": 800,
"currency": "EUR"
},
"fees": [{
"amount": {
"amount": 7,
"currency": "EUR"
}
}]
}])
elif cb_account_id == "wallet_id_ltc":
return MockAPIObject(
data=[{
"created_at": "2018-01-23T07:23:54Z",
"resource": "sell",
"status": "completed",
"amount": {
"amount": 0.3,
"currency": "LTC"
},
"total": {
"amount": 80,
"currency": "EUR"
},
"fees": [{
"amount": {
"amount": 2,
"currency": "EUR"
}
}]
}])
else:
return MockAPIObject()
def new_get_transactions(self, cb_account_id):
"""Fake get transactions for account"""
if cb_account_id == "wallet_id_ltc":
return MockAPIObject(data=[{
"id": "12234-6666-8888-0000-1111111111",
"type": "send",
"status": "completed",
"amount": {
"amount": "-0.2",
"currency": "LTC"
},
"native_amount": {
"amount": "-46.00",
"currency": "EUR"
},
"description": None,
"created_at": "2017-12-15T15:00:00Z",
"updated_at": "2017-12-15T15:00:00Z",
"resource": "transaction",
"network": {
"status": "confirmed",
"hash": "123456789",
"transaction_fee": {
"amount": "0.001",
"currency": "LTC"
},
"transaction_amount": {
"amount": "0.199",
"currency": "LTC"
},
"confirmations": 54000
},
"to": {
"resource": "litecoin_address",
"address": "LcnAddress1",
"currency": "LTC"
},
"details": {
"title": "Sent Litecoin",
"subtitle": "To Litecoin address"
}
},
{
"id": "aaaaaaaaa-aaaa-aaaaaa-eeee-aaaaaa",
"type": "send",
"status": "completed",
"amount": {
"amount": "-0.4",
"currency": "LTC"
},
"native_amount": {
"amount": "-90.00",
"currency": "EUR"
},
"description": None,
"created_at": "2017-12-11T19:00:00Z",
"updated_at": "2017-12-11T19:00:00Z",
"resource": "transaction",
"instant_exchange": False,
"network": {
"status": "confirmed",
"hash": "123456789",
"transaction_fee": {
"amount": "0.001",
"currency": "LTC"
},
"transaction_amount": {
"amount": "0.399",
"currency": "LTC"
},
"confirmations": 15387
},
"to": {
"resource": "litecoin_address",
"address": "LcnAddress2",
"currency": "LTC"
},
"details": {
"title": "Sent Litecoin",
"subtitle": "To Litecoin address"
}
},
{
"id": "aaaaaaaaa-aaaa-aaaaaa-eeee-aaaaaa",
"type": "send",
"status": "completed",
"amount": {
"amount": "1.0",
"currency": "LTC"
},
"native_amount": {
"amount": "90.00",
"currency": "EUR"
},
"description": None,
"created_at": "2017-12-11T19:00:00Z",
"updated_at": "2017-12-11T19:00:00Z",
"resource": "transaction",
"instant_exchange": False,
"network": {
"status": "off_blockchain",
},
}])
else:
return MockAPIObject()
def new_get_historical_price(base, target, date):
"""Fake crypto compare API"""
if base == "BTC" and target == "EUR":
return {"BTC": {"EUR": 10000}}
elif base == "EUR" and target == "BTC":
return {"EUR": {"BTC": 0.00012}}
elif base == "LTC" and target == "BTC":
return {"LTC": {"BTC": 0.02}}
elif base == "LTC" and target == "EUR":
return {"LTC": {"EUR": 250}}
def test_refresh_coinbase_trx(monkeypatch: MonkeyPatch):
"""Test import coinbase transactions"""
user = mixer.blend("auth.User")
account: Account = mixer.blend(
"accounts.Account", owner=user, service_type="coinbase", api_key="123", api_secret="456")
monkeypatch.setattr(cryptocompare, "get_historical_price",
new_get_historical_price)
monkeypatch.setattr(coinbase.wallet.client.Client, "get_accounts",
new_get_accounts)
monkeypatch.setattr(coinbase.wallet.client.Client, "get_transactions",
new_get_transactions)
monkeypatch.setattr(coinbase.wallet.client.Client, "get_buys",
new_get_buys)
monkeypatch.setattr(coinbase.wallet.client.Client, "get_sells",
new_get_sells)
update_coinbase_trx(account)
transaction = Transaction.objects.filter(target_peer=account)
assert transaction.count() == 9, "Should import nine transations"
def new_get_buys_transaction_history(self, cb_account):
"""Fake coinbase get buys transation history"""
date: datetime = now()
if cb_account == "wallet_id_btc":
return MockAPIObject(
data=[{
"created_at": str(date + timedelta(days=-1)),
"resource": "buy",
"status": "completed",
"amount": {
"amount": 10,
"currency": "BTC"
},
"total": {
"amount": 10,
"currency": "BTC"
},
"fees": [{
"amount": {
"amount": 1,
"currency": "EUR"
}
}]
}, {
"created_at": str(date + timedelta(days=1)),
"resource": "buy",
"status": "completed",
"amount": {
"amount": 5,
"currency": "BTC"
},
"total": {
"amount": 5,
"currency": "BTC"
},
"fees": [{
"amount": {
"amount": 0.5,
"currency": "EUR"
}
}]
}])
else:
return MockAPIObject()
def test_update_trx_coinbase_transaction_history(monkeypatch: MonkeyPatch):
""" Test, that the update function does not import """
user = mixer.blend("auth.User")
account: Account = mixer.blend(
"accounts.Account", owner=user, service_type="coinbase", api_key="123", api_secret="456")
date: datetime = now()
monkeypatch.setattr(cryptocompare, "get_historical_price",
new_get_historical_price)
monkeypatch.setattr(coinbase.wallet.client.Client, "get_accounts",
new_get_accounts)
monkeypatch.setattr(coinbase.wallet.client.Client, "get_transactions",
new_get_transactions)
monkeypatch.setattr(coinbase.wallet.client.Client, "get_buys",
new_get_buys_transaction_history)
monkeypatch.setattr(coinbase.wallet.client.Client, "get_sells",
lambda self, cb_account: MockAPIObject())
mixer.blend(
"transactions.TransactionUpdateHistoryEntry",
date=date,
account=account,
fetched_transactions=3)
update_coinbase_trx(account)
transaction = Transaction.objects.filter(target_peer=account)
assert transaction.count(
) == 1, "Should not import transactions older than last update time"
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,367
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/tests/test_tasks.py
|
import pytest
from ...celery import app as celery_app
from ..tasks import async_update_account_trx
from mixer.backend.django import mixer
from backend.accounts.models import Account
pytestmark = pytest.mark.django_db
def new_update_exchange_trx_generic(account_id):
return "running ..."
# TODO: Write a working test, update_exchange_trx_generic is not patched correctly
@celery_app.task
def test_async_update_exchange_trx_generic(monkeypatch):
obj: Account = mixer.blend("accounts.Account")
monkeypatch.setattr("backend.transactions.fetchers.generic_exchange",
async_update_account_trx)
assert True
# assert async_update_exchange_trx_generic.delay(
# obj.id).get(timeout=10) == "running"
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,368
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/migrations/0002_auto_generate_default_objects.py
|
# Generated by Django 2.0.2 on 2018-03-17 07:16
from django.db import migrations
def gen_default(apps, schema_editor):
"""Generate default models for the accounts application"""
Peer = apps.get_model("accounts", "Peer")
peer = Peer()
peer.name = "Unknown Peer"
peer.save()
class Migration(migrations.Migration):
dependencies = [
("accounts", "0001_initial"),
]
operations = [
migrations.RunPython(gen_default),
]
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,369
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/accounts/models.py
|
from django.db import models
from backend.coins.models import Coin
class Peer(models.Model):
"""
Database model for a peer. A peer is something that can
send or receive value. Usually it has an address or
multiple addresses associated with it.
"""
id = models.AutoField(primary_key=True)
owner = models.ForeignKey(
to='auth.user',
on_delete=models.PROTECT,
)
name = models.CharField(max_length=100)
class_type = models.CharField(max_length=50, editable=False)
def save(self,
force_insert=False,
force_update=False,
using=None,
update_fields=None,
class_type="Peer"):
"""Set class type"""
self.class_type = class_type
super(Peer, self).save(force_insert, force_update, using,
update_fields)
def __str__(self):
return "[{}] {}".format(self.class_type, self.name)
class CryptoAddress(models.Model):
"""A crypto address to identify value flows"""
class Meta:
ordering = ("id", )
id = models.AutoField(primary_key=True)
# The peers this address belongs to
peer = models.ForeignKey(Peer, on_delete=models.PROTECT)
coin = models.ForeignKey(Coin, on_delete=models.PROTECT)
address = models.CharField(max_length=256)
address_str = models.CharField(max_length=300, blank=True)
watch = models.BooleanField(default=False)
def save(self,
force_insert=False,
force_update=False,
using=None,
update_fields=None):
"""Calculate the address string before save"""
self.address_str = "{}:{}".format(self.coin.symbol, self.address)
super(CryptoAddress, self).save(force_insert, force_update, using,
update_fields)
def __str__(self):
return self.address_str
class Account(Peer):
'''
An Account represents an Exchange like Binance or Cryptopia.
Transactions from accounts are usually fetched via an API.
In some cases only csv file import might be available.
To see which account supports which type of import see the
SERVICE_TYPES tuple.
'''
SERVICE_TYPES = (('binance', 'Binance',
'api'), ('bitfinex', 'Bitfinex',
'api'), ('coinbase', 'Coinbase', 'api'),
('cryptopia', 'Cryptopia',
'api'), ('ethereum_wallet', 'Ethereum Wallet',
'public_address_import'),
('kraken', 'Kraken', 'api'), ('livecoin', 'Livecoin',
'manual'))
slug = models.SlugField(max_length=50)
service_type = models.CharField(max_length=50)
api_key = models.CharField(max_length=100, blank=True, null=True)
api_secret = models.CharField(max_length=100, blank=True, null=True)
creation_date = models.DateTimeField(auto_now_add=True)
symbols = models.CharField(
max_length=1000,
blank=True,
null=True,
)
def save(self,
force_insert=False,
force_update=False,
using=None,
update_fields=None):
'''
Save is overridden to set properly call Peers save method with the
class_type parameter
'''
super(Account, self).save(force_insert, force_update, using,
update_fields, "Account")
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,370
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/transactions/admin.py
|
from django.contrib import admin
from backend.transactions.models import Transaction, TransactionUpdateHistoryEntry
# Register your models here.
admin.site.register(Transaction)
admin.site.register(TransactionUpdateHistoryEntry)
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,371
|
fusion44/crypternity-backend
|
refs/heads/master
|
/backend/user_profile/handlers.py
|
from backend.user_profile.serializers import UserSerializer
def jwt_response_payload_handler(token, user=None, request=None):
return {
'token': token,
'user': UserSerializer(user, context={
'request': request
}).data
}
|
{"/backend/user_profile/tasks.py": ["/backend/celery.py"], "/backend/schema.py": ["/backend/coins/schema.py", "/backend/accounts/schema.py", "/backend/transactions/schema.py"], "/backend/transactions/schema.py": ["/backend/transactions/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/admin.py": ["/backend/coins/models.py"], "/backend/transactions/fetchers/generic_exchange.py": ["/backend/utils/utils.py", "/backend/accounts/models.py", "/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_generic.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/generic_exchange.py"], "/backend/accounts/schema.py": ["/backend/accounts/models.py", "/backend/coins/models.py", "/backend/accounts/tasks.py"], "/backend/transactions/importers/livecoin.py": ["/backend/utils/utils.py", "/backend/transactions/models.py", "/backend/accounts/models.py"], "/backend/coins/tests/test_tasks.py": ["/backend/coins/models.py", "/backend/celery.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_schema.py": ["/backend/test_utils/utils.py"], "/backend/transactions/tests/test_schema.py": ["/backend/transactions/schema.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/transactions/fetchers/coinbase.py": ["/backend/transactions/models.py", "/backend/accounts/models.py", "/backend/utils/utils.py"], "/backend/accounts/tests/test_schema.py": ["/backend/test_utils/utils.py", "/backend/accounts/models.py"], "/backend/utils/tests/test_utils.py": ["/backend/utils/utils.py"], "/backend/accounts/admin.py": ["/backend/accounts/models.py"], "/backend/transactions/tests/test_importer_livecoin.py": ["/backend/transactions/schema.py", "/backend/accounts/models.py", "/backend/transactions/importers/livecoin.py"], "/backend/coins/schema.py": ["/backend/coins/models.py", "/backend/coins/tasks.py"], "/backend/coins/tests/test_models.py": ["/backend/coins/models.py"], "/backend/coins/tasks.py": ["/backend/celery.py", "/backend/coins/models.py"], "/backend/transactions/tests/test_models.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/test_utils/utils.py"], "/backend/accounts/tasks.py": ["/backend/celery.py", "/backend/accounts/models.py", "/backend/transactions/fetchers/generic_exchange.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/test_utils/utils.py": ["/backend/transactions/models.py"], "/backend/transactions/tests/test_fetcher_coinbase.py": ["/backend/accounts/models.py", "/backend/transactions/models.py", "/backend/transactions/fetchers/coinbase.py"], "/backend/accounts/tests/test_tasks.py": ["/backend/celery.py", "/backend/accounts/tasks.py", "/backend/accounts/models.py"], "/backend/accounts/models.py": ["/backend/coins/models.py"], "/backend/transactions/admin.py": ["/backend/transactions/models.py"]}
|
12,373
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/utils/utils.py
|
import os
def list_dir(dirpath: str) -> (list, list):
files, dirs = [], []
for dpath, dnames, fnames in os.walk(dirpath,):
files.extend(fnames)
dirs.extend(dnames)
break
return sorted(dirs), sorted(files)
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,374
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/eval/predict.py
|
import json
import logging
import sys
import torch
import pandas as pd
import numpy as np
from seqeval.metrics import f1_score, precision_score, recall_score, accuracy_score, classification_report
from torch.utils.data import TensorDataset, DataLoader, RandomSampler
from keras.preprocessing.sequence import pad_sequences
from transformers import AutoTokenizer, AutoModelForTokenClassification
from transformers import PreTrainedModel, pipeline
from collections import defaultdict
from operator import itemgetter
from tqdm import tqdm
from src.utils.load_dataset import LoadBSNLP
logging.basicConfig(
stream=sys.stdout,
level=logging.INFO,
format='[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s'
)
logger = logging.getLogger('MakePrediction')
class ExtractPredictions:
def __init__(
self,
tag2code: dict,
code2tag: dict,
model_path: str = f'./data/models/bert-base-multilingual-cased-other',
):
"""
A class to extract all the NE predictions from a given tokens
:param model_path: path to a HuggingFace-transformers pre-trained model for the NER task, such as BERT Base Multilingual (Un)Cased
"""
self.model_path = model_path
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.model = AutoModelForTokenClassification.from_pretrained(
model_path,
output_attentions=False,
output_hidden_states=False,
num_labels=len(tag2code),
label2id=tag2code,
id2label=code2tag,
).to(self.device)
self.tokenizer = AutoTokenizer.from_pretrained(
model_path,
from_pt=True,
do_lower_case=False,
use_fast=False
)
self.BATCH_SIZE = 32
self.MAX_LENGTH = 128
def convert_input(
self,
input_data: pd.DataFrame,
tag2code: dict,
) -> (DataLoader, list):
all_ids = []
ids = [] # sentence ids
tokens = [] # sentence tokens
token_ids = [] # converted sentence tokens
tags = [] # NER tags
for (doc, sentence), data in input_data.groupby(["docId", "sentenceId"]):
sentence_tokens = []
sentence_tags = []
sentence_ids = []
for id, word_row in data.iterrows():
word_tokens = self.tokenizer.tokenize(str(word_row["text"]))
sentence_tokens.extend(word_tokens)
sentence_tags.extend([tag2code[word_row["ner"]]] * len(word_tokens))
token_id_str = f'{doc};{sentence};{word_row["tokenId"]}'
all_ids.append(token_id_str)
token_id = len(all_ids) - 1
sentence_ids.extend([token_id] * len(word_tokens))
if len(sentence_tokens) != len(sentence_tags) != len(sentence_ids):
raise Exception("Inconsistent output!")
ids.append(sentence_ids)
tokens.append(sentence_tokens)
sentence_token_ids = self.tokenizer.convert_tokens_to_ids(sentence_tokens)
token_ids.append(sentence_token_ids)
tags.append(sentence_tags)
# padding is required to spill the sentence tokens in case there are sentences longer than 128 words
# or to fill in the missing places to 128 (self.MAX_LENGTH)
ids = torch.as_tensor(pad_sequences(
ids,
maxlen=self.MAX_LENGTH,
dtype="long",
value=-1,
truncating="post",
padding="post"
)).to(self.device)
token_ids = torch.as_tensor(pad_sequences(
token_ids,
maxlen=self.MAX_LENGTH,
dtype="long",
value=0.0,
truncating="post",
padding="post"
)).to(self.device)
tags = torch.as_tensor(pad_sequences(
tags,
maxlen=self.MAX_LENGTH,
dtype="long",
value=tag2code["PAD"],
truncating="post",
padding="post"
)).to(self.device)
masks = torch.as_tensor(np.array([[float(token != 0.0) for token in sentence] for sentence in token_ids])).to(self.device)
data = TensorDataset(ids, token_ids, masks, tags)
sampler = RandomSampler(data)
return DataLoader(data, sampler=sampler, batch_size=self.BATCH_SIZE), all_ids
def translate(
self,
predictions: list,
labels: list,
tokens: list,
sent_ids: list,
tag2code: dict,
code2tag: dict,
all_ids: list
) -> (list, list, list, list):
translated_predictions, translated_labels, translated_tokens, translated_sentences = [], [], [], []
for preds, labs, toks, ids in zip(predictions, labels, tokens, sent_ids):
sentence_predictions, sentence_labels, sentence_tokens, sentence_ids = [], [], [], []
for p, l, t, i in zip(preds, labs, toks, ids):
if l == tag2code["PAD"]:
continue
if p == tag2code["PAD"]:
logger.info(f"PREDICTED `PAD`! {p}, {l}, {t}, {i}")
continue
sentence_tokens.append(t)
sentence_predictions.append(code2tag[p])
sentence_labels.append(code2tag[l])
sentence_ids.append(all_ids[i])
translated_tokens.append(sentence_tokens)
translated_predictions.append(sentence_predictions)
translated_labels.append(sentence_labels)
translated_sentences.append(sentence_ids)
return translated_predictions, translated_labels, translated_tokens, translated_sentences
def test(
self,
data: DataLoader,
all_ids: list,
tag2code: dict,
code2tag: dict,
) -> (dict, pd.DataFrame):
eval_loss = 0.
eval_steps, eval_examples = 0, 0
eval_ids, eval_tokens, eval_predictions, eval_labels = [], [], [], []
self.model.eval()
for batch in data:
batch_ids, batch_tokens, batch_masks, batch_tags = tuple(t.to(self.device) for t in batch)
with torch.no_grad():
outputs = self.model(
batch_tokens,
attention_mask=batch_masks,
labels=batch_tags
)
logits = outputs[1].detach().cpu().numpy()
label_ids = batch_tags.to('cpu').numpy()
toks = batch_tokens.to('cpu').numpy()
sentence_ids = batch_ids.to('cpu').numpy()
eval_loss += outputs[0].mean().item()
toks = [self.tokenizer.convert_ids_to_tokens(sentence) for sentence in toks]
eval_tokens.extend(toks)
eval_predictions.extend([list(p) for p in np.argmax(logits, axis=2)])
eval_labels.extend(label_ids)
eval_ids.extend(sentence_ids)
eval_examples += batch_tokens.size(0)
eval_steps += 1
eval_loss = eval_loss / eval_steps
flatten = lambda x: [j for i in x for j in i]
predicted_tags, valid_tags, tokens, sentence_ids = self.translate(eval_predictions, eval_labels, eval_tokens, eval_ids, tag2code, code2tag, all_ids)
# for st, sp, sv, vi in zip(tokens, predicted_tags, valid_tags, sentence_ids):
# for t, p, v, i in zip(st, sp, sv, vi):
# logger.info(f"row = {t}, {p}, {v}, {i}")
predicted_data = pd.DataFrame(data={
'sentence_id': flatten(sentence_ids),
'tokens': flatten(tokens),
'predicted_tag': flatten(predicted_tags),
'valid_tag': flatten(valid_tags),
})
if len([tag for sent in valid_tags for tag in sent if tag[:2] in ['B-', 'I-']]) == 0:
valid_tags.append(["O"])
predicted_tags.append(["B-ORG"])
scores = {
"loss": eval_loss,
"acc": accuracy_score(valid_tags, predicted_tags),
"f1": f1_score(valid_tags, predicted_tags),
"p": precision_score(valid_tags, predicted_tags),
"r": recall_score(valid_tags, predicted_tags),
"report": classification_report(valid_tags, predicted_tags),
}
return scores, predicted_data
def __merge_data(self,
data: pd.DataFrame,
pred_data: pd.DataFrame,
) -> pd.DataFrame:
data['calcNER'] = ''
for sent_id, sent_data in pred_data.groupby('sentence_id'):
ids = sent_id.split(';')
did = ids[0]
sid = int(ids[1])
tid = int(ids[2])
max_cat = max(sent_data['predicted_tag'].value_counts().to_dict().items(), key=itemgetter(1))[0]
data.loc[(data['docId'] == did) & (data['sentenceId'] == sid) & (data['tokenId'] == tid), 'calcNER'] = max_cat
return data
def predict(self,
data: pd.DataFrame,
tag2code: dict,
code2tag: dict,
) -> (dict, pd.DataFrame):
in_data, ids = self.convert_input(data, tag2code)
scores, pred_data = self.test(in_data, ids, tag2code, code2tag)
merged = self.__merge_data(data, pred_data)
return scores, merged
if __name__ == '__main__':
# model_path = f'./data/models/bert-base-multilingual-cased-other'
model_path = './data/runs/run_2021-02-17T11:42:19_slo-models/models/sloberta-1.0-bsnlp-2021-5-epochs'
tag2code, code2tag = LoadBSNLP(lang='sl', year='2021', merge_misc=False).encoding()
logger.info(f'{tag2code}')
logger.info(f'{code2tag}')
loader = LoadBSNLP(lang="sl", year='2021', merge_misc=False)
predictor = ExtractPredictions(model_path)
data = loader.test()
scores, pred_data = predictor.predict(data, tag2code, code2tag)
logger.info(f'{json.dumps(scores, indent=4)}')
logger.info(f'\n{scores["report"]}')
logger.info(f'\n{pred_data}')
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,375
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/eval/model_eval.py
|
import json
import argparse
import tqdm
import logging
import sys
import pandas as pd
import random
from collections import defaultdict
from src.eval.predict import ExtractPredictions
from src.utils.load_documents import LoadBSNLPDocuments
from src.utils.load_dataset import LoadBSNLP
from src.utils.update_documents import UpdateBSNLPDocuments
from src.utils.utils import list_dir
pd.set_option('display.max_rows', None)
pd.set_option('display.max_columns', None)
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s'
)
logger = logging.getLogger('TrainEvalModels')
DEBUG = False
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--lang', type=str, default='all')
parser.add_argument('--year', type=str, default='all')
parser.add_argument('--merge-misc', action='store_true')
parser.add_argument('--run-path', type=str, default=None)
return parser.parse_args()
def group_sentences(document: list) -> dict:
sentences = defaultdict(lambda: "")
for token in document:
sentences[token['sentenceId']] = f"{sentences[token['sentenceId']]} {token['text']}"
return dict(sentences)
def get_label_dicts(path: str) -> (dict, dict):
with open(f'{path}/config.json') as f:
config = json.load(f)
code2tag = {int(k): v for k, v in config['id2label'].items()}
return config['label2id'], code2tag
def looper(
run_path: str,
clang: str,
model: str,
year: str,
categorize_misc: bool = False,
) -> dict:
loader = LoadBSNLPDocuments(lang=clang, year=year)
model_name = model.split('/')[-1]
logger.info(f"Predicting for {model_name}")
model_path = f'{run_path}/models/{model}'
tag2code, code2tag = get_label_dicts(model_path)
misctag2code, misccode2tag = {}, {}
logger.info(f"tag2code: {tag2code}")
logger.info(f"code2tag: {code2tag}")
misc_model, _ = list_dir(f'{run_path}/misc_models')
if categorize_misc:
logger.info(f"Using misc model: {misc_model[0]}")
misctag2code, misccode2tag = get_label_dicts(f'{run_path}/misc_models/{misc_model[0]}')
logger.info(f"misctag2code: {misctag2code}")
logger.info(f"misccode2tag: {misccode2tag}")
predictor = ExtractPredictions(model_path=model_path, tag2code=tag2code, code2tag=code2tag)
pred_misc = None if not categorize_misc else ExtractPredictions(model_path=f'./{run_path}/misc_models/{misc_model[0]}', tag2code=misctag2code, code2tag=misccode2tag)
updater = UpdateBSNLPDocuments(lang=clang, year=year, path=f'{run_path}/predictions/bsnlp/{model_name}')
predictions = {}
data = loader.load_merged()
tdset = tqdm.tqdm(data.items(), desc="Dataset")
scores = []
for dataset, langs in tdset:
tdset.set_description(f'Dataset: {dataset}')
tlang = tqdm.tqdm(langs.items(), desc="Language")
predictions[dataset] = {}
for lang, docs in tlang:
predictions[dataset][lang] = {}
tlang.set_description(f'Lang: {tlang}')
for docId, doc in tqdm.tqdm(docs.items(), desc="Docs"):
to_pred = pd.DataFrame(doc['content'])
if categorize_misc:
# categorize the PRO and EVT to MISC, as the model only knows about it
to_pred.loc[to_pred['ner'].isin(['B-PRO', 'B-EVT']), 'ner'] = f'B-MISC'
to_pred.loc[to_pred['ner'].isin(['I-PRO', 'I-EVT']), 'ner'] = f'I-MISC'
doc_scores, pred_data = predictor.predict(to_pred, tag2code, code2tag)
doc_scores['id'] = f'{lang};{docId}'
scores.append(doc_scores)
# if pred_misc is not None and :
if categorize_misc and len(pred_data.loc[pred_data['calcNER'].isin(['B-MISC', 'I-MISC'])]) > 0:
misc_data = pd.DataFrame(doc['content'])
if len(misc_data.loc[~(misc_data['ner'].isin(['B-MISC', 'I-MISC']))]) > 0:
# randomly choose a category for (B|I)-MISC category
cat = random.choice(['PRO', 'EVT'])
misc_data.loc[(misc_data['ner'] == 'B-MISC'), 'ner'] = f'B-{cat}'
misc_data.loc[(misc_data['ner'] == 'I-MISC'), 'ner'] = f'I-{cat}'
misc_data.loc[~(misc_data['ner'].isin(['B-PRO', 'B-EVT', 'I-PRO', 'I-EVT'])), 'ner'] = 'O'
_, misc_pred = pred_misc.predict(misc_data, misctag2code, misccode2tag)
# pred_data['ner'] = pd.DataFrame(doc['content'])['ner']
# update the entries
# update wherever there is misc in the original prediction
pred_data.loc[pred_data['calcNER'].isin(['B-MISC', 'I-MISC']), 'calcNER'] = misc_pred.loc[pred_data['calcNER'].isin(['B-MISC', 'I-MISC']), 'calcNER']
# update wherever the new predictor made a prediction
pred_data.loc[misc_pred['calcNER'].isin(['B-PRO', 'B-EVT', 'I-PRO', 'I-EVT']), 'calcNER'] = misc_pred.loc[misc_pred['calcNER'].isin(['B-PRO', 'B-EVT', 'I-PRO', 'I-EVT']), 'calcNER']
doc['content'] = pred_data.to_dict(orient='records')
miscs = [r['calcNER'] for r in doc['content'] if r['calcNER'] in ['B-MISC', 'I-MISC']]
if len(miscs) > 0:
raise Exception(f"STILL MORE MISCS??? {docId}, {miscs}")
predictions[dataset][lang][docId] = pred_data.loc[~(pred_data['calcNER'] == 'O')].to_dict(orient='records')
updater.update_merged(data)
logger.info(f"Done predicting for {model_name}")
return {
'model': model_name,
'preds': predictions,
}, scores
def main():
args = parse_args()
run_path = args.run_path if args.run_path is not None else "./data/models/"
lang = args.lang
year = args.year
merge_misc = args.merge_misc
print(f"Run path: {run_path}")
print(f"Langs: {lang}")
print(f"Year: {year}")
print(f"Merge misc: {merge_misc}")
models, _ = list_dir(f'{run_path}/models')
logger.info(f"Models to predict: {json.dumps(models, indent=4)}")
# tmodel = tqdm.tqdm(list(map(lambda x: (run_path, lang, x), models)), desc="Model")
# predictions = pool.map(looper, tmodel)
# predictions = list(map(looper, tmodel))
predictions = []
doc_scores = {}
for model in tqdm.tqdm(models, desc="Model"):
logger.info(f"Model: {model}")
preds, scores = looper(run_path, lang, model,year, merge_misc)
predictions.append(preds)
doc_scores[model]= scores
# logger.info(predictions)
with open(f'{run_path}/all_predictions.json', 'w') as f:
json.dump(predictions, f)
with open(f'{run_path}/all_scores.json', 'w') as f:
json.dump(predictions, f)
logger.info("Done.")
if __name__ == '__main__':
main()
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,376
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/utils/load_dataset.py
|
import pandas as pd
import pyconll
import numpy as np
from src.utils.utils import list_dir
from typing import Union
# pd.set_option('display.max_rows', None) # only for debugging purposes
class LoadDataset:
def __init__(self, base_fname: str, format: str, print_debug: bool = False):
self.base_fname = base_fname
self.data_format = format
self.print_debug = print_debug
def load(self, dset: str) -> pd.DataFrame:
return pd.DataFrame()
def train(self) -> pd.DataFrame:
return pd.DataFrame()
def dev(self) -> pd.DataFrame:
"""
This is the validation data
"""
return pd.DataFrame()
def test(self) -> pd.DataFrame:
return pd.DataFrame()
def load_all(self) -> pd.DataFrame:
return pd.concat([
self.train(),
self.dev(),
self.test()
])
def encoding(self) -> (dict, dict):
data = self.train()
possible_tags = np.append(data["ner"].unique(), ["PAD"])
tag2code = {tag: code for code, tag in enumerate(possible_tags)}
code2tag = {val: key for key, val in tag2code.items()}
return tag2code, code2tag
class LoadSSJ500k(LoadDataset):
def __init__(self):
super().__init__(
"data/datasets/ssj500k/",
"conll"
)
def load(self, dset: str) -> pd.DataFrame:
raw_data = pyconll.load_from_file(f"{self.base_fname}{dset}_ner.conllu")
data = []
for id, sentence in enumerate(raw_data):
for word in sentence:
if word.upos == 'PROPN': # check if the token is a NER
annotation = list(word.misc.keys())[0]
data.append({"docId": "xxx", "text": word.form, "sentenceId": id, "ner": annotation.upper()})
# NOTE: we cannot use the just <TYPE> annotation without `B-` (begin) or `I-` (inside) `<TYPE>`
# because we would not be compliant with the CoNLL format
else:
data.append({"docId": "xxx", "text": word.form, "sentenceId": id, "ner": "O"})
return pd.DataFrame(data)
def train(self) -> pd.DataFrame:
return self.load('train')
def dev(self) -> pd.DataFrame:
return self.load('dev')
def test(self) -> pd.DataFrame:
return self.load('test')
class LoadBSNLP(LoadDataset):
available_langs = ['bg', 'cs', 'pl', 'ru', 'sl', 'uk']
datasets = {
"2017": ["ec", "trump"],
"2021": ["asia_bibi", "brexit", "nord_stream", "other", "ryanair"],
"all": ["ec", "trump", "asia_bibi", "brexit", "nord_stream", "other", "ryanair"],
"test_2021": ["covid-19", "us_election_2020"],
}
def __init__(
self,
lang: str = 'all',
year: str = 'all',
data_set: str = 'all',
exclude: Union[str, None] = None,
merge_misc: bool = True,
misc_data_only: bool = False,
print_debug: bool = False
):
super().__init__(
"data/datasets/bsnlp",
"csv",
print_debug=print_debug,
)
# assert year
if year not in self.datasets:
raise Exception(f"Invalid year chosen: {year}")
# assert dataset
if data_set in self.datasets[year]:
self.data_set = [data_set]
elif data_set == 'all':
self.data_set = self.datasets[year]
else:
raise Exception(f"Invalid dataset chosen: {data_set}")
if exclude is not None:
if print_debug: print(f"Excluding {exclude}")
self.data_set = [ds for ds in self.data_set if ds != exclude]
if not self.data_set:
raise Exception(f"Empty data set chosen? {self.data_set}")
# assert language
if lang in self.available_langs:
self.langs = [lang]
elif lang == 'all':
self.langs = self.available_langs
else:
raise Exception(f"Invalid language option: {lang}")
self.random_state = 42
self.merge_misc = merge_misc
if merge_misc and misc_data_only:
print("WARNING: weird combination? merge misc and misc data only?")
self.misc_data_only = misc_data_only
def load(self, dset: str) -> pd.DataFrame:
dirs, _ = list_dir(self.base_fname)
data = pd.DataFrame()
for dataset in dirs:
if dataset not in self.data_set:
continue
for lang in self.langs:
fname = f"{self.base_fname}/{dataset}/splits/{lang}/{dset}_{lang}.csv"
try:
df = pd.read_csv(f"{fname}")
except:
if self.print_debug: print(f"[{dataset}] skipping {lang}.")
continue
df['sentenceId'] = df['docId'].astype(str) + ';' + df['sentenceId'].astype('str') # + '-' + df['tokenId'].astype(str)
if self.merge_misc:
df['ner'] = df['ner'].map(lambda x: x.replace("PRO", "MISC").replace("EVT", "MISC"))
if self.misc_data_only:
df['ner'] = df['ner'].map(lambda x: "O" if x[2:] in ["PER", "LOC", "ORG"] else x)
data = pd.concat([data, df])
return data
def train(self) -> pd.DataFrame:
return self.load('train')
def dev(self) -> pd.DataFrame:
"""
This is the validation data
"""
return self.load('dev')
def test(self) -> pd.DataFrame:
return self.load('test')
class LoadCombined(LoadDataset):
def __init__(self, loaders: list):
super().__init__(
f"combined_datasets:{','.join([l.base_fname for l in loaders])}",
"csv"
)
self.random_state = 42
self.loaders = loaders
def load(self, set: str) -> pd.DataFrame:
return pd.DataFrame()
def train(self) -> pd.DataFrame:
data = pd.DataFrame()
for loader in self.loaders:
loader_data = loader.train()
data = pd.concat([data, loader_data])
return data
def dev(self) -> pd.DataFrame:
data = pd.DataFrame()
for loader in self.loaders:
loader_data = loader.dev()
data = pd.concat([data, loader_data])
return data
def test(self) -> pd.DataFrame:
data = pd.DataFrame()
for loader in self.loaders:
loader_data = loader.test()
data = pd.concat([data, loader_data])
return data
if __name__ == '__main__':
loader = LoadBSNLP(lang="all", year='2021', merge_misc=False)
# loader = LoadSSJ500k()
# loader = LoadCombined([LoadBSNLP("sl"), LoadSSJ500k()])
tag2code, code2tag = loader.encoding()
print(f"tag2code: {tag2code}")
print(f"code2tag: {code2tag}")
train_data = loader.train()
# print(train_data.head(10))
print(f"Train data: {train_data.shape[0]}, NERs: {train_data.loc[train_data['ner'] != 'O'].shape[0]}")
print(train_data['ner'].value_counts())
print(train_data.value_counts())
# print(train_data)
dev_data = loader.dev()
print(f"Validation data: {dev_data.shape[0]}, NERs: {dev_data.loc[dev_data['ner'] != 'O'].shape[0]}")
print(dev_data['ner'].value_counts())
test_data = loader.test()
print(f"Test data: {test_data.shape[0]}, NERs: {test_data.loc[test_data['ner'] != 'O'].shape[0]}")
print(test_data['ner'].value_counts())
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,377
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/transform/create_splits.py
|
import os
import json
import pandas as pd
import pathlib
import shutil
from src.utils.utils import list_dir
from sklearn.model_selection import train_test_split
# TODO: add different seed option
random_state = 42
TRAIN_SIZE = 0.8
def join_docs(path: str, docs: list) -> pd.DataFrame:
joined = pd.DataFrame()
for doc in docs:
df = pd.read_csv(f'{path}/{doc["merged_fname"]}')
joined = pd.concat([joined, df])
return joined
def copy_annotations(
docs: list,
path: str,
):
print(f"Copying annotations to {path}")
pathlib.Path(path).mkdir(parents=True, exist_ok=True)
for doc in docs:
old_doc_path = pathlib.Path(doc['annotated'])
ann_name = doc["ann_fname"]
if ann_name[-4:] != '.out':
ann_name = f'{ann_name}.out'
new_doc_path = pathlib.Path(f'{path}/{ann_name}')
shutil.copy(old_doc_path, new_doc_path)
def join_files(files: list, docs: list) -> list:
for doc in docs:
joined = False
for file in files:
if file[:-4] == doc['raw_fname'][:-4]:
doc['merged_fname'] = file
joined = True
break
if not joined:
print(f"[ERROR] No merged file for {doc}")
return docs
def create_split(
dataset_dir: str,
lang: str,
docs: list,
split_path: str,
) -> None:
path = f"{dataset_dir}/merged/{lang}"
out_path = f"{dataset_dir}/splits/{lang}/"
dataset_name = dataset_dir.split('/')[-1]
print(path)
_, files = list_dir(path)
joined = join_files(files, docs)
train_docs, test_docs = train_test_split(
joined,
train_size=TRAIN_SIZE,
random_state=random_state,
)
train_docs, val_docs = train_test_split(
joined,
test_size=TRAIN_SIZE * 0.1,
random_state=random_state,
)
# print(len(files), len(train_docs), len(val_docs), len(test_docs))
train_data = join_docs(path, train_docs)
val_data = join_docs(path, val_docs)
test_data = join_docs(path, test_docs)
if not os.path.exists(out_path):
os.mkdir(out_path)
print(f"Saving to: {out_path}")
train_data.to_csv(f'{out_path}/train_{lang}.csv', index=False)
val_data.to_csv(f'{out_path}/dev_{lang}.csv', index=False)
test_data.to_csv(f'{out_path}/test_{lang}.csv', index=False)
copy_annotations(train_docs, f'{split_path}/train/{dataset_name}/{lang}')
copy_annotations(val_docs, f'{split_path}/dev/{dataset_name}/{lang}')
copy_annotations(test_docs, f'{split_path}/test/{dataset_name}/{lang}')
def create_splits(
datasets: dict,
split_path: str
) -> None:
for dataset, langs in datasets.items():
for lang, docs in langs.items():
create_split(dataset, lang, docs, split_path)
def main():
split_path = './data/datasets/bsnlp_splits'
datasets = json.load(open('./data/results/dataset_pairs.json'))
create_splits(datasets, split_path)
if __name__ == '__main__':
main()
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,378
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/utils/load_documents.py
|
import json
import pandas as pd
from typing import Callable
from src.utils.utils import list_dir
class LoadDocuments:
def __init__(self, path):
self.path = path
class LoadBSNLPDocuments(LoadDocuments):
def __init__(
self,
year: str = 'all',
lang: str = 'all',
path: str = './data/datasets/bsnlp',
) -> None:
super(LoadBSNLPDocuments, self).__init__(
path=path
)
datasets = {
"2017": ["ec", "trump"],
"2021": ["asia_bibi", "brexit", "nord_stream", "other", "ryanair"],
"all": ["ec", "trump", "asia_bibi", "brexit", "nord_stream", "other", "ryanair"],
"test_2021": ["covid-19", "us_election_2020"],
}
if year not in datasets:
raise Exception(f"Invalid subset chosen: {year}")
self.dirs = datasets[year]
available_langs = ['bg', 'cs', 'pl', 'ru', 'sl', 'uk']
if lang in available_langs:
self.langs = [lang]
elif lang == 'all':
self.langs = available_langs
else:
raise Exception("Invalid language option.")
def load(
self,
ftype: str,
fun: Callable # NOTE: all functions must return `dict` type with `docId` available
) -> dict:
data = {}
for dataset in self.dirs:
data[dataset] = {}
for lang in self.langs:
data[dataset][lang] = {}
path = f'{self.path}/{dataset}/{ftype}/{lang}'
_, files = list_dir(path)
for fname in files:
result = fun(f'{path}/{fname}')
result['fname'] = fname
data[dataset][lang][result['docId']] = result
return data
def load_raw(self) -> dict:
def raw_loader(fpath: str) -> dict:
data = {}
with open(fpath) as f:
lines = f.readlines()
data['docId'] = lines[0].strip()
data['lang'] = lines[1].strip()
data['created'] = lines[2].strip()
data['url'] = lines[3].strip()
data['title'] = lines[4].strip()
content = ' '.join([line.strip() for line in lines[4:]])
data['content'] = content
return data
return self.load('raw', raw_loader)
def load_merged(self) -> dict:
def merged_loader(fpath: str) -> dict:
df = pd.read_csv(fpath, dtype={'docId': str, 'clID': str}).to_dict(orient='records')
docId = df[0]['docId']
return {
'docId': docId,
'content': df
}
return self.load('merged', merged_loader)
def load_predicted(self, folder: str = 'predicted') -> dict:
def predicted_loader(fpath: str) -> dict:
df = pd.read_csv(fpath)
docId = df.iloc[0]['docId']
return {
'docId': docId,
'content': df
}
return self.load(folder, predicted_loader)
def load_annotated(self):
def annotated_loader(fpath: str) -> dict:
docId = open(fpath).readline().strip()
data = pd.read_csv(
fpath,
header=None,
skiprows=[0],
delimiter='\t',
names=['Mention', 'Base', 'Category', 'clID']
)
return {
'docId': docId,
'content': data.to_dict(orient='records'),
}
return self.load('annotated', annotated_loader)
if __name__ == '__main__':
doc_loader = LoadBSNLPDocuments(lang='sl')
res = doc_loader.load_annotated()
print(json.dumps(res, indent=4))
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,379
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/utils/update_documents.py
|
import pandas as pd
from typing import Callable
from pathlib import Path
class UpdateDocuments:
def __init__(self, path):
self.path = path
class UpdateBSNLPDocuments(UpdateDocuments):
def __init__(
self,
year: str = 'all',
lang: str = 'all',
path: str = './data/datasets/bsnlp',
) -> None:
super(UpdateBSNLPDocuments, self).__init__(
path=path
)
datasets = {
"2017": ["ec", "trump"],
"2021": ["asia_bibi", "brexit", "nord_stream", "other", "ryanair"],
"all": ["ec", "trump", "asia_bibi", "brexit", "nord_stream", "other", "ryanair"],
"test_2021": ["covid-19", "us_election_2020"],
}
if year not in datasets:
raise Exception(f"Invalid subset chosen: {year}")
self.dirs = datasets[year]
available_langs = ['bg', 'cs', 'pl', 'ru', 'sl', 'uk']
if lang in available_langs:
self.langs = [lang]
elif lang == 'all':
self.langs = available_langs
else:
raise Exception("Invalid language option.")
def __update(
self,
ftype: str,
data: dict,
fun: Callable
) -> None:
for dataset, langs in data.items():
if dataset not in self.dirs:
raise Exception(f"Unrecognized dataset: {dataset}")
for lang, documents in langs.items():
if lang not in self.langs:
raise Exception(f"Unrecognized language: {lang}")
path = f'{self.path}/{dataset}/{ftype}/{lang}'
Path(path).mkdir(parents=True, exist_ok=True)
for docId, content in documents.items():
fun(f'{path}/{content["fname"]}', content)
def update_merged(self, new_data) -> None:
def update_merged(fpath: str, doc: dict) -> None:
df = pd.DataFrame(doc['content'])
df.to_csv(fpath, index=False)
self.__update('predicted', new_data, update_merged)
def update_clustered(self, new_data) -> None:
def update_merged(fpath: str, doc: dict) -> None:
doc['content'].to_csv(fpath, index=False)
self.__update('clustered', new_data, update_merged)
def __merge_records(
self,
nes: pd.DataFrame
) -> pd.DataFrame:
"""
Merges the NEs in the form of the expected output
:param nes:
:return:
"""
nes = nes.to_dict(orient='records')
merged = []
for i, ne in enumerate(nes):
if ne['calcNER'].startswith('I-'):
continue
j = i + 1
while j < len(nes) and not nes[j]['calcNER'].startswith('B-'):
ne['text'] = f'{ne["text"]} {nes[j]["text"]}'
ne['calcLemma'] = f'{ne["calcLemma"]} {nes[j]["calcLemma"]}'
j += 1
ne['calcNER'] = ne['calcNER'][2:]
merged.append(ne)
return pd.DataFrame(merged)
def update_predicted(self, new_data) -> None:
def update_predicted(fpath: str, doc: dict) -> None:
df = doc['content']
if 'calcLemma' not in df.columns:
print(f"MISSING LEMMA: `{fpath}`")
df['calcLemma'] = 'xxx'
if 'calcClId' not in df.columns:
print(f"MISSING caclClId in `{fpath}`")
df['calcClId'] = 'xxx'
if 'calcNer' in df.columns:
df = df.rename(columns={'calcNer': 'calcNER'})
df = df[['text', 'calcLemma', 'calcNER', 'calcClId']]
if len(df.loc[df['calcNER'].isna()]) > 0:
df.loc[df['calcNER'].isna(), 'calcNER'] = 'O'
df = df.loc[~df['calcNER'].isin(['O'])]
df = self.__merge_records(df)
df = df.drop_duplicates(subset=['text'])
with open(f'{fpath}.out', 'w') as f:
f.write(f'{doc["docId"]}\n')
df.to_csv(f, sep='\t', header=False, index=False)
self.__update('', new_data, update_predicted)
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,380
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/data/datasets/ssj500k/prep.py
|
import xml.etree.ElementTree as ET
import random
random.seed(42)
def next_sent():
conllu=open('ssj500k.conllu/ssj500k-ud-morphology.conllu')
sent=[]
for line in conllu:
if not line.startswith('#'):
if line.strip()=='':
yield sent
sent=[]
else:
line=line.split('\t')
sent.append([line[3],line[5]])
get_next_sent=next_sent()
get_next_sent=next_sent()
tree=ET.parse('ssj500k-en.TEI/ssj500k-en.body.xml')
root=tree.getroot()
train=[]
dev=[]
test=[]
train_jos=[]
dev_jos=[]
test_jos=[]
train_ud=[]
dev_ud=[]
test_ud=[]
train_ner=[]
dev_ner=[]
test_ner=[]
train_text=open('train.txt','w')
dev_text=open('dev.txt','w')
test_text=open('test.txt','w')
train_jos_text=open('train_jos.txt','w')
dev_jos_text=open('dev_jos.txt','w')
test_jos_text=open('test_jos.txt','w')
train_ud_text=open('train_ud.txt','w')
dev_ud_text=open('dev_ud.txt','w')
test_ud_text=open('test_ud.txt','w')
train_ner_text=open('train_ner.txt','w')
dev_ner_text=open('dev_ner.txt','w')
test_ner_text=open('test_ner.txt','w')
do_ner=True
for doc in root.iter('{http://www.tei-c.org/ns/1.0}div'):
rand=random.random()
if rand<0.8:
pointer=train
pointer_text=train_text
pointer_ud=train_ud
pointer_ud_text=train_ud_text
pointer_jos=train_jos
pointer_jos_text=train_jos_text
pointer_ner_text=train_ner_text
pointer_ner=train_ner
elif rand<0.9:
pointer=dev
pointer_text=dev_text
pointer_ud=dev_ud
pointer_ud_text=dev_ud_text
pointer_jos=dev_jos
pointer_jos_text=dev_jos_text
pointer_ner=dev_ner
pointer_ner_text=dev_ner_text
else:
pointer=test
pointer_text=test_text
pointer_ud=test_ud
pointer_ud_text=test_ud_text
pointer_jos=test_jos
pointer_jos_text=test_jos_text
pointer_ner=test_ner
pointer_ner_text=test_ner_text
for p in doc.iter('{http://www.tei-c.org/ns/1.0}p'):
#print p.attrib
if p.attrib['{http://www.w3.org/XML/1998/namespace}id']=='ssj500.2653':
do_ner=False
for element in p:
if element.tag.endswith('s'):
sent_id=element.attrib['{http://www.w3.org/XML/1998/namespace}id']
sentence=element
text=''
tokens=[]
ners=[]
uposfeats=get_next_sent.next()
jos=None
ud=None
for element in sentence:
if element.tag[-3:]=='seg':
if element.attrib['type']=='name':
ner=element.attrib['subtype']
else:
ner=None
for idx,subelement in enumerate(element):
text+=subelement.text
if not subelement.tag.endswith('}c'):
if subelement.tag.endswith('w'):
lemma=subelement.attrib['lemma']
else:
lemma=subelement.text
if do_ner:
if ner is not None:
if idx==0:
ners.append('B-'+ner)
else:
ners.append('I-'+ner)
else:
ners.append('O')
tokens.append([subelement.text,lemma,subelement.attrib['ana'].split(':')[1]])
if element.tag[-2:] not in ('pc','}w','}c'):
if element.tag[-7:]=='linkGrp':
if element.attrib['type']=='UD-SYN':
ud=[]
for subelement in element:
label=subelement.attrib['ana'].split(':')[1]
head,dep=subelement.attrib['target'].split(' ')
head=head.split('.')[-1]
if head[0]!='t':
head='0'
else:
head=head[1:]
ud.append((head,label))
elif element.attrib['type']=='JOS-SYN':
jos=[]
for subelement in element:
label=subelement.attrib['ana'].split(':')[1]
head,dep=subelement.attrib['target'].split(' ')
head=head.split('.')[-1]
if head[0]!='t':
head='0'
else:
head=head[1:]
jos.append((head,label))
continue
text+=element.text
if not element.tag.endswith('}c'):
if element.tag.endswith('w'):
lemma=element.attrib['lemma']
else:
lemma=element.text
tokens.append([element.text,lemma,element.attrib['ana'].split(':')[1]])
if do_ner:
ners.append('O')
tokens=[a+b for a,b in zip(tokens,uposfeats)]
pointer.append((sent_id,text,tokens))
pointer_text.write(text.encode('utf8'))
if ud!=None:
pointer_ud.append((sent_id,text,tokens,ud))
pointer_ud_text.write(text.encode('utf8'))
if jos!=None:
pointer_jos.append((sent_id,text,tokens,jos))
pointer_jos_text.write(text.encode('utf8'))
if do_ner:
pointer_ner.append((sent_id,text,tokens,ners))
pointer_ner_text.write(text.encode('utf8'))
else:
pointer_text.write(element.text.encode('utf8'))
if ud!=None:
pointer_ud_text.write(element.text.encode('utf8'))
if jos!=None:
pointer_jos_text.write(element.text.encode('utf8'))
if do_ner:
pointer_ner_text.write(element.text.encode('utf8'))
pointer_text.write('\n')
if ud!=None:
pointer_ud_text.write('\n')
if jos!=None:
pointer_jos_text.write('\n')
if do_ner:
pointer_ner_text.write('\n')
#pointer_text.write('\n')
def write_list(lst,fname,synt=False,ner=False):
f=open(fname,'w')
for el in lst:
if not synt and not ner:
sid,text,tokens=el
elif ner:
sid,text,tokens,nes=el
else:
sid,text,tokens,dep=el
f.write('# sent_id = '+sid+'\n')
f.write('# text = '+text.encode('utf8')+'\n')
for idx,token in enumerate(tokens):
if not synt and not ner:
f.write(str(idx+1)+'\t'+token[0].encode('utf8')+'\t'+token[1].encode('utf8')+'\t'+token[3]+'\t'+token[2]+'\t'+token[4]+'\t_\t_\t_\t_\n')
elif synt:
f.write(str(idx+1)+'\t'+token[0].encode('utf8')+'\t'+token[1].encode('utf8')+'\t'+token[3]+'\t'+token[2]+'\t'+token[4]+'\t'+dep[idx][0].encode('utf8')+'\t'+dep[idx][1].encode('utf8')+'\t_\t_\n')
else:
f.write(str(idx+1)+'\t'+token[0].encode('utf8')+'\t'+token[1].encode('utf8')+'\t'+token[3]+'\t'+token[2]+'\t'+token[4]+'\t_\t_\t_\t'+nes[idx].encode('utf8')+'\n')
f.write('\n')
f.close()
write_list(train,'train.conllu')
write_list(dev,'dev.conllu')
write_list(test,'test.conllu')
write_list(train_jos,'train_jos.conllu',True)
write_list(dev_jos,'dev_jos.conllu',True)
write_list(test_jos,'test_jos.conllu',True)
write_list(train_ud,'train_ud.conllu',True)
write_list(dev_ud,'dev_ud.conllu',True)
write_list(test_ud,'test_ud.conllu',True)
write_list(train_ner,'train_ner.conllu',ner=True)
write_list(dev_ner,'dev_ner.conllu',ner=True)
write_list(test_ner,'test_ner.conllu',ner=True)
train_text.close()
dev_text.close()
test_text.close()
train_ud_text.close()
dev_ud_text.close()
test_ud_text.close()
train_jos_text.close()
dev_jos_text.close()
test_jos_text.close()
train_ner_text.close()
dev_ner_text.close()
test_ner_text.close()
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,381
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/transform/annotate_docs.py
|
import json
import pathlib
import stanza
import classla
import pandas as pd
from fuzzywuzzy import fuzz
DOWNLOAD_RESOURCES = False
LOWEST_SIMILARITY = 85
def split_documents(dataset_files: dict, tokenizers: dict):
warnings = []
files_processed = 0
for dataset in dataset_files:
if dataset not in ['./data/datasets/bsnlp/covid-19', './data/datasets/bsnlp/us_election_2020']:
print(f"Skipping {dataset}")
continue
for lang in dataset_files[dataset]:
print(f'Dataset: {dataset}, Language: {lang}')
merged_path = f'{dataset}/merged/{lang}'
if not pathlib.Path(merged_path).exists():
pathlib.Path(merged_path).mkdir(parents=True, exist_ok=True)
for file in dataset_files[dataset][lang]:
sentences, document_id = split_document(file['raw'], tokenizers[lang], lang)
annotated_document, warns = annotate_document(sentences, file['annotated'], document_id, tokenizers[lang], lang)
warnings.extend(warns)
doc_name = f"{file['raw'].split('/')[-1][:-3]}csv"
merged_fname = f'{merged_path}/{doc_name}'
annotated_document.to_csv(merged_fname, index=False)
files_processed += 1
print(f'Files processed: {files_processed}.')
print(f'Number of warnings occured: {len(warnings)}.')
json.dump(warnings, open('./data/results/merge_warnings.json', 'w'), indent=4)
def convert_sentences(raw_sentences, lang):
sentences = []
for sentence in raw_sentences:
tokens = []
for token in sentence.tokens:
if len(token.words) > 1:
print(f"MORE WORDS: {token.words}")
tokens.append({
"id": token.index if lang in ['sl', 'bg'] else token.id[0],
"text": ''.join([w.text for w in token.words]),
"calcLemma": ' '.join([w.lemma for w in token.words if w.lemma is not None]),
"upos": ' '.join([w.xpos for w in token.words if w.xpos is not None]),
"xpos": ' '.join([w.upos for w in token.words if w.upos is not None]),
})
sentences.append(tokens)
return sentences
def split_document(document_path: str, tokenizer, lang: str):
document_lines = open(document_path, encoding='utf-8-sig').readlines()
document_id = document_lines[0].strip()
content = ' '.join(document_lines[4:])
doc = tokenizer(content)
# sentences = [sentence.to_dict() for sentence in doc.sentences] if lang != 'sl' else convert_sentences(doc.sentences)
sentences = convert_sentences(doc.sentences, lang)
return sentences, document_id
def tokenize_mention(mention: str, tokenizer, lang: str) -> list:
# just for slo
tokenized = [i for s in convert_sentences(tokenizer(mention).sentences, lang) for i in s]
return [t['text'] for t in tokenized]
def sort_by_mention_length(data: pd.DataFrame) -> pd.DataFrame:
sorted_vals = data['Mention'].str.len().sort_values().index
return data.reindex(sorted_vals).reset_index(drop=True)
def annotate_document(sentences: list, annotations_path: str, document_id: str, tokenizer, lang) -> (pd.DataFrame, list):
# print(tf"Work on {annotations_path}")
try:
anns = pd.read_csv(annotations_path, names=['Mention', 'Base', 'Category', 'clID'], skiprows=[0], sep='\t')
ann_df = sort_by_mention_length(anns)
except:
print(f"CAN'T LOAD {annotations_path}")
anns = pd.DataFrame()
ann_df = pd.DataFrame(columns=['Mention', 'Base', 'Category', 'clID'])
# return pd.DataFrame(), []
# a hack to first look for shorter matches if mentions
# are substrings, e.g. komisija vs Evropska Komisija
warnings = []
if len(ann_df['Mention'].unique()) != len(ann_df.index):
print("Duplicate mentions!")
warnings.append({
"msg": "Duplicate mentions found!",
"doc": annotations_path,
})
annotations = ann_df.to_dict('records')
annotated_tokens = []
for sent_id, sentence in enumerate(sentences):
for token in sentence:
token['ner'] = 'O'
token['lemma'] = ''
token['clID'] = ''
token['sentenceId'] = sent_id
token['docId'] = document_id
annotated_tokens.append(token)
used_annotations = 0
for annotation in annotations:
ann_pieces = tokenize_mention(annotation['Mention'], tokenizer, lang)
matched = 0
for token_id, token in enumerate(annotated_tokens):
first_ratio = fuzz.ratio(ann_pieces[0].lower(), token['text'].lower())
if first_ratio >= LOWEST_SIMILARITY:
if token_id + len(ann_pieces) > len(annotated_tokens):
continue
all_ratio = [fuzz.ratio(ann.lower(), annotated_tokens[token_id + i]['text'].lower()) for i, ann in enumerate(ann_pieces)]
if len([r for r in all_ratio if r >= LOWEST_SIMILARITY]) != len(ann_pieces):
continue
f_ner = True
matched_tokens = [annotated_tokens[token_id + i]['text'] for i, _ in enumerate(ann_pieces)]
lemma = tokenize_mention(str(annotation["Base"]), tokenizer, lang)
for i, _ in enumerate(ann_pieces):
t = annotated_tokens[token_id + i]
t['ner'] = f"{'B' if f_ner else 'I'}-{annotation['Category']}"
if not lemma:
warnings.append({
"msg": "BASE FORM DOES NOT MATCH MENTION",
"doc": annotations_path,
"lemma": annotation['Base'],
"ner": annotation['Mention'],
"matched": matched_tokens
})
print(f"[WARNING] LEMMA DOES NOT MATCH")
lemma = ['PAD']
t['lemma'] = lemma.pop(0)
t['clID'] = annotation["clID"]
f_ner = False if f_ner else f_ner
matched += 1
if matched == 0:
warnings.append({
"msg": "Annotation not matched!",
"doc": annotations_path,
"annotation": annotation,
})
used_annotations += 1 if matched > 0 else 0
if used_annotations != len(annotations):
print(f"[WARNING] UNUSED ANNOTATIONS: {used_annotations}/{len(annotations)}")
warnings.append({
"msg": f"ALTERED ITEMS ({used_annotations}) NOT EQUAL TO ANNOTATIONS ({len(annotations)})",
"doc": annotations_path,
"num_altered": used_annotations,
"num_annotations": len(annotations)
})
sentence_df = pd.DataFrame(annotated_tokens)
sentence_df = sentence_df.rename(columns={'id': 'tokenId'})
sentence_df = sentence_df[['docId', 'sentenceId', 'tokenId', 'text', 'lemma', 'calcLemma', 'upos', 'xpos', 'ner', 'clID']] # leaving out 'misc' for now
return sentence_df, warnings
if __name__ == '__main__':
datasets_files = json.load(open('./data/results/dataset_pairs.json'))
languages = set([lang for dataset in datasets_files for lang in datasets_files[dataset].keys()])
print(languages)
processors = 'tokenize,pos,lemma'
if DOWNLOAD_RESOURCES: # do it once on a new system
for lang in languages:
lang = lang if lang != 'ua' else 'uk'
print(f'Downloading {lang}...')
stanza.download(lang, processors=processors)
classla.download('sl')
classla.download('bg')
tokenizers = {lang: stanza.Pipeline(lang=lang if lang != 'ua' else 'uk', processors=processors) for lang in languages}
tokenizers['sl'] = classla.Pipeline('sl', processors=processors)
tokenizers['bg'] = classla.Pipeline('bg', processors=processors)
split_documents(datasets_files, tokenizers)
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,382
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/analyze/main.py
|
import os
import json
import pandas as pd
import sys
import logging
from collections import defaultdict
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s'
)
logger = logging.getLogger('main')
def list_all_files(dirpath: str) -> (list, list, dict):
files, dirs = [], []
stats = {
'test': {
"numFiles": 0,
"dirs": {
"annotated": defaultdict(list),
"raw": defaultdict(list),
}
},
'sample': {
"numFiles": 0,
"dirs": {
"annotated": defaultdict(list),
"raw": defaultdict(list),
}
},
'train': {
"numFiles": 0,
"dirs": {
"annotated": defaultdict(list),
"raw": defaultdict(list),
}
},
}
for dpath, dnames, fnames in os.walk(dirpath,):
whole_path_dirs = [f'{dpath}/{dname}' for dname in dnames]
whole_path_files = [f'{dpath}/{fname}' for fname in fnames]
dirs.extend(whole_path_dirs)
files.extend(whole_path_files)
if fnames:
if 'test' in dpath:
stats['test']['numFiles'] += len(whole_path_files)
if "annotated" in dpath:
stats['test']['dirs']['annotated'][dpath[-2:].lower()].extend(whole_path_files)
else:
stats['test']['dirs']['raw'][dpath[-2:].lower()].extend(whole_path_files)
elif 'sample' in dpath:
stats['sample']['numFiles'] += len(whole_path_files)
if "annotated" in dpath:
stats['sample']['dirs']['annotated'][dpath[-2:].lower()].extend(whole_path_files)
else:
stats['sample']['dirs']['raw'][dpath[-2:].lower()].extend(whole_path_files)
else:
stats['train']['numFiles'] += len(whole_path_files)
if "annotated" in dpath:
stats['train']['dirs']['annotated'][dpath[-2:].lower()].extend(whole_path_files)
else:
stats['train']['dirs']['raw'][dpath[-2:].lower()].extend(whole_path_files)
return sorted(dirs), sorted(files), stats
def list_datasets(datasets: list) -> dict:
dataset_files = {}
for dataset in datasets:
dataset_files[dataset] = {}
languages_raw = sorted(os.listdir(f'{dataset}/raw'))
try:
languages_ann = sorted(os.listdir(f'{dataset}/annotated'))
except:
languages_ann = languages_raw
for lang_id, lang in enumerate(languages_raw):
base_raw = f'{dataset}/raw/{lang}'
base_ann = f'{dataset}/annotated/{lang}'
raw_files = sorted(os.listdir(base_raw))
try:
ann_files = sorted(os.listdir(base_ann))
except:
ann_files = raw_files
for r, a in zip(raw_files, ann_files):
digits_r = ''.join([d for d in r if d.isdigit()])
digits_a = ''.join([d for d in r if d.isdigit()])
if digits_a != digits_r:
print(f'NO MATCH:\n{base_raw}/{r}\n{base_ann}/{a}')
dataset_files[dataset][languages_ann[lang_id]] = [
{
'raw': f'{base_raw}/{r}',
'annotated': f'{base_ann}/{a}',
'raw_fname': r, 'ann_fname': a
}
for r, a in zip(raw_files, ann_files)
]
return dataset_files
def aggregate_nes(stats: dict) -> dict:
ne_stats = {}
atts = ['Mention', 'Base', 'Category', 'clID']
all_data = {att: pd.DataFrame() for att in atts}
for dataset, data in stats.items():
ne_stats[dataset] = {}
for lang, files in data['dirs']['annotated'].items():
ne_stats[dataset][lang] = {}
lang_data = pd.DataFrame()
for file in files:
file_nes = pd.read_csv(file, header=None, skiprows=[0], delimiter='\t', names=['Mention', 'Base', 'Category', 'clID'])
lang_data = pd.concat([lang_data, file_nes], ignore_index=True)
for att in atts:
counts = pd.DataFrame(lang_data[att].value_counts())
ne_stats[dataset][lang][att] = counts.to_json()
counts.reset_index(inplace=True)
counts = counts.rename(columns={'index': att, att:'Count'})
all_data[att] = pd.concat([all_data[att], counts], ignore_index=True)
counts.to_csv(f'./data/stats/{dataset}-{lang}-{att}.csv', index=False)
for att in atts:
counts = all_data[att].groupby([att]).agg(['sum'])
counts.reset_index(inplace=True)
counts.columns = [att, 'Count']
counts.to_csv(f'./data/stats/{dataset}-{att}.csv', index=False)
return ne_stats
def raw_doc_info(fname: str) -> dict:
file_info = {}
with open(fname, encoding='utf-8-sig') as f:
lines = f.readlines()
file_info['id'] = lines[0].strip()
file_info['lang'] = lines[1].strip()
file_info['created'] = lines[2].strip()
file_info['url'] = lines[3].strip()
file_info['title'] = lines[4].strip()
content = ' '.join(lines[5:]).strip()
file_info['contentLength'] = len(content)
file_info['numWords'] = len(content.split(' '))
return file_info
def ann_doc_info(fname: str) -> dict:
file_info = {}
ne_categories = ['PER', 'ORG', 'LOC', 'EVT', 'PRO']
with open(fname, encoding='utf-8-sig') as f:
lines = f.readlines()
file_info['id'] = lines[0].strip()
df = pd.read_csv(fname, names=['Mention', 'Base', 'Category', 'clID'], skiprows=[0], sep='\t')
file_info['NEcount'] = len(df.index)
cat_counts = df['Category'].value_counts()
for cat in ne_categories:
file_info[cat] = cat_counts[cat] if cat in cat_counts else 0
file_info['UniqueCLIDs'] = len(df['clID'].unique())
return file_info
def get_doc_info(stats: dict) -> dict:
dataset_raw = []
dataset_ann = []
for dataset, data in stats.items():
for lang, files in data['dirs']['raw'].items():
for file in files:
info = raw_doc_info(file)
info['dataset_dir'] = dataset
info['lang'] = lang
info['fpath'] = file
dataset_raw.append(info)
for lang, files in data['dirs']['annotated'].items():
for file in files:
info = ann_doc_info(file)
info['dataset_dir'] = dataset
info['lang'] = lang
info['fpath'] = file
dataset_ann.append(info)
raw_df = pd.DataFrame(dataset_raw)
raw_df.to_csv("./data/results/file_raw_stats.csv")
ann_df = pd.DataFrame(dataset_ann)
ann_df.to_csv("./data/results/file_ne_stats.csv")
return {
"raw": raw_df,
"ann": ann_df,
}
if __name__ == '__main__':
datasets = [
'./data/datasets/bsnlp/ec',
'./data/datasets/bsnlp/trump',
# 2019 data is updated for the 2021 challenge, so these are obsolete
# './data/datasets/bsnlp/sample',
# './data/datasets/bsnlp/training',
# './data/datasets/bsnlp/nord_stream',
# './data/datasets/bsnlp/ryanair',
'./data/datasets/bsnlp/asia_bibi',
'./data/datasets/bsnlp/brexit',
'./data/datasets/bsnlp/nord_stream',
'./data/datasets/bsnlp/other',
'./data/datasets/bsnlp/ryanair',
'./data/datasets/bsnlp/covid-19',
'./data/datasets/bsnlp/us_election_2020',
]
dataset_files = list_datasets(datasets)
logger.info('Done.')
with open('./data/results/dataset_pairs.json', 'w') as f:
json.dump(dataset_files, f, indent=4)
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,383
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/train/crosloeng.py
|
import pandas as pd
import numpy as np
import torch
import random
import os
import sys
import logging
import argparse
import transformers
import pathlib
from datetime import datetime
from tqdm import trange, tqdm
from torch.utils.data import TensorDataset, DataLoader, RandomSampler
from transformers import AutoTokenizer, AutoModelForTokenClassification, AdamW
from transformers import get_linear_schedule_with_warmup, PreTrainedModel
from keras.preprocessing.sequence import pad_sequences
from seqeval.metrics import f1_score, precision_score, recall_score, accuracy_score, classification_report
from matplotlib import pyplot as plt
from itertools import product
from src.train.model import Model
from src.utils.load_dataset import LoadSSJ500k, LoadBSNLP, LoadCombined
from src.utils.utils import list_dir
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s'
)
logger = logging.getLogger('TrainEvalModels')
class BertModel(Model):
def __init__(
self,
tag2code,
code2tag,
output_model_path: str, # this is the output dir
output_model_fname: str, # this is the output file name
tune_entire_model: bool,
epochs: int = 3,
max_grad_norm: float = 1.0,
input_model_path: str = 'data/models/cro-slo-eng-bert', # this is a directory
use_test: bool = False
):
super().__init__()
self.input_model_path = input_model_path
self.output_model_path = output_model_path
self.output_model_fname = output_model_fname
self.use_test = use_test
logger.info(f"Output model at: {output_model_path}")
logger.info(f"Tuning entire model: {tune_entire_model}")
self.tune_entire_model = tune_entire_model
self.tokenizer = AutoTokenizer.from_pretrained(
self.input_model_path,
from_pt=True,
do_lower_case=False,
use_fast=False,
)
self.MAX_LENGTH = 128 # max input length
self.BATCH_SIZE = 32 # max input length
self.epochs = epochs
self.max_grad_norm = max_grad_norm
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
logger.info(f"Using device: {self.device}")
self.tag2code, self.code2tag = tag2code, code2tag
logger.info(f"tags: {self.tag2code.keys()}")
self.save_weights = False
def convert_input(self, input_data: pd.DataFrame):
tokens = []
tags = [] # NER tags
if 'docId' not in input_data.columns:
input_data['docId'] = 'xxx'
for (_, sentence), data in input_data.groupby(["docId", "sentenceId"]):
sentence_tokens = []
sentence_tags = []
for id, word_row in data.iterrows():
word_tokens = self.tokenizer.tokenize(str(word_row["text"]))
sentence_tokens.extend(word_tokens)
sentence_tags.extend([self.tag2code[word_row["ner"]]] * len(word_tokens))
sentence_ids = self.tokenizer.convert_tokens_to_ids(sentence_tokens)
tokens.append(sentence_ids)
tags.append(sentence_tags)
# padding is required to spill the sentence tokens in case there are sentences longer than 128 words
# or to fill in the missing places to 128 (self.MAX_LENGTH)
tokens = torch.as_tensor(pad_sequences(
tokens,
maxlen=self.MAX_LENGTH,
dtype="long",
value=0.0,
truncating="post",
padding="post"
)).to(self.device)
tags = torch.as_tensor(pad_sequences(
tags,
maxlen=self.MAX_LENGTH,
dtype="long",
value=self.tag2code["PAD"],
truncating="post",
padding="post"
)).to(self.device)
masks = torch.as_tensor(np.array([[float(token != 0.0) for token in sentence] for sentence in tokens])).to(
self.device)
data = TensorDataset(tokens, masks, tags)
sampler = RandomSampler(data)
return DataLoader(data, sampler=sampler, batch_size=self.BATCH_SIZE)
def convert_output(self):
pass
def train(
self,
data_loaders: dict
):
logger.info(f"Loading the pre-trained model `{self.input_model_path}`...")
model = AutoModelForTokenClassification.from_pretrained(
self.input_model_path,
num_labels=len(self.tag2code),
label2id=self.tag2code,
id2label=self.code2tag,
output_attentions=False,
output_hidden_states=False
)
model = model.to(self.device)
optimizer, loss = None, None
for dataset, dataloader in data_loaders.items():
logger.info(f'Training on `{dataset}`')
# hack to use entire dataset, leaving the validation data intact
td = pd.concat([dataloader.train(), dataloader.test()]) if self.use_test else dataloader.train()
model, optimizer, loss = self.__train(model, train_data=td,
validation_data=dataloader.dev())
out_fname = f"{self.output_model_path}/{self.output_model_fname}"
logger.info(f"Saving the model at: {out_fname}")
model.save_pretrained(out_fname)
self.tokenizer.save_pretrained(out_fname)
logger.info("Done!")
def __train(
self,
model,
train_data: pd.DataFrame,
validation_data: pd.DataFrame
):
logger.info("Loading the training data...")
train_data = self.convert_input(train_data)
logger.info("Loading the validation data...")
validation_data = self.convert_input(validation_data)
if self.tune_entire_model:
model_parameters = list(model.named_parameters())
no_decay = ['bias', 'gamma', 'beta']
optimizer_parameters = [
{
'params': [p for n, p in model_parameters if not any(nd in n for nd in no_decay)],
'weight_decay_rate': 0.01
},
{
'params': [p for n, p in model_parameters if any(nd in n for nd in no_decay)],
'weight_decay_rate': 0.0
}
]
else:
model_parameters = list(model.named_parameters())
optimizer_parameters = [{"params": [p for n, p in model_parameters]}]
optimizer = AdamW(
optimizer_parameters,
lr=3e-5,
eps=1e-8
)
total_steps = len(train_data) * self.epochs
scheduler = get_linear_schedule_with_warmup(
optimizer,
num_warmup_steps=0,
num_training_steps=total_steps
)
# ensure reproducibility
# TODO: try out different seed values
seed_val = 42
random.seed(seed_val)
np.random.seed(seed_val)
torch.manual_seed(seed_val)
torch.cuda.manual_seed_all(seed_val)
training_loss, validation_loss, loss = [], [], None
logger.info(f"Training the model for {self.epochs} epochs...")
for _ in trange(self.epochs, desc="Epoch"):
model.train()
total_loss = 0
# train:
for step, batch in tqdm(enumerate(train_data), desc='Batch'):
batch_tokens, batch_masks, batch_tags = tuple(t.to(self.device) for t in batch)
# reset the grads
model.zero_grad()
outputs = model(
batch_tokens,
attention_mask=batch_masks,
labels=batch_tags
)
loss = outputs[0]
loss.backward()
total_loss += loss.item()
# preventing exploding gradients
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=self.max_grad_norm)
# update the parameters
optimizer.step()
# update the learning rate (lr)
scheduler.step()
avg_epoch_train_loss = total_loss / len(train_data)
logger.info(f"Avg train loss = {avg_epoch_train_loss:.4f}")
training_loss.append(avg_epoch_train_loss)
# validate:
model.eval()
val_loss, val_acc, val_f1, val_p, val_r, val_report = self.__test(model, validation_data)
validation_loss.append(val_loss)
logger.info(f"Validation loss: {val_loss:.4f}")
logger.info(f"Validation accuracy: {val_acc:.4f}, P: {val_p:.4f}, R: {val_r:.4f}, F1 score: {val_f1:.4f}")
logger.info(f"Classification report:\n{val_report}")
fig, ax = plt.subplots()
ax.plot(training_loss, label="Traing loss")
ax.plot(validation_loss, label="Validation loss")
ax.legend()
ax.set_title("Model Loss")
ax.set_ylabel("Loss")
ax.set_xlabel("Epoch")
fig.savefig(f"{self.output_model_path}/{self.output_model_fname}-loss.png")
return model, optimizer, loss
def translate(self, predictions: list, labels: list, tokens) -> (list, list, list):
translated_predictions, translated_labels, translated_tokens = [], [], []
for preds, labs, toks in zip(predictions, labels, tokens):
sentence_predictions, sentence_labels, sentence_tokens = [], [], []
for p, l, t in zip(preds, labs, toks):
if l == self.tag2code["PAD"]:
continue
sentence_tokens.append(t)
sentence_predictions.append(self.code2tag[p])
sentence_labels.append(self.code2tag[l])
translated_tokens.append(sentence_tokens)
translated_predictions.append(sentence_predictions)
translated_labels.append(sentence_labels)
return translated_predictions, translated_labels, translated_tokens
def __test(self, model: PreTrainedModel, data: DataLoader) -> (float, float, float, float, float, str):
eval_loss = 0.
eval_steps, eval_examples = 0, 0
tokens, eval_predictions, eval_labels = [], [], []
model.eval()
for batch in tqdm(data):
batch_tokens, batch_masks, batch_tags = tuple(t.to(self.device) for t in batch)
with torch.no_grad():
outputs = model(
batch_tokens,
attention_mask=batch_masks,
labels=batch_tags
)
logits = outputs[1].detach().cpu().numpy()
label_ids = batch_tags.to('cpu').numpy()
toks = batch_tokens.to('cpu').numpy()
eval_loss += outputs[0].mean().item()
batch_toks = [self.tokenizer.convert_ids_to_tokens(sentence) for sentence in toks]
tokens.extend(batch_toks)
eval_predictions.extend([list(p) for p in np.argmax(logits, axis=2)])
eval_labels.extend(label_ids)
eval_examples += batch_tokens.size(0)
eval_steps += 1
eval_loss = eval_loss / eval_steps
predicted_tags, valid_tags, tokens = self.translate(eval_predictions, eval_labels, tokens)
score_acc = accuracy_score(valid_tags, predicted_tags)
score_f1 = f1_score(valid_tags, predicted_tags)
score_p = precision_score(valid_tags, predicted_tags)
score_r = recall_score(valid_tags, predicted_tags)
report = classification_report(valid_tags, predicted_tags)
return eval_loss, score_acc, score_f1, score_p, score_r, report
def test(self, test_data: pd.DataFrame) -> (float, float, float):
if not (os.path.exists(self.output_model_path) and os.path.isdir(self.output_model_path)):
raise Exception(f"A model with the given parameters has not been trained yet,"
f" or is not located at `{self.output_model_path}`.")
models, _ = list_dir(self.output_model_path)
models = [model_fname for model_fname in models if model_fname.startswith(self.output_model_fname)]
print("Models:", models)
if not models:
raise Exception(f"There are no trained models with the given criteria: `{self.output_model_fname}`")
logger.info("Loading the testing data...")
test_data = self.convert_input(test_data)
avg_acc, avg_f1, avg_p, avg_r, reports = [], [], [], [], []
for model_fname in models:
logger.info(f"Loading {model_fname}...")
model = AutoModelForTokenClassification.from_pretrained(
f"{self.output_model_path}/{model_fname}",
num_labels=len(self.tag2code),
label2id=self.tag2code,
id2label=self.code2tag,
output_attentions=False,
output_hidden_states=False
)
model = model.to(self.device)
_, acc, f1, p, r, report = self.__test(model, test_data)
avg_acc.append(acc)
avg_f1.append(f1)
avg_p.append(p)
avg_r.append(r)
logger.info(f"Testing P: {p:.4f}, R: {r:.4f}, F1: {f1:.4f}")
logger.info(f"Testing classification report:\n{report}")
logger.info(f"Average accuracy: {np.mean(avg_acc):.4f}")
f1 = np.mean(avg_f1)
p = np.mean(avg_p)
r = np.mean(avg_r)
logger.info(f"Average P: {p:.4f}, R: {r:.4f}, F1: {f1:.4f}")
return p, r, f1
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--train', action='store_true')
parser.add_argument('--train-iterations', type=int, default=1)
parser.add_argument('--train-bundle', type=str, default="slo_misc-only")
parser.add_argument('--epochs', type=int, default=3)
parser.add_argument('--test', action='store_true')
parser.add_argument('--run-path', type=str, default=None)
parser.add_argument('--full-finetuning', action='store_true')
return parser.parse_args()
def main():
args = parse_args()
global JOB_ID
JOB_ID = os.environ['SLURM_JOB_ID'] if 'SLURM_JOB_ID' in os.environ else None
logger.info(f"Training new NER models")
logger.info(f"SLURM_JOB_ID = {JOB_ID}")
logger.info(f"Training: {args.train}")
logger.info(f"Train iterations: {args.train_iterations}")
logger.info(f"Train bundle: {args.train_bundle}")
logger.info(f"Epochs: {args.epochs}")
logger.info(f"Full finetuning: {args.full_finetuning}")
logger.info(f"Testing: {args.test}")
logger.info(f"Torch version {torch.__version__}")
logger.info(f"Transformers version {transformers.__version__}")
train_bundles = {
"slo_misc": {
"models": [
"cro-slo-eng-bert",
"bert-base-multilingual-cased",
"bert-base-multilingual-uncased",
"sloberta-1.0",
"sloberta-2.0",
],
"train": {
"ssj500k-bsnlp2017-iterative": {
"ssj500k": LoadSSJ500k(),
"bsnlp-2017": LoadBSNLP(lang='sl', year='2017'),
},
"ssj500k-bsnlp-2017-combined": {
"combined": LoadCombined([LoadSSJ500k(), LoadBSNLP(lang='sl', year='2017')]),
},
"ssj500k-bsnlp-2021-iterative": {
"ssj500k": LoadSSJ500k(),
"bsnlp2021": LoadBSNLP(lang='sl', year='2021'),
},
"ssj500k-bsnlp-2021-combined": {
"combined": LoadCombined([LoadSSJ500k(), LoadBSNLP(lang='sl', year='2021')]),
},
"ssj500k-bsnlp-all-iterative": {
"ssj500k": LoadSSJ500k(),
"bsnlp2017": LoadBSNLP(lang='sl', year='all'),
},
"ssj500k-bsnlp-all-combined": {
"combined": LoadCombined([LoadSSJ500k(), LoadBSNLP(lang='sl', year='all')]),
},
"ssj500k": {
"ssj500k": LoadSSJ500k(),
},
"bsnlp-2017": {
"bsnlp-2017": LoadBSNLP(lang='sl', year='2017'),
},
"bsnlp-2021": {
"bsnlp-2021": LoadBSNLP(lang='sl', year='2021'),
},
"bsnlp-all": {
"bsnlp-all": LoadBSNLP(lang='sl', year='all'),
},
},
"test": {
"ssj500k": LoadSSJ500k(),
"bsnlp-2017": LoadBSNLP(lang='sl', year='2017'),
"bsnlp-2021": LoadBSNLP(lang='sl', year='2021'),
"bsnlp-all": LoadBSNLP(lang='sl', year='all')
},
},
"slo_misc-submission": {
"models": [
"cro-slo-eng-bert",
"sloberta-1.0",
"sloberta-2.0",
],
"train": {
"ssj500k-bsnlp-2021-iterative": {
"ssj500k": LoadSSJ500k(),
"bsnlp2021": LoadBSNLP(lang='sl', year='2021'),
},
"bsnlp-all": {
"bsnlp-all": LoadBSNLP(lang='sl', year='all'),
},
},
"test": {
"bsnlp-2021": LoadBSNLP(lang='sl', year='2021'),
},
},
"slo_misc-only-submission": {
"models": [
"sloberta-1.0",
],
"train": {
"bsnlp-2021": {
"bsnlp-2021": LoadBSNLP(lang='sl', year='2021', merge_misc=False, misc_data_only=True),
}
},
"test": {
"bsnlp-2021": LoadBSNLP(lang='sl', year='2021', merge_misc=False, misc_data_only=True),
},
},
"slo_misc-only": {
"models": [
"cro-slo-eng-bert",
"bert-base-multilingual-cased",
"bert-base-multilingual-uncased",
"sloberta-1.0",
"sloberta-2.0",
],
"train": {
"bsnlp-2021": {
"bsnlp-2021": LoadBSNLP(lang='sl', year='2021', merge_misc=False, misc_data_only=True),
}
},
"test": {
"bsnlp-2021": LoadBSNLP(lang='sl', year='2021', merge_misc=False, misc_data_only=True),
},
},
"slo_all": {
"models": [
"cro-slo-eng-bert",
"bert-base-multilingual-cased",
"bert-base-multilingual-uncased",
"sloberta-1.0",
"sloberta-2.0",
],
"train": {
"bsnlp-2021": {
"bsnlp-2021": LoadBSNLP(lang='sl', year='2021', merge_misc=False),
}
},
"test": {
"bsnlp-2021": LoadBSNLP(lang='sl', year='2021', merge_misc=False),
}
},
"multilang_all": {
"models": [
"bert-base-multilingual-cased",
],
"train": {
'bsnlp-2021-bg': {'bsnlp-2021-bg': LoadBSNLP(lang='bg', year='2021', merge_misc=False)},
'bsnlp-2021-cs': {'bsnlp-2021-cs': LoadBSNLP(lang='cs', year='2021', merge_misc=False)},
'bsnlp-2021-pl': {'bsnlp-2021-pl': LoadBSNLP(lang='pl', year='2021', merge_misc=False)},
'bsnlp-2021-ru': {'bsnlp-2021-ru': LoadBSNLP(lang='ru', year='2021', merge_misc=False)},
'bsnlp-2021-sl': {'bsnlp-2021-sl': LoadBSNLP(lang='sl', year='2021', merge_misc=False)},
'bsnlp-2021-uk': {'bsnlp-2021-uk': LoadBSNLP(lang='uk', year='2021', merge_misc=False)},
'bsnlp-2021-all': {'bsnlp-2021-all': LoadBSNLP(lang='all', year='2021', merge_misc=False)},
},
"test": {
"bsnlp-2021-bg": LoadBSNLP(lang='bg', year='2021', merge_misc=False),
"bsnlp-2021-cs": LoadBSNLP(lang='cs', year='2021', merge_misc=False),
"bsnlp-2021-pl": LoadBSNLP(lang='pl', year='2021', merge_misc=False),
"bsnlp-2021-ru": LoadBSNLP(lang='ru', year='2021', merge_misc=False),
"bsnlp-2021-sl": LoadBSNLP(lang='sl', year='2021', merge_misc=False),
"bsnlp-2021-uk": LoadBSNLP(lang='uk', year='2021', merge_misc=False),
"bsnlp-2021-all": LoadBSNLP(lang='all', year='2021', merge_misc=False),
}
}
}
chosen_bundle = args.train_bundle
if chosen_bundle not in train_bundles:
raise Exception(f"Invalid bundle chosen: {chosen_bundle}")
bundle = train_bundles[chosen_bundle]
models = bundle['models']
train_data = bundle['train']
test_data = bundle['test']
if not args.run_path:
run_time = datetime.now().isoformat()[:-7] # exclude the ms
run_path = f'./data/runs/run_{JOB_ID if JOB_ID is not None else run_time}_{chosen_bundle}'
else:
run_path = args.run_path
run_time = run_path.split('/')[-1][4:]
pathlib.Path(run_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(f'{run_path}/models').mkdir(parents=True, exist_ok=True)
logger.info(f'Running path: `{run_path}`, run time: `{run_time}`')
tag2code, code2tag = list(test_data.values())[0].encoding()
test_f1_scores = []
for model_name, fine_tuning in product(models, [True, False]):
logger.info(f"Working on model: `{model_name}`...")
for train_bundle, loaders in train_data.items():
bert = BertModel(
tag2code=tag2code,
code2tag=code2tag,
epochs=args.epochs,
input_model_path=f'./data/models/{model_name}',
output_model_path=f'{run_path}/models',
output_model_fname=f'{model_name}-{train_bundle}'
f"{'-finetuned' if fine_tuning else ''}"
f'-{args.epochs}-epochs',
tune_entire_model=fine_tuning,
use_test=True,
)
if args.train:
logger.info(f"Training data bundle: `{train_bundle}`")
bert.train(loaders)
if args.test:
for test_dataset, dataloader in test_data.items():
logger.info(f"Testing on `{test_dataset}`")
p, r, f1 = bert.test(test_data=dataloader.test())
test_f1_scores.append({
"model_name": model_name,
"fine_tuned": fine_tuning,
"train_bundle": train_bundle,
"epochs": args.epochs,
"test_dataset": test_dataset,
"precision_score": p,
"recall_score": r,
"f1_score": f1
})
logger.info(f"[{train_bundle}][{test_dataset}] P = {p:.4f}, R = {r:.4f}, F1 = {f1:.4f}")
if args.test:
scores = pd.DataFrame(test_f1_scores)
scores.to_csv(f'{run_path}/training_scores-{chosen_bundle}-{JOB_ID}.csv', index=False)
logger.info(f'Entire training suite is done.')
if __name__ == '__main__':
main()
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,384
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/data/datasets/ssj500k/fetch.py
|
import urllib2
response=urllib2.urlopen('https://www.clarin.si/repository/xmlui/bitstream/handle/11356/1210/ssj500k-en.TEI.zip')
archive=response.read()
file=open('ssj500k-en.TEI.zip','w')
file.write(archive)
file.close()
import zipfile
zipfile.ZipFile('ssj500k-en.TEI.zip').extractall('.')
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,385
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/utils/prepare_output.py
|
import argparse
import pandas as pd
from src.utils.load_documents import LoadBSNLPDocuments
from src.utils.update_documents import UpdateBSNLPDocuments
from src.utils.utils import list_dir
def parser_args():
parser = argparse.ArgumentParser()
parser.add_argument('--lang', type=str, default='all')
parser.add_argument('--year', type=str, default='2021')
parser.add_argument('--run-path', type=str, default=None)
return parser.parse_args()
def convert_files(
run_path: str,
lang: str = 'sl',
year: str = '2021',
) -> None:
dirs, _ = list_dir(f'{run_path}/predictions/bsnlp')
for dir in dirs:
print(f"Working on {dir}")
loader = LoadBSNLPDocuments(year=year, lang=lang, path=f'{run_path}/predictions/bsnlp/{dir}')
updater = UpdateBSNLPDocuments(year=year, lang=lang, path=f'{run_path}/out/{dir}')
data = loader.load_predicted(folder='clustered')
# data = loader.load_predicted()
updater.update_predicted(data)
if __name__ == '__main__':
args = parser_args()
print(f'Run path: {args.run_path}')
print(f'Lang: {args.lang}')
print(f'Year: {args.year}')
convert_files(args.run_path, lang=args.lang, year=args.year)
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,386
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/train/trainer.py
|
import os
import sys
import logging
import pandas as pd
import pathlib
from tqdm import tqdm
from datetime import datetime
from src.train.crosloeng import BertModel
from src.utils.load_dataset import LoadBSNLP
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s'
)
logger = logging.getLogger('TrainL1OStrategy')
run_time = datetime.now().isoformat()[:-7] # exclude the ms
JOB_ID = os.environ['SLURM_JOB_ID'] if 'SLURM_JOB_ID' in os.environ else None
run_path = f'./data/runs/run_l1o_{JOB_ID if JOB_ID is not None else run_time}'
pathlib.Path(run_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(f'{run_path}/models').mkdir(parents=True, exist_ok=True)
def main():
epochs = 5
fine_tuning = True
model_name = 'bert-base-multilingual-cased'
test_scores = []
for excluded_dataset in tqdm(LoadBSNLP.datasets['2021'], desc='Excluded Dataset'):
excluded_dataset = 'none'
logger.info(f"Excluding {excluded_dataset}")
train_bundle = f'bsnlp-exclude-{excluded_dataset}'
train_datasets = {
train_bundle: LoadBSNLP(
lang='all',
year='2021',
merge_misc=False,
# exclude=excluded_dataset
)
}
# test_dataset = LoadBSNLP(
# lang='all',
# year='2021',
# data_set=excluded_dataset,
# merge_misc=False,
# )
tag2code, code2tag = train_datasets[train_bundle].encoding()
bert = BertModel(
tag2code=tag2code,
code2tag=code2tag,
epochs=epochs,
input_model_path=f'./data/models/{model_name}',
output_model_path=f'{run_path}/models',
output_model_fname=f'{model_name}-{train_bundle}'
f"{'-finetuned' if fine_tuning else ''}"
f'-{epochs}-epochs',
tune_entire_model=fine_tuning,
use_test=True,
)
logger.info(f"Training data bundle: `{train_bundle}`")
bert.train(train_datasets)
# logger.info(f"Testing on `{excluded_dataset}`")
# p, r, f1 = bert.test(test_data=test_dataset.load_all())
# test_scores.append({
# "model_name": model_name,
# "fine_tuned": fine_tuning,
# "train_bundle": train_bundle,
# "epochs": epochs,
# "test_dataset": excluded_dataset,
# "precision_score": p,
# "recall_score": r,
# "f1_score": f1
# })
# logger.info(f"[{train_bundle}][{excluded_dataset}] P = {p:.4f}, R = {r:.4f}, F1 = {f1:.4f}")
break
scores = pd.DataFrame(test_scores)
scores.to_csv(f'{run_path}/training_scores-L1O-{JOB_ID}.csv', index=False)
if __name__ == '__main__':
main()
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,387
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/train/model.py
|
from typing import Any
import pandas as pd
class Model:
def __init__(self) -> None:
pass
def convert_input(self, input_data: pd.DataFrame) -> Any:
"""
Convert the data to the correct input format for the model
By default, we assume that it is already in the correct format
:param input_data:
:return:
"""
return input_data
def train(self, data_loaders: dict):
pass
def test(self, test_data: pd.DataFrame) -> (float, float, float):
pass
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,388
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/matching/match_dedupe.py
|
import sys
import os
import argparse
import json
import pathlib
import pandas as pd
import logging
from tqdm import tqdm
from dedupe import Dedupe, StaticDedupe, console_label
from fuzzywuzzy import fuzz
from datetime import datetime
from collections import defaultdict
from itertools import combinations, product
from random import choices, random
from typing import Iterable, Callable
from src.utils.utils import list_dir
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s'
)
logger = logging.getLogger('DedupeMatching')
BASE_FNAME: str = "./data/deduper"
run_time = datetime.now().isoformat()[:-7] # exclude the ms
JOB_ID = os.environ['SLURM_JOB_ID'] if 'SLURM_JOB_ID' in os.environ else run_time
RUN_BASE_FNAME = f"{BASE_FNAME}/runs/run_{JOB_ID}"
DATA_PATH = f"./data/datasets/bsnlp"
NER_FIELD = 'calcNER'
RELEVANT_LANGS: list = ['bg', 'cs', 'pl', 'ru', 'sl', 'uk']
# Dedup configuration variables
SEARCH_CLOSEST: bool = True
CHOOSE_K: int = 2 # determines how many samples of equivalent values to choose
CLUSTER_THRESHOLD: float = 0.65
DEDUPE_CORES_USED: int = 63
dedupe_variables: list = [
# document structure: docId,sentenceId,tokenId,text,lemma,calcLemma,upos,xpos,ner,clID
# variables to consider:
{"field": "text", "type": "String"},
{"field": "calcLemma", "type": "String"},
{"field": "upos", "type": "String"},
{"field": "xpos", "type": "String"},
{"field": "ner", "type": "String"},
]
def merge_nes(
nes: list
) -> list:
"""
Merges the NEs in the form of the expected output
:param nes:
:return:
"""
merged = []
for i, ne in enumerate(nes):
if ne[NER_FIELD].startswith('I-'):
continue
j = i + 1
ne['numTokens'] = 1
while j < len(nes) and not nes[j][NER_FIELD].startswith('B-'):
ne['text'] = f'{ne["text"]} {nes[j]["text"]}'
ne['lemma'] = f'{ne["lemma"]} {nes[j]["lemma"]}'
ne['calcLemma'] = f'{ne["calcLemma"]} {nes[j]["calcLemma"]}'
ne['sentenceId'] = f'{ne["sentenceId"]}:{nes[j]["sentenceId"]}'
ne['tokenId'] = f'{ne["tokenId"]}:{nes[j]["tokenId"]}'
ne['upos'] = f'{ne["upos"]}:{nes[j]["upos"]}'
ne['xpos'] = f'{ne["xpos"]}:{nes[j]["xpos"]}'
if nes[j]["clID"] != ne['clID']:
print(f"Inconsistent cluster ids: {nes[j]['clID']} vs {ne['clID']}, NE: {ne}")
ne['numTokens'] += 1
j += 1
ne[NER_FIELD] = ne[NER_FIELD][2:]
merged.append(ne)
return merged
def load_nes(
datasets: list,
) -> (dict, dict):
documents = {}
doc_alphabet = {}
# doc_alphabet = defaultdict(dict)
for dataset in datasets:
dataset_name = dataset.split('/')[-1]
if dataset_name not in ['covid-19', 'us_election_2020']:
print(f"Skipping {dataset_name}")
continue
documents[dataset_name] = {}
doc_alphabet[dataset_name] = defaultdict(dict)
langs, _ = list_dir(f'{dataset}/predicted')
for lang in langs:
if lang.lower() not in RELEVANT_LANGS:
logger.info(f"Skipping {dataset_name}/{lang}")
continue
documents[dataset_name][lang] = {}
logger.info(f'Extracting from: {dataset}/{lang}')
ne_path = f'{dataset}/predicted/{lang}'
_, files = list_dir(ne_path)
for file in files:
df = pd.read_csv(f'{ne_path}/{file}', dtype={'docId': str, 'sentenceId': str, 'tokenId': str, 'clID': str,'text': str,'lemma': str,'calcLemma': str,'upos': str,'xpos': str,'ner': str})
df['lang'] = lang
df = df.fillna('N/A')
records = merge_nes(df.loc[~(df[NER_FIELD] == 'O')].to_dict(orient='records'))
for item in records:
dkey = f"{lang};{item['docId']};{item['sentenceId']};{item['tokenId']};{item['text']}"
fchar = item['text'][0].upper()
if dkey in doc_alphabet[dataset_name][fchar]:
raise Exception(f"[doc_alphabet] COLLISION!!! {dkey}")
doc_alphabet[dataset_name][fchar][dkey] = item
if dkey in documents[dataset_name][lang]:
raise Exception(f"[documents] COLLISION!!! {dkey}")
documents[dataset_name][lang][dkey] = item
return {
"normal": documents,
"alphabetized": doc_alphabet,
}
def load_data(
clear_cache: bool = False
) -> (dict, dict):
cache_path = f'{RUN_BASE_FNAME}/cached_data.json'
cached_file = pathlib.Path(cache_path)
if not clear_cache and cached_file.exists() and cached_file.is_file():
mod_time = datetime.fromtimestamp(cached_file.stat().st_mtime)
logger.info(f"Using cached data from `{cache_path}`, last modified at: `{mod_time.isoformat()}`")
with open(cache_path) as f:
return json.load(f)
# datasets = json.load(open("./data/results/dataset_pairs.json"))
datasets, _ = list_dir(DATA_PATH)
datasets = [f'{DATA_PATH}/{dataset}' for dataset in datasets]
data = load_nes(datasets)
with open(cache_path, 'w') as f:
logger.info(f"Storing cached data at: {cache_path}")
json.dump(data, f)
return data
def get_clustered_ids(
clustered: Iterable
) -> list:
return [{
"clusterId": i,
"ners": [
{
'id': cid,
'score': float(score)
} for cid, score in zip(ids, scores)
]
} for i, (ids, scores) in enumerate(clustered)]
def generate_training_examples(
data: dict,
) -> dict:
positive_examples = defaultdict(list)
matches = []
distinct = []
for key, value in data.items():
positive_examples[value['clID']].append(value)
for key, values in positive_examples.items():
# logger.info(f"{key} ({len(values)}): {values}")
use_items = choices(values, k=CHOOSE_K)
for comb in combinations(use_items, 2):
matches.append(comb)
clids = positive_examples.keys()
for comb in combinations(clids, 2):
# skip some combination with a 1/2 probability
if not SEARCH_CLOSEST and random() < 0.5: # toss a fair coin
# logger.info("Skipping...")
continue
d1 = choices(positive_examples[comb[0]], k=CHOOSE_K)
d2 = choices(positive_examples[comb[1]], k=CHOOSE_K)
for (i1, i2) in product(d1, d2):
if SEARCH_CLOSEST:
if fuzz.ratio(i1['text'].lower(), i2['text'].lower()) >= 70:
# logger.info(f"Similar are: {i1['text']}, {i2['text']}")
distinct.append((i1, i2))
else:
distinct.append((i1, i2))
return {
'distinct': distinct,
'match': matches
}
def data_looper(
data: dict,
call_fun: Callable,
mapper: dict,
train_all: bool = False,
) -> Callable:
chunk_size = 50
def loop_through():
for dataset, langs in data.items():
for lang, items in langs.items():
try:
logger.info(f"size of items for `{dataset}/{lang}`: {len(items)}")
keys = list(items.keys())
for i, chunk_keys in enumerate([keys[x:x+chunk_size] for x in range(0, len(keys), chunk_size)]):
chunk = {k:items[k] for k in chunk_keys}
call_fun(dataset, f'{lang}-{i}', chunk, mapper)
except Exception as e:
logger.error(f"ERROR OCCURED WHEN WORKING ON {dataset}/{lang}, {e}")
if train_all:
try:
call_fun(dataset, "all", {k:v for lang, docs in langs.items() for k, v in docs.items()})
except Exception as e:
logger.error(f"ERROR OCCURED WHEN WORKING ON {dataset}/all, {e}")
return loop_through
def train(
dataset: str,
lang: str,
items: dict,
mapper: dict,
) -> None:
logger.info(f"Training on `{dataset}/{lang}`")
# prepare training examples: generate matches and distinct cases
td = generate_training_examples(items)
train_path = f'{RUN_BASE_FNAME}/{dataset}'
pathlib.Path(train_path).mkdir(parents=True, exist_ok=True)
train_data_fname = f'{train_path}/train-{lang}.json'
with open(train_data_fname, 'w') as tf:
json.dump(td, tf)
## alternatively, manually label the training data
## the above code generates the training examples, so it is automating this step
# console_label(deduper)
# create a dedupe instance with chosen variables and number of cores to be used
deduper = Dedupe(variable_definition=dedupe_variables, num_cores=DEDUPE_CORES_USED)
# load the training data and prepare for training
with open(train_data_fname) as tf:
deduper.prepare_training(data=items, training_file=tf)
# train the deduper
deduper.train()
# store the learned settings
learned_settings_fname = f'{train_path}/learned_settings-{lang}.bin'
with open(learned_settings_fname, 'wb') as ts:
deduper.write_settings(ts)
def cluster_data(
dataset: str,
lang: str,
items: dict,
mapper: dict
) -> None:
logger.info(f"Clustering `{dataset}/{lang}`")
data_set_folder = f'{RUN_BASE_FNAME}/{dataset}/'
pathlib.Path(data_set_folder).mkdir(parents=True, exist_ok=True)
lang_id = lang.split('-')[0]
clusters_report_fname = f'{RUN_BASE_FNAME}/{dataset}/clusters_report-{lang}.txt'
if pathlib.Path(clusters_report_fname).exists():
logger.info(f"Dataset: `{dataset}/{lang}` is already processed, skipping...")
return
learned_settings_fname = f'{RUN_BASE_FNAME}/{mapper[dataset]}/learned_settings-{lang_id}.bin'
settings_file = pathlib.Path(learned_settings_fname)
if not (settings_file.exists() or settings_file.is_file()):
logger.info(f"Settings file `{learned_settings_fname}` does not exist or it's not a file.")
return
# load the learned settings
with open(learned_settings_fname, 'rb') as f:
deduper = StaticDedupe(f, num_cores=DEDUPE_CORES_USED)
# cluster the data
clustered = deduper.partition(items, threshold=CLUSTER_THRESHOLD)
with open(clusters_report_fname, 'w') as f:
for clid, (rec, score) in enumerate(clustered):
print(f"{clid}: {','.join(rec)}", file=f)
clustered_data_fname = f'{RUN_BASE_FNAME}/{dataset}/clusters-{lang}.json'
clusters = get_clustered_ids(clustered)
with open(clustered_data_fname, 'w') as f:
json.dump(clusters, fp=f, indent=4)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--closest', action='store_true')
parser.add_argument('--train', action='store_true')
parser.add_argument('--train-chars', action='store_true')
parser.add_argument('--train-all', action='store_true')
parser.add_argument('--test', action='store_true')
parser.add_argument('--run-path', type=str, default=None)
parser.add_argument('--data-path', type=str, default=None)
parser.add_argument('--tsh', type=float, default=None)
return parser.parse_args()
def main():
args = parse_args()
global RUN_BASE_FNAME, SEARCH_CLOSEST, CLUSTER_THRESHOLD, JOB_ID, DATA_PATH
RUN_BASE_FNAME = args.run_path if args.run_path is not None else RUN_BASE_FNAME
DATA_PATH = args.data_path if args.data_path is not None else DATA_PATH
pathlib.Path(RUN_BASE_FNAME).mkdir(parents=True, exist_ok=True)
CLUSTER_THRESHOLD = args.tsh if args.tsh is not None else CLUSTER_THRESHOLD
SEARCH_CLOSEST = args.closest
logger.info("Running Dedupe Entity Matching")
logger.info(f"SLURM_JOB_ID = {JOB_ID}")
logger.info(f"Run path = {RUN_BASE_FNAME}")
logger.info(f"Number of cores = {DEDUPE_CORES_USED}")
logger.info(f"Dedupe threshold = {CLUSTER_THRESHOLD}")
logger.info(f"Choose k = {CHOOSE_K}")
logger.info(f"Closest string search: {SEARCH_CLOSEST}")
logger.info(f"Train on chars: {args.train_chars}")
logger.info(f"Train on all datasets: {args.train_all}")
logger.info(f"Train: {args.train}")
logger.info(f"Test: {args.test}")
logger.info("Loading the data...")
data = load_data()
data = data['alphabetized'] if args.train_chars else data['normal']
predict_from = {
'covid-19': 'ryanair',
'us_election_2020': 'brexit',
}
trainer = data_looper(data, train, train_all=args.train_all, mapper=predict_from)
if args.train:
logger.info("Training on the data...")
trainer()
clusterer = data_looper(data, cluster_data, mapper=predict_from)
if args.test:
logger.info("Clustering the data...")
clusterer()
logger.info("Done!")
if __name__ == '__main__':
main()
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,389
|
UL-FRI-Zitnik/BSNLP-2021-Shared-Task
|
refs/heads/master
|
/src/utils/join_pred_cluster.py
|
import sys
import json
import glob
import pathlib
import logging
import argparse
import pandas as pd
from collections import defaultdict
from src.utils.load_dataset import LoadBSNLP
from src.utils.load_documents import LoadBSNLPDocuments
from src.utils.update_documents import UpdateBSNLPDocuments
from src.utils.utils import list_dir
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s'
)
logger = logging.getLogger('TrainEvalModels')
# pred_path = 'data/runs/run_2497_multilang_all'
pred_path = './data/runs/run_l1o_2551'
cluster_path = 'data/deduper/runs/run_2508'
def load_clusters(
path: str
) -> (dict, dict):
clusters = {}
ne_map = {}
n_clusters = 0
for dataset in LoadBSNLP.datasets['test_2021']:
df_clusters = pd.DataFrame()
ne_map[dataset] = defaultdict(list)
for fname in glob.glob(f'{path}/{dataset}/clusters-*.json'):
fcluster = json.load(open(fname))
nes = []
for cluster in fcluster:
for ne in cluster['ners']:
try:
ids = ne['id'].split(';')
for sid, tid, t in zip(ids[2].split(':'), ids[3].split(':'), ids[4].split(' ')):
item = {
'clusterId': f'{n_clusters}-{cluster["clusterId"]}',
'lang': ids[0],
'docId': ids[1],
'sentenceId': int(sid),
'tokenId': int(tid),
'text': t,
}
ne_key = f'{ids[0]};{ids[1]};{sid};{tid}'
if ne_key in ne_map[dataset]:
logger.info(f"Double occurrence: {ne_key}")
ne_map[dataset][ne_key].append(f'{n_clusters}-{cluster["clusterId"]}')
nes.append(item)
except Exception as e:
logger.error(f"ERROR OCCURRED {ne}, {e}")
n_clusters += 1
df_clusters = pd.concat([df_clusters, pd.DataFrame(nes)])
clusters[dataset] = df_clusters
logger.info(f"Clusters: {clusters}")
logger.info(f"Map: {ne_map}")
return clusters, ne_map
def update_clusters(data: dict, ne_map: dict):
for dataset, langs in data.items():
missed = 0
all_nes = 0
for lang, docs in langs.items():
for docId, doc in docs.items():
doc['content']['calcClId'] = 'xxx'
for i, row in doc['content'].iterrows():
if row['calcNER'] != 'O':
all_nes += 1
ne_key = f'{lang};{row["docId"]};{row["sentenceId"]};{row["tokenId"]}'
if ne_key not in ne_map[dataset]:
if row['calcNER'] != 'O':
missed += 1
continue
doc['content'].loc[i, 'calcClId'] = ne_map[dataset][ne_key][0]
logger.info(f"[{dataset}] Missed {missed}/{all_nes} [{missed/all_nes:.3f}]")
return data
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--pred-path', type=str, default=None)
parser.add_argument('--cluster-path', type=str, default=None)
parser.add_argument('--year', type=str, default='2021')
parser.add_argument('--lang', type=str, default='all')
return parser.parse_args()
def main():
global pred_path, cluster_path
args = parse_args()
pred_path = args.pred_path if args.pred_path is not None else pred_path
cluster_path = args.cluster_path if args.cluster_path is not None else cluster_path
year = args.year
lang = args.lang
logger.info(f"Predictions path: {pred_path}")
logger.info(f"Clusters path: {pred_path}")
logger.info(f"Year: {year}")
logger.info(f"Language: {lang}")
path = pathlib.Path(pred_path)
if not path.exists() or not path.is_dir():
raise Exception(f"Path does not exist or is not a directory: `{pred_path}`")
path = pathlib.Path(cluster_path)
if not path.exists() or not path.is_dir():
raise Exception(f"Path does not exist or is not a directory: `{cluster_path}`")
logger.info("Loading the clusters...")
clusters, ne_map = load_clusters(cluster_path)
models, _ = list_dir(f'{pred_path}/predictions/bsnlp')
for model in models:
logger.info(f"Loading the documents for model `{model}`...")
data = LoadBSNLPDocuments(year='test_2021', lang=lang, path=f'{pred_path}/predictions/bsnlp/{model}').load_predicted()
logger.info(f"[{model}] Merging the cluster data into the prediction data")
updated = update_clusters(data, ne_map)
logger.info(f"[{model}] Persisting the changes...")
UpdateBSNLPDocuments(year='test_2021', lang=lang, path=f'{pred_path}/predictions/bsnlp/{model}').update_clustered(updated)
logger.info("Done.")
if __name__ == '__main__':
main()
|
{"/src/eval/predict.py": ["/src/utils/load_dataset.py"], "/src/eval/model_eval.py": ["/src/eval/predict.py", "/src/utils/load_documents.py", "/src/utils/load_dataset.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/utils/load_dataset.py": ["/src/utils/utils.py"], "/src/transform/create_splits.py": ["/src/utils/utils.py"], "/src/utils/load_documents.py": ["/src/utils/utils.py"], "/src/train/crosloeng.py": ["/src/train/model.py", "/src/utils/load_dataset.py", "/src/utils/utils.py"], "/src/utils/prepare_output.py": ["/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"], "/src/train/trainer.py": ["/src/train/crosloeng.py", "/src/utils/load_dataset.py"], "/src/matching/match_dedupe.py": ["/src/utils/utils.py"], "/src/utils/join_pred_cluster.py": ["/src/utils/load_dataset.py", "/src/utils/load_documents.py", "/src/utils/update_documents.py", "/src/utils/utils.py"]}
|
12,398
|
DasGuna/armer_ur
|
refs/heads/main
|
/armer_ur/robots/__init__.py
|
"""
Robots for Armer
.. codeauthor:: Gavin Suddreys
"""
from armer_ur.robots.URROSRobot import URROSRobot
__all__ = [
'URROSRobot'
]
|
{"/armer_ur/robots/__init__.py": ["/armer_ur/robots/URROSRobot.py"]}
|
12,399
|
DasGuna/armer_ur
|
refs/heads/main
|
/armer_ur/robots/URROSRobot.py
|
"""
URROSRobot module defines the URROSRobot type
URROSRobot provides robot-specific callbacks for recovery and setting impedance.
.. codeauthor:: Gavin Suddreys
"""
import rospy
import actionlib
import roboticstoolbox as rtb
from armer.robots import ROSRobot
from std_srvs.srv import EmptyRequest, EmptyResponse
from std_srvs.srv import Trigger, TriggerRequest
from armer_msgs.msg import ManipulatorState
from armer_msgs.srv import \
SetCartesianImpedanceRequest, \
SetCartesianImpedanceResponse
from ur_dashboard_msgs.msg import RobotMode, SafetyMode
class URROSRobot(ROSRobot):
def __init__(self,
robot: rtb.robot.Robot,
controller_name: str = None,
recover_on_estop: bool = True,
*args,
**kwargs):
super().__init__(robot, *args, **kwargs)
self.controller_name = controller_name \
if controller_name else self.joint_velocity_topic.split('/')[1]
self.recover_on_estop = recover_on_estop
self.last_estop_state = 0
# UR state subscribers
self.robot_state_subscriber = rospy.Subscriber(
'/ur_hardware_interface/robot_mode',
RobotMode,
self.ur_robot_cb
)
self.safety_state_subscriber = rospy.Subscriber(
'/ur_hardware_interface/safety_mode',
SafetyMode,
self.ur_safety_cb
)
self.last_estop_state = 0
self.robot_state = None
self.safety_state = None
# Error recovery services
self.unlock_proxy = rospy.ServiceProxy('/ur_hardware_interface/dashboard/brake_release', Trigger)
self.unlock_proxy.wait_for_service()
self.reset_proxy = rospy.ServiceProxy('/ur_hardware_interface/dashboard/play', Trigger)
self.reset_proxy.wait_for_service()
self.recover_cb(EmptyRequest())
def recover_cb(self, req: EmptyRequest) -> EmptyResponse: # pylint: disable=no-self-use
"""[summary]
ROS Service callback:
Invoke any available error recovery functions on the robot when an error occurs
:param req: an empty request
:type req: EmptyRequest
:return: an empty response
:rtype: EmptyResponse
"""
print('Recover')
self.unlock_proxy(TriggerRequest())
while not self.robot_state or self.robot_state.mode != RobotMode.RUNNING:
rospy.sleep(1)
print('Reset')
self.reset_proxy(TriggerRequest())
return EmptyResponse()
def get_state(self):
state = super().get_state()
if self.robot_state:
state.errors |= ManipulatorState.LOCKED if self.robot_state.mode == RobotMode.IDLE or self.robot_state.mode == RobotMode.POWER_OFF else 0
if self.safety_state:
state.errors |= ManipulatorState.ESTOP if self.safety_state.mode == SafetyMode.ROBOT_EMERGENCY_STOP else 0
state.errors |= ManipulatorState.JOINT_LIMIT_VIOLATION | ManipulatorState.CARTESIAN_LIMIT_VIOLATION | ManipulatorState.TORQUE_LIMIT_VIOLATION if self.safety_state.mode == SafetyMode.VIOLATION else 0
state.errors |= ManipulatorState.OTHER if self.safety_state.mode != SafetyMode.NORMAL and self.safety_state.mode != SafetyMode.ROBOT_EMERGENCY_STOP else 0
if self.safety_state and self.safety_state.mode == SafetyMode.NORMAL:
if self.recover_on_estop and self.last_estop_state == 1:
self.recover_cb(EmptyRequest())
else:
if state.errors & ManipulatorState.OTHER == ManipulatorState.OTHER:
self.recover_cb(EmptyRequest())
self.last_estop_state = 1 if self.safety_state and \
self.safety_state.mode == SafetyMode.ROBOT_EMERGENCY_STOP else 0
return state
def ur_robot_cb(self, msg):
self.robot_state = msg
def ur_safety_cb(self, msg):
self.safety_state = msg
|
{"/armer_ur/robots/__init__.py": ["/armer_ur/robots/URROSRobot.py"]}
|
12,424
|
rishibhutada/Take_Home_Test
|
refs/heads/master
|
/AvgDurationPerDifficultyLevel.py
|
from pyspark.sql import SparkSession
from pyspark.sql.functions import udf, when
from pyspark.sql import functions as F
import os, glob
import configparser
import util
def get_minutes(df_to_be_formatted,prep_time):
'''
Converting Prep Time in numerical format to perform mathematical operation over it
:param prep_time:
:return: Time
'''
#Checking for String length and deciding course of action to take as follows:
#Length is 4 and M is last Character 'PT5M' -> 5
#Length is 4 and H is last Character 'PT4H' -> 240
#Length is 5 'PT25M' -> 25
#Length is 6 'PT5H5M' -> 305 ((5*60) + 5)
#Length is 7 'PT5H25M' -> 325 ((5*60) + 25
#Other than that 'PT' -> 0
df_to_be_formatted = df_to_be_formatted.withColumn(prep_time, F.when((F.length(F.col(prep_time)) == 4) &
(F.substring(F.col(prep_time), 4, 1) == 'M'),
(F.substring(F.col(prep_time), 3, 1)).cast('int'))
.when((F.length(F.col(prep_time)) == 4) & (F.substring(F.col(prep_time), 4, 1) == 'H'),
((F.substring(F.col(prep_time), 3, 1)).cast('int')) * 60)
.when((F.length(F.col(prep_time)) == 5), (F.substring(F.col(prep_time), 3, 2)).cast('int'))
.when((F.length(F.col(prep_time)) == 6), (((F.substring(F.col(prep_time), 3, 1)).cast('int')) * 60) +
((F.substring(F.col(prep_time), 5, 1)).cast('int')))
.when((F.length(F.col(prep_time)) == 7), (((F.substring(F.col(prep_time), 3, 1)).cast('int')) * 60) +
((F.substring(F.col(prep_time), 5, 2)).cast('int'))).otherwise(F.lit(0))
)
return df_to_be_formatted
@udf
def duration_in_proper_format(difficulty, avg_total_cooking_time):
'''
Converting the Duration of each difficulty type to readable format
:param difficulty:
:param avg_total_cooking_time:
:return: Dataframe with Formated duration
'''
#Check if difficulty is hard, then convert duration in Hours and minutes form
if difficulty == 'hard':
rounded_duration = int(round(avg_total_cooking_time))
hours = int(rounded_duration / 60)
minutes = rounded_duration % 60
return f"{hours} hours & {minutes} minutes"
else:
#If difficulty is easy or medium, keep it in minutes
rounded_duration = int(round(avg_total_cooking_time))
return f"{rounded_duration} minutes"
def filename_change(output_path,output_file_name):
'''
Renames the output file to the required name
:param output_path:
'''
os.chdir(output_path) #Going tto the folder where we need to change the name
#Iterating through each file name to check if it has '.csv' as its extension
for file in glob.glob("*.csv"):
filename = file
break
new_path = output_path + filename #Creating path of the file to be renamed
rename_path = output_path + output_file_name #Creating the new name of the file with which we want to rename
#This renames the file to our specified name
os.rename(new_path, rename_path)
def get_recipes_involving_beef(input_reciepie_df):
'''
Filter out only those recipes with Beef in the ingredients
:param input_reciepie_df:
:return: only_beef_in_ingredients_df
'''
# Selecting only the columns which are required for computations and dropping others
input_reciepie_df = input_reciepie_df.select("name", "ingredients", "prepTime", "cookTime")
# Changing the case of the string so that it becomes easier for comparision
is_beef_in_ingredients_df = input_reciepie_df.withColumn("ingredients_to_upper",
F.upper(input_reciepie_df['ingredients']))
# Checking if 'BEEF' is there in the ingredients
is_beef_in_ingredients_df = is_beef_in_ingredients_df.withColumn("contains_beef",
F.col('ingredients_to_upper').contains('BEEF'))
# Filtering records with BEEF in the ingredients and drop the columns which are not required
only_beef_in_ingredients_df = is_beef_in_ingredients_df.filter(F.col('contains_beef') == 'true') \
.drop('ingredients', 'ingredients_to_upper','contains_beef')
return only_beef_in_ingredients_df
def calculate_total_cooking_time(only_beef_in_ingredients_df):
'''
Calcultes total Cooking time by adding prep time and cook time for each recipe
:param only_beef_in_ingredients_df:
:return: total_cook_time_df
'''
# Getting Preperation Time in proper format so that we can perform mathematical operations over it
prep_time_formated_df = get_minutes(only_beef_in_ingredients_df,'prepTime')
# Getting Cooking Time in proper format so that we can perform mathematical operations over it
cook_time_formatted_df = get_minutes(prep_time_formated_df,'cookTime')
# Calculating total cooking time by adding Cook Time and Prep time
total_cook_time_df = cook_time_formatted_df.withColumn('total_cook_time', (
F.col('prepTime') + F.col('cookTime')).cast('int'))
# Dropping columns which are not required
total_cook_time_df = total_cook_time_df.drop("prepTime", "cookTime", "name",'contains_beef')
return total_cook_time_df
def calculate_average_cooking_time_per_difficulty_level(total_cook_time_df):
'''
Gives difficulty level according to their individual cooking time and
Calclates average cooking time for every difficulty level
:param total_cook_time_df:
:return: avg_cooking_time_df
'''
# Classifying the recipes the basis of their difficulty levels
difficulty_level_classified_df = total_cook_time_df.withColumn("difficulty",
when(F.col('total_cook_time') <= 30, 'easy')
.when(F.col('total_cook_time') <= 60, 'medium')
.otherwise('hard'))
# Calculating average duration per difficulty level
difficulty_level_classified_df = difficulty_level_classified_df.groupby('difficulty').mean()
#REduce the partitions to one as we have only 3 rows
difficulty_level_classified_df.coalesce(1)
# Ordering as easy, medium and hard
difficulty_level_classified_df = difficulty_level_classified_df.orderBy('avg(total_cook_time)')
# Changing the duration in a proper readable and presentable format
avg_cooking_time_df = difficulty_level_classified_df.withColumn("avg_total_cooking_time",
duration_in_proper_format(F.col('difficulty'),
F.col('avg(total_cook_time)')))
# dropping the column which is not required
avg_cooking_time_df = avg_cooking_time_df.drop('avg(total_cook_time)')
return avg_cooking_time_df
def main():
global logger
#Initializing Configs and Paths
config = configparser.ConfigParser()
config.read('config.ini')
input_path_S3 = config.get('INPUT_PATH', 'input_path')
output_path = config.get('OUTPUT_PATH', 'output_path')
output_file_name = config.get('OUTPUT_PATH', 'output_file_name')
log_path = config.get('LOGS', 'log_path')
environment = config.get('ENVIRONMENT', 'environment')
#Initializing Logger
logger = util.init_logger('AvgDurationPerDifficultyLevel.py',log_path)
#Initializing SparkSession
if environment == 'local':
spark = SparkSession.builder.master('local[*]').appName('HelloFreshAssigment').getOrCreate()
else:
spark = SparkSession.builder.appName('HelloFreshAssigment').getOrCreate()
logger.info("Spark Session Initialised")
try:
#Reading File From Specified Path
input_recipe_df = spark.read.json(input_path_S3)
logger.info(f"File got read successfully from the loation {input_path_S3}")
except:
logger.error(f"There is some issue with the file input path: {input_path_S3}")
#Calling function to get records which only contain ingredient beef
only_beef_in_ingredients_df = get_recipes_involving_beef(input_recipe_df)
#Calculating Total cooking time for each recipe
total_cook_time_df = calculate_total_cooking_time(only_beef_in_ingredients_df)
#Calculating Average Cooking time per difficulty level
avg_cooking_time_df = calculate_average_cooking_time_per_difficulty_level(total_cook_time_df)
try:
#Writing data to the specified output path and overwriting if it already exists
avg_cooking_time_df.coalesce(1)\
.write.mode('overwrite')\
.format("csv") \
.option("header", "true") \
.save(output_path)
logger.info(f"File got written successfully at the loation {output_path}")
except:
logger.error(f"There is some issue with the file input path: {output_path}")
#Stopping the Spark Session so that resources are released
spark.stop()
#As the file written by spark has a name something like - 'part000-*.csv'
#We need to change it to reports.csv as per the deliverable requirement
try:
filename_change(output_path,output_file_name)
logger.info("Filname changed Successfully")
except:
logger.error(f"There is some issue with {output_path} or {output_file_name}")
#Calling the main()
if __name__ == '__main__':
main()
|
{"/AvgDurationPerDifficultyLevel.py": ["/util.py"], "/test_AvgDurationPerDifficultyLevel.py": ["/AvgDurationPerDifficultyLevel.py", "/util.py"]}
|
12,425
|
rishibhutada/Take_Home_Test
|
refs/heads/master
|
/util.py
|
import logging
import datetime
# import cal
formatter = logging.Formatter('%(asctime)s:%(levelname)s:%(message)s')
currtime = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
logger = ''
def init_logger(job_name, log_path):
global logger
log_file = "%s/%s_%s.log" % (log_path, job_name, currtime)
handler = logging.FileHandler(log_file)
handler.setFormatter(formatter)
logger = logging.getLogger(job_name)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
return logger
def close_logger(logger):
if len(logger.handlers) > 0:
for handler in logger.handlers:
logger.removeHandler(handler)
|
{"/AvgDurationPerDifficultyLevel.py": ["/util.py"], "/test_AvgDurationPerDifficultyLevel.py": ["/AvgDurationPerDifficultyLevel.py", "/util.py"]}
|
12,426
|
rishibhutada/Take_Home_Test
|
refs/heads/master
|
/test_AvgDurationPerDifficultyLevel.py
|
import unittest
from unittest import TestCase
from AvgDurationPerDifficultyLevel import get_recipes_involving_beef, calculate_total_cooking_time, \
calculate_average_cooking_time_per_difficulty_level, duration_in_proper_format
from pyspark.sql import SparkSession
import util
# Initializing Configs and Paths
input_path = "C://Users/Rishi/Desktop/HelloFreshAssignment/sample.json"
log_path = "C://Users/Rishi/Desktop/HelloFreshAssignment/logs/test_logs"
# Initializing Logger
logger = util.init_logger('test_AvgDurationPerDifficultyLevel.py', log_path)
# Initializing SparkSession
spark = SparkSession.builder.master('local[*]').appName('HelloFreshAssigment').getOrCreate()
logger.info("Spark Session Initialised")
try:
# Reading File From local sample Json
input_reciepie_df = spark.read.json(input_path)
logger.info(f"File got read successfully from the location {input_path}")
except:
logger.error(f"There is some issue with the file input path: {input_path}")
# Calling functions to get respective values in different stages
only_beef_in_ingredients_df = get_recipes_involving_beef(input_reciepie_df)
total_cook_time_df = calculate_total_cooking_time(only_beef_in_ingredients_df)
avg_cooking_time_df = calculate_average_cooking_time_per_difficulty_level(total_cook_time_df)
class Test(TestCase):
# Calling functions to respective values in different stages
def test_get_recipies_involving_beef(self):
time = 'prepTime'
expected_beef_occurences = spark.createDataFrame(data=[['abc', 'PT5M', 'PT2H2M'],
['def', 'PT20M', 'PT3H25M'],
['ijk', 'PT5H', 'PT']],
schema=['name', 'prepTime', 'cookTime'])
self.assertTrue(expected_beef_occurences, only_beef_in_ingredients_df)
def test_calculate_total_cooking_time(self):
expected_cooking_time = spark.createDataFrame(data=[[25],
[200],
[45]],
schema=['total_cook_time'])
self.assertTrue(expected_cooking_time, total_cook_time_df)
def test_calculate_average_cooking_time_per_difficulty_level(self):
expected_avg_cooking_time = spark.createDataFrame(data=[['easy', '25 minutes'],
['medium', '45 minutes'],
['hard', '3 hours & 20 minutes']],
schema=['difficulty', 'avg_total_cooking_time'])
self.assertTrue(expected_avg_cooking_time, total_cook_time_df)
def test_duration_in_proper_format(self):
expected_duration = '2 hours & 25 minutes'
test_df = spark.createDataFrame(data=[['hard', 145]], schema=['difficulty', 'total_duration'])
actual_duration = duration_in_proper_format(test_df['difficulty'], test_df['total_duration'])
self.assertTrue(expected_duration, actual_duration)
if __name__ == '__main__':
unittest.main()
|
{"/AvgDurationPerDifficultyLevel.py": ["/util.py"], "/test_AvgDurationPerDifficultyLevel.py": ["/AvgDurationPerDifficultyLevel.py", "/util.py"]}
|
12,437
|
dipu2poudel/sudoku_solver
|
refs/heads/master
|
/sudoku_solver.py
|
class Sudoku_Board:
def __init__(self):
self.board = [
[7, 8, 0, 4, 0, 0, 1, 2, 0],
[6, 0, 0, 0, 7, 5, 0, 0, 9],
[0, 0, 0, 6, 0, 1, 0, 7, 8],
[0, 0, 7, 0, 4, 0, 2, 6, 0],
[0, 0, 1, 0, 5, 0, 9, 3, 0],
[9, 0, 4, 0, 6, 0, 0, 0, 5],
[0, 7, 0, 3, 0, 0, 0, 1, 2],
[1, 2, 0, 0, 0, 7, 4, 0, 0],
[0, 4, 9, 2, 0, 6, 0, 0, 7]
]
self.rows = len(self.board)
self.columns = len(self.board[0])
def print_board(self):
board_GUI = ''
z = 0
for row in self.board:
if z % 3 == 0 and not z == 0:
board_GUI += '------------------------\n'
for i in range(self.rows):
if i % 3 == 0 and not i == 0:
board_GUI += ' | '
board_GUI += str(row[i]) + ' '
if i == self.columns - 1:
board_GUI = f'{board_GUI}\n'
z += 1
print(board_GUI)
def empty_spots_finder(self):
for i in range(self.rows):
for j in range(self.columns):
if self.board[i][j] == 0:
return i, j
def sudoku_solver(self):
empty_spot = self.empty_spots_finder()
if not empty_spot:
return True
ro, co = empty_spot
for i in range(1, 10):
if self.number_is_valid(i, ro, co):
self.board[ro][co] = i
if self.sudoku_solver():
return True
self.board[ro][co] = 0
return False
def number_is_valid(self, number, ro, co):
for i in range(self.rows):
if self.board[ro][i] == number and co != i:
return False
# check column
if self.board[i][co] == number and ro != i:
return False
# check boxes
box_horizontal = (ro // 3) * 3
box_vertical = (co // 3) * 3
for i in range(box_horizontal, box_horizontal + 3):
for j in range(box_vertical, box_vertical + 3):
if self.board[i][j] == number and (i, j) != (ro, co):
return False
return True
|
{"/main.py": ["/sudoku_solver.py"]}
|
12,438
|
dipu2poudel/sudoku_solver
|
refs/heads/master
|
/main.py
|
from sudoku_solver import Sudoku_Board
board = Sudoku_Board()
board.sudoku_solver()
board.print_board()
|
{"/main.py": ["/sudoku_solver.py"]}
|
12,486
|
Geeorgee23/Socios_cooperativa_MVC
|
refs/heads/master
|
/controlador.py
|
from socios import Socios
class Controlador:
def __init__(self):
self.listaSocios={}
self.productos = { 'Naranja':40,
'Oliva':10,
'Caqui':20 }
def numSocios(self):
return len(self.listaSocios)
def addSocio(self,socio):
if (socio.getIdSocio() not in self.listaSocios):
self.listaSocios[socio.getIdSocio()]=socio
return True
return False
def delSocio(self,id_socio):
if id_socio in self.listaSocios:
del self.listaSocios[id_socio]
return True
return False
def listarSocios(self):
lista=[]
for clave,valor in self.listaSocios.items():
lista.append("Id_socio: "+clave+"\n\tDni: "+valor.getDni()+"\n\tNombre: "+valor.getNombre()+"\n\tApellidos: "+valor.getApellidos()+"\n\tfecha: "+valor.getFecha()+"\n\tSaldo: "+str(valor.getSaldo()))
return lista
def getProductos(self):
lista=""
for i in self.productos:
lista +="\t"+i+"\n"
return lista
def addProducto(self,id_socio,producto,kilos):
if id_socio in self.listaSocios:
if producto in self.productos:
self.listaSocios[id_socio].addProducto(producto,kilos)
return True
return False
def actualizaSaldo(self,id_socio):
saldo=0.0
if id_socio in self.listaSocios:
for clave,valor in self.listaSocios[id_socio].getRegistrosPendientes().items():
saldo+= self.productos[clave] * float(valor)
self.listaSocios[id_socio].actualizaSaldo(saldo)
self.listaSocios[id_socio].delRegistros()
return True
return False
def fichaSocio(self,id_socio):
socio=""
if id_socio in self.listaSocios:
for clave,valor in self.listaSocios.items():
socio = ("Id_socio: "+clave+"\n\tDni: "+valor.getDni()+"\n\tNombre: "+valor.getNombre()+"\n\tApellidos: "+valor.getApellidos()+"\n\tfecha: "+valor.getFecha()+"\n\tSaldo: "+str( "{:10.2f}".format(valor.getSaldo()))+"\n\tRegistros Pendientes: "+str(valor.getRegistrosPendientes()))
return socio
|
{"/controlador.py": ["/socios.py"], "/main.py": ["/socios.py", "/controlador.py"]}
|
12,487
|
Geeorgee23/Socios_cooperativa_MVC
|
refs/heads/master
|
/main.py
|
from socios import Socios
from controlador import Controlador
from datetime import datetime
controlador = Controlador()
while True:
print("Actualmente hay ",controlador.numSocios()," socios")
print("1.- Añadir Socio")
print("2.- Eliminar Socio")
print("3.- Listar Socios")
print("4.- Registrar Productos")
print("5.- Actualizar Saldo")
print("6.- Ficha de Socio")
print("7.- Salir")
while True:
try:
op=int(input("Introduce opción:"))
if op>=1 and op<=7:
break
else:
print("Introduce un numero del 1 al 7!")
except ValueError:
print("Introduce un numero!")
if op==7:
break
if op==1:
print()
id_socio=input("Introduce el id del socio: ")
dni=input("Introduce el dni del socio: ")
nombre=input("Introduce el nombre del socio: ")
apellidos=input("Introduce los apellidos del socio: ")
fecha= datetime.now()
hoy = str(fecha.strftime("%d-%m-%Y"))
socio = Socios(id_socio,dni,nombre,apellidos,hoy)
if controlador.addSocio(socio):
print("Socio añadido correctamente!")
else:
print("Error al añadir el socio!")
print()
if op==2:
print()
id_socio=input("Introduce el id del socio a eliminar: ")
if controlador.delSocio(id_socio):
print("Socio eliminado correctamente!")
else:
print("Error al eliminar el socio!")
print()
if op ==3:
print()
print("Socios: ")
for i in controlador.listarSocios():
print(i)
print()
if op ==4:
print()
print("Registrando productos...")
id_socio=input("Introduce el id del socio: ")
print("Productos:")
print(controlador.getProductos())
producto=input("Introduce el nombre del producto: ")
kilos=input("Introduce el numero de kilos: ")
if controlador.addProducto(id_socio,producto,kilos):
print("Producto añadido correctamente!")
else:
print("Error al añadir el producto!")
print()
if op ==5:
print()
id_socio=input("Introduce el id del socio: ")
if controlador.actualizaSaldo(id_socio):
print("Saldo actualizado correctamente!")
else:
print("Error al actualizar saldo!")
print()
if op==6:
print()
id_socio=input("Introduce el id del socio: ")
print(controlador.fichaSocio(id_socio))
print()
|
{"/controlador.py": ["/socios.py"], "/main.py": ["/socios.py", "/controlador.py"]}
|
12,488
|
Geeorgee23/Socios_cooperativa_MVC
|
refs/heads/master
|
/socios.py
|
class Socios:
def __init__(self,id_socio,dni,nombre,apellidos,fecha):
self.id_socio=id_socio
self.dni=dni
self.nombre=nombre
self.apellidos=apellidos
self.fecha=fecha
self.saldo=0.0
self.registrosPendientes={}
def getIdSocio(self):
return self.id_socio
def getDni(self):
return self.dni
def getNombre(self):
return self.nombre
def getApellidos(self):
return self.apellidos
def getFecha(self):
return self.fecha
def getSaldo(self):
return self.saldo
def getRegistrosPendientes(self):
return self.registrosPendientes
def addProducto(self,producto,kilos):
if producto in self.registrosPendientes:
self.registrosPendientes[producto] += kilos
else:
self.registrosPendientes[producto] = kilos
def actualizaSaldo(self,saldo):
self.saldo+=saldo
def delRegistros(self):
self.registrosPendientes={}
|
{"/controlador.py": ["/socios.py"], "/main.py": ["/socios.py", "/controlador.py"]}
|
12,496
|
theparadoxer02/koinex
|
refs/heads/master
|
/accounts/admin.py
|
from django.contrib import admin
from .models import Account, KYC, KYC_Document, BankDetail
admin.site.register(Account)
admin.site.register(KYC)
admin.site.register(KYC_Document)
admin.site.register(BankDetail)
|
{"/accounts/admin.py": ["/accounts/models.py"], "/accounts/api/serializers.py": ["/accounts/models.py"], "/accounts/api/views.py": ["/accounts/api/serializers.py"], "/accounts/api/urls.py": ["/accounts/api/views.py"]}
|
12,497
|
theparadoxer02/koinex
|
refs/heads/master
|
/accounts/api/serializers.py
|
from rest_framework import serializers
from accounts.models import Account, KYC, KYC_Document, BankDetail
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = Account
fields = [
'status_option',
'user',
'first_name',
'last_name',
'email',
'asknbid_id',
'account_status',
'accont_created_on',
]
class KYCSerializer(serializers.ModelSerializer):
class Meta:
model = KYC
fields = [
'kyc_status',
'Account',
'dob',
'full_name',
'pan_number',
'adhaar_no',
'gross_annual_income',
'residential_status',
'street_address',
'city',
'state',
'country',
'pin_code',
'kyc_status',
'valid'
]
class KYC_DocumentSerializer(serializers.ModelSerializer):
class Meta:
model = KYC_Document
fields = [
'Account',
'pan_card',
'adhaar_card',
'adhaar_back',
'photograph',
'valid',
]
class BankDetailSerializer(serializers.ModelSerializer):
class Meta:
model = BankDetail
fields = [
'Account',
'ifsc_code',
'time',
'source',
'ip',
'activity',
'status',
'valid'
]
|
{"/accounts/admin.py": ["/accounts/models.py"], "/accounts/api/serializers.py": ["/accounts/models.py"], "/accounts/api/views.py": ["/accounts/api/serializers.py"], "/accounts/api/urls.py": ["/accounts/api/views.py"]}
|
12,498
|
theparadoxer02/koinex
|
refs/heads/master
|
/accounts/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-11-26 20:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(default='', max_length=40)),
('last_name', models.CharField(default='', max_length=40)),
('email', models.EmailField(max_length=254)),
('asknbid_id', models.CharField(max_length=30, null=True, unique=True)),
('account_status', models.CharField(choices=[('Submitted', 'Submitted'), ('Verified', 'Verified'), ('Rejected', 'Rejected')], max_length=20)),
('accont_created_on', models.DateField(auto_now=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='BankDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ifsc_code', models.CharField(default='', max_length=10)),
('time', models.TimeField(auto_now_add=True)),
('source', models.CharField(default='', max_length=100)),
('ip', models.GenericIPAddressField()),
('activity', models.TextField()),
('status', models.BooleanField(default=False)),
('valid', models.BooleanField(default=False)),
('Account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Account')),
],
),
migrations.CreateModel(
name='KYC',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dob', models.DateField()),
('full_name', models.CharField(max_length=100)),
('pan_number', models.CharField(default='', max_length=10)),
('adhaar_no', models.CharField(default='', max_length=12)),
('gross_annual_income', models.CharField(default='', max_length=12)),
('residential_status', models.CharField(default='', max_length=200)),
('street_address', models.CharField(default='', max_length=30)),
('city', models.CharField(default='', max_length=30)),
('state', models.CharField(default='', max_length=30)),
('country', models.CharField(default='', max_length=30)),
('pin_code', models.CharField(default='', max_length=10)),
('kyc_status', models.CharField(choices=[('Submitted', 'Submitted'), ('Pending', 'Pending'), ('Verified', 'Verified')], max_length=30)),
('valid', models.BooleanField(default=False)),
('Account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Account')),
],
),
migrations.CreateModel(
name='KYC_Document',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pan_card', models.ImageField(upload_to='media/upload/pancard')),
('adhaar_card', models.ImageField(upload_to='media/upload/adhaarcard')),
('adhaar_back', models.ImageField(upload_to='media/upload/adhaarback')),
('photograph', models.ImageField(upload_to='media/upload/photograph')),
('valid', models.BooleanField(default=False)),
('Account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Account')),
],
),
]
|
{"/accounts/admin.py": ["/accounts/models.py"], "/accounts/api/serializers.py": ["/accounts/models.py"], "/accounts/api/views.py": ["/accounts/api/serializers.py"], "/accounts/api/urls.py": ["/accounts/api/views.py"]}
|
12,499
|
theparadoxer02/koinex
|
refs/heads/master
|
/accounts/api/views.py
|
from rest_framework import generics
from rest_framework import permissions
from . serializers import (
BankDetailSerializer,
KYCSerializer,
KYC_DocumentSerializer,
AccountSerializer,
)
class BankDetailCreateApiView(generics.CreateAPIView):
serializer_class = BankDetailSerializer
permission_classes = [permissions.IsAuthenticated]
class KYC_DocumentCreateApiView(generics.CreateAPIView):
serializer_class = KYC_DocumentSerializer
permission_classes = [permissions.IsAuthenticated]
class KYCCreateApiView(generics.CreateAPIView):
serializer_class = KYCSerializer
permission_classes = [permissions.IsAuthenticated]
class AccountCreateApiView(generics.CreateAPIView):
serializer_class = AccountSerializer
permission_classes = [permissions.IsAuthenticated]
|
{"/accounts/admin.py": ["/accounts/models.py"], "/accounts/api/serializers.py": ["/accounts/models.py"], "/accounts/api/views.py": ["/accounts/api/serializers.py"], "/accounts/api/urls.py": ["/accounts/api/views.py"]}
|
12,500
|
theparadoxer02/koinex
|
refs/heads/master
|
/accounts/api/urls.py
|
from django.conf.urls import url
from accounts.api.views import (
BankDetailCreateApiView,
KYCCreateApiView,
KYC_DocumentCreateApiView,
AccountCreateApiView
)
urlpatterns = [
url(r'^accounts/$', AccountCreateApiView.as_view(), name='accounts'),
url(r'^bank/$', BankDetailCreateApiView.as_view(), name='bankdetail'),
url(r'^kyc/$', KYCCreateApiView.as_view(), name='kyc'),
url(r'^kycdocument/$', KYC_DocumentCreateApiView.as_view(), name='kycdocument'),
]
|
{"/accounts/admin.py": ["/accounts/models.py"], "/accounts/api/serializers.py": ["/accounts/models.py"], "/accounts/api/views.py": ["/accounts/api/serializers.py"], "/accounts/api/urls.py": ["/accounts/api/views.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.