index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
57,536 | juliendurand/pricing-tool | refs/heads/master | /gui/pricing/migrations/0002_auto_20181218_2231.py | # Generated by Django 2.1.4 on 2018-12-18 22:31
from django.db import migrations, models
import pricing.models
class Migration(migrations.Migration):
dependencies = [
('pricing', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='feature',
name='status',
field=models.CharField(choices=[(pricing.models.FeatureStatus('Input'), 'Input'), (pricing.models.FeatureStatus('Target'), 'Target')], default=pricing.models.FeatureStatus('Input'), max_length=10),
),
migrations.AlterField(
model_name='model',
name='loss',
field=models.CharField(choices=[(pricing.models.LossFunction('Gaussian'), 'Gaussian'), (pricing.models.LossFunction('Logistic'), 'Logistic'), (pricing.models.LossFunction('Poisson'), 'Poisson'), (pricing.models.LossFunction('Gamma'), 'Gamma')], default=pricing.models.LossFunction('Gaussian'), max_length=10),
),
]
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,537 | juliendurand/pricing-tool | refs/heads/master | /src/pypricing/documentation.py | import os
import subprocess
import sys
import markdown
import numpy as np
import jinja2
import config
import result
def euro(n):
return '{:,.0f}'.format(n).replace(',', ' ') + " €"
def thsep(n):
return '{:,.0f}'.format(n).replace(',', ' ') + ' '
def percent(p):
return '{:,.2f}'.format(p * 100).replace(',', ' ') + '%'
def twodp(p):
return '{:,.2f}'.format(p).replace(',', ' ')
class Documentation:
'''
Create the documentation for a glm regression run, using jinja2 as a
template engine and markdown for the syntax.
'''
def __init__(self, config):
self.path = config.get_doc_path()
self.filename = config.get_doc_filename()
self.result = result.Result(config)
extensions = [
'extra',
'nl2br',
'toc',
'smarty',
'meta',
]
self.md = markdown.Markdown(extensions=extensions)
jinja2_loader = jinja2.FileSystemLoader('./templates/')
self.jinja2_env = jinja2.Environment(loader=jinja2_loader)
self.jinja2_env.filters['euro'] = euro
self.jinja2_env.filters['thsep'] = thsep
self.jinja2_env.filters['percent'] = percent
self.jinja2_env.filters['twodp'] = twodp
img_path = os.path.join(self.path, 'img')
if not os.path.exists(img_path):
os.makedirs(img_path)
self.result.write_coeffs_as_csv(self.path)
def get_metadata(self):
md_template = ''
with open('./templates/template.md', 'r') as md_file:
md_template = md_file.read()
self.md.convert(md_template)
context = self.md.Meta
for k in context:
if len(context[k]) == 1:
context[k] = context[k][0]
return context
def create_md(self):
context = self.get_metadata()
context['path'] = self.path
context['result'] = self.result
context['np'] = np
md_content = self.jinja2_env.get_template('template.md') \
.render(context)
return self.md.convert(md_content)
def create_html(self, md):
return self.jinja2_env.get_template('template.html') \
.render(content=md)
def save_html(self):
md = self.create_md()
html = self.create_html(md)
with open(self.filename, 'w') as out_file:
out_file.write(html)
if __name__ == '__main__':
if len(sys.argv) != 2:
raise Exception("Invalid number of options, expecting only one : "
"[config filename].")
config_filename = sys.argv[1]
configuration = config.Config(config_filename)
print("Generating Documentation...")
doc = Documentation(configuration)
doc.save_html()
subprocess.run('open ' + configuration.get_doc_filename(), shell=True)
print("Document Finished\n")
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,538 | juliendurand/pricing-tool | refs/heads/master | /gui/pricing/views.py | from django.forms import modelform_factory, modelformset_factory, formset_factory, inlineformset_factory, ModelForm
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse, reverse_lazy
from django.views import generic
from .models import Dataset, Feature, FeatureStatus, Model, ModelFeature, Run
import pypricing.dataset as ds
from . import tasks
def index(request):
context = {}
return render(request, 'pricing/index.html', context)
class DatasetListView(generic.ListView):
model = Dataset
nav = 'dataset'
class DatasetDetailView(generic.DetailView):
model = Dataset
nav = 'dataset'
class DatasetCreateView(generic.CreateView):
model = Dataset
fields = ['name', 'csvfile', 'fn_transform', 'fn_filter', 'fn_train']
nav = 'dataset'
def get_success_url(self):
return self.model.get_absolute_url(self.object)
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.train_size = 100
self.object.test_size = 10
self.object.save()
fields = ds.get_fields(self.object.get_data_filename())
for field, field_status in fields:
try:
status = FeatureStatus(field_status)
except Exception:
status = FeatureStatus.Ignore
f = Feature(dataset=self.object, name=field, status=status)
f.save()
return HttpResponseRedirect(self.get_success_url())
class DatasetDeleteView(generic.DeleteView):
model = Dataset
success_url = reverse_lazy('dataset_list')
def dataset_process(request, pk):
tasks.process_dataset(pk)
return HttpResponseRedirect(reverse('dataset_list'))
class FeatureUpdateView(generic.UpdateView):
model = Feature
fields = ['name', 'status']
def get_success_url(self):
return Dataset.get_absolute_url(self.object.dataset)
class ModelListView(generic.ListView):
model = Model
nav = 'model'
class ModelDetailView(generic.DetailView):
model = Model
nav = 'model'
class ModelCreateView(generic.CreateView):
model = Model
fields = ['name', 'dataset', ]
nav = 'model'
def get_success_url(self):
return reverse('model_update', args=[self.object.pk])
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.save()
for f in self.object.dataset.feature_set.filter(status=FeatureStatus.Input):
self.object.features.create(feature=f)
return HttpResponseRedirect(self.get_success_url())
class ModelForm(ModelForm):
class Meta:
model = Model
fields = ['loss', 'target', 'weight', 'max_nb_features']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
targets = self.fields['target'].queryset.filter(
dataset=kwargs['instance'].dataset,
status=FeatureStatus.Target
)
self.fields['target'].queryset = targets
self.fields['weight'].queryset = targets
class ModelUpdateView(generic.UpdateView):
model = Model
form_class = ModelForm
nav = 'model'
def get_success_url(self):
return self.model.get_absolute_url(self)
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.save()
return HttpResponseRedirect(self.get_success_url())
def switchFeature(request, pk, mf):
instance = ModelFeature.objects.get(pk=mf)
instance.active = not instance.active
instance.save()
url = Model.objects.get(pk=pk).get_absolute_url()
return HttpResponseRedirect(url)
class ModelDeleteView(generic.DeleteView):
model = Model
success_url = reverse_lazy('model_list')
def run(request, pk):
instance = Model.objects.get(pk=pk)
run = Run.objects.create(model=instance)
tasks.run_model(run.id)
url = Model.objects.get(pk=pk).get_absolute_url()
return HttpResponseRedirect(url)
class RunDetailView(generic.DetailView):
model = Run
nav = 'model'
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,539 | juliendurand/pricing-tool | refs/heads/master | /gui/pricing/admin.py | from django.contrib import admin
from .models import Dataset, Model, Feature, Modality, Run
admin.site.register(Dataset)
admin.site.register(Model)
admin.site.register(Feature)
admin.site.register(Modality)
admin.site.register(Run)
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,540 | juliendurand/pricing-tool | refs/heads/master | /gui/pricing/migrations/0001_initial.py | # Generated by Django 2.1.4 on 2018-12-18 22:11
from django.db import migrations, models
import django.db.models.deletion
import pricing.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Dataset',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now_add=True)),
('name', models.CharField(max_length=200)),
('csvfile', models.FileField(upload_to='data/')),
('train_size', models.BigIntegerField()),
('test_size', models.BigIntegerField()),
],
),
migrations.CreateModel(
name='Feature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('status', models.CharField(choices=[(pricing.models.FeatureStatus('Input'), 'Input'), (pricing.models.FeatureStatus('Target'), 'Target')], max_length=10)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pricing.Dataset')),
],
),
migrations.CreateModel(
name='Modality',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.CharField(max_length=50)),
('feature', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pricing.Feature')),
],
),
migrations.CreateModel(
name='Model',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now_add=True)),
('name', models.CharField(max_length=200)),
('loss', models.CharField(choices=[(pricing.models.LossFunction('Gaussian'), 'Gaussian'), (pricing.models.LossFunction('Logistic'), 'Logistic'), (pricing.models.LossFunction('Poisson'), 'Poisson'), (pricing.models.LossFunction('Gamma'), 'Gamma')], max_length=10)),
('target', models.CharField(max_length=50)),
('weight', models.CharField(max_length=50)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pricing.Dataset')),
('features', models.ManyToManyField(to='pricing.Feature')),
],
),
migrations.CreateModel(
name='Run',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now_add=True)),
('model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pricing.Model')),
],
),
]
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,541 | juliendurand/pricing-tool | refs/heads/master | /gui/pricing/migrations/0005_auto_20190108_1301.py | # Generated by Django 2.1.4 on 2019-01-08 13:01
from django.db import migrations, models
import django.db.models.functions.text
import pricing.models
class Migration(migrations.Migration):
dependencies = [
('pricing', '0004_auto_20181231_1013'),
]
operations = [
migrations.AlterModelOptions(
name='feature',
options={'ordering': [django.db.models.functions.text.Lower('name')]},
),
migrations.AddField(
model_name='dataset',
name='fn_filter',
field=models.TextField(default=("def data_filter(row):\n return row['HAB_hab'] == 'A'\n",)),
),
migrations.AddField(
model_name='dataset',
name='fn_train',
field=models.TextField(default="def data_train(row):\n return row['Random'] != '9' and row['Random'] != '10'\n"),
),
migrations.AddField(
model_name='dataset',
name='fn_transform',
field=models.TextField(default=("def data_transform(row):\n row['POL_mtcapass'] = int(math.log(float(row['POL_mtcapass'])))\n",)),
),
migrations.AlterField(
model_name='feature',
name='status',
field=models.CharField(choices=[('Input', 'Input'), ('Target', 'Target'), ('Ignore', 'Ignore')], default=pricing.models.FeatureStatus('Input'), max_length=10),
),
]
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,542 | juliendurand/pricing-tool | refs/heads/master | /gui/pricing/tasks.py | import subprocess
from background_task import background
from . import models
import pypricing.dataset as ds
@background
def process_dataset(dataset_id):
try:
dataset = models.Dataset.objects.get(pk=dataset_id)
print('Start Processing Dataset : ' + str(dataset))
config = dataset.get_config()
ds.Dataset().process(config, ds.printProgressBar)
except Exception as e:
print('Exception raised during processing...')
print(e)
return
print('Done Processing Dataset')
@background
def run_model(run_id):
try:
run = models.Run.objects.get(pk=run_id)
model = run.model
print('Start Running Model : ' + str(model) + ' ' + str(run))
config = model.get_config()
# print(config)
complete_process = subprocess.run(['./bin/glm'],
input=config,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
check=True)
print(complete_process.stdout)
except Exception as e:
print('Exception raised during processing...')
print(e)
return
print('Done Running Model')
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,543 | juliendurand/pricing-tool | refs/heads/master | /gui/pricing/migrations/0010_auto_20190108_2231.py | # Generated by Django 2.1.4 on 2019-01-08 22:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('pricing', '0009_auto_20190108_2220'),
]
operations = [
migrations.AlterField(
model_name='model',
name='target',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='target', to='pricing.Feature'),
),
migrations.AlterField(
model_name='model',
name='weight',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='weight', to='pricing.Feature'),
),
]
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,544 | juliendurand/pricing-tool | refs/heads/master | /gui/pricing/models.py | from enum import Enum
import io
from django.db import models
from django.urls import reverse
from django.utils.text import slugify
import pypricing.dataset as ds
class FeatureStatus(Enum):
Input = "Input"
Target = "Target"
Ignore = "Ignore"
def __str__(self):
return self.value
class LossFunction(Enum):
Gaussian = "Gaussian"
Logistic = "Logistic"
Poisson = "Poisson"
Gamma = "Gamma"
def __str__(self):
return self.value
fn_filter = "def data_filter(row):\n return row['HAB_hab'] == 'A'\n"
fn_transform = "def data_transform(row):\n row['POL_mtcapass'] = int(math.log(float(row['POL_mtcapass'])))\n"
fn_train = "def data_train(row):\n return row['Random'] != '9' and row['Random'] != '10'\n"
class Dataset(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
name = models.CharField(max_length=200, unique=True)
csvfile = models.FileField(upload_to='data/')
train_size = models.BigIntegerField()
test_size = models.BigIntegerField()
fn_transform = models.TextField(default=fn_transform)
fn_filter = models.TextField(default=fn_filter)
fn_train = models.TextField(default=fn_train)
def get_absolute_url(self):
return reverse("dataset_detail", kwargs={'pk': self.pk})
def get_data_filename(self):
return str(self.csvfile)
def get_number_lines(self):
return ds.count_line(self.get_data_filename())
def get_fields(self):
return ds.get_fields(self.get_data_filename())
def get_path(self):
return './dataset/' + slugify(self.name)
def get_config(self):
config = {
'filename': str(self.csvfile),
'transform': str(self.fn_transform),
'filter': str(self.fn_filter),
'train': str(self.fn_train),
'features': [f.name for f in
self.feature_set.filter(status=FeatureStatus.Input)],
'targets': [f.name for f in
self.feature_set.filter(status=FeatureStatus.Target)],
'path': self.get_path(),
}
return config
def __str__(self):
return self.name
class Feature(models.Model):
dataset = models.ForeignKey(Dataset, on_delete=models.CASCADE)
name = models.CharField(max_length=50)
status = models.CharField(
max_length=10,
blank=False,
default=FeatureStatus.Input,
choices=[(tag.name, tag.value) for tag in FeatureStatus]
)
class Meta:
ordering = [models.functions.Lower('name')]
def __str__(self):
return self.name
class Modality(models.Model):
feature = models.ForeignKey(Feature, on_delete=models.CASCADE)
value = models.CharField(max_length=50)
def __str__(self):
return self.feature.dataset.name + ' / ' + self.feature.name \
+ ' / ' + self.value
class ModelFeature(models.Model):
feature = models.ForeignKey(Feature, on_delete=models.CASCADE)
active = models.BooleanField(default=True)
class Meta:
ordering = [models.functions.Lower('feature__name')]
def __str__(self):
return self.feature.name
class Model(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
name = models.CharField(max_length=200, unique=True)
dataset = models.ForeignKey(Dataset, on_delete=models.CASCADE)
loss = models.CharField(
max_length=10,
blank=False,
default=LossFunction.Gaussian,
choices=[(tag.name, tag.value) for tag in LossFunction]
)
features = models.ManyToManyField(ModelFeature, related_name='features')
target = models.ForeignKey(Feature,
on_delete=models.CASCADE,
related_name='target',
null=True,
)
weight = models.ForeignKey(Feature,
on_delete=models.CASCADE,
related_name='weight',
null=True,
)
max_nb_features = models.PositiveSmallIntegerField(default=20)
def get_absolute_url(self):
return reverse("model_detail", kwargs={'pk': self.pk})
def get_config(self):
sb = io.StringIO()
print(self.name, file=sb)
print(self.dataset.get_path(), file=sb)
print(self.name, file=sb)
print(self.loss.lower(), file=sb)
print(self.target, file=sb)
print(self.weight, file=sb)
print(self.max_nb_features, file=sb)
for f in self.features.filter(active=False):
print(f, file=sb)
config = sb.getvalue()
return config
def __str__(self):
return self.name
class Run(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
model = models.ForeignKey(Model, on_delete=models.CASCADE)
def __str__(self):
return 'Run #' + str(self.pk)
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,545 | juliendurand/pricing-tool | refs/heads/master | /src/pypricing/config.py | import os
class Config:
'''
Configuration for a glm regression.
'''
def __init__(self, config_filename):
self.filename = config_filename
with open(config_filename) as config_file:
self.label = config_file.readline().strip()
self.dataset_filename = config_file.readline().strip()
self.name = config_file.readline().strip()
self.loss = config_file.readline().strip()
self.target = config_file.readline().strip()
self.weight = config_file.readline().strip()
self.nb_features = config_file.readline().strip()
self.excludes = [x.strip() for x in config_file.readlines()]
def get_dataset_filename(self):
return self.dataset_filename
def get_result_path(self):
return os.path.join("result", self.name)
def get_doc_path(self):
doc_path = os.path.join("doc", self.name)
if not os.path.exists(doc_path):
os.makedirs(doc_path)
return doc_path
def get_doc_filename(self):
doc_path = self.get_doc_path()
get_doc_filename = os.path.join(doc_path, self.name + '.html')
return get_doc_filename
| {"/gui/pricing/views.py": ["/gui/pricing/models.py"], "/gui/pricing/admin.py": ["/gui/pricing/models.py"]} |
57,546 | ColeFMeldorf/KNC-Live | refs/heads/main | /knc/utils.py | """
Utility functions for KNC-Live
"""
import numpy as np
def sigmoid(x : np.ndarray, a : float, b : float, c : float) -> np.ndarray :
"""
A parameterized sigmoid curve
Args:
x (np.ndarray or float): x values to evaluate the sigmoid
a (float): vertical stretch parameter
b (float): horizontal shift parameter
c (float): horizontal stretch parameter
Returns:
evalutated sigmoid curve at x values for the given parameterization
"""
return a / (b + np.exp(-1.0 * c * x))
def load(filename : str) -> object:
"""
Load a pickled file into memory. Warning: Only use on data you trust
because this function overrides the default pickling allowance in numpy,
which is there for security reasons.
Args:
filename (str): path to file containing pickled object
Returns:
unpickled object
"""
return np.load(filename, allow_pickle=True).item()
def save(filename : str, obj : object):
"""
Save an object by pickling
Args:
filename (str): path to file that will contain pickled object
obj (object): a python object you want to pickle
"""
np.save(filename, obj, allow_pickle=True)
class ArgumentError(Exception):
"""
A class to raise errors for invalid arguments
"""
pass
| {"/run_knc.py": ["/knc/__init__.py", "/knc/utils.py"], "/make_training_data.py": ["/knc/__init__.py", "/knc/utils.py"]} |
57,547 | ColeFMeldorf/KNC-Live | refs/heads/main | /run_knc.py | """
Run KN-Classify Live
"""
import os
import argparse
from knc import process
from knc import classify
from knc.utils import ArgumentError
def parse_args() -> argparse.ArgumentParser:
"""
Parse command line arguments to enable script-like running of KNC-Live
Returns:
arrgparser object
"""
parser = argparse.ArgumentParser(description=__doc__)
# Enable command line arguments
parser.add_argument('--process',
action='store_true',
help='Run data processing')
parser.add_argument('--classify',
action='store_true',
help='Run classification')
parser.add_argument('--lcs_file',
type=str,
help='Path to lcs file')
parser.add_argument('--datasets_file',
type=str,
help='Path to datasets file')
parser.add_argument('--mode',
type=str,
help=('Type of data to classify. r=realtime, f=full, r'
'fp=realtime+force_photo, ffp=full+force_photo'))
parser.add_argument('--results_outfile',
type=str,
help='Filename to store results',
default='KNC-Live_Results.csv')
parser.add_argument('--results_dir',
type=str,
help='Directory to save results',
default='knc_results/')
parser.add_argument('--rfc_dir',
type=str,
help='Path to directory containing classifiers',
default='classifiers/')
parser.add_argument('--id_map_file',
type=str,
help='Name of ID map file in classifier directory',
default='id_map.npy')
parser.add_argument('--verbose',
action='store_true',
help='Print status updates')
parser.add_argument('--skip_cv',
action='store_true',
help='Skip hyperparam optimization')
parser.add_argument('--distribute',
action='store_true',
help='Use multiprocessing')
return parser
if __name__ == '__main__':
# Get arguments
parser = parse_args()
args = parser.parse_args()
# Validate process arguments and run
if args.process:
process_args = process.check_args(parser)
process.process_main(process_args)
# Validate classify arguments and run
if args.classify:
classify_args = classify.check_args(parser)
classify.classify_main(classify_args)
| {"/run_knc.py": ["/knc/__init__.py", "/knc/utils.py"], "/make_training_data.py": ["/knc/__init__.py", "/knc/utils.py"]} |
57,548 | ColeFMeldorf/KNC-Live | refs/heads/main | /knc/train.py | """
Train a Random Forest Classifier
"""
import sys
sys.path.append('knc')
import numpy as np
import pandas as pd
pd.set_option('use_inf_as_na', True)
from scipy.optimize import curve_fit
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import roc_curve
from sklearn.metrics import roc_auc_score
from sklearn.metrics import precision_recall_curve as pr_curve
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import train_test_split
from utils import sigmoid, save
class Data:
"""
Organize all data for a ML algorithm
"""
def __init__(self, df : pd.DataFrame,
doit : bool = False,
feats: list = []):
"""
Instantiate a Data object from an input DataFrame. If doit is set to
True, then the X_train, X_test, y_train, y_test, and feats
attributes are calculated
Args:
df (pd.DataFrame): DataFrame for all objects / features
doit (bool, optional, default=False): run all data prep steps
feats (list, optional, default=[]): list of features to consider
"""
self.data = df
if doit:
self.data = self.select_feats(feats)
alats = [x for x in self.feats if x not in ['SNID', 'CID', 'OBJ']]
self.data = self.clean_data()
self.prep()
def select_feats(self, feats : list = []) -> pd.DataFrame :
"""
Select a subset of features for training. Store feats as attribute.
Args:
feats (list, optional, default=[]): features to use
Returns:
A DataFrame containing only the columns in feats
Raises:
ValueError if feats contains names not in columns of self.data
"""
# Set features to use
if len(feats) == 0 and not hasattr(self, 'feats'):
return self.data
elif len(feats) != 0:
# Overwrite self.feats if feats are passed to this function
metadata_cols = ['OBJ', 'SNID']
self.feats = [x for x in feats if x not in metadata_cols]
# Check validity of features
intersection = set(feats).intersection(self.data.columns)
if len(intersection) != len(feats):
raise ValueError("One or more features are not in the data")
return self.data[feats].copy()
def clean_data(self) -> pd.DataFrame :
"""
Remove inf and NaNs from data
Returns:
df without rows containing infs and NaNs
"""
# Force numeric features
metadata_cols = ['OBJ', 'SNID']
num_cols = [x for x in self.data.columns if x not in metadata_cols]
self.data[num_cols] = self.data[num_cols].apply(pd.to_numeric)
# Deal with NaNs and infs
nas = [np.inf, -np.inf, 'inf', 'nan', 'NaN']
df = self.data.replace(nas, np.nan).dropna(axis=0, how='any')
return df.copy().reset_index(drop=True)
def prep(self):
"""
Encode and build training and testing sets
"""
# Apply one-hot encoding
kn_truth = [1 if x == 'KN' or x == 'KN-tr' else 0
for x in self.data['OBJ'].values.astype(str)]
#print(sum(kn_truth), len(self.data))
self.data['KN'] = kn_truth
# Make training and validation sets
all_feats = [x for x in self.feats if x not in ['SNID', 'CID', 'OBJ']]
X = self.data[all_feats]
y = self.data['KN']
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.2, random_state=6, stratify=y)
# Store attributes
self.X = X
self.y = y
self.X_train = X_train
self.y_train = y_train
self.X_test = X_test
self.y_test = y_test
class Classifier:
"""
ML algorithm for classification
"""
def __init__(self,
data : Data,
doit : bool = False,
verbose : bool = False,
skip_cv : bool = False,
distribute : bool = False):
"""
Instantiate a Classifier object. If doit, the best_estimator,
feature dict, best_params, and feature_importances attirbutes are
calculated
Args:
data (Data) : A prepared instance of the Data class
doit (bool) : Train a classifier
verbose (bool, default=False): Print status updates
skip_cv (bool, default=False): Skip hyperparam optimization
distribute (bool, default=False): Use multiprocessing
"""
self.data = data
self.X = data.X
self.y = data.y
self.X_train = data.X_train
self.y_train = data.y_train
self.X_test = data.X_test
self.y_test = data.y_test
self.n_jobs = -1 if distribute else None
self.rfc = RandomForestClassifier(
n_estimators=100, max_depth=5, random_state=6, criterion='gini',
n_jobs=self.n_jobs)
# Run all steps to train, optimize, and validate classifier
if doit:
# Hyperparam optimization
if not skip_cv:
if verbose:
print("\tOptimizing hyperparameters with grid search")
self.optimize_hyperparams()
else:
self.rfc.fit(self.X_train[self.data.feats], self.y_train)
# Feature selection
if verbose:
print("\tSelecting optimal features")
self.optimize_features()
# Validation
if verbose:
print("\tValidating classifier")
self.validate()
# Fit classifier on all data
self.fit([], best=True)
def optimize_hyperparams(self):
"""
Determine best hyperparamters
"""
param_grid = {'criterion': ['gini', 'entropy'],
'n_estimators': [10, 50, 100, 500],
'max_depth': [3, 5, 10, 20],
'class_weight': ['balanced_subsample',
'balanced', {0: 1, 1: 1}, {0: 5, 1:5}]}
gs = GridSearchCV(self.rfc, param_grid, cv=5, n_jobs=self.n_jobs)
gs.fit(self.data.X_train, self.data.y_train)
self.rfc = gs.best_estimator_
def optimize_features(self):
"""
Determine best features to use
"""
feature_dict = {}
feature_names = np.array(self.data.feats)
fi = self.rfc.feature_importances_
sorted_fi = sorted(fi)
# Method 1: use only featrues above maximum gradient
cut = sorted_fi[np.argmax(np.gradient(sorted_fi))]
feats = feature_names[np.where(self.rfc.feature_importances_ > cut)]
if len(feats) > 0:
self.fit(feats)
feature_dict[1] = {'FEATURES': feats,
'SCORE': self.rfc.score(self.X_test[feats],
self.y_test),
'CUTOFF': cut}
else:
feature_dict[1] = {'FEATURES': feats,
'SCORE': 0.0,
'CUTOFF': cut}
# Method 2: use only features above a slightly lower cutoff
cut = (sorted_fi[np.argmax(np.gradient(sorted_fi))] /
(0.25 * len(feature_names)))
feats = feature_names[np.where(self.rfc.feature_importances_ > cut)]
if len(feats) > 0:
self.fit(feats)
feature_dict[2] = {'FEATURES': feats,
'SCORE': self.rfc.score(self.X_test[feats],
self.y_test),
'CUTOFF': cut}
else:
feature_dict[2] = {'FEATURES': feats,
'SCORE': 0.0,
'CUTOFF': cut}
# Method 3: use all features
self.fit(feature_names)
feature_dict[3] = {'FEATURES': feature_names,
'SCORE': self.rfc.score(self.X_test, self.y_test),
'CUTOFF': 0.0}
# Store results
self.feature_dict = feature_dict
# Establish best features
best_score = 0.0
for info in feature_dict.values():
if info['SCORE'] > best_score:
self.feats = info['FEATURES']
best_score = info['SCORE']
def validate(self):
"""
Evaluate performance on test data and determine calibration
"""
# Predict on test data
self.fit(self.feats)
scores = self.rfc.predict_proba(self.X_test[self.feats])
# Calculate basic metrics
precision, recall, pr_thresholds = pr_curve(self.y_test, scores[:,1])
f1_score = 2 * (precision * recall) / (precision + recall)
pr_threshold = pr_thresholds[np.argmax(f1_score)]
fpr, tpr, roc_thresholds = roc_curve(self.y_test, scores[:,1])
auc = roc_auc_score(self.y_test, scores[:,1])
# Determine calibration
kn_probs, centers = [], []
for i in range(len(roc_thresholds) - 1):
mask = ((scores[:,1] >= roc_thresholds[i+1]) &
(scores[:,1] < roc_thresholds[i]))
if sum(mask) == 0:
continue
centers.append(0.5 * (roc_thresholds[i] + roc_thresholds[i+1]))
num_kn = sum(self.y_test[mask] == 1)
total = sum(mask)
kn_probs.append(num_kn / total)
try:
popt, pcov = curve_fit(sigmoid, centers, kn_probs)
self.calibration_coeffs = popt
self.prob_cutoff = sigmoid(pr_threshold, *popt)
except Exception:
self.calibration_coeffs = None
self.prob_cutoff = pr_threshold
# Store metrics
roc_idx = np.argmin(np.abs(roc_thresholds - pr_threshold))
pr_idx = np.argmin(np.abs(pr_thresholds - pr_threshold))
self.metrics = {'auc': auc,
'fpr': fpr[roc_idx],
'tpr': tpr[roc_idx],
'precision': precision[pr_idx],
'recall': recall[pr_idx],
'f1': max(f1_score)}
def fit(self, feats : list, best : bool = False):
"""
Fit an optimized RFC with the training data
Args:
feats (list): features to use in the fit (ignored if best==True)
best (bool): Use all data, if false only X_train is used
Returns:
a fit RFC if best == False
"""
if best:
self.rfc.fit(self.X[self.feats], self.y)
else:
self.rfc.fit(self.X_train[feats], self.y_train)
def to_dict(self):
"""
Convert Classifier object to dictionary
Returns:
Dictionary where essential attributes of self are the keys
"""
out_dict = {'rfc': self.rfc,
'metrics': self.metrics,
'feats': self.feats,
'calibration_coeffs': self.calibration_coeffs,
'prob_cutoff': self.prob_cutoff,
'feature_dict': self.feature_dict}
return out_dict
def train_new(mode : str,
dataset_id : str,
key : str,
rfc_dir : str = 'classifiers/',
verbose : bool = False,
skip_cv : bool = False,
distribute : bool = False):
"""
Train a new classifier and return its key.
Args:
mode (str): type of classifier ('r', 'f', 'rfp', 'ffp')
dataset_id (str): ID string for the dataset
key (str): ID for the newly trained classifier
rfc_dir (str, default='classifiers/'): path to classifier directory
verbose (bool, default=False): Print status updates
skip_cv (bool, default=False): Skip hyperparam optimization
distribute (bool, default=False): Use multiprocessing
"""
# Load training data
df = pd.read_csv(f'{rfc_dir}training_data_{mode}.csv')
# Determine features based on dataset ID
with open(f"{rfc_dir}features_{mode}.txt", 'r') as f:
all_feats = [x.strip() for x in f.readlines() if x != '']
feats = [x for i, x in enumerate(all_feats) if dataset_id[i] == 'F']
# Make a Data object
if verbose:
print("Preparing training data")
training_data = Data(df, feats=feats, doit=True)
# Make a classifier object
if verbose:
print("Training classifier")
classifier = Classifier(
data=training_data, doit=True, verbose=verbose, skip_cv=skip_cv,
distribute=distribute)
# Save classifier
save(f"{rfc_dir}knclassifier_{mode}_{key}.npy", classifier.to_dict())
| {"/run_knc.py": ["/knc/__init__.py", "/knc/utils.py"], "/make_training_data.py": ["/knc/__init__.py", "/knc/utils.py"]} |
57,549 | ColeFMeldorf/KNC-Live | refs/heads/main | /knc/features.py | """
KN-Classify hand-engineered lightcurve features
"""
import numpy as np
import pandas as pd
class FeatureExtractor():
"""
Class to contain all feature extraction methods
"""
def __init__(self):
# Establish feature families
self.features = [x for x in dir(self) if x[0:1] != '_']
self.families = ['nobs_brighter_than',
'slope',
'same_nite_color_diff',
'total_color_diff',
'snr',
'flat',
'half',
'mag']
self.feat_families = {fam: [x for x in self.features
if x.find(fam) != -1]
for fam in self.families}
self.single_features = [x for x in self.features
if x.find('color') == -1]
self.double_features = [x for x in self.features
if x.find('color') != -1]
return
# Family 1: Nobs bright than
def __family1(self, lc, flt, threshold):
mag_arr = self.__get_mags(lc, flt)
if len(mag_arr) > 0:
return sum((mag_arr < threshold))
else:
return 'N'
@staticmethod
def __get_mags(lc, flt):
if flt is None:
return lc['MAG'].values.astype(float)
else:
return lc['MAG'].values[lc['FLT'].values == flt].astype(float)
def nobs_brighter_than_17(self, lc, flt1, flt2=None):
return self.__family1(lc, flt1, 17.0)
def nobs_brighter_than_18(self, lc, flt1, flt2=None):
return self.__family1(lc, flt1, 18.0)
def nobs_brighter_than_19(self, lc, flt1, flt2=None):
return self.__family1(lc, flt1, 19.0)
def nobs_brighter_than_20(self, lc, flt1, flt2=None):
return self.__family1(lc, flt1, 20.0)
def nobs_brighter_than_21(self, lc, flt1, flt2=None):
return self.__family1(lc, flt1, 21.0)
def nobs_brighter_than_215(self, lc, flt1, flt2=None):
return self.__family1(lc, flt1, 21.5)
def nobs_brighter_than_22(self, lc, flt1, flt2=None):
return self.__family1(lc, flt1, 22.0)
def nobs_brighter_than_225(self, lc, flt1, flt2=None):
return self.__family1(lc, flt1, 22.5)
def nobs_brighter_than_23(self, lc, flt1, flt2=None):
return self.__family1(lc, flt1, 23.0)
def nobs_brighter_than_17_any_flt(self, lc, flt1, flt2=None):
return self.__family1(lc, None, 17.0)
def nobs_brighter_than_18_any_flt(self, lc, flt1, flt2=None):
return self.__family1(lc, None, 18.0)
def nobs_brighter_than_19_any_flt(self, lc, flt1, flt2=None):
return self.__family1(lc, None, 19.0)
def nobs_brighter_than_20_any_flt(self, lc, flt1, flt2=None):
return self.__family1(lc, None, 20.0)
def nobs_brighter_than_21_any_flt(self, lc, flt1, flt2=None):
return self.__family1(lc, None, 20.0)
def nobs_brighter_than_215_any_flt(self, lc, flt1, flt2=None):
return self.__family1(lc, None, 20.0)
def nobs_brighter_than_22_any_flt(self, lc, flt1, flt2=None):
return self.__family1(lc, None, 20.0)
def nobs_brighter_than_225_any_flt(self, lc, flt1, flt2=None):
return self.__family1(lc, None, 20.0)
def nobs_brighter_than_23_any_flt(self, lc, flt1, flt2=None):
return self.__family1(lc, None, 20.0)
# Family 2: Slope
@staticmethod
def __get_mjds(lc, flt):
if flt is None:
mjds = lc['MJD'].values.astype(float)
else:
mjds = lc['MJD'].values[lc['FLT'].values == flt].astype(float)
if len(mjds) != 0:
return mjds - mjds.min()
else:
return mjds
@staticmethod
def __combine_nites(mags, mjds):
nites = mjds.round().astype(int)
data = {}
out_mjds, out_mags = [], []
for idx, nite in enumerate(nites):
if not nite in data:
data[nite] = None
out_mjds.append(mjds[idx])
out_mags.append(mags[idx])
return np.array(out_mjds), np.array(out_mags)
def slope_average(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
mjds, mags = self.__combine_nites(mags, mjds)
if len(mags) > 1:
if mjds[-1] != mjds[0]:
return (mags[-1] - mags[0]) / (mjds[-1] - mjds[0])
else:
return 'N'
else:
return 'N'
def slope_max(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
mjds, mags = self.__combine_nites(mags, mjds)
if len(mags) > 1:
return np.max(np.diff(mags) / np.diff(mjds))
else:
return 'N'
def slope_min(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
mjds, mags = self.__combine_nites(mags, mjds)
if len(mags) > 1:
return np.min(np.diff(mags) / np.diff(mjds))
else:
return 'N'
def slope_mjd_of_max(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
mjds, mags = self.__combine_nites(mags, mjds)
if len(mags) > 1:
return mjds[np.argmax(np.diff(mags) / np.diff(mjds))]
else:
return 'N'
def slope_mjd_of_min(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
mjds, mags = self.__combine_nites(mags, mjds)
if len(mags) > 1:
return mjds[np.argmin(np.diff(mags) / np.diff(mjds))]
else:
return 'N'
# Family 3: Same night color difference
def __get_nite_color(self, lc, flt1, flt2):
lc['NITE'] = lc['MJD'].values.astype(float).round().astype(int)
nites = lc.groupby('NITE')
colors = []
for (nite, df) in nites:
mags_1 = self.__get_mags(df, flt1)
mags_2 = self.__get_mags(df, flt2)
if len(mags_1) == 0 or len(mags_2) == 0:
continue
colors.append(mags_1.mean() - mags_2.mean())
return colors
def same_nite_color_diff_max(self, lc, flt1, flt2):
colors = self.__get_nite_color(lc, flt1, flt2)
if len(colors) == 0:
return 'N'
else:
return max(colors)
def same_nite_color_diff_min(self, lc, flt1, flt2):
colors = self.__get_nite_color(lc, flt1, flt2)
if len(colors) == 0:
return 'N'
else:
return min(colors)
def same_nite_color_diff_average(self, lc, flt1, flt2):
colors = self.__get_nite_color(lc, flt1, flt2)
if len(colors) == 0:
return 'N'
else:
return np.mean(colors)
# Family 4: Total color differences
def total_color_diff_max_max(self, lc, flt1, flt2):
mags_1 = self.__get_mags(lc, flt1)
mags_2 = self.__get_mags(lc, flt2)
if len(mags_1) == 0 or len(mags_2) == 0:
return 'N'
else:
return mags_1.max() - mags_2.max()
def total_color_diff_max_min(self, lc, flt1, flt2):
mags_1 = self.__get_mags(lc, flt1)
mags_2 = self.__get_mags(lc, flt2)
if len(mags_1) == 0 or len(mags_2) == 0:
return 'N'
else:
return mags_1.max() - mags_2.min()
def total_color_diff_min_max(self, lc, flt1, flt2):
mags_1 = self.__get_mags(lc, flt1)
mags_2 = self.__get_mags(lc, flt2)
if len(mags_1) == 0 or len(mags_2) == 0:
return 'N'
else:
return mags_1.min() - mags_2.max()
def total_color_diff_min_min(self, lc, flt1, flt2):
mags_1 = self.__get_mags(lc, flt1)
mags_2 = self.__get_mags(lc, flt2)
if len(mags_1) == 0 or len(mags_2) == 0:
return 'N'
else:
return mags_1.min() - mags_2.min()
def total_color_diff_mean_mean(self, lc, flt1, flt2):
mags_1 = self.__get_mags(lc, flt1)
mags_2 = self.__get_mags(lc, flt2)
if len(mags_1) == 0 or len(mags_2) == 0:
return 'N'
else:
return mags_1.mean() - mags_2.mean()
# Family 5: SNR
@staticmethod
def __get_flux_and_fluxerr(lc, flt):
return (lc['FLUXCAL'].values[lc['FLT'].values == flt].astype(float),
lc['FLUXCALERR'].values[lc['FLT'].values == flt].astype(float))
def snr_max(self, lc, flt1, flt2=None):
if not ('FLUXCAL' in lc.columns and 'FLUXCALERR' in lc.columns):
return 'N'
flux, fluxerr = self.__get_flux_and_fluxerr(lc, flt1)
if len(flux) == 0:
return 'N'
else:
return (flux / fluxerr).max()
def snr_mean(self, lc, flt1, flt2=None):
if not ('FLUXCAL' in lc.columns and 'FLUXCALERR' in lc.columns):
return 'N'
flux, fluxerr = self.__get_flux_and_fluxerr(lc, flt1)
if len(flux) == 0:
return 'N'
else:
return (flux / fluxerr).mean()
def snr_mjd_of_max(self, lc, flt1, flt2=None):
if not ('FLUXCAL' in lc.columns and 'FLUXCALERR' in lc.columns):
return 'N'
flux, fluxerr = self.__get_flux_and_fluxerr(lc, flt1)
if len(flux) == 0:
return 'N'
mjds = self.__get_mjds(lc, flt1)
return mjds[np.argmax(flux / fluxerr)]
# Family 6: Flat line fitting
@staticmethod
def __get_magerrs(lc, flt):
return lc['MAGERR'].values[lc['FLT'].values == flt].astype(float)
def flat_reduced_chi2(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
magerrs = self.__get_magerrs(lc, flt1)
if len(mags) < 3:
return 'N'
chi2 = sum((mags - mags.mean())**2 / magerrs**2)
dof = len(mags) - 1
return chi2 / dof
def flat_reduced_chi2_weighted(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
magerrs = self.__get_magerrs(lc, flt1)
if len(mags) < 3:
return 'N'
weighted_av = sum(mags / magerrs**2) / sum(1 / magerrs**2)
chi2 = sum((mags - weighted_av) ** 2 / magerrs ** 2)
dof = len(mags) - 1
return chi2 / dof
def flat_nobs_3_sigma_from_line(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
magerrs = self.__get_magerrs(lc, flt1)
if len(mags) < 3:
return 'N'
return sum(np.abs((mags - mags.mean()) / magerrs) > 3)
def flat_nobs_3_sigma_from_line_weighted(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
magerrs = self.__get_magerrs(lc, flt1)
if len(mags) < 3:
return 'N'
weighted_av = sum(mags / magerrs**2) / sum(1 / magerrs**2)
return sum(np.abs((mags - weighted_av) / magerrs) > 3)
def flat_nobs_2_sigma_from_line(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
magerrs = self.__get_magerrs(lc, flt1)
if len(mags) < 3:
return 'N'
return sum(np.abs((mags - mags.mean()) / magerrs) > 2)
def flat_nobs_2_sigma_from_line_weighted(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
magerrs = self.__get_magerrs(lc, flt1)
if len(mags) < 3:
return 'N'
weighted_av = sum(mags / magerrs**2) / sum(1 / magerrs**2)
return sum(np.abs((mags - weighted_av) / magerrs) > 2)
# Familiy 7: Half lightcurve mags
def half_first_average_mag(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
if len(mags) < 4:
return 'N'
split = mjds.mean()
return mags[mjds < split].mean()
def half_second_average_mag(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
if len(mags) < 4:
return 'N'
split = mjds.mean()
return mags[mjds > split].mean()
def half_first_average_mag_weighted(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
magerrs = self.__get_magerrs(lc, flt1)
if len(mags) < 4:
return 'N'
mask = (mjds < mjds.mean())
return sum(mags[mask] / magerrs[mask]**2) / sum(1 / magerrs[mask]**2)
def half_second_average_mag_weighted(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
magerrs = self.__get_magerrs(lc, flt1)
if len(mags) < 4:
return 'N'
mask = (mjds > mjds.mean())
return sum(mags[mask] / magerrs[mask]**2) / sum(1 / magerrs[mask]**2)
def half_split_average_mag_difference(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
if len(mags) < 4:
return 'N'
mask = (mjds < mjds.mean())
return mags[mask].mean() - mags[~mask].mean()
def half_split_average_mag_difference_weighted(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
mjds = self.__get_mjds(lc, flt1)
magerrs = self.__get_magerrs(lc, flt1)
if len(mags) < 4:
return 'N'
mask = (mjds < mjds.mean())
lav = sum(mags[mask] / magerrs[mask]**2) / sum(1 / magerrs[mask]**2)
rav = sum(mags[~mask] / magerrs[~mask]**2) / sum(1 / magerrs[~mask]**2)
return lav - rav
# Family 8: Full lightcurve mags
def mag_average(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
if len(mags) == 0:
return 'N'
return mags.mean()
def mag_average_weighted(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
magerrs = self.__get_mags(lc, flt1)
if len(mags) == 0:
return 'N'
return sum(mags / magerrs**2) / sum(1 / magerrs**2)
def mag_brightest(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
if len(mags) == 0:
return 'N'
return mags.min()
def mag_total_change(self, lc, flt1, flt2=None):
mags = self.__get_mags(lc, flt1)
if len(mags) == 0:
return 'N'
return mags[0] - mags[-1]
| {"/run_knc.py": ["/knc/__init__.py", "/knc/utils.py"], "/make_training_data.py": ["/knc/__init__.py", "/knc/utils.py"]} |
57,550 | ColeFMeldorf/KNC-Live | refs/heads/main | /knc/__init__.py | """
KN-Classify Live
"""
__version__ = '0.0.1'
| {"/run_knc.py": ["/knc/__init__.py", "/knc/utils.py"], "/make_training_data.py": ["/knc/__init__.py", "/knc/utils.py"]} |
57,551 | ColeFMeldorf/KNC-Live | refs/heads/main | /make_training_data.py | """
Generate training data for KNC-Live
"""
import argparse
import glob
import pandas as pd
from knc import feature_extraction as fe
from knc import process
from knc.utils import load, ArgumentError
# Handle command line arguments
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--force_photo',
action='store_true',
help='Include forced photometry in lightcurves')
parser.add_argument('--realtime',
action='store_true',
help='Prepare for data containing recent observations')
parser.add_argument('--data_dir',
type=str,
help='Directory containing training lightcurves',
default='training_data/')
parser.add_argument('--rfc_dir',
type=str,
help='Directory to store featurized training data',
default='classifiers/')
parser.add_argument('--sample',
type=int,
help='Number of examples to keep in each class',
default=None)
parser.add_argument('--verbose',
action='store_true',
help='Print status updates')
args = parser.parse_args()
# Check command line arguments
# Find simulated lightcurves
lcs_files = glob.glob(f"{args.data_dir}*.npy")
# Extract features and store dfs
feature_dfs = []
for filename in lcs_files:
# Status update
if args.verbose:
print(filename)
# Parse object type from filename
obj = filename.split('_')[-3]
# Load lightcurves into memory
lcs = load(filename)
# Trim lightcurves to post discovery
if not args.force_photo:
lcs = process.trim_lcs(lcs)
# Extract features
feat_df = fe.extract_all(
lcs, cut_requirement=2, obj=obj, sample=args.sample,
verbose=args.verbose)
feature_dfs.append(feat_df)
# Determine mode
mode = 'r' if args.realtime else 'f'
if args.force_photo:
mode += 'fp'
# Merge dfs
train_df = pd.concat(feature_dfs)
# Save feature list
with open(f"{args.rfc_dir}features_{mode}.txt", 'w+') as f:
f.writelines('\n'.join(train_df.columns))
# Organize into datasets
if args.verbose:
print("Organizing")
datasets = process.organize_datasets(train_df)
# Save best featurized dataset
count = 0
best_id = None
for dataset_id, feat_df in datasets.items():
good_feats = dataset_id.count('F')
if good_feats > count:
best_id = dataset_id
count = good_feats
datasets[best_id].to_csv(f"{args.rfc_dir}training_data_{mode}.csv",
index=False)
| {"/run_knc.py": ["/knc/__init__.py", "/knc/utils.py"], "/make_training_data.py": ["/knc/__init__.py", "/knc/utils.py"]} |
57,552 | ColeFMeldorf/KNC-Live | refs/heads/main | /knc/process.py | """
Functionalities to process a lightcurve file for KN-Classify
"""
import argparse
import os
import sys
sys.path.append('knc')
import numpy as np
import pandas as pd
import feature_extraction
from utils import ArgumentError, load, save
def trim_lcs(lcs : dict, cut_requirement : int = 0) -> dict :
"""
Remove epochs from a lightcurve that occur before object discovery, which
is defined as the first mjd with SNR >= 3.0.
Args:
lcs (dict): dictionary from a lightcurve file
cut_requirement (int, default=0): cut number to require
Returns:
copy of lcs with pre-discovery epochs removed from each lightcurve
"""
out_lcs = {}
for snid, info in lcs.items():
# Skip if the lightcurve will get cut during feature extraction
if not (info['cut'] == -1 or info['cut'] > cut_requirement):
continue
# Get discovery MJD, skip if light curve is never discovered
flux = info['lightcurve']['FLUXCAL'].values.astype(float)
fluxerr = info['lightcurve']['FLUXCALERR'].values.astype(float)
detection_mask = ((flux / fluxerr) >= 3.0)
if sum(detection_mask) == 0:
continue
mjds = info['lightcurve']['MJD'].values.astype(float)
mjd0 = mjds[detection_mask].min()
# Trim lightcurve
lc = info['lightcurve'][mjds >= mjd0].copy().reset_index(drop=True)
# Store results
out_lcs[snid] = {'lightcurve': lc, 'cut': info['cut']}
return out_lcs
def organize_datasets(df : pd.DataFrame) -> dict :
"""
Split a DataFrame of features into separate datasets
Args:
df (pandas.DataFrame): DataFrame of lightcurve features
Returns:
A dictionary mapping dataset identifiers to the datasets
"""
def bitrep(arr : list) -> str:
"""
Convert a boolean array to an inverted string representation
Args:
arr (array-like): array of boolean elements
Returns:
string of 'T' and 'F' characters with inverted boolean values
"""
bit_rep = ['F' if val else 'T' for val in arr]
return ''.join(bit_rep)
# Organize into datasets with the same sets of well-behaved features
groups, bad_cols = {}, {}
for index, row in df.iterrows():
r = row.copy()
br = bitrep(row != 'N')
if br in groups.keys():
groups[br].append(r.values)
else:
groups[br] = [r.values]
bad_cols[br] = [x for x in df.columns
if br[list(df.columns).index(x)] == 'T']
# Store datasets in a dictionary
datasets = {k: pd.DataFrame(data=v, columns=df.columns).drop(
labels=bad_cols[k], axis=1)
for k, v in groups.items()}
return datasets
def run_processing(lcs_file : str,
results_dir : str = None,
dataset_file : str = 'KNC_datasets.npy',
verbose : bool = False):
"""
Run all processing steps on a lightcurve file and save results to disk
Args:
lcs_file (str): Path to lightcurve file
results_dir (str, optional, default=None): directory to save results
verbose (bool, default=False): print status updates
"""
# Establish the results directory
if results_dir is None:
results_dir = f'{os.getcwd()}/knc_results'
if results_dir[-1] == '/':
results_dir = results_dir[:-1]
if not os.path.exists(results_dir):
os.mkdir(results_dir)
# Load data
lcs = load(lcs_file)
# Extract features
feat_df = feature_extraction.extract_all(lcs, verbose=verbose)
# Organize into datasets
datasets = organize_datasets(feat_df)
# Save results
save(f'{dataset_file}', datasets)
def parse_args() -> argparse.ArgumentParser :
"""
Parse command line arguments to enable script-like data processing
Returns:
argparser object
"""
parser = argparse.ArgumentParser(description=__doc__)
# Enable command line arguments
parser.add_argument('--lcs_file',
type=str,
help='Path to lightcurve file',
required=True)
parser.add_argument('--results_dir',
type=str,
help='Directory to save results',
default='knc_results/')
parser.add_argument('--datasets_file',
type=str,
help='Path to create datasets file',
default='KNC_datasets.npy')
parser.add_argument('--verbose',
action='store_true',
help='Print status updates')
return parser
def check_args(parser : argparse.ArgumentParser) -> argparse.Namespace :
"""
Check the arguments for invalid values
Args:
parser (argparse.ArgumentParser): a parser object
Returns:
The parsed arguments if all arguments are valid
Raises:
knc.ArgumentError if lcs_file is not passed as argument
knc.ArgumentError if lcs_file is not found
knc.ArgumentError if results_dir cannot be found or created
"""
args = parser.parse_args()
# Check that the lcs file exists
if args.lcs_file is None:
raise ArgumentError("Must pass the lcs_file argument in process mode")
if not os.path.exists(args.lcs_file):
raise ArgumentError(f"{args.lcs_file} not found")
# Check that the results directory can be made or exists
if not os.path.exists(args.results_dir):
try:
os.mkdir(args.results_dir)
except FileNotFoundError:
raise ArgumentError(f"{args.results_dir} is not valid")
return args
def process_main(args):
"""
Run KNC-Live in processing mode
Args:
args (argpars.Namespace): parsed arguments for process.py
"""
# Run data processing
run_processing(
args.lcs_file, args.results_dir, args.datasets_file, args.verbose)
if __name__ == "__main__":
# Get and validate command line arguments
args = check_args(parse_args())
# Run data processing
process_main(args)
| {"/run_knc.py": ["/knc/__init__.py", "/knc/utils.py"], "/make_training_data.py": ["/knc/__init__.py", "/knc/utils.py"]} |
57,553 | ColeFMeldorf/KNC-Live | refs/heads/main | /knc/feature_extraction.py | """
Extract features from lightcurves
"""
import sys
sys.path.append('knc')
import pandas as pd
from features import FeatureExtractor
def extract(lc : pd.DataFrame,
extractor : FeatureExtractor,
flts : str ="griz") -> dict :
"""
Extract features from a single lightcurve
Args:
lc (pandas.DataFrame): A single lightcurve
extractor (FeatureExtractor): A features.FeatureExtractor instance
flts (str or list, default='griz'): iterable of all possible filters
Returns:
dict of feature names and values
"""
# Determine which filters are present in the lightcurve
good_flts = list(set(lc['FLT'].values))
data_dict = {}
for flt in flts:
if flt in good_flts:
flt_good = True
else:
flt_good = False
# Extract all single-filter features
for feat in extractor.single_features:
if flt_good:
command = f'extractor.{feat}(lc, flt)'
data_dict[feat + '_' + flt] = eval(command)
else:
data_dict[feat + '_' + flt] = 'N'
# Extract all double-filter features
for pair in ['gr', 'gi', 'gz', 'ri', 'rz', 'iz']:
for feat in extractor.double_features:
command = f'extractor.{feat}(lc, pair[0], pair[1])'
data_dict[feat + '_' + pair] = eval(command)
return data_dict
def extract_all(lcs : dict,
cut_requirement : int = 0,
obj : str = 'DATA',
return_feats : bool = False,
sample : int = None,
verbose : bool = False) -> pd.DataFrame:
"""
Extract features from all lightcurves in a dictionary
Args:
lcs (dict): dict of all lightcurves
cut_requirement (int, default=0): number of cut to enforce
obj (str, default='DATA') : label to give to object
return_feats (bool, default=False) : return list of all features
sample (int, default=None): max number of lightcurves to use
verbose (bool, default=False): print progress
Returns:
pandas DataFrame of all extracted features,
list of all features if return_feats=True
"""
extractor = FeatureExtractor()
# Extract features for everything at the desired cut level
data = []
count = 0
sample = len(lcs) if sample is None else sample
for snid, info in lcs.items():
if info['cut'] > cut_requirement or info['cut'] == -1:
flts = set(info['lightcurve']['FLT'].values)
data_dict = extract(info['lightcurve'], extractor)
data_dict['SNID'] = snid
data_dict['OBJ'] = obj
data.append(data_dict)
count += 1
if count >= sample:
break
if verbose and count % 50 == 0:
progress = str(round(count / sample * 100.0, 2))
sys.stdout.write(f"Progress: {progress} % \r")
sys.stdout.flush()
# Construct and clean a DataFrame
df = pd.DataFrame(data)
feats = df.columns
df = df.dropna(how='all')
df = df.fillna('N')
if verbose:
print("")
if return_feats:
return df, feats
return df
| {"/run_knc.py": ["/knc/__init__.py", "/knc/utils.py"], "/make_training_data.py": ["/knc/__init__.py", "/knc/utils.py"]} |
57,554 | ColeFMeldorf/KNC-Live | refs/heads/main | /knc/classify.py | """
Classify datasets
"""
import argparse
import os
import sys
sys.path.append('knc')
import numpy as np
import pandas as pd
import train
from utils import sigmoid, ArgumentError, load, save
def calibrate(scores: np.ndarray, popt : list) -> np.ndarray :
"""
Transform output scores into probabilities
Args:
scores (np.array): scores assigned by classifer
popt (list): calibration coefficients for sigmoid function
Returns:
calibrated scores
"""
#popt = [2.66766158e-05, 2.66362041e-05, 3.94224999e+01]
return sigmoid(scores, *popt)
def predict(classifier_dict : dict, data : pd.DataFrame) -> pd.DataFrame :
"""
Predict on data and add results to the dataframe
Args:
classifier_dict (dict): dictionary of classifier and features to use
data (pd.DataFrame): featurized dataset examples
Returns:
DataFrame with a 'PROB_KN' column
"""
rfc = classifier_dict['rfc']
feats = classifier_dict['feats']
popt = classifier_dict['calibration_coeffs']
scores = rfc.predict_proba(data[feats])[:,1]
if not popt is None:
data['PROB_KN'] = calibrate(scores, popt)
else:
data['PROB_KN'] = scores
data['KN'] = [1 if x >= classifier_dict['prob_cutoff']
else 0 for x in data['PROB_KN'].values]
return data
def get_classifier_filename(mode : str,
dataset_id : str,
id_map_file : str = 'id_map.npy',
rfc_dir : str = 'classifiers/',
verbose : bool = False,
skip_cv : bool = False,
distribute : bool = False) -> str:
"""
Given a classifier ID, return the filepath to the classifier. Trains
a new classifier if no classifiers match the ID.
Args:
mode (str): Type of classifier ('r', 'f', 'rfp', 'ffp')
dataset_id (str): ID string for the dataset
id_map_file (str, default='id_map.npy'): path to map of classifier ids
rfc_dir (str, default='classifiers/'): path to classifier directory
verbose (bool, default=False): Print status updates
skip_cv (bool, default=False): Skip hyperparam optimization
distribute (bool, default=False): Use multiprocessing
Returns:
filename of the classifier
"""
if not rfc_dir.endswith('/'):
rfc_dir += '/'
try:
id_map = load(f"{rfc_dir}{mode}_{id_map_file}")
key = id_map[dataset_id]
except FileNotFoundError:
# Initialize the id map and train a classifier
key = 10000
id_map = {dataset_id : key}
if verbose:
print("No classifier found, training new classifier")
train.train_new(
mode, dataset_id, key, rfc_dir, verbose, skip_cv, distribute)
except KeyError:
if verbose:
print("No classifier found, training new classifier")
# Train a new classifier and update the id map
key = max([int(x) for x in id_map.values()]) + 1
train.train_new(mode, dataset_id, key, rfc_dir, verbose)
id_map[dataset_id] = key
# Save the updated id map
save(f"{rfc_dir}{mode}_{id_map_file}", id_map)
return f"{rfc_dir}knclassifier_{mode}_{key}.npy"
def classify_datasets(mode : str,
data_dict : dict,
id_map_file : str = 'id_map.npy',
rfc_dir : str = 'classifiers/',
verbose : bool = False,
skip_cv : bool = False,
distribute : bool = False) -> pd.DataFrame :
"""
For each dataset, load the corresponding classifier and predict
Args:
mode (str): Type of classifier ('r', 'f', 'rfp', 'ffp')
data_dict (dict): dictionary containing all datasets
id_map_file (str, default='id_map.npy'): path to map of classifier ids
rfc_dir (str, default='classifiers/'): path to classifier directory
verbose (bool, default=False): Print status updates
skip_cv (bool, default=False): Skip hyperparam optimization
distribute (bool, default=False): Use multiprocessing
Returns:
DataFrame with columns SNID and PROB_KN
"""
out_data = []
count = 1
total = len(data_dict)
for dataset_id, df in data_dict.items():
# Update status
if verbose:
print(f"Classifying dataset {count} of {total}")
count += 1
# Load classifier corresponding to dataset
classifier_name = get_classifier_filename(
mode, dataset_id, id_map_file, rfc_dir, verbose, skip_cv,
distribute)
# Load classifier
classifier_dict = load(classifier_name)
# Remove rows with infs and NaNs
data = train.Data(df)
clean_df = data.clean_data()
# Apply the classifier
res = predict(classifier_dict, clean_df)
out_data += [(x, y, z) for x, y, z in zip(res['SNID'].values,
res['PROB_KN'].values,
res['KN'].values)]
return pd.DataFrame(data=out_data, columns=['SNID', 'PROB_KN', 'KN'])
def parse_args() -> argparse.ArgumentParser:
"""
Parse command line arguments to enable script-like classifying
Returns:
arrgparser object
"""
parser = argparse.ArgumentParser(description=__doc__)
# Enable command line arguments
parser.add_argument('--datasets_file',
type=str,
help='Path to datasets file',
required=True)
parser.add_argument('--mode',
type=str,
help=('Type of data to classify. r=realtime, f=full, r'
'fp=realtime+force_photo, ffp=full+force_photo'),
required=True)
parser.add_argument('--results_outfile',
type=str,
help='Filename to store results',
default='KNC-Live_Results.csv')
parser.add_argument('--results_dir',
type=str,
help='Directory to save results',
default='knc_results/')
parser.add_argument('--rfc_dir',
type=str,
help='Path to directory containing classifiers',
default='classifiers/')
parser.add_argument('--id_map_file',
type=str,
help='Name of ID map file in classifier directory',
default='id_map.npy')
parser.add_argument('--verbose',
action='store_true',
help='Print status updates')
parser.add_argument('--skip_cv',
action='store_true',
help='Skip hyperparam optimization')
parser.add_argument('--distribute',
action='store_true',
help='Use multiprocessing')
return parser
def check_args(parser : argparse.ArgumentParser) -> argparse.Namespace :
"""
Check the arguments for invalid values
Args:
parser (argparse.ArgumentParser): a parser object
Returns:
The parsed arguments if all arguments are valid
Raises:
knc.ArgumentError if mode not in ('r', 'f', 'rfp', 'ffp')
knc.ArgumentError if rfc_dir is not found
knc.ArgumentError if id_map_file is not found
knc.ArgumentError if datasets_file is not found
knc.ArgumentError if results_dir cannot be found or created
"""
args = parser.parse_args()
# Check that the mode is valid
if not args.mode in ['r', 'f', 'rfp', 'ffp']:
raise ArgumentError(f"{args.mode} must be r, f, rfp, or ffp")
# Check that the classifiers directory exists
if not os.path.exists(args.rfc_dir):
raise ArgumentError(f"{args.rfc_dir} not found")
if not args.rfc_dir.endswith('/'):
args.rfc_dir += '/'
# Check that the processed files exist
if not os.path.exists(args.datasets_file):
raise ArgumentError(f"{args.datasets_file} not found")
# Check that the results directory can be made or exists
if not os.path.exists(args.results_dir):
try:
os.mkdir(results_dir)
except FileNotFoundError:
raise ArgumentError(f"{args.results_dir} is not valid")
return args
def classify_main(args):
"""
Run KNC-Live in classification mode
Args:
args (argpars.Namespace): parsed arguments for classify.py
"""
# Run classification
results = classify_datasets(
args.mode,
load(args.datasets_file),
args.id_map_file,
args.rfc_dir,
args.verbose,
args.skip_cv)
# Save results
results.to_csv(f"{args.results_dir}{args.results_outfile}", index=False)
if __name__ == "__main__":
# Get and validate the command line arguments
args = check_args(parse_args())
# Run classification and save results
classify_main(args)
| {"/run_knc.py": ["/knc/__init__.py", "/knc/utils.py"], "/make_training_data.py": ["/knc/__init__.py", "/knc/utils.py"]} |
57,555 | purdue-nrl/hybrid-snn-conversion | refs/heads/master | /ann.py | #############################
# @author: Nitin Rathi #
#############################
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms, models
from torchviz import make_dot
from matplotlib import pyplot as plt
import pdb
import sys
import datetime
import os
from self_models import *
def train(epoch, loader):
global learning_rate
if epoch in [125, 200, 250]:
for param_group in optimizer.param_groups:
param_group['lr'] = param_group['lr'] / 10
learning_rate = param_group['lr']
total_correct = 0
model.train()
for batch_idx, (data, target) in enumerate(loader):
start_time = datetime.datetime.now()
if torch.cuda.is_available() and args.gpu:
data, target = data.cuda(), target.cuda()
optimizer.zero_grad()
output = model(data)
loss = F.cross_entropy(output,target)
#make_dot(loss).view()
#exit(0)
loss.backward()
optimizer.step()
pred = output.max(1,keepdim=True)[1]
correct = pred.eq(target.data.view_as(pred)).cpu().sum()
total_correct += correct.item()
# if (batch_idx+1) % 100 == 0:
# #f.write('\nconv1: {:.2e}, conv2: {:.2e}, conv3: {:.2e}'.format(
# #model.conv1.weight.mean().item(),
# #model.conv2.weight.mean().item(),
# #model.conv3.weight.mean().item(),
# #))
# f.write('Epoch: {} [{}/{} ({:.0f}%)] Loss: {:.6f} Current:[{}/{} ({:.2f}%)] Total:[{}/{} ({:.2f}%)] Time: {}'.format(
# epoch,
# (batch_idx+1) * len(data),
# len(loader.dataset),
# 100. * (batch_idx+1) / len(loader),
# loss,
# correct.item(),
# data.size(0),
# 100. * correct.item()/data.size(0),
# total_correct,
# data.size(0)*(batch_idx+1),
# 100. * total_correct/(data.size(0)*(batch_idx+1)),
# datetime.timedelta(seconds=(datetime.datetime.now() - start_time).seconds)
# )
# )
f.write('\n Epoch: {}, LR: {}, Train Loss: {:.6f}, Train accuracy: {:.4f}'.format(
epoch,
learning_rate,
loss,
total_correct/len(loader.dataset)
)
)
def test(loader):
with torch.no_grad():
model.eval()
total_loss = 0
correct = 0
global max_correct, start_time
for batch_idx, (data, target) in enumerate(loader):
if torch.cuda.is_available() and args.gpu:
data, target = data.cuda(), target.cuda()
output = model(data)
loss = F.cross_entropy(output,target)
total_loss += loss.item()
pred = output.max(1, keepdim=True)[1]
correct += pred.eq(target.data.view_as(pred)).cpu().sum()
if correct>max_correct:
max_correct = correct
state = {
'accuracy' : max_correct.item()/len(loader.dataset),
'epoch' : epoch,
'state_dict' : model.state_dict(),
'optimizer' : optimizer.state_dict()
}
filename = 'ann_'+architecture.lower()+'_'+dataset.lower()+'.pth'
torch.save(state,filename)
f.write(' Test Loss: {:.6f}, Current: {:.2f}%, Best: {:.2f}%, Time: {}'. format(
total_loss/(batch_idx+1),
100. * correct.item() / len(loader.dataset),
100. * max_correct.item() / len(loader.dataset),
datetime.timedelta(seconds=(datetime.datetime.now() - start_time).seconds)
)
)
# f.write('\n Time: {}'.format(
# datetime.timedelta(seconds=(datetime.datetime.now() - current_time).seconds)
# )
# )
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Train ANN to be later converted to SNN', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--gpu', default=True, type=bool, help='use gpu')
parser.add_argument('-s','--seed', default=0, type=int, help='seed for random number')
parser.add_argument('--dataset', default='CIFAR10', type=str, help='dataset name', choices=['MNIST','CIFAR10','CIFAR100'])
parser.add_argument('--batch_size', default=64, type=int, help='minibatch size')
parser.add_argument('-a','--architecture', default='VGG16', type=str, help='network architecture', choices=['VGG5','VGG9','VGG11','VGG13','VGG16',' VGG19'])
parser.add_argument('-lr','--learning_rate', default=1e-2, type=float, help='initial learning_rate')
parser.add_argument('--pretrained_CIFAR10', default='', type=str, help='pretrained CIFAR10 model to initialize CIFAR100 training')
parser.add_argument('--log', action='store_true', help='to print the output on terminal or to log file')
args=parser.parse_args()
if args.log:
log_file = 'ann_'+args.architecture.lower()+'_'+args.dataset.lower()+'.log'
f= open(log_file, 'w', buffering=1)
else:
f=sys.stdout
f.write('\n Log file for \'{}\', run on {}'.format(sys.argv[0],datetime.datetime.now()))
f.write('\n\n')
f.write('\n Dataset:{}'.format(args.dataset))
f.write('\n Batch size: {}'.format(args.batch_size))
f.write('\n Architecture: {}'.format(args.architecture))
# Training settings
torch.manual_seed(args.seed)
if torch.cuda.is_available() and args.gpu:
f.write("\n \t ------- Running on GPU -------")
torch.set_default_tensor_type('torch.cuda.FloatTensor')
# Loading Dataset
dataset = args.dataset
batch_size = args.batch_size
if dataset == 'CIFAR100':
normalize = transforms.Normalize((0.5071,0.4867,0.4408),(0.2675,0.2565,0.2761))
labels = 100
elif dataset == 'CIFAR10':
normalize = transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))
labels = 10
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize
])
transform_test = transforms.Compose([transforms.ToTensor(), normalize])
if dataset == 'CIFAR100':
train_dataset = datasets.CIFAR100(root='~/Datasets/cifar_data', train=True, download=True,transform =transform_train)
test_dataset = datasets.CIFAR100(root='~/Datasets/cifar_data', train=False, download=True, transform=transform_test)
elif dataset == 'CIFAR10':
train_dataset = datasets.CIFAR10(root='~/Datasets/cifar_data', train=True, download=True,transform =transform_train)
test_dataset = datasets.CIFAR10(root='~/Datasets/cifar_data', train=False, download=True, transform=transform_test)
elif dataset == 'MNIST':
train_dataset = datasets.MNIST(root='~/Datasets/mnist/', train=True, download=True, transform=transforms.ToTensor()
)
test_dataset = datasets.MNIST(root='~/Datasets/mnist/', train=False, download=True, transform=transforms.ToTensor())
train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True)
test_loader = torch.utils.data.DataLoader(dataset=test_dataset,
batch_size=batch_size,
shuffle=False)
architecture = args.architecture
model = VGG(architecture, labels)
f.write('\n{}'.format(model))
#CIFAR100 sometimes has problem to start training
#One solution is to train for CIFAR10 with same architecture
#Load the CIFAR10 trained model except the final layer weights
if dataset == 'CIFAR100' and args.pretrained_CIFAR10:
state=torch.load(args.pretrained_CIFAR10)
if 'classifier.6.weight' in state['state_dict']:
state['state_dict'].pop('classifier.6.weight')
state['state_dict']['classifier.6.weight'] = model.state_dict()['classifier.6.weight']
elif 'module.classifier.6.weight' in state['state_dict']:
state['state_dict'].pop('module.classifier.6.weight')
state['state_dict']['module.classifier.6.weight'] = model.state_dict()['module.classifier.6.weight']
model.load_state_dict(state['state_dict'])
model = nn.DataParallel(model)
if torch.cuda.is_available() and args.gpu:
model.cuda()
learning_rate = args.learning_rate
optimizer = optim.SGD(model.parameters(), lr=learning_rate, momentum=0.9, weight_decay=5e-4)
#optimizer = optim.Adam(model.parameters(), lr=learning_rate, weight_decay=5e-4)
max_correct = 0
for epoch in range(1, 300):
start_time = datetime.datetime.now()
train(epoch, train_loader)
test(test_loader)
f.write('Highest accuracy: {:.2f}%'.format(100*max_correct.item()/len(test_loader.dataset)))
| {"/ann.py": ["/self_models/__init__.py"]} |
57,556 | purdue-nrl/hybrid-snn-conversion | refs/heads/master | /self_models/__init__.py | from .spiking_model import *
from .vgg import *
| {"/ann.py": ["/self_models/__init__.py"]} |
57,568 | sseaver/BaseballAPI | refs/heads/master | /app/admin.py | from django.contrib import admin
from app.models import Batting, Fielding, Master, Pitching
# Register your models here.
admin.site.register([Batting, Fielding, Master, Pitching])
| {"/app/admin.py": ["/app/models.py"], "/BaseballAPI/urls.py": ["/app/views.py"], "/app/serializers.py": ["/app/models.py"], "/app/views.py": ["/app/models.py", "/app/serializers.py"]} |
57,569 | sseaver/BaseballAPI | refs/heads/master | /app/models.py | from django.db import models
# Create your models here.
class Master(models.Model):
playerID = models.CharField(max_length=10, null=True, blank=True)
birthYear = models.CharField(max_length=50, null=True, blank=True)
birthMonth = models.CharField(max_length=50, null=True, blank=True)
birthDay = models.CharField(max_length=50, null=True, blank=True)
birthCountry = models.CharField(max_length=50, null=True, blank=True)
birthState = models.CharField(max_length=5, null=True, blank=True)
birthCity = models.CharField(max_length=50, null=True, blank=True)
deathYear = models.CharField(max_length=50, null=True, blank=True)
deathMonth = models.CharField(max_length=50, null=True, blank=True)
deathDay = models.CharField(max_length=50, null=True, blank=True)
deathCountry = models.CharField(max_length=50, null=True, blank=True)
deathState = models.CharField(max_length=5, null=True, blank=True)
deathCity = models.CharField(max_length=50, null=True, blank=True)
nameFirst = models.CharField(max_length=50, null=True, blank=True)
nameLast = models.CharField(max_length=50, null=True, blank=True)
nameGiven = models.CharField(max_length=50, null=True, blank=True)
weight = models.CharField(max_length=50, null=True, blank=True)
height = models.CharField(max_length=50, null=True, blank=True)
bats = models.CharField(max_length=5, null=True, blank=True)
throws = models.CharField(max_length=5, null=True, blank=True)
debut = models.CharField(max_length=50, null=True, blank=True)
finalGame = models.CharField(max_length=50, null=True, blank=True)
retroID = models.CharField(max_length=10, null=True, blank=True)
bbrefID = models.CharField(max_length=10, null=True, blank=True)
class Batting(models.Model):
playerID = models.ForeignKey(Master)
yearID = models.CharField(max_length=50, null=True, blank=True)
stint = models.CharField(max_length=50, null=True, blank=True)
teamID = models.CharField(max_length=5, null=True, blank=True)
lgID = models.CharField(max_length=5, null=True, blank=True)
G = models.CharField(max_length=50, null=True, blank=True)
AB = models.CharField(max_length=50, null=True, blank=True)
R = models.CharField(max_length=50, null=True, blank=True)
H = models.CharField(max_length=50, null=True, blank=True)
doubles = models.CharField(max_length=50, null=True, blank=True)
triples = models.CharField(max_length=50, null=True, blank=True)
HR = models.CharField(max_length=50, null=True, blank=True)
RBI = models.CharField(max_length=50, null=True, blank=True)
SB = models.CharField(max_length=50, null=True, blank=True)
CS = models.CharField(max_length=50, null=True, blank=True)
BB = models.CharField(max_length=50, null=True, blank=True)
SO = models.CharField(max_length=50, null=True, blank=True)
IBB = models.CharField(max_length=50, null=True, blank=True)
HBP = models.CharField(max_length=50, null=True, blank=True)
SH = models.CharField(max_length=50, null=True, blank=True)
SF = models.CharField(max_length=50, null=True, blank=True)
GIDP = models.CharField(max_length=50, null=True, blank=True)
class Pitching(models.Model):
playerID = models.ForeignKey(Master)
yearID = models.CharField(max_length=50, null=True, blank=True)
stint = models.CharField(max_length=50, null=True, blank=True)
teamID = models.CharField(max_length=10, null=True, blank=True)
lgID = models.CharField(max_length=10, null=True, blank=True)
W = models.CharField(max_length=50, null=True, blank=True)
L = models.CharField(max_length=50, null=True, blank=True)
G = models.CharField(max_length=50, null=True, blank=True)
GS = models.CharField(max_length=50, null=True, blank=True)
CG = models.CharField(max_length=50, null=True, blank=True)
SHO = models.CharField(max_length=50, null=True, blank=True)
SV = models.CharField(max_length=50, null=True, blank=True)
IPouts = models.CharField(max_length=50, null=True, blank=True)
H = models.CharField(max_length=50, null=True, blank=True)
ER = models.CharField(max_length=50, null=True, blank=True)
HR = models.CharField(max_length=50, null=True, blank=True)
BB = models.CharField(max_length=50, null=True, blank=True)
SO = models.CharField(max_length=50, null=True, blank=True)
BAOpp = models.CharField(max_length=50, null=True, blank=True)
ERA = models.CharField(max_length=50, null=True, blank=True)
IBB = models.CharField(max_length=50, null=True, blank=True)
WP = models.CharField(max_length=50, null=True, blank=True)
HBP = models.CharField(max_length=50, null=True, blank=True)
BK = models.CharField(max_length=50, null=True, blank=True)
BFP = models.CharField(max_length=50, null=True, blank=True)
GF = models.CharField(max_length=50, null=True, blank=True)
R = models.CharField(max_length=50, null=True, blank=True)
SH = models.CharField(max_length=50, null=True, blank=True)
SF = models.CharField(max_length=50, null=True, blank=True)
GIDP = models.CharField(max_length=50, null=True, blank=True)
class Fielding(models.Model):
playerID = models.ForeignKey(Master)
yearID = models.CharField(max_length=50, null=True, blank=True)
stint = models.CharField(max_length=50, null=True, blank=True)
teamID = models.CharField(max_length=10, null=True, blank=True)
lgID = models.CharField(max_length=10, null=True, blank=True)
POS = models.CharField(max_length=50, null=True, blank=True)
G = models.CharField(max_length=50, null=True, blank=True)
GS = models.CharField(max_length=50, null=True, blank=True)
InnOuts = models.CharField(max_length=50, null=True, blank=True)
PO = models.CharField(max_length=50, null=True, blank=True)
A = models.CharField(max_length=50, null=True, blank=True)
E = models.CharField(max_length=50, null=True, blank=True)
DP = models.CharField(max_length=50, null=True, blank=True)
PB = models.CharField(max_length=50, null=True, blank=True)
WP = models.CharField(max_length=50, null=True, blank=True)
SB = models.CharField(max_length=50, null=True, blank=True)
CS = models.CharField(max_length=50, null=True, blank=True)
ZR = models.CharField(max_length=50, null=True, blank=True)
| {"/app/admin.py": ["/app/models.py"], "/BaseballAPI/urls.py": ["/app/views.py"], "/app/serializers.py": ["/app/models.py"], "/app/views.py": ["/app/models.py", "/app/serializers.py"]} |
57,570 | sseaver/BaseballAPI | refs/heads/master | /BaseballAPI/urls.py | """BaseballAPI URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from app.views import (MasterListAPIView, MasterDetailUpdateDestroyAPIView, BattingListAPIView, PitchingListAPIView,
FieldingListAPIView, BattingDetailUpdateDestroyAPIView, PitchingDetailUpdateDestroyAPIView,
FieldingDetailUpdateDestroyAPIView)
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/master/$', MasterListAPIView.as_view(), name='master_list_api_view'),
url(r'^api/master/(?P<pk>\d+)/$', MasterDetailUpdateDestroyAPIView.as_view(),
name='master_detail_update_destroy_api_view'),
url(r'^api/batting/$', BattingListAPIView.as_view(), name='batting_list_api_view'),
url(r'^api/batting/(?P<pk>\d+)/$', BattingDetailUpdateDestroyAPIView.as_view(),
name='batting_detail_update_destroy_api_view'),
url(r'^api/pitching/$', PitchingListAPIView.as_view(), name='pitching_list_api_view'),
url(r'^api/pitching/(?P<pk>\d+)/$', PitchingDetailUpdateDestroyAPIView.as_view(),
name='pitching_detail_update_destroy_api_view'),
url(r'^api/fielding/$', FieldingListAPIView.as_view(), name='fielding_list_api_view'),
url(r'^api/fielding/(?P<pk>\d+)/$', FieldingDetailUpdateDestroyAPIView.as_view(),
name='fielding_detail_update_destroy_api_view'),
]
| {"/app/admin.py": ["/app/models.py"], "/BaseballAPI/urls.py": ["/app/views.py"], "/app/serializers.py": ["/app/models.py"], "/app/views.py": ["/app/models.py", "/app/serializers.py"]} |
57,571 | sseaver/BaseballAPI | refs/heads/master | /app/migrations/0003_auto_20161026_2107.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-26 21:07
from __future__ import unicode_literals
import csv
from django.db import migrations
def add_info(apps, schema_editor):
Master = apps.get_model("app", "Master")
Batting = apps.get_model("app", "Batting")
Pitching = apps.get_model("app", "Pitching")
Fielding = apps.get_model("app", "Fielding")
with open('core/Master.csv') as open_file:
reader = csv.reader(open_file)
for row in reader:
Master.objects.create(playerID=row[0], birthYear=row[1], birthMonth=row[2],
birthDay=row[3], birthCountry=row[4], birthState=row[5],
birthCity=row[6], deathYear=row[7], deathMonth=row[8],
deathDay=row[9], deathCountry=row[10], deathState=row[11],
deathCity=row[12], nameFirst=row[13], nameLast=row[14],
nameGiven=row[15], weight=row[16], height=row[17],
bats=row[18], throws=row[19], debut=row[20], finalGame=row[21],
retroID=row[22], bbrefID=row[23])
# raise Exception("BOOMSHAKALAKA!")
with open("core/Batting.csv") as open_file:
reader = csv.reader(open_file)
for row in reader:
master = Master.objects.get(playerID=row[0])
Batting.objects.create(playerID=master, yearID=row[1], stint=row[2],
teamID=row[3], lgID=row[4], G=row[5], AB=row[6],
R=row[7], H=row[8], doubles=row[9], triples=row[10],
HR=row[11], RBI=row[12], SB=row[13], CS=row[14],
BB=row[15], SO=row[16], IBB=row[17], HBP=row[18],
SH=row[19], SF=row[20], GIDP=row[21])
# raise Exception("BOOMSHAKALAKA!")
with open("core/Pitching.csv") as open_file:
reader = csv.reader(open_file)
for row in reader:
master = Master.objects.get(playerID=row[0])
Pitching.objects.create(playerID=master, yearID=row[1], stint=row[2],
teamID=row[3], lgID=row[4], W=row[5], L=row[6],
G=row[7], GS=row[8], CG=row[9], SHO=row[10],
SV=row[11], IPouts=row[12], H=row[13], ER=row[14],
HR=row[15], BB=row[16], SO=row[17], BAOpp=row[18],
ERA=row[19], IBB=row[20], WP=row[21], HBP=row[22],
BK=row[23], BFP=row[24], GF=row[25], R=row[26],
SH=row[27], SF=row[28], GIDP=row[29])
# raise Exception("BOOMSHAKALAKA!")
with open("core/Fielding.csv") as open_file:
reader = csv.reader(open_file)
for row in reader:
master = Master.objects.get(playerID=row[0])
Fielding.objects.create(playerID=master, yearID=row[1], stint=row[2],
teamID=row[3], lgID=row[4], POS=row[5], G=row[6],
GS=row[7], InnOuts=row[8], PO=row[9], A=row[10],
E=row[11], DP=row[12], PB=row[13], WP=row[14],
SB=row[15], CS=row[16], ZR=row[17])
raise Exception("BOOMSHAKALAKA!")
class Migration(migrations.Migration):
dependencies = [
('app', '0002_auto_20161026_2145'),
]
operations = [
migrations.RunPython(add_info)
]
| {"/app/admin.py": ["/app/models.py"], "/BaseballAPI/urls.py": ["/app/views.py"], "/app/serializers.py": ["/app/models.py"], "/app/views.py": ["/app/models.py", "/app/serializers.py"]} |
57,572 | sseaver/BaseballAPI | refs/heads/master | /app/migrations/0002_auto_20161026_2145.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-26 21:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='batting',
name='AB',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='BB',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='CS',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='G',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='GIDP',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='H',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='HBP',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='HR',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='IBB',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='R',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='RBI',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='SB',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='SF',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='SH',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='SO',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='doubles',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='lgID',
field=models.CharField(blank=True, max_length=5, null=True),
),
migrations.AlterField(
model_name='batting',
name='playerID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app.Master'),
),
migrations.AlterField(
model_name='batting',
name='stint',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='teamID',
field=models.CharField(blank=True, max_length=5, null=True),
),
migrations.AlterField(
model_name='batting',
name='triples',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='batting',
name='yearID',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='A',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='CS',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='DP',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='E',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='G',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='GS',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='InnOuts',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='PB',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='PO',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='POS',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='SB',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='WP',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='ZR',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='lgID',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='fielding',
name='playerID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app.Master'),
),
migrations.AlterField(
model_name='fielding',
name='stint',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='fielding',
name='teamID',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='fielding',
name='yearID',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='bats',
field=models.CharField(blank=True, max_length=5, null=True),
),
migrations.AlterField(
model_name='master',
name='bbrefID',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='master',
name='birthCity',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='birthCountry',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='birthDay',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='birthMonth',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='birthState',
field=models.CharField(blank=True, max_length=5, null=True),
),
migrations.AlterField(
model_name='master',
name='birthYear',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='deathCity',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='deathCountry',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='deathDay',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='deathMonth',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='deathState',
field=models.CharField(blank=True, max_length=5, null=True),
),
migrations.AlterField(
model_name='master',
name='deathYear',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='debut',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='finalGame',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='height',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='nameFirst',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='nameGiven',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='nameLast',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='master',
name='playerID',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='master',
name='retroID',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='master',
name='throws',
field=models.CharField(blank=True, max_length=5, null=True),
),
migrations.AlterField(
model_name='master',
name='weight',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='BAOpp',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='BB',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='BFP',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='BK',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='CG',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='ER',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='ERA',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='G',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='GF',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='GIDP',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='GS',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='H',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='HBP',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='HR',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='IBB',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='IPouts',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='L',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='R',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='SF',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='SH',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='SHO',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='SO',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='SV',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='W',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='WP',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='lgID',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='pitching',
name='playerID',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app.Master'),
),
migrations.AlterField(
model_name='pitching',
name='stint',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='pitching',
name='teamID',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='pitching',
name='yearID',
field=models.CharField(blank=True, max_length=50, null=True),
),
]
| {"/app/admin.py": ["/app/models.py"], "/BaseballAPI/urls.py": ["/app/views.py"], "/app/serializers.py": ["/app/models.py"], "/app/views.py": ["/app/models.py", "/app/serializers.py"]} |
57,573 | sseaver/BaseballAPI | refs/heads/master | /app/serializers.py | from rest_framework import serializers
from app.models import Batting, Fielding, Master, Pitching
class MasterSerializer(serializers.ModelSerializer):
class Meta:
model = Master
fields = '__all__'
class BattingSerializer(serializers.ModelSerializer):
class Meta:
model = Batting
fields = '__all__'
class FieldingSerializer(serializers.ModelSerializer):
class Meta:
model = Fielding
fields = '__all__'
class PitchingSerializer(serializers.ModelSerializer):
class Meta:
model = Pitching
fields = '__all__'
| {"/app/admin.py": ["/app/models.py"], "/BaseballAPI/urls.py": ["/app/views.py"], "/app/serializers.py": ["/app/models.py"], "/app/views.py": ["/app/models.py", "/app/serializers.py"]} |
57,574 | sseaver/BaseballAPI | refs/heads/master | /app/views.py | from django.shortcuts import render
from app.models import Batting, Fielding, Master, Pitching
from app.serializers import MasterSerializer, BattingSerializer, FieldingSerializer, PitchingSerializer
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
# Create your views here.
class MasterListAPIView(ListCreateAPIView):
queryset = Master.objects.all()
serializer_class = MasterSerializer
class MasterDetailUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView):
queryset = Master.objects.all()
serializer_class = MasterSerializer
class BattingListAPIView(ListCreateAPIView):
queryset = Batting.objects.all()
serializer_class = BattingSerializer
class BattingDetailUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView):
queryset = Batting.objects.all()
serializer_class = BattingSerializer
class FieldingListAPIView(ListCreateAPIView):
queryset = Fielding.objects.all()
serializer_class = FieldingSerializer
class FieldingDetailUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView):
queryset = Fielding.objects.all()
serializer_class = FieldingSerializer
class PitchingListAPIView(ListCreateAPIView):
queryset = Pitching.objects.all()
serializer_class = PitchingSerializer
class PitchingDetailUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView):
queryset = Pitching.objects.all()
serializer_class = PitchingSerializer
| {"/app/admin.py": ["/app/models.py"], "/BaseballAPI/urls.py": ["/app/views.py"], "/app/serializers.py": ["/app/models.py"], "/app/views.py": ["/app/models.py", "/app/serializers.py"]} |
57,583 | ocolisanrares97/Equation-System-Solver-with-Python | refs/heads/master | /helperFunctions.py | import numpy as np
import random
from parsing_functions import *
#global_filename = "sistem.txt"
# equations = getFileContent(global_filename)
#
# coefficients, freeTerms = getCoefficientsAndFreeTerms(equations)
#
# population_size = 8
# no_of_variables = len(coefficients[0])
'''
- calcFitness calculates the fitness of a solution
- we compute the lefthandside of the equation by adding together (solution*coefficient)
- then we substract from this the free term fi = lhsEvaluation - freeTerm
- we add the abs(fi) to F, which has to be as small as possible for the approximation to be close
to the real value of the solution
- the resulting value is the fitness value of the solutions set
'''
def calcFitnessAbsValue(coefficients,freeTerms,variables):
F = 0.0
iterator = 0
for coeffList in coefficients:
lhsEvaluation = 0.0
for i in range(0,len(coeffList)):
lhsEvaluation = lhsEvaluation +(coeffList[i] * variables[i])
freeTerm = freeTerms[iterator]
iterator += 1
fi = lhsEvaluation - freeTerm
F = F + abs(fi)
return F
def calcFitnessSquare(coefficients,freeTerms,variables):
F = 0.0
iterator = 0
for coeffList in coefficients:
lhsEvaluation = 0.0
for i in range(0,len(coeffList)):
lhsEvaluation = lhsEvaluation +(coeffList[i] * variables[i])
freeTerm = freeTerms[iterator]
iterator += 1
fi = lhsEvaluation - freeTerm
F = F + pow(fi,2)
return float(truncate(F,3))
'''Truncates/pads a float f to n decimal places without rounding'''
def truncate(f, n):
s = '{}'.format(f)
if 'e' in s or 'E' in s:
return '{0:.{1}f}'.format(f, n)
i, p, d = s.partition('.')
return '.'.join([i, (d+'0'*n)[:n]])
'''
- initializePopulation generates the first population of solutions
- popoulation_size = the number of potential solutions in the population
- no_of_variables = number of unknowns matched to the coefficients
'''
def initializePopulation( population_size , no_of_variables ):
population = []
for i in range(0,population_size):
solution = np.random.uniform(-11,11,no_of_variables)
for j in range(0,len(solution)):
solution[j] = solution[j]
population.append(list(solution))
return population
'''
Computes the distance between two points in the Euclidean Space
'''
def euclideanDistance(x1,y1,x2,y2):
dist = ((x1-x2)**2 + (y1-y2)**2)**.5
return dist
'''
Weighted distance used in the mating selection process
DW(p)
'''
def DW(pop, index, fitList1, fitList2):
k = len(pop)
p_x = fitList1[index]
p_y = fitList2[index]
dis_sum = 0
for i in range(0,k):
if i == index:
continue
else:
x2 = fitList1[i]
y2 = fitList2[i]
dist = euclideanDistance(p_x,p_y,x2,y2)
dis_sum += dist
r_sum = 0
for i in range(0,k):
if i == index:
continue
else:
x2 = fitList1[i]
y2 = fitList2[i]
dist = euclideanDistance(p_x,p_y,x2,y2)
r_sum += 1/abs(dist-(dis_sum/k))
dw_sum = 0
for i in range(0,k):
if i == index:
continue
else:
x2 = fitList1[i]
y2 = fitList2[i]
dist = euclideanDistance(p_x,p_y,x2,y2)
dw_sum += ((1/abs(dist-(dis_sum/k)))/r_sum)*dist
return dw_sum
'''
-mating_selection using tournament criterion
'''
def mating_selection(population, set_of_KneePoints, pop_size, fitnessListAbs, fitnessListSquare):
Q = []
while len(Q) < pop_size:
i = np.random.randint(0,pop_size)
j = np.random.randint(0,pop_size)
a = population[i]
b = population[j]
a_fitness = fitnessListAbs[i]
b_fitness = fitnessListAbs[j]
if a_fitness < b_fitness:
Q.append(a)
elif b_fitness < a_fitness:
Q.append(b)
else:
if a in set_of_KneePoints and b not in set_of_KneePoints:
Q.append(a)
elif b in set_of_KneePoints and a not in set_of_KneePoints:
Q.append(b)
else:
if DW(population,i,fitnessListAbs,fitnessListSquare) > DW(population,j,fitnessListAbs,fitnessListSquare):
Q.append(a)
elif DW(population,j,fitnessListAbs,fitnessListSquare) > DW(population,i,fitnessListAbs,fitnessListSquare):
Q.append(b)
else:
if np.random.rand(0,1) < 0.5:
Q.append(a)
else:
Q.append(b)
return Q
'''
Non dominated sort
Sorts the population based on the dominantion criteria
(if we have solutions 'a' and 'b' and the fitness of solution 'a' < fitness of sol 'b', then
a dominates b)
'''
def non_dominated_sort(population, fitnessList):
fitList = []
for i in range(0,len(fitnessList)):
fitList.append(fitnessList[i])
fitList.sort()
sortedPopulation = []
for i in range(0,len(population)):
for j in range(0,len(fitList)):
if fitList[i] == fitnessList[j]:
sortedPopulation.append(population[j])
return sortedPopulation
'''
Computes the distance from point C(x3,y3) to the line determined by points A(x1,y1) and B(x2,y2)
'''
def distanceToLine(x1,y1,x2,y2,x3,y3):
d = np.abs((x3-x2)*(y2-y1) - (x2-x1)*(y3-y2)) / np.sqrt(np.square(x3-x2) + np.square(y3-y2))
return d
'''
Find best solution from current population
'''
def find_best_solution(population, coefficients, freeTerms):
fitnessList = []
for pop in population:
fitnessList.append(calcFitnessAbsValue(coefficients, freeTerms, pop))
bestFit = min(fitnessList)
for i in range(0,len(fitnessList)):
if fitnessList[i] == bestFit:
bestSol = population[i]
return bestSol
'''
-Compute the probability that each solution will be selected for the next generation
based on the fitness value
-by generating random values between 0 and 1, and taking into account the values of the cumulative
probabilites we select the best solutions for the next generation
'''
def roulette_wheel_selection(fitnessList,next_generation):
Fitness = []
Total = 0.0
for i in range(0,len(fitnessList)):
val = 1/(1+fitnessList[i])
Fitness.append(float(val))
Total = Total + float(val)
Prob = [] # probability values of the solutions
s = 0
for i in range(0,len(Fitness)):
val = float(Fitness[i]/Total) # the probability of the solution i based on its fitness score
Prob.append(val)
s = s+ (val)
C = [] #cumulative probabilities values
for i in range(0,len(Prob)):
sum = 0
for j in range(0,i+1):
sum = sum + Prob[j]
C.append(sum)
selected_chromosomes = [] #list containing the selected solutions for next gen
for i in range(0, len(next_generation)):
RandNR = random.random() #generate random nr between 0 -> 1
if RandNR < C[0]:
selected_chromosomes.append(next_generation[0]) #if the random nr is smaller than the
# probability of first solution, then
# we add this to the selected chromosomes
else:
for j in range(0, len(C) - 1):
if RandNR >= C[j] and RandNR < C[j + 1]:
selected_chromosomes.append(next_generation[j + 1])
# based on the random nr we find the solution whose cumulative prob interval
# contains this random number and we add it to the selected_chromosomes list
return selected_chromosomes
'''
Crossover function
'''
def crossover(selected_chromosomes, crossover_rate, no_of_variables,coefficients,freeTerms):
selected_for_crossover = []
for i in range(0, len(selected_chromosomes)):
RandNR = random.random()
if RandNR <= crossover_rate:
selected_for_crossover.append(selected_chromosomes[i])
new_solutions = []
for i in range(0, len(selected_for_crossover)):
crossover_point = random.randrange(1, no_of_variables)
s1 = selected_for_crossover[i]
s2 = selected_for_crossover[len(selected_for_crossover) - i - 1]
new_sol = []
for i in range(0, crossover_point):
new_sol.append(s1[i])
for i in range(crossover_point, no_of_variables):
new_sol.append(s2[i])
new_solutions.append(new_sol)
fitness_selected_chromosomes = []
for sol in selected_chromosomes:
fitness_selected_chromosomes.append(calcFitnessAbsValue(coefficients, freeTerms, sol))
j = 0
for i in range(len(selected_chromosomes) - len(selected_for_crossover), len(selected_chromosomes)):
selected_chromosomes[i] = new_solutions[j]
j = j + 1
return selected_chromosomes
'''
Mutation function
'''
def mutate(population_after_crossover, mutation_rate, population_size, no_of_variables):
total_gen = population_size * no_of_variables
nr_of_chromosomes_mutated = round(mutation_rate * total_gen)
for i in range(0, nr_of_chromosomes_mutated):
RandNR = random.randrange(0, len(population_after_crossover))
population_after_crossover[RandNR][1] = random.uniform(-10,10)
return population_after_crossover
'''
Function to improve the fitness of a solution by adding/substracting a small value
'''
def imporve_solution(sol,coefficients, freeTerms,prcecisionUsed):
precision = prcecisionUsed #0.003 #we use this as the value that will be added/substracted to fine tune the solution
fitness = calcFitnessAbsValue(coefficients,freeTerms,sol) #calc the current fitness of our population
new_sol = []
for i in range(0,len(coefficients)):
new_sol.append(sol[i])
for i in range(0,len(coefficients)):
auxSolution = []
for j in range(0, len(coefficients)):
auxSolution.append(sol[j])
auxSolution[i] = auxSolution[i] + precision #add the precision to each variable and check
#the fitness of the pop after
tempFitness = calcFitnessAbsValue(coefficients,freeTerms,auxSolution)
if tempFitness < fitness: #if the new fitness is better we leave it like this
new_sol[i] = auxSolution[i]
else: #otherwise we substract from the variable the precision
#and we check again if the new fitness is better
auxSolution[i] = auxSolution[i] - 2*(precision)
tempFitness = calcFitnessAbsValue(coefficients,freeTerms,auxSolution)
if tempFitness < fitness:
new_sol[i] = auxSolution[i]
for i in range(0,len(new_sol)):
sol[i] = new_sol[i]
newFit = calcFitnessAbsValue(coefficients,freeTerms,sol)
#because there are cases when the solution won't improve (the fitness will be larger)
# we approach a method that uses a coin-flip principle in order to variate the values of
# our variables
if newFit >= fitness:
#if we reached an impass, we generate a random number between 0 and 1 for each variable of
# our system
for i in range(0,len(coefficients)-1):
RandNR = random.random()
if RandNR < 0.5: #if the random nr is smaller than 0.5 we add the "precision" to the current
# variable
new_sol[i] = new_sol[i] + precision
else: #otherwise we substract the precision
new_sol[i] = new_sol[i] - precision
for i in range(0, len(new_sol)):
sol[i] = new_sol[i]
#by using this coin flip principle of adding or substractin the precision, I found out that the
#algorithm exists the potential loop when it gets stuck to the same fitness value and stops improving
#print("{0} --> {1}".format(fitness,newFit))
| {"/helperFunctions.py": ["/parsing_functions.py"], "/main.py": ["/helperFunctions.py", "/generator.py"], "/gui.py": ["/main.py"]} |
57,584 | ocolisanrares97/Equation-System-Solver-with-Python | refs/heads/master | /parsing_functions.py |
''' PARSING STAGE '''
#function that returns a list of strings, each one representing an equation
#read contents of the file
def getFileContent(filename):
# equations[] will contatin the raw equations from the file as strings
equations = []
try:
with open(filename) as file:
equations = file.readlines()
except OSError:
print("Invalid filename/file does not exist")
# remove \n from the strings
equations = [x.strip() for x in equations]
return equations
'''
getCoefficients extracts the coefficients, free terms and variables from the strings
representing the equations
'''
def getCoefficientsAndFreeTerms(equations):
try:
# leftHandSide and rightHandSide of the equations
leftHandSides = []
rightHandSides = []
# we split the entire equation based on the "=" sign into lefths and righths
for eq in equations:
a = eq.split("=")
leftHandSides.append(a[0])
rightHandSides.append(a[1])
#eliminate white space from the strings representing the free terms
for i in range(0,len(rightHandSides)):
rightHandSides[i] = rightHandSides[i].strip(" ")
#in the Lhs we split the coefficients from the variables
for i in range(0, len(leftHandSides)):
leftHandSides[i] = leftHandSides[i].replace("*", " ").split(" ")
del leftHandSides[i][-1] #eleminates white space char from the list of strings
#we extract each coeffiecient of the variables, we convert it to float and add it to the
#matrix
coeff = []
coefficients = []
for i in range(0, len(leftHandSides)):
eq = leftHandSides[i]
coeff = []
for i in range(0, len(eq), 2):
singleCoeff = eq[i]
coeff.append(float(singleCoeff))
coefficients.append(coeff)
#get free terms
freeTerms = []
for i in range(0,len(rightHandSides)):
freeTerms.append(float(rightHandSides[i]))
except OSError:
print("Cannot divide lists because of the invalid equations format")
return coefficients,freeTerms
| {"/helperFunctions.py": ["/parsing_functions.py"], "/main.py": ["/helperFunctions.py", "/generator.py"], "/gui.py": ["/main.py"]} |
57,585 | ocolisanrares97/Equation-System-Solver-with-Python | refs/heads/master | /main.py | from helperFunctions import *
from generator import *
import time
#++++++++++++++++++++++++++++++++++++++++ IMPROVED GENETIC ALGORITHM PROCEDURE++++++++++++++++++++++++++++++++++
def bootImproved(filename,pop_size,crossRate,mutationRate,fitTreshold,prec ):
startTime = time.time() #get the starting time of the function
equations = getFileContent(filename)
coefficients, freeTerms = getCoefficientsAndFreeTerms(equations)
population_size = pop_size #1000
crossover_rate = crossRate #0.50 #25% chance to use current member for crossover
mutation_rate = mutationRate#0.2 #10% chance to mutate
no_of_variables = len(coefficients[0])
population = initializePopulation(population_size , no_of_variables)
next_generation = population
#
# gen = Generator()
# gen.print_sistem()
bestFit = 3000
while bestFit > fitTreshold:
previous_generation = next_generation.copy()
''' compute the fitness value for each solution '''
fitnessList = []
for pop in next_generation:
fitnessList.append(calcFitnessAbsValue(coefficients, freeTerms, pop))
fitList2 = []
for pop in next_generation:
fitList2.append(calcFitnessSquare(coefficients,freeTerms,pop))
#K =[]
#selected_chromosomes = mating_selection(population,K,population_size,fitnessList,fitList2)
''' select the best solutions for the next generation '''
selected_chromosomes = roulette_wheel_selection(fitnessList, next_generation)
''' apply the crossover on the selected chromosomes '''
population_after_crossover = crossover(selected_chromosomes,crossover_rate,no_of_variables,coefficients,freeTerms)
''' mutate the population after crossover '''
next_generation = mutate(population_after_crossover, mutation_rate,population_size,no_of_variables)
fit_Previous_gen = []
for sol in previous_generation:
fit_Previous_gen.append(calcFitnessAbsValue(coefficients, freeTerms, sol))
fit_Next_gen = []
for sol in next_generation:
fit_Next_gen.append(calcFitnessAbsValue(coefficients, freeTerms, sol))
# best_sol_previous = find_best_solution(previous_generation)
# best_sol_next = find_best_solution(next_generation)
bestFitPrev = min(fit_Previous_gen)
bestFitNew = min(fit_Next_gen)
bestFit = bestFitNew
if bestFitPrev < bestFitNew:
for i in range(0,len(next_generation)):
next_generation[i] = previous_generation[i]
bestFit = bestFitPrev
best_sol = find_best_solution(next_generation, coefficients,freeTerms)
for i in range(0,2000):
imporve_solution(best_sol,coefficients,freeTerms,prec)
final_bestFitness = bestFit
print(bestFit)
#print(find_best_solution(next_generation))
sol = find_best_solution(next_generation, coefficients, freeTerms)
print(sol)
final_solution = []
for i in range(0,len(sol)):
final_solution.append(sol[i])
endTime = time.time()
totalTime = endTime - startTime
return final_solution, final_bestFitness, totalTime
#++++++++++++++++++++++++++++++++++++++++ BASIC GENETIC ALGORITHM PROCEDURE++++++++++++++++++++++++++++++++++
def bootBasic(filename,pop_size,crossRate,mutationRate,fitTreshold,prec):
startTime = time.time() # get the starting time of the function
equations = getFileContent(filename)
coefficients, freeTerms = getCoefficientsAndFreeTerms(equations)
population_size = pop_size #1000
crossover_rate = crossRate #0.50 #25% chance to use current member for crossover
mutation_rate = mutationRate#0.2 #10% chance to mutate
no_of_variables = len(coefficients[0])
population = initializePopulation(population_size , no_of_variables)
next_generation = population
#
# gen = Generator()
# gen.print_sistem()
bestFit = 3000
while bestFit > fitTreshold:
previous_generation = next_generation.copy()
''' compute the fitness value for each solution '''
fitnessList = []
for pop in next_generation:
fitnessList.append(calcFitnessAbsValue(coefficients, freeTerms, pop))
fitList2 = []
for pop in next_generation:
fitList2.append(calcFitnessSquare(coefficients,freeTerms,pop))
#K =[]
#selected_chromosomes = mating_selection(population,K,population_size,fitnessList,fitList2)
''' select the best solutions for the next generation '''
selected_chromosomes = roulette_wheel_selection(fitnessList, next_generation)
''' apply the crossover on the selected chromosomes '''
population_after_crossover = crossover(selected_chromosomes,crossover_rate,no_of_variables,coefficients,freeTerms)
''' mutate the population after crossover '''
next_generation = mutate(population_after_crossover, mutation_rate,population_size,no_of_variables)
fit_Previous_gen = []
for sol in previous_generation:
fit_Previous_gen.append(calcFitnessAbsValue(coefficients, freeTerms, sol))
fit_Next_gen = []
for sol in next_generation:
fit_Next_gen.append(calcFitnessAbsValue(coefficients, freeTerms, sol))
# best_sol_previous = find_best_solution(previous_generation)
# best_sol_next = find_best_solution(next_generation)
bestFitPrev = min(fit_Previous_gen)
bestFitNew = min(fit_Next_gen)
bestFit = bestFitNew
if bestFitPrev < bestFitNew:
for i in range(0,len(next_generation)):
next_generation[i] = previous_generation[i]
bestFit = bestFitPrev
# best_sol = find_best_solution(next_generation, coefficients,freeTerms)
# for i in range(0,2000):
# imporve_solution(best_sol,coefficients,freeTerms,prec)
final_bestFitness = bestFit
print(bestFit)
#print(find_best_solution(next_generation))
sol = find_best_solution(next_generation, coefficients, freeTerms)
print(sol)
final_solution = []
for i in range(0,len(sol)):
final_solution.append(sol[i])
endTime = time.time()
totalTime = endTime - startTime
return final_solution, final_bestFitness, totalTime
| {"/helperFunctions.py": ["/parsing_functions.py"], "/main.py": ["/helperFunctions.py", "/generator.py"], "/gui.py": ["/main.py"]} |
57,586 | ocolisanrares97/Equation-System-Solver-with-Python | refs/heads/master | /gui.py | from main import *
from tkinter import *
import threading
dead = False
thread = threading.Thread()
#____BUTTON COMMANDS________
def startComputation():
str = ""
filename = inputTextField.get("1.0", END)
str = filename.strip()
#If the "Use default values" box is checked we set the default parameters values
if(checkBoxValue.get() == 1):
try:
if(radioButtuonValue.get() == 1):
final_solution, fit, execTime = bootBasic(str, 1000, 0.5, 0.2, 0.4, 0.003)
elif(radioButtuonValue.get() == 2):
final_solution, fit, execTime = bootImproved(str, 1000, 0.5, 0.2, 0.4, 0.003)
else:
resultsTextField.delete("1.0", END)
resultsTextField.insert(END, "Please select which algorithm you wish to use.")
return
crossoverRateTextField.delete("1.0", END)
crossoverRateTextField.insert(END, 0.5)
mutationRateTextField.delete("1.0", END)
mutationRateTextField.insert(END, 0.2)
fitnessTextField.delete("1.0", END)
fitnessTextField.insert(END, 0.1)
populationSizeTextField.delete("1.0", END)
populationSizeTextField.insert(END, 1000)
precisionTextField.delete("1.0", END)
precisionTextField.insert(END, 0.003)
sol = "x"
egal = " = "
backs = "\n"
resultsTextField.delete("1.0", END)
resultsTextField.insert(END, "Fitness values of the solution is: ")
resultsTextField.insert(END, fit)
resultsTextField.insert(END, backs)
for i in range(0, len(final_solution)):
resultsTextField.insert(END, sol)
resultsTextField.insert(END, i + 1)
resultsTextField.insert(END, egal)
resultsTextField.insert(END, final_solution[i])
resultsTextField.insert(END, backs)
resultsTextField.insert(END, "Execution time: ")
resultsTextField.insert(END, execTime)
del final_solution[:]
except:
resultsTextField.delete("1.0", END)
resultsTextField.insert(END, "Error. Something went wrong when proccesing the solution"
" \nMake sure that you have succesfully read the file."
"\nCheck if you have filled the paramaters fields")
#If the "Use default parameters" is not checked we take the values from the parameters fields
else:
try:
crossRate = float(crossoverRateTextField.get("1.0", END))
mutationRate = float(mutationRateTextField.get("1.0", END))
fitness = float(fitnessTextField.get("1.0", END))
popSize = int(populationSizeTextField.get("1.0", END))
precision = float(precisionTextField.get("1.0", END))
except:
resultsTextField.delete("1.0", END)
resultsTextField.insert(END, "Please make sure that you completed the parameter fields.")
return
try:
if(radioButtuonValue.get() == 1):
final_solution,fit, execTime = bootBasic(str,popSize,crossRate,mutationRate,fitness,precision)
elif (radioButtuonValue.get() == 2):
final_solution, fit, execTime = bootImproved(str,popSize,crossRate,mutationRate,fitness,precision)
else:
resultsTextField.delete("1.0", END)
resultsTextField.insert(END, "Please select which algorithm you wish to use.")
return
resultsTextField.delete("1.0",END)
sol = "x"
egal = " = "
backs = "\n"
resultsTextField.delete("1.0", END)
resultsTextField.insert(END, "Fitness values of the solution is: ")
resultsTextField.insert(END, fit)
resultsTextField.insert(END, backs)
for i in range(0,len(final_solution)):
resultsTextField.insert(END, sol)
resultsTextField.insert(END,i+1)
resultsTextField.insert(END, egal)
resultsTextField.insert(END, final_solution[i])
resultsTextField.insert(END, backs)
resultsTextField.insert(END, "Execution time: ")
resultsTextField.insert(END, execTime)
del final_solution[:]
except:
resultsTextField.delete("1.0",END)
resultsTextField.insert(END,"Error. Something went wrong when proccesing the solution"
" \nMake sure that you have succesfully read the file."
"\nCheck if you have filled the paramaters fields")
def displaySystem():
str = ""
filename = inputTextField.get("1.0", END)
str = filename.strip()
fileContent = []
try:
with open(str) as f:
fileContent = f.readlines()
showSystemTextField.delete("1.0", END)
for i in fileContent:
showSystemTextField.insert(END,i)
except(OSError):
showSystemTextField.delete("1.0", END)
showSystemTextField.insert("1.0","Invalid filename/file does not exist")
#++++++++++++++++++++++++++++ GUI LAYOUT +++++++++++++++++++++++++++
top = Tk()
top.title("Equations System Solver")
top.geometry("1600x500")
checkBoxValue = IntVar()
radioButtuonValue = IntVar()
#++++++++++++ WIDGETS +++++
#___LABLES__
entryLable = Label(top,text="File name:")
S = Scrollbar(top)
#____TEXT FIELDS____
inputTextField = Text(top, height=1, bd=3,width=60)
resultsTextField = Text(top, height=20,bd=3, width=60)
showSystemTextField = Text(top,height=20,bd=3,width =60)
#____ENTRIES_____
crossoverRateTextField = Text(top,bd = 2,width=10 ,height = 1)
mutationRateTextField = Text(top,bd = 2,width=10,height = 1)
precisionTextField = Text(top,bd = 2,width=10,height = 1)
populationSizeTextField = Text(top, bd = 2,width=10,height = 1)
fitnessTextField = Text(top,bd = 2,width=10,height = 1)
#_____BUTTONS____
readFileButton = Button(top, text="Read File", command= displaySystem)
startButton = Button(top, text ="Find Solutions", command = startComputation)
#stopButton = Button(top,text = "Stop",width=10)
defaultValuesCheckbox = Checkbutton(top, text ="Use default parameter values", variable = checkBoxValue,height = 4)
normalGA_RadioButton = Radiobutton(top,text = "Use Basic Genetic Algorithm", value=1, variable = radioButtuonValue)
improvedGA_RadioButton = Radiobutton(top,text = "Use Improved Genetic Algorithm", value=2, variable = radioButtuonValue)
#-------------LAYOUT------------
entryLable.grid(row=0,column=2, sticky=W,pady=10,padx = 5)
inputTextField.grid(row=0,column=3)
readFileButton.grid(row=0,column=4,sticky=W)
Label(top,text="Population Size:").grid(row=1,sticky=E,pady=5,padx = 5)
populationSizeTextField.grid(row=1,column =1)
Label(top,text="Fitness treshold:").grid(row=2,sticky=E,pady=5,padx = 5)
fitnessTextField.grid(row=2,column =1)
Label(top,text="Crossover Rate:").grid(row=3,sticky=E,pady=5,padx = 5)
crossoverRateTextField.grid(row=3,column =1)
Label(top,text="Mutation Rate:").grid(row=4,sticky=E,pady=5,padx = 5)
mutationRateTextField.grid(row=4,column =1)
Label(top,text="Precision:").grid(row=5,sticky=E,pady=5,padx = 10)
precisionTextField.grid(row=5,column =1)
showSystemTextField.grid(row=1,column=3,rowspan=4)
Label(top,text="System of equations to be solved").grid(row=5,column=3,pady=5)
startButton.grid(row=2,column = 4,padx = 20)
resultsTextField.grid(row=1,column = 5, rowspan=4)
Label(top,text="Solutions").grid(row=5,column=5,pady=5)
defaultValuesCheckbox.grid(row=6,column=1,pady=10)
normalGA_RadioButton.grid(row=6, column = 3, sticky=E)
improvedGA_RadioButton.grid(row=6, column = 5, sticky =W)
#stopButton.grid(row=3,column=4,padx=20)
top.mainloop()
| {"/helperFunctions.py": ["/parsing_functions.py"], "/main.py": ["/helperFunctions.py", "/generator.py"], "/gui.py": ["/main.py"]} |
57,587 | ocolisanrares97/Equation-System-Solver-with-Python | refs/heads/master | /generator.py | import random
class Generator:
def __init__(self):
self.n= int(input("Give number of equations/unknowns"))
self.solutions = []
self.coef = []
self.results = []
self.boot()
def boot(self):
for i in range(self.n):
self.solutions.append(random.randint(-10,10))
for i in range(self.n):
aux = []
for j in range(self.n):
aux.append(random.randint(-10,10))
self.coef.append(aux)
for i in range(self.n):
sum = 0
for j in range(self.n):
sum = sum+ self.coef[i][j]*self.solutions[j]
self.results.append(sum)
def print_sistem(self):
for i in range(self.n):
for j in range(self.n):
if(self.coef[i][j] > 0):
print("+" + str(self.coef[i][j]) + "*x"+str(j), end =" ")
else:
print(str(self.coef[i][j]) + "*x"+str(j), end =" ")
print("= " + str(self.results[i]))
print(self.solutions) | {"/helperFunctions.py": ["/parsing_functions.py"], "/main.py": ["/helperFunctions.py", "/generator.py"], "/gui.py": ["/main.py"]} |
57,590 | Eugene208029488/FullStackProject-Movie-Trailer | refs/heads/master | /media.py | class Movie():
def __init__(self, movie_title, movie_storyline, poster_image
, trailer_youtube):
self.title=movie_title
self.sotryline=movie_storyline
self.poster_image_url = poster_image
self.trailer_youtube_url = trailer_youtube
| {"/entertainment_center.py": ["/media.py"]} |
57,591 | Eugene208029488/FullStackProject-Movie-Trailer | refs/heads/master | /entertainment_center.py | import media
import fresh_tomatoes
toy_story = media.Movie("Toy Story",
"A story about a toy",
"http://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg",
"https://www.youtube.com/watch?v=vwyZH85NQC4")
avatar = media.Movie("Avatar",
"A marine in an alien planet",
"http://upload.wikimedia.org/wikipedia/id/b/b0/Avatar-Teaser-Poster.jpg",
"https://www.youtube.com/watch?v=5PSNL1qE6VY")
civil_war = media.Movie("Marvel: Civil War",
"Fight between 2 groups of superheroes.",
"https://upload.wikimedia.org/wikipedia/en/5/53/Captain_America_Civil_War_poster.jpg",
"https://www.youtube.com/watch?v=43NWzay3W4s")
bvs = media.Movie("Batman vs Superman",
"Batman vs Superman",
"https://upload.wikimedia.org/wikipedia/en/2/20/Batman_v_Superman_poster.jpg",
"https://www.youtube.com/watch?v=x177jhcMSqg")
martian = media.Movie("Martian",
"Astronout stuck in Mars.",
"https://upload.wikimedia.org/wikipedia/en/c/cd/The_Martian_film_poster.jpg",
"https://www.youtube.com/watch?v=ej3ioOneTy8")
deadpool = media.Movie("Deadpool",
"A former Special Forces operative turned mercenary "
" is subjected to a rogue experiment that leaves him "
"with accelerated healing powers, adopting the alter ego Deadpool.",
"https://upload.wikimedia.org/wikipedia/en/4/46/Deadpool_poster.jpg",
"https://www.youtube.com/watch?v=8wrdUol28hM")
# movies is an array with the Movie objects.
movies = [toy_story, avatar, civil_war, bvs, martian, deadpool]
# Calls open_movies_page() function and passing movies list of Movie objects.
# This function will generate a HTML based on the list of Movie objects.
fresh_tomatoes.open_movies_page(movies)
| {"/entertainment_center.py": ["/media.py"]} |
57,592 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/models/abstract_model.py | import uuid as uuid
from django.utils import timezone
from django.db import models
from django.utils.translation import gettext_lazy as _
class AbstractModelManager(models.Manager):
"""
Менеджер абстрактной модели
"""
def get_queryset(self):
return super(AbstractModelManager, self).get_queryset().exclude(is_deleted=True)
class AbstractModel(models.Model):
"""
Абстрактная модель
"""
uuid = models.UUIDField(default=uuid.uuid4, editable=False, verbose_name=_(u'UUID'))
created_at = models.DateTimeField(default=timezone.now, verbose_name=_(u'Время создания'))
is_deleted = models.BooleanField(default=False, verbose_name=_(u'Удалена'))
objects = AbstractModelManager()
class Meta:
db_table = u'abstract'
verbose_name = u'Абстрактная модель'
verbose_name_plural = u'Абстрактные модели'
abstract = True
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,593 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/models/log.py | from .abstract_model import AbstractModel
from django.db import models
from django.utils.translation import gettext_lazy as _
class Log(AbstractModel):
text = models.TextField(verbose_name=_(u'Текст'))
user = models.ForeignKey('users.User', on_delete=models.CASCADE, related_name='log_user',
verbose_name=_(u'Пользователь'))
class Meta:
db_table = u'logs'
verbose_name = u'История действия'
verbose_name_plural = u'История действий'
ordering = ['-created_at']
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,594 | matrixalex/DemoDjango | refs/heads/master | /src/apps/users/models/user.py | from django.contrib.auth.models import AbstractUser, BaseUserManager, Permission
from src.apps.core.models import AbstractModel, AbstractModelManager
from django.db import models
from typing import List
class UserManager(AbstractModelManager, BaseUserManager):
"""
Менеджер создания
"""
def create_user(self, email, first_name, last_name, password=None):
"""
Creates and saves a User with the given email, date of
birth and password.
"""
if not email:
raise ValueError('Users must have an email address')
email = self.normalize_email(email)
user = self.model(
email=email,
username=email,
first_name=first_name,
last_name=last_name
)
user.set_password(password)
user.save()
return user
def create_superuser(self, email, password, first_name, last_name):
"""
Creates and saves a superuser with the given email, date of
birth and password.
"""
user = self.create_user(
email,
password=password,
first_name=first_name,
last_name=last_name,
)
user.save(using=self._db)
user.is_superuser = True
user.is_staff = True
user.set_password(password)
user.save()
user.user_permissions.set(Permission.objects.all())
return user
def get_users(self):
return self.get_queryset().exclude(is_superuser=True)
class User(AbstractModel, AbstractUser):
"""
Модель пользователя
"""
MAX_LENGTH = 100
first_name = models.CharField(default=u'Имя', max_length=MAX_LENGTH,
blank=False, null=False, verbose_name=u'Имя')
last_name = models.CharField(default=u'Фамилия', max_length=MAX_LENGTH,
blank=False, null=False, verbose_name=u'Фамилия')
middle_name = models.CharField(default='', max_length=MAX_LENGTH,
blank=True, null=False, verbose_name=u'Отчество')
email = models.EmailField(unique=True, null=False, max_length=MAX_LENGTH, verbose_name=u'Email пользователя')
REQUIRED_FIELDS = ['first_name', 'last_name']
USERNAME_FIELD = 'email'
objects = UserManager()
class Meta:
db_table = u'users'
verbose_name = u'Пользователь'
verbose_name_plural = u'Пользователи'
ordering = ['-created_at']
permissions = (
('Редактирование пользователя', 'user_change'),
('Добавление пользователя', 'user_add'),
('Удаление пользователя', 'user_delete')
)
def __str__(self):
result = self.last_name + ' ' + self.first_name
if self.middle_name:
result += ' ' + self.middle_name
return result
def save(self, *args, **kwargs):
if self.username != self.email:
self.username = self.email
super(User, self).save(*args, **kwargs)
def has_perm(self, perm, obj=None):
if self.is_superuser:
return True
return perm in get_user_permissions(self)
def has_module_perms(self, app_label):
if self.is_superuser:
return True
return app_label in get_user_module_permissions(self)
def __eq__(self, other):
if type(other) != User:
return False
return self.id == other.id
def get_user_permissions(user: User) -> List[str]:
"""
Получает разрешения пользователя
:param user: User
:return: List[str]
"""
permissions = list(map(
lambda permission: permission.content_type.app_label + '.' + permission.codename,
user.user_permissions.all()
))
return permissions
def get_user_module_permissions(user: User) -> List[str]:
"""
Получает список модулей в которых у пользователя есть разрешения
:param user: User
:return: List[str]
"""
permissions = list(map(
lambda perm: perm.content_type.app_label, user.user_permissions.all()
))
return permissions
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,595 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/apps.py | from django.apps import AppConfig
class Config(AppConfig):
name = 'src.apps.core'
label = 'core'
def ready(self):
self.verbose_name = 'Ядро'
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,596 | matrixalex/DemoDjango | refs/heads/master | /src/apps/debts/views.py | from django.shortcuts import render, redirect
from django.views import View
from . import services
class MyDebtsView(View):
"""
Контроллер получения, добавления долга
"""
def get(self, request):
if not request.user.is_authenticated:
return redirect('/auth/login')
data = {'debts': services.get_user_debts_data(request.user), 'user': request.user}
print(data)
return render(request, 'index.html', context=data)
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,597 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/admin/models/__init__.py | from .base_admin import BaseModelAdmin
from .log_admin import LogAdmin
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,598 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/models/__init__.py | from .abstract_model import AbstractModel, AbstractModelManager
from .log import Log
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,599 | matrixalex/DemoDjango | refs/heads/master | /src/apps/users/admin/__init__.py | from django.contrib.admin import site
from django.contrib.auth.models import Group
from ..models import User
from .models import UserAdmin
site.register(User, UserAdmin)
site.unregister(Group)
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,600 | matrixalex/DemoDjango | refs/heads/master | /src/apps/debts/admin/models/__init__.py | from .debt_admin import DebtAdmin
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,601 | matrixalex/DemoDjango | refs/heads/master | /src/apps/debts/models/__init__.py | from .debt import Debt
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,602 | matrixalex/DemoDjango | refs/heads/master | /src/apps/users/services.py | from typing import Union, Optional, NamedTuple
from .models import User
from django.contrib.auth import authenticate as auth, login
from ..core.errors import ErrorMessages
class UserAuthData(NamedTuple):
status: bool
message: str
user: Optional[User]
def get_user_by_unique_field(user_id: Union[str, int] = None, email: str = None) -> Optional[User]:
"""
Получить пользователя по id или email
:param user_id: str, int
:param email: str
:return: User, None
"""
try:
if user_id:
user = User.objects.get(pk=user_id)
return user
if email:
user = User.objects.get(email=email)
return user
except User.DoesNotExist:
return None
def authenticate(request, email: str, password: str) -> UserAuthData:
"""
Авторизирует пользователя в системе
:param request: request
:param email: str
:param password: str
:return: UserAuthData = NamedTuple(status: bool, message: str, user: Optional[User]
"""
user = auth(request, email=email, password=password)
if not user:
return UserAuthData(status=False, message=ErrorMessages.WRONG_EMAIL_OR_PASSWORD, user=None)
login(request, user)
return UserAuthData(status=True, message='', user=user)
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,603 | matrixalex/DemoDjango | refs/heads/master | /src/apps/debts/admin/models/debt_admin.py | from src.apps.core.admin import BaseModelAdmin
from src.apps.debts.models import Debt
from django import forms
class DebtChangeForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(DebtChangeForm, self).__init__(*args, **kwargs)
class Meta:
model = Debt
fields = ['text', 'price', 'user', 'to_user']
def save(self, *args, **kwargs):
debt = super(DebtChangeForm, self).save(*args, **kwargs)
if not debt.to_user:
debt.to_user = self.current_user
debt.save()
return debt
class DebtAdmin(BaseModelAdmin):
list_display = ('to_user', 'user', 'price', 'text', 'created_at')
form = DebtChangeForm
add_form = DebtChangeForm
def get_form(self, request, obj=None, change=False, **kwargs):
form = super(DebtAdmin, self).get_form(request, obj, change, **kwargs)
return form
def get_queryset(self, request):
user = request.user
debts = Debt.objects.all()
if not user.is_superuser:
debts = debts.objects.filter_by_user(user)
return debts
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,604 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/admin/models/log_admin.py | from .base_admin import BaseModelAdmin
class LogAdmin(BaseModelAdmin):
list_display = ('user', 'text', 'created_at')
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,605 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/errors.py | from django.utils.translation import gettext_lazy as _
class ErrorMessages:
WRONG_EMAIL_OR_PASSWORD = _(u'Неправильно введен email или пароль')
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,606 | matrixalex/DemoDjango | refs/heads/master | /src/apps/debts/services.py | from .models import Debt
from src.apps.users.models import User
from django.db.models import Q
class DebtData:
def __init__(self, debt: Debt, is_positive: bool):
self.text = debt.text
self.price = debt.price
self.is_positive = is_positive
self.created_at = debt.created_at
class UserDebtsData:
def __init__(self, user: User):
self.summary = 0
self.is_positive = True
self.debts = []
self.user = user
def add_debt(self, debt: Debt):
is_positive = True
if debt.to_user == self.user:
is_positive = False
self.debts.append(DebtData(debt, is_positive))
if is_positive:
self.summary += debt.price
else:
self.summary -= debt.price
if self.summary < 0:
self.is_positive = False
class SummaryDebtData:
def __init__(self):
self.summary = 0
self.debts_count = 0
self.users = []
def get_user_debts_data(user: User) -> SummaryDebtData:
"""
Получить сводные данные о долгах пользователя
:param user:
:return:
"""
result = SummaryDebtData()
debts = Debt.objects.filter_by_user(user)
if len(debts) == 0:
return result
unique_users = []
for debt in debts:
if debt.user not in unique_users:
unique_users.append(debt.user)
if debt.to_user not in unique_users:
unique_users.append(debt.to_user)
unique_users.remove(user)
for unique_user in unique_users:
user_debt_data = UserDebtsData(user)
user_debts = debts.filter(Q(user=unique_user) | Q(to_user=unique_user))
for debt in user_debts:
user_debt_data.add_debt(debt)
result.users.append(user_debt_data)
for user_debt_data in result.users:
result.summary += user_debt_data.summary
result.debts_count += len(user_debt_data.debts)
user_debt_data.summary = abs(user_debt_data.summary)
return result
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,607 | matrixalex/DemoDjango | refs/heads/master | /src/apps/users/views.py | from django.contrib.auth import logout
from django.views import View
from django.shortcuts import render, redirect
from . import services as user_services
class LoginView(View):
"""
Аторизация пользователя
"""
def get(self, request):
if request.user.is_authenticated:
return redirect('/debts')
return render(request, 'login.html')
def post(self, request):
if request.user.is_authenticated:
return redirect('/debts')
email = request.POST.get('email', '')
password = request.POST.get('password', '')
data = user_services.authenticate(request, email, password)
if not data.status:
return render(request, 'login.html', context={'error': data.message})
if data.user.is_superuser:
return redirect('/admin')
return redirect('/debts')
class LogoutView(View):
"""
Логаут пользователя из системы
"""
def get(self, request):
logout(request)
return redirect('/auth/login')
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,608 | matrixalex/DemoDjango | refs/heads/master | /src/apps/urls.py | from django.urls import path, include
urlpatterns = [
path('', include('src.apps.core.urls')),
path('auth/', include('src.apps.users.urls')),
path('debts', include('src.apps.debts.urls')),
]
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,609 | matrixalex/DemoDjango | refs/heads/master | /src/apps/debts/models/debt.py | from django.db.models import Q
from src.apps.core.models import AbstractModel, AbstractModelManager
from django.db import models
from django.utils.translation import gettext_lazy as _
class DebtManager(AbstractModelManager):
def filter_by_user(self, user):
"""
Фильтрует долги по пользователю
:param user: User
:return: QuerySet
"""
debts = self.get_queryset().filter(Q(user=user) | Q(to_user=user))
return debts
class Debt(AbstractModel):
"""
Модель долга
"""
text = models.TextField(verbose_name=_(u'Описание'), blank=True)
price = models.PositiveIntegerField(verbose_name=_(u'Сумма'))
user = models.ForeignKey('users.User', on_delete=models.CASCADE, related_name='debt_user',
blank=True, null=True, verbose_name=_(u'Должник'))
to_user = models.ForeignKey('users.User', on_delete=models.CASCADE, related_name='debt_to_user',
blank=True, null=True, verbose_name=_(u'Кредитор'))
objects = DebtManager()
class Meta:
db_table = 'debts'
verbose_name = _('Долг')
verbose_name_plural = _('Долги')
ordering = ['-created_at']
def __str__(self):
return 'Долг ' + str(self.user) + ' пользователю ' + str(self.to_user)
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,610 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/views.py | from django.shortcuts import redirect
def index(request):
"""
Корневой url сайта
:param request: мета данные
:return: HttpResponse
"""
user = request.user
if not user.is_authenticated:
return redirect('/auth/login')
if user.is_superuser:
return redirect('/admin')
return redirect('/debts')
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,611 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/admin/actions.py | from ..models import Log
def delete_selected(modeladmin, request, queryset):
for obj in queryset:
if str(type(obj)) == "<class 'src.apps.users.models.user.User'>" and obj.id == request.user.id:
continue
obj.is_deleted = True
obj.save()
Log.objects.create(user=request.user, text=f'Пользователь {request.user} удалил {obj}')
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,612 | matrixalex/DemoDjango | refs/heads/master | /src/apps/core/admin/__init__.py | from .models import BaseModelAdmin, LogAdmin
from django.contrib.admin import site
from ..models import Log
site.register(Log, LogAdmin)
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,613 | matrixalex/DemoDjango | refs/heads/master | /src/apps/debts/__init__.py | default_app_config = 'src.apps.debts.apps.Config'
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,614 | matrixalex/DemoDjango | refs/heads/master | /src/apps/users/urls.py | from django.urls import path, include
from . import views
urlpatterns = [
path('login', views.LoginView.as_view()),
path('logout', views.LogoutView.as_view())
]
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,615 | matrixalex/DemoDjango | refs/heads/master | /src/apps/debts/admin/__init__.py | from ..models import Debt
from .models import DebtAdmin
from django.contrib.admin import site
site.register(Debt, DebtAdmin)
| {"/src/apps/core/models/log.py": ["/src/apps/core/models/abstract_model.py"], "/src/apps/users/models/user.py": ["/src/apps/core/models/__init__.py"], "/src/apps/debts/views.py": ["/src/apps/debts/__init__.py"], "/src/apps/core/admin/models/__init__.py": ["/src/apps/core/admin/models/log_admin.py"], "/src/apps/core/models/__init__.py": ["/src/apps/core/models/abstract_model.py", "/src/apps/core/models/log.py"], "/src/apps/debts/admin/models/__init__.py": ["/src/apps/debts/admin/models/debt_admin.py"], "/src/apps/debts/models/__init__.py": ["/src/apps/debts/models/debt.py"], "/src/apps/users/services.py": ["/src/apps/core/errors.py"], "/src/apps/debts/admin/models/debt_admin.py": ["/src/apps/core/admin/__init__.py", "/src/apps/debts/models/__init__.py"], "/src/apps/debts/services.py": ["/src/apps/debts/models/__init__.py"], "/src/apps/debts/models/debt.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/actions.py": ["/src/apps/core/models/__init__.py"], "/src/apps/core/admin/__init__.py": ["/src/apps/core/admin/models/__init__.py", "/src/apps/core/models/__init__.py"], "/src/apps/debts/admin/__init__.py": ["/src/apps/debts/models/__init__.py", "/src/apps/debts/admin/models/__init__.py"]} |
57,616 | grobalex/msgraph | refs/heads/main | /config.py | CLIENT_ID = ''
CLIENT_SECRET = ''
REDIRECT_URI = 'http://localhost:5000/login/authorized'
AUTHORITY_URL = 'https://login.microsoftonline.com/common'
AUTH_ENDPOINT = '/oauth2/v2.0/authorize'
TOKEN_ENDPOINT = '/oauth2/v2.0/token'
RESOURCE = 'https://graph.microsoft.com/'
API_VERSION = 'v1.0'
SCOPES = ['User.Read'] # Add other scopes/permissions as needed.
# This code can be removed after configuring CLIENT_ID and CLIENT_SECRET above.
if 'ENTER_YOUR' in CLIENT_ID or 'ENTER_YOUR' in CLIENT_SECRET:
print('ERROR: config.py does not contain valid CLIENT_ID and CLIENT_SECRET')
import sys
sys.exit(1)
| {"/app.py": ["/config.py"]} |
57,617 | grobalex/msgraph | refs/heads/main | /app.py | import os
import uuid
import bottle
import requests_oauthlib
import config
MSGRAPH = requests_oauthlib.OAuth2Session(config.CLIENT_ID,
scope=config.SCOPES,
redirect_uri=config.REDIRECT_URI)
# Enable non-HTTPS redirect URI for development/testing.
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
# Allow token scope to not match requested scope. (Other auth libraries allow
# this, but Requests-OAuthlib raises exception on scope mismatch by default.)
os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE'] = '1'
os.environ['OAUTHLIB_IGNORE_SCOPE_CHANGE'] = '1'
bottle.TEMPLATE_PATH = ['./bbtl_static/templates']
@bottle.route('/')
@bottle.view('homepage.html')
def homepage():
"""Render the home page."""
return {'sample': 'Requests-OAuthlib'}
@bottle.route('/login')
def login():
"""Prompt user to authenticate."""
auth_base = config.AUTHORITY_URL + config.AUTH_ENDPOINT
authorization_url, state = MSGRAPH.authorization_url(auth_base)
MSGRAPH.auth_state = state
return bottle.redirect(authorization_url)
@bottle.route('/login/authorized')
def authorized():
"""Handler for the application's Redirect Uri."""
if bottle.request.query.state != MSGRAPH.auth_state:
raise Exception('state returned to redirect URL does not match!')
token = MSGRAPH.fetch_token(config.AUTHORITY_URL + config.TOKEN_ENDPOINT,
client_secret=config.CLIENT_SECRET,
authorization_response=bottle.request.url)
return token
@bottle.route('/bbtl_static/<filepath:path>')
def server_static(filepath):
"""Handler for static files, used with the development server."""
root_folder = os.path.abspath(os.path.dirname(__file__))
return bottle.static_file(filepath, root=os.path.join(root_folder, 'bbtl_static'))
if __name__ == '__main__':
bottle.run(app=bottle.app(), server='wsgiref', host='localhost', port=5000)
| {"/app.py": ["/config.py"]} |
57,623 | venglov/forta-utilization-rate-agent | refs/heads/master | /src/agent.py | import json
from forta_agent import Finding, FindingType, FindingSeverity, get_json_rpc_url
from web3 import Web3
from src.constants import CTOKEN_CONTRACTS, ABI, TIME_WINDOW, UTILIZATION_RATE_TH
class UtilizationRate:
def __init__(self, token):
self.token = token
self.rates = []
def add_rate(self, rate, timestamp):
self.rates.append(RateTimestamped(rate, timestamp))
def analyze(self):
self.rates = list(
filter(lambda x: x.timestamp + TIME_WINDOW > max([z.timestamp for z in self.rates]), self.rates))
dif = max([x.rate for x in self.rates]) - min([x.rate for x in self.rates])
if dif > UTILIZATION_RATE_TH:
return True, dif
return False, dif
class RateTimestamped:
def __init__(self, rate, timestamp):
self.rate = rate
self.timestamp = timestamp
def get_utilization_rate(ctoken_address, block_number):
contract = web3.eth.contract(address=Web3.toChecksumAddress(ctoken_address), abi=abi)
total_borrows_current = contract.functions.totalBorrowsCurrent().call(
block_identifier=int(block_number))
cash = contract.functions.getCash().call(block_identifier=int(block_number))
ur = total_borrows_current / (cash + total_borrows_current)
return ur
def get_severity(dif):
if 0.1 <= dif < 0.15:
return FindingSeverity.Medium
elif 0.15 <= dif < 0.2:
return FindingSeverity.High
elif dif >= 0.2:
return FindingSeverity.Critical
else:
return FindingSeverity.Info
def provide_handle_block(_):
def handle_block(block_event):
findings = []
for ctoken_name, ctoken_address in CTOKEN_CONTRACTS.items():
ur = get_utilization_rate(ctoken_address, block_event.block_number)
token_rates[ctoken_name].add_rate(ur, block_event.block.timestamp)
changed, dif = token_rates[ctoken_name].analyze()
if changed:
findings.append(Finding({
'name': 'CToken Utilization Rate Significantly Changed',
'description': f'cToken {ctoken_name} Utilization Rate changed for {dif}',
'alert_id': f'{ctoken_name.capitalize()}_UT_RATE_ALERT',
'type': FindingType.Suspicious,
'severity': get_severity(dif),
'metadata': {
'timestamp': block_event.block.timestamp,
'utilization_rate': ur,
'difference': dif,
}
}))
return findings
return handle_block
def handle_block(block_event):
return real_handle_block(block_event)
web3 = Web3(Web3.HTTPProvider(get_json_rpc_url()))
abi = json.loads(ABI)
token_rates = {}
for token, _ in CTOKEN_CONTRACTS.items():
token_rates |= {token: UtilizationRate(token)}
real_handle_block = provide_handle_block(web3)
| {"/src/agent.py": ["/src/constants.py"], "/src/agent_test.py": ["/src/agent.py"]} |
57,624 | venglov/forta-utilization-rate-agent | refs/heads/master | /src/constants.py | CTOKEN_CONTRACTS = {
"cAAVE": "0xe65cdb6479bac1e22340e4e755fae7e509ecd06c",
"cBAT": "0x6c8c6b02e7b2be14d4fa6022dfd6d75921d90e4e",
"cCOMP": "0x70e36f6bf80a52b3b46b3af8e106cc0ed743e8e4",
"cDAI": "0x5d3a536e4d6dbd6114cc1ead35777bab948e3643",
"cETH": "0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5",
"cLINK": "0xface851a4921ce59e912d19329929ce6da6eb0c7",
"cMKR": "0xface851a4921ce59e912d19329929ce6da6eb0c7",
"cREP": "0x158079ee67fce2f58472a96584a73c7ab9ac95c1",
"cSAI": "0xf5dce57282a584d2746faf1593d3121fcac444dc",
"cSUSHI": "0x4b0181102a0112a2ef11abee5563bb4a3176c9d7",
"cTUSD": "0x12392f67bdf24fae0af363c24ac620a2f67dad86",
"cUNI": "0x35a18000230da775cac24873d00ff85bccded550",
"cUSDC": "0x39aa39c021dfbae8fac545936693ac917d5e7563",
"cUSDT": "0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9",
"cWBTC": "0xc11b1268c1a384e55c48c2391d8d480264a3a7f4",
"cWBTC2": "0xccf4429db6322d5c611ee964527d42e5d685dd6a",
"cYFI": "0x80a2ae356fc9ef4305676f7a3e2ed04e12c33946",
"cZRX": "0xb3319f5d18bc0d84dd1b4825dcde5d5f7266d407",
}
TIME_WINDOW = 3600 # in seconds
UTILIZATION_RATE_TH = 0.1
ABI = '''[
{
"constant": false,
"inputs": [],
"name": "totalBorrowsCurrent",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "getCash",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
}]''' | {"/src/agent.py": ["/src/constants.py"], "/src/agent_test.py": ["/src/agent.py"]} |
57,625 | venglov/forta-utilization-rate-agent | refs/heads/master | /src/agent_test.py | from src.agent import UtilizationRate
class TestUtilizationRate:
def test_removes_old_logs(self):
ur = UtilizationRate("SMILE")
ur.add_rate(0.1, 50)
ur.add_rate(0.1, 100)
ur.add_rate(0.11, 120)
ur.add_rate(0.12, 140)
ur.add_rate(0.12, 160)
ur.add_rate(0.12, 3701)
_, _ = ur.analyze()
assert len(ur.rates) == 4
def test_returns_false_if_rate_dif_below_th(self):
ur = UtilizationRate("HAPPY")
ur.add_rate(0.1, 50)
ur.add_rate(0.1, 100)
ur.add_rate(0.12, 120)
ur.add_rate(0.12, 140)
ur.add_rate(0.1, 160)
ur.add_rate(0.12, 3701)
changed, _ = ur.analyze()
assert not changed
def test_returns_true_if_rate_dif_above_th(self):
ur = UtilizationRate("SUNNY")
ur.add_rate(0.12, 50)
ur.add_rate(0.1, 100)
ur.add_rate(0.2, 120)
ur.add_rate(0.13, 140)
ur.add_rate(0.12, 160)
ur.add_rate(0.23, 3701)
changed, _ = ur.analyze()
assert changed
| {"/src/agent.py": ["/src/constants.py"], "/src/agent_test.py": ["/src/agent.py"]} |
57,626 | niki4smirn/test | refs/heads/master | /main.py | import numpy as np
import cv2
import trajectory_builder as tb
class Nail:
def __init__(self, x, y, idx, param):
self._x = x
self._y = y
self._idx = idx
self._r = param["r"]
self._head_r = param["head_r"]
self._length = param["length"]
self._depth = param["depth"]
self._strings_number = 0
def get_x(self):
return self._x
def get_y(self):
return self._y
def get_idx(self):
return self._idx
def get_strings_number(self):
return self._strings_number
def set_strings_number(self, strings_number):
self._strings_number = strings_number
def distance_to(self, nail):
x = nail.get_x()
y = nail.get_y()
return ((self._x - x) ** 2 + (self._y - y) ** 2) ** 0.5
# nails_list = []
nail_params = {
"r": 0.06,
"head_r": 2,
"length": 2,
"depth": 1
}
image = np.zeros((500, 500, 1), dtype="uint8")
ratio = 12
real_head_r = int(nail_params["head_r"] * ratio)
# nails_list.append(Nail(100, 100, 0, nail_params))
# nails_list.append(Nail(100, 400, 1, nail_params))
# nails_list.append(Nail(400, 400, 2, nail_params))
cv2.circle(image, (100, 100), real_head_r, 255, -1)
cv2.circle(image, (400, 100), real_head_r, 255, -1)
cv2.circle(image, (400, 400), real_head_r, 255, -1)
strings_list = [((100, 100), (400, 400)),
((400, 400), (400, 100)),
((400, 100), (100, 100))]
prev_line = None
segment1 = strings_list[-1]
line1 = tb.lines_from_circle_to_circle(segment1[0][0], segment1[0][1],
real_head_r,
segment1[1][0], segment1[1][1],
real_head_r)[0]
segment2 = strings_list[0]
line2 = tb.lines_from_circle_to_circle(segment2[0][0], segment2[0][1],
real_head_r,
segment2[1][0], segment2[1][1],
real_head_r)[0]
prev_dot = tb.lines_intersection(line1, line2)
first_dot = prev_dot
i = 0
for string in strings_list:
((x1, y1), (x2, y2)) = string
(line1, line2, line3, line4) = \
tb.lines_from_circle_to_circle(x1, y1, real_head_r,
x2, y2, real_head_r)
if prev_line is None:
prev_line = line1
elif tb.on_same_side(prev_line, x1, y1, x2, y2):
dot1 = tb.lines_intersection(prev_line, line1)
dot2 = tb.lines_intersection(prev_line, line2)
dist1 = tb.dist(prev_dot[0], prev_dot[1], dot1[0], dot1[1])
dist2 = tb.dist(prev_dot[0], prev_dot[1], dot2[0], dot2[1])
if dist1 > dist2:
cv2.line(image, prev_dot, dot1, 128, 3)
prev_dot = dot1
prev_line = line1
else:
cv2.line(image, prev_dot, dot2, 128, 3)
prev_dot = dot2
prev_line = line2
else:
pass
cv2.imshow(str(i), image)
i += 1
cv2.line(image, prev_dot, first_dot, 128, 3)
cv2.imshow("final", image)
cv2.waitKey(0)
| {"/main.py": ["/trajectory_builder.py"]} |
57,627 | niki4smirn/test | refs/heads/master | /trajectory_builder.py | def dist(x1, y1, x2, y2):
return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5
def circles_intersection(x1, y1, r1, x2, y2, r2):
d = dist(x1, y1, x2, y2)
if d > r1 + r2 or d < r1 - r2 or not d:
return None
a = (r1 ** 2 - r2 ** 2 + d ** 2) / (2 * d)
h = (r1 ** 2 - a ** 2) ** 0.5
x3 = x1 + a * (x2 - x1) / d
y3 = y1 + a * (y2 - y1) / d
x4 = x3 + h * (y2 - y1) / d
y4 = y3 - h * (x2 - x1) / d
x5 = x3 - h * (y2 - y1) / d
y5 = y3 + h * (x2 - x1) / d
return x4, y4, x5, y5
def lines_intersection(line1, line2):
(a1, b1, c1) = line1.get_coefficients()
(a2, b2, c2) = line2.get_coefficients()
if b2 * a1 - b1 * a2 == 0:
return None
x = (b1 * c2 - b2 * c1) / (b2 * a1 - b1 * a2)
if b1 != 0:
y = -(a1 * x + c1) / b1
elif b2 != 0:
y = -(a2 * x + c2) / b2
else:
return None
return round(x), round(y)
def circle_and_line_intersection(x, y, r, line):
pass
def on_same_side(line, x1, y1, x2, y2):
(a, b, c) = line.get_coefficients()
return (a * x1 + b * y1 + c) * (a * x2 + b * y2 + c) > 0
def lines_from_point_to_circle(x1, y1, x2, y2, r):
x3 = (x1 + x2) / 2
y3 = (y1 + y2) / 2
r2 = 0.5 * dist(x1, y1, x2, y2)
inter = circles_intersection(x2, y2, r, x3, y3, r2)
if inter is not None:
return (Line(x1=x1, y1=y1, x2=inter[0], y2=inter[1]),
Line(x1=x1, y1=y1, x2=inter[2], y2=inter[3]))
else:
return None
def move_to_circle(line1, x, y, r):
if line1 is None:
return None
(a, b, c) = line1.get_coefficients()
c1 = (r * r * (a * a + b * b)) ** 0.5
c2 = -c1
c1 -= (a * x) + (b * y)
c2 -= (a * x) + (b * y)
if abs(c1 - c) < abs(c2 - c):
return Line(a=a, b=b, c=c1)
else:
return Line(a=a, b=b, c=c2)
def lines_from_circle_to_circle(x1, y1, r1, x2, y2, r2):
# outside
(line1, line2) = lines_from_point_to_circle(x2, y2, x1, y1, r1 - r2)
# inside
(line3, line4) = lines_from_point_to_circle(x2, y2, x1, y1, r1 + r2)
line1 = move_to_circle(line1, x1, y1, r1)
line2 = move_to_circle(line2, x1, y1, r1)
line3 = move_to_circle(line3, x1, y1, r1)
line4 = move_to_circle(line4, x1, y1, r1)
temp1 = line1.get_coefficients()
temp2 = line2.get_coefficients()
if temp1 == temp2:
line2.set_c(-temp1[2] - 2 * ((temp1[0] * x1) + (temp1[1] * y1)))
return line1, line2, line3, line4
class Line:
def __init__(self, **kwargs):
y1 = kwargs.get("y1")
y2 = kwargs.get("y2")
x1 = kwargs.get("x1")
x2 = kwargs.get("x2")
a = kwargs.get("a")
b = kwargs.get("b")
c = kwargs.get("c")
if x1 is not None and \
x2 is not None and \
y1 is not None and \
y2 is not None:
self.a = y2 - y1
self.b = x1 - x2
self.c = x2 * y1 - x1 * y2
if a is not None and \
b is not None and \
c is not None:
self.a = a
self.b = b
self.c = c
def get_coefficients(self):
return self.a, self.b, self.c
def set_c(self, c):
self.c = c
| {"/main.py": ["/trajectory_builder.py"]} |
57,669 | dreipoe/RecipeBook | refs/heads/master | /app/main/admin.py | from django.contrib import admin
# Register your models here.
from main.models import *
admin.site.register(Measure)
admin.site.register(Ingredient)
admin.site.register(Receipt)
admin.site.register(ReceiptItem)
| {"/app/main/views.py": ["/app/main/models.py"]} |
57,670 | dreipoe/RecipeBook | refs/heads/master | /app/main/models.py | from django.core.paginator import Paginator, Page
from django.db import models
class NamedModel(models.Model):
"""Абстрактный класс модели с полем name"""
class Meta:
abstract = True
name = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
assert(isinstance(self.name, str), TypeError("self.name is not char field"))
def __str__(self):
return self.name
class Measure(NamedModel):
"""Единица измерения"""
class Meta:
app_label = "main"
verbose_name = "Единица измерения"
verbose_name_plural = "Единицы измерения"
# была идея использовать pymorphy2, но разбираться было долго, а времени было мало
name = models.CharField(max_length=32, unique=True, verbose_name='Название ')
name_case_1 = models.CharField(max_length=32, null=True, blank=True, verbose_name='Название (например, 2 ложки)')
name_case_2 = models.CharField(
max_length=32, null=True, blank=True, verbose_name='Название (например, 5 ложек)'
)
def get_inflected_name(self, n: int):
"""Склонение наименования единиы измерения после числа"""
name = self.name_case_2 if self.name_case_2 is not None else self.name
if n // 10 % 10 != 1:
last_digit = n % 10
if last_digit == 1:
name = self.name
elif last_digit in range(2, 5):
name = self.name_case_1 if self.name_case_1 is not None else self.name
return name
class Ingredient(NamedModel):
"""Ингредиент"""
class Meta:
app_label = "main"
verbose_name = "Ингредиент"
verbose_name_plural = "Ингредиенты"
name = models.CharField(max_length=32, unique=True, verbose_name='Название')
name_genitive = models.CharField(max_length=32, null=True, blank=True, verbose_name='Название в родительном падеже')
def get_genitive(self):
return self.name_genitive if self.name_genitive is not None else self.name
class Receipt(NamedModel):
"""Рецепт"""
class Meta:
app_label = "main"
verbose_name = "Рецепт"
verbose_name_plural = "Рецепты"
name = models.CharField(max_length=16, unique=True, verbose_name='Название')
definition = models.TextField(max_length=4000, verbose_name='Описание')
@staticmethod
def get_page(page) -> Page:
"""
Возвращает страницу из списка рецептов
:param page: Номер страницы
"""
items = None
objects = Receipt.objects.order_by('pk')
if objects.count() > 0:
paginator = Paginator(objects, 20)
items = paginator.page(page)
return items
def cut_definition(self):
"""Обрезает описание рецепта для его отображения в карточке в списке рецептов"""
n = 500
return f"{self.definition[0:n]}..." if len(self.definition) > n else self.definition
def get_ingredients(self):
"""Получает список ингредиентов для рецепта"""
return ReceiptItem.objects.filter(receipt=self.id)
class ReceiptItem(models.Model):
"""Запись об ингредиенте и его количестве в рецепте"""
class Meta:
app_label = "main"
verbose_name = "Ингредиент в рецепте"
verbose_name_plural = "Ингредиенты в рецепте"
n = models.IntegerField(verbose_name='Кол-во')
measure = models.ForeignKey(to=Measure, on_delete=models.RESTRICT, verbose_name='Единица измерения')
ingredient = models.ForeignKey(to=Ingredient, on_delete=models.RESTRICT, verbose_name='Ингредиент')
receipt = models.ForeignKey(to=Receipt, on_delete=models.RESTRICT, verbose_name='Рецепт')
def __str__(self):
n = self.n
return f"{n} {self.measure.get_inflected_name(n)} {self.ingredient.get_genitive()}"
| {"/app/main/views.py": ["/app/main/models.py"]} |
57,671 | dreipoe/RecipeBook | refs/heads/master | /app/main/views.py | from django.db.models import Q
from django.core.handlers.wsgi import WSGIRequest
from django.shortcuts import render, get_object_or_404
from .models import Receipt, ReceiptItem, Ingredient
def index(request: WSGIRequest):
receipt_name = request.GET.get("receipt")
ingredient_name = request.GET.get("ingredient")
receipts = Receipt.objects.all()
if ingredient_name not in (None, ''):
receipts = receipts.filter(
pk__in=ReceiptItem.objects.filter(
ingredient__in=Ingredient.objects.filter(name__icontains=ingredient_name)
).values_list("receipt_id")
)
if receipt_name not in (None, ''):
receipts = receipts.filter(name__icontains=receipt_name)
return render(request, "main.html", {
'title': "Книга рецептов",
'items': receipts,
'receipt_name': receipt_name,
'ingredient_name': ingredient_name
})
def detail(request: WSGIRequest, idx: int):
item = get_object_or_404(Receipt, pk=idx)
ingredients = item.get_ingredients()
return render(request, "detail.html", {"item": item, "ingredients": ingredients})
| {"/app/main/views.py": ["/app/main/models.py"]} |
57,672 | dreipoe/RecipeBook | refs/heads/master | /app/main/migrations/0003_auto_20211104_1439.py | # Generated by Django 3.2.9 on 2021-11-04 09:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20211102_1305'),
]
operations = [
migrations.AddField(
model_name='measure',
name='name_genitive',
field=models.CharField(blank=True, max_length=32, null=True, verbose_name='Название в родительном падеже'),
),
migrations.AddField(
model_name='measure',
name='name_genitive_plural',
field=models.CharField(blank=True, max_length=32, null=True, verbose_name='Название в родительном падеже множественного числа'),
),
migrations.AlterField(
model_name='ingredient',
name='name',
field=models.CharField(max_length=32, unique=True, verbose_name='Название в родительном падеже'),
),
]
| {"/app/main/views.py": ["/app/main/models.py"]} |
57,673 | TheoKlein/snake-core | refs/heads/master | /snake/core/route_support.py | """The module containing support for router modules.
This contains functions that are shared between the routes modules.
"""
import asyncio
import os
import shutil
import zipfile
from datetime import datetime
from snake import db
from snake import enums
from snake import error
from snake import schema
from snake import utils
from snake.config import snake_config
from snake.core import celery
from snake.managers import scale_manager
async def execute_autoruns(sha256_digest, file_type, mime_type):
"""Find and queue autoruns for a given file (sha256_digest).
If enabled this function will queue all applicable autoruns for the file
given (sha256_digest).
Args:
sha256_digest (str): The hash of the file to execute the autoruns on.
file_type (:obj:`FileType`): The file type used to help apply autoruns.
mime_type (str): The mime type used to help apply autoruns.
"""
if snake_config['command_autoruns']:
autoruns = scale_manager.get_autoruns(file_type=file_type)
for mod, cmd, mime in autoruns:
if mime and not mime == mime_type:
continue
args = {
'sha256_digest': sha256_digest,
'scale': mod,
'command': cmd,
'asynchronous': True
}
args = schema.CommandSchema().load(args)
await queue_command(args)
async def queue_command(data):
"""Queue commands for execution
This will queue commands for execution on the celery workers.
Note:
The returned command schema will reflect the status of the queued
command.
Args:
data (:obj:`CommandSchema`): The command to queue for execution.
Returns:
:obj:`CommandSchema`: The command schema with updates
"""
# The lastest execution always wins, thus we replace the current one in the db
document = await db.async_command_collection.select(data['sha256_digest'], data['scale'], data['command'], data['args'])
if document:
if 'status' in document and document['status'] == enums.Status.RUNNING:
return schema.CommandSchema().dump(schema.CommandSchema().load(document))
else:
_output_id = None
if '_output_id' in document:
_output_id = document['_output_id']
data['timestamp'] = datetime.utcnow()
data = schema.CommandSchema().dump(data)
await db.async_command_collection.replace(data['sha256_digest'], data['scale'], data['command'], data['args'], data)
# NOTE: We delete after the replace to try and prevent concurrent
# reads to a file while it is being deleted
if _output_id:
await db.async_command_output_collection.delete(_output_id)
else:
# Save the command, this will be in a pending state
data['timestamp'] = datetime.utcnow()
data = schema.CommandSchema().dump(data)
await db.async_command_collection.insert(data)
data = schema.CommandSchema().load(data)
if data['asynchronous'] is True:
celery.execute_command.apply_async(args=[data], time_limit=data['timeout'] + 30, soft_time_limit=data['timeout'])
else:
task = celery.execute_command.apply_async(args=[data], time_limit=data['timeout'] + 30, soft_time_limit=data['timeout'])
result = await celery.wait_for_task(task)
if not task.successful():
document = await db.async_command_collection.select(data['sha256_digest'], data['scale'], data['command'], data['args'])
_output_id = None
if '_output_id' in document:
_output_id = document['_output_id']
_new_output_id = await db.async_command_output_collection.put(document['command'], b"{'error': 'worker failed please check log'}")
document['_output_id'] = _new_output_id
document['status'] = enums.Status.FAILED
await db.async_command_collection.update(document['sha256_digest'], document['scale'], document['command'], data['args'], document)
if _output_id:
await db.async_command_output_collection.delete(_output_id)
raise error.SnakeError(result)
return await db.async_command_collection.select(data['sha256_digest'], data['scale'], data['command'], data['args'])
async def store_file(sha256_digest, file_path, file_type, data):
"""Store a file to disk.
Uses file storage to store the new file to disk. Upon success insert the
metadata into the database.
Args:
sha256_digest (str): The has of the file to store.
file_path (str): The location of the file to move into the store.
file_type (:obj:`FileType`): The type of the file being stored.
data (:obj:`CommandSchema`): The metadata for the file.
Returns:
:obj:`CommandSchema`: The updated document metadata.
Raises:
SnakeError: When the metadata cannot be inserted into the database.
"""
# Save the file to the 'filedb' and add it to the database
file_storage = utils.FileStorage()
file_storage.create(file_path, sha256_digest)
if not file_storage.save(move=True):
raise error.SnakeError("Failed to store file on disk")
data.update(file_storage.to_dict())
data['name'] = strip_extensions(data['name'])
data['timestamp'] = datetime.utcnow()
data = schema.FileSchema().dump(data)
data['file_type'] = file_type # load_only=True
document = await db.async_file_collection.insert(data)
if not document:
file_storage.delete()
raise error.SnakeError("Failed to insert document")
document = await db.async_file_collection.select(file_storage.sha256_digest)
# Run any autoruns, if allowed
await execute_autoruns(sha256_digest, file_type, file_storage.mime)
return document
def strip_extensions(name):
"""Strip extensions from a given name.
This function is used to strip the trailing extension of a file name. It
uses a list that is specified in the snake configuration file.
Note:
This will only strip the trailing extension, it will not recurse.
Args:
name (str): The name to strip.
Returns:
str: The stripped name.
"""
# Strip annoying malware extensions
if snake_config['strip_extensions']:
parts = name.rsplit(".", 1)
if len(parts) > 1:
for ext in snake_config['strip_extensions']:
if parts[-1] == ext:
return parts[0]
return name
async def unzip_file_python(file_path, file_name, output_dir, protected=False, password=None):
"""Unzip file using ZipFile.
Uses ZipFile to extract a file from a zip into a given directory. It will
handle password protected folders and if no password is presented then it
will loop through a list of passwords stored in the snake configuration.
Note:
Only zips with a single file are supported.
Args:
file_path (str): The path to the zipped file.
file_name (str): The name of the file to extract from the zip.
output_dir (str): The directory to extract the file to.
protected (bool, optional): Is the zip password protected. Defaults to False.
protected (str, optional): The password for the zip. Defaults to None.
Returns:
str: The path of the extracted file.
Raises:
RuntimeError: For any error that is not related to a Bad Password.
SnakeError: When extraction of the file has failed.
"""
zip_file = zipfile.ZipFile(file_path)
new_path = None
if protected:
if password:
try:
new_path = zip_file.extract(file_name, output_dir, bytes(password, 'utf-8'))
except RuntimeError as err:
if 'Bad password' not in str(err):
raise
else:
for passwd in snake_config['zip_passwords']:
try:
new_path = zip_file.extract(file_name, output_dir, bytes(passwd, 'utf-8'))
except RuntimeError as err:
if 'Bad password' not in str(err):
raise
if new_path:
break
if not new_path:
raise error.SnakeError('ZipError: incorrect password')
else:
new_path = zip_file.extract(file_name, output_dir, None)
return new_path
async def unzip_file_unix(file_path, file_name, output_dir, protected=False, password=None): # pylint: disable=too-many-branches
"""Unzip file using unzip.
Uses unzip binary to extract a file from a zip into a given directory. It
will handle password protected folders and if no password is presented then
it will loop through a list of passwords stored in the snake configuration.
Note:
Only zips with a single file are supported.
Args:
file_path (str): The path to the zipped file.
file_name (str): The name of the file to extract from the zip.
output_dir (str): The directory to extract the file to.
protected (bool, optional): Is the zip password protected. Defaults to False.
protected (str, optional): The password for the zip. Defaults to None.
Returns:
str: The path of the extracted file.
Raises:
SnakeError: When extraction of the file has failed.
"""
err = ''
new_path = None
if protected:
if password:
proc = await asyncio.create_subprocess_exec(
*["unzip", "-P", bytes(password, "utf-8"), "-j", file_path, file_name, "-d", output_dir],
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE)
_stdout, stderr = await proc.communicate()
if not proc.returncode:
# NOTE: We flatten dirs so we must strip dirs from file_name if present
new_path = os.path.join(output_dir, file_name.split('/')[-1])
else:
err = str(stderr, encoding='utf-8')
else:
for passwd in snake_config['zip_passwords']:
proc = await asyncio.create_subprocess_exec(
*["unzip", "-P", bytes(passwd, "utf-8"), "-j", file_path, file_name, "-d", output_dir],
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE)
_stdout, stderr = await proc.communicate()
if not proc.returncode:
# NOTE: We flatten dirs so we must strip dirs from file_name if present
new_path = os.path.join(output_dir, file_name.split('/')[-1])
else:
err = str(stderr, encoding='utf-8')
if new_path:
break
else:
proc = await asyncio.create_subprocess_exec(
*["unzip", "-j", file_path, file_name, "-d", output_dir],
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE)
_stdout, stderr = await proc.communicate()
if not proc.returncode:
# NOTE: We flatten dirs so we must strip dirs from file_name if present
new_path = os.path.join(output_dir, file_name.split('/')[-1])
else:
err = str(stderr, encoding='utf-8')
if not new_path:
if 'incorrect password' in err:
raise error.SnakeError('ZipError: incorrect password')
else:
raise error.SnakeError('ZipError: {}'.format(err))
return new_path
async def unzip_file(file_path, password=None):
"""Unzip a file.
Unzips a file using unzip or ZipFile. For speed reasons if unzip is
installed it will be used in favour of the ZipFile library. It will extract
the file to the same directory as that of the zip folder.
Note: The zip file must contrail only one file.
Args:
file_path (str): The zip file to unzip.
password (str): The password for the zip. Defaults to None.
Returns:
str: The path to the extract file.
Raises:
SnakeError: When the zip file contains more than one file.
When the extraction fails.
"""
zip_file = zipfile.ZipFile(file_path)
info_list = zip_file.infolist()
if len(info_list) != 1:
raise error.SnakeError('ZipError: only one file is allowed in the container')
i = info_list[0]
working_dir = os.path.dirname(file_path)
new_path = None
protected = i.flag_bits & 0x1
# NOTE: ZipFile is slow as balls so we outsource to unzip if installed
outsource = shutil.which('unzip')
if outsource:
new_path = await unzip_file_unix(file_path, i.filename, working_dir, protected, password)
else:
new_path = await unzip_file_python(file_path, i.filename, working_dir, protected, password)
if not new_path:
raise error.SnakeError('ZipError: failed to extract file')
return new_path
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,674 | TheoKlein/snake-core | refs/heads/master | /tests/core/test_route_support.py | # pylint: disable=missing-docstring
import pytest
from snake import error
from snake import schema
from snake.config import snake_config
from snake.core import route_support
@pytest.mark.asyncio
async def test_execute_autoruns(mocker):
"""
Test execute_autoruns function
"""
state = {
'queue': []
}
def get_autoruns(self, file_type): # pylint: disable=unused-argument
return [
('1', '1', None),
('2', '2', None),
('3', '3', None),
('4', '4', '4')
]
def get_autoruns_empty(self, file_type): # pylint: disable=unused-argument
return []
async def queue_command(data): # pylint: disable=unused-argument
state['queue'] += [data]
mocker.patch('snake.core.route_support.queue_command', queue_command)
snake_config['command_autoruns'] = False
# Test disabled
await route_support.execute_autoruns('abcd', None, None)
assert len(state['queue']) == 0 # pylint: disable=len-as-condition
snake_config['command_autoruns'] = True
# Test no autoruns
mocker.patch('snake.core.scale_manager.ScaleManager.get_autoruns', get_autoruns_empty)
state['queue'] = []
await route_support.execute_autoruns('abcd', None, None)
assert len(state['queue']) == 0 # pylint: disable=len-as-condition
mocker.patch('snake.core.scale_manager.ScaleManager.get_autoruns', get_autoruns_empty)
# Test autoruns
mocker.patch('snake.core.scale_manager.ScaleManager.get_autoruns', get_autoruns)
state['queue'] = []
await route_support.execute_autoruns('abcd', None, None)
assert len(state['queue']) == 3
mocker.patch('snake.core.scale_manager.ScaleManager.get_autoruns', get_autoruns)
# Test mime
mocker.patch('snake.core.scale_manager.ScaleManager.get_autoruns', get_autoruns)
state['queue'] = []
await route_support.execute_autoruns('abcd', None, '4')
assert len(state['queue']) == 4
mocker.patch('snake.core.scale_manager.ScaleManager.get_autoruns', get_autoruns)
@pytest.mark.asyncio
async def test_queue_command(mocker):
"""
Test queue_command function
"""
base_data = schema.CommandSchema().load({'sha256_digest': 'abcd', 'scale': 'abcd', 'command': 'abcd'})
state = {
'data': {}
}
def apply_async(*args, **kwargs): # pylint: disable=unused-argument
class Task: # pylint: disable=too-few-public-methods
def successful(self): # pylint: disable=no-self-use
return True
state['data'] = kwargs['args'][0]
state['data']['status'] = 'running'
return Task()
def apply_async_fail(*args, **kwargs): # pylint: disable=unused-argument
class Task: # pylint: disable=too-few-public-methods
def successful(self): # pylint: disable=no-self-use
return False
state['data'] = kwargs['args'][0]
state['data']['status'] = 'running'
return Task()
async def insert(self, *args): # pylint: disable=unused-argument
state['data'] = args
async def put(self, *args): # pylint: disable=unused-argument
pass
async def replace(self, *args): # pylint: disable=unused-argument
return state['data']
async def update_fail(self, *args): # pylint: disable=unused-argument
state['data']['status'] = 'failed'
async def select(self, *args): # pylint: disable=unused-argument
return state['data']
async def wait_for_task(self, *args, **kwargs): # pylint: disable=unused-argument
return
mocker.patch('snake.core.celery.execute_command.apply_async', apply_async)
mocker.patch('snake.core.celery.wait_for_task', wait_for_task)
mocker.patch('snake.db.async_command_collection.insert', insert)
mocker.patch('snake.db.async_command_collection.replace', replace)
mocker.patch('snake.db.async_command_collection.select', select)
# Test Status running
data = base_data
data['status'] = 'running'
state['data'] = data
state['data'] = await route_support.queue_command(data)
assert state['data']['status'] == 'running'
# Test replace branch
data = base_data
data['status'] = 'success'
state['data'] = data
state['data'] = await route_support.queue_command(data)
assert state['data']['status'] == 'running'
# Test new command
state['data'] = {}
data = base_data
data['status'] = 'success'
state['data'] = await route_support.queue_command(data)
assert state['data']['status'] == 'running'
# Test async
state['data'] = {}
data = base_data
data['asynchronous'] = 'true'
data['status'] = 'success'
state['data'] = await route_support.queue_command(data)
assert state['data']['status'] == 'running'
# Test failure
mocker.patch('snake.core.celery.execute_command.apply_async', apply_async_fail)
mocker.patch('snake.db.async_command_collection.update', update_fail)
mocker.patch('snake.db.async_command_output_collection.put', put)
state['data'] = {}
data = base_data
data['asynchronous'] = 'false'
data['status'] = 'success'
with pytest.raises(error.SnakeError):
state['data'] = await route_support.queue_command(data)
assert state['data']['status'] == 'failed'
@pytest.mark.asyncio
async def test_store_file(mocker):
"""
Test store_file function
"""
base_data = schema.FileSchema().load({'name': 'abcd'})
state = {
'data': {}
}
async def execute_autoruns(self, *args, **kwargs): # pylint: disable=unused-argument
pass
class FileStorage: # pylint: disable=too-few-public-methods
def create(*args, **kwargs): # pylint: disable=unused-argument, no-method-argument, no-self-use
return False
def save(*args, **kwargs): # pylint: disable=unused-argument, no-method-argument, no-self-use
return False
class AsyncFileCollection:
# XXX: Don't add self it breaks the mocks?!
async def insert(data): # pylint: disable=unused-argument, no-self-argument
state['data'] = data
return state['data']
async def select(*args, **kwargs): # pylint: disable=unused-argument, no-method-argument
return state['data']
class AsyncFileCollectionFail:
# XXX: Don't add self it breaks the mocks?!
async def insert(data): # pylint: disable=unused-argument, no-self-argument
return None
async def select(*args, **kwargs): # pylint: disable=unused-argument, no-method-argument
return state['data']
mocker.patch('snake.core.route_support.db.async_file_collection', AsyncFileCollection)
mocker.patch('snake.core.route_support.execute_autoruns', execute_autoruns)
mocker.patch('snake.core.route_support.utils.FileStorage')
# Test success
state['data'] = {}
data = base_data
document = await route_support.store_file('abcd', 'file', 'abcd', data)
assert document['name'] == 'abcd'
assert document['file_type'] == 'abcd'
# Test failing to create file
mocker.patch('snake.core.route_support.utils.FileStorage', FileStorage)
state['data'] = {}
data = base_data
with pytest.raises(error.SnakeError):
await route_support.store_file('abcd', 'file', 'abcd', data)
# Test failed insert
mocker.patch('snake.core.route_support.utils.FileStorage')
mocker.patch('snake.core.route_support.db.async_file_collection', AsyncFileCollectionFail)
state['data'] = {}
data = base_data
with pytest.raises(error.SnakeError):
await route_support.store_file('abcd', 'file', 'abcd', data)
@pytest.mark.asyncio
async def test_strip_extensions():
"""
Test strip_extensions function
"""
# Test no stripping
snake_config['strip_extensions'] = []
name = route_support.strip_extensions('abcd.zip')
assert name == 'abcd.zip'
snake_config['strip_extensions'] = ['blah', 'zip']
# Test stripping no extension
name = route_support.strip_extensions('abcd')
assert name == 'abcd'
# Test stripping one extension
name = route_support.strip_extensions('abcd.zip')
assert name == 'abcd'
# Test stripping two extension
name = route_support.strip_extensions('abcd.blah.zip')
assert name == 'abcd.blah'
@pytest.mark.asyncio
async def test_unzip_file_python(mocker):
"""
Test unzip_file_python function
"""
class ZipFile: # pylint: disable=too-few-public-methods
def __init__(self, path):
pass
def extract(self, name, directory, password): # pylint: disable=unused-argument, no-self-use
if password == b'incorrect':
raise RuntimeError
if password == b'bad':
raise RuntimeError('Bad password')
return "{}/{}".format(directory, name)
mocker.patch("snake.core.route_support.zipfile.ZipFile", ZipFile)
# Test normal unzip
path = await route_support.unzip_file_python('path', 'name', 'output_dir')
assert path == 'output_dir/name'
# Test password unzip
path = await route_support.unzip_file_python('path', 'name', 'output_dir', True, 'password')
assert path == 'output_dir/name'
# Test bad password unzip
with pytest.raises(error.SnakeError):
await route_support.unzip_file_python('path', 'name', 'output_dir', True, 'bad')
# Test unzip error
with pytest.raises(RuntimeError):
await route_support.unzip_file_python('path', 'name', 'output_dir', True, 'incorrect')
# Test auto password unzip
snake_config['zip_passwords'] = ['bad', 'password']
path = await route_support.unzip_file_python('path', 'name', 'output_dir', True)
assert path == 'output_dir/name'
# Test auto bad password unzip
snake_config['zip_passwords'] = ['bad']
with pytest.raises(error.SnakeError):
await route_support.unzip_file_python('path', 'name', 'output_dir', True)
# Test auto password unzip error
snake_config['zip_passwords'] = ['bad', 'incorrect']
with pytest.raises(RuntimeError):
await route_support.unzip_file_python('path', 'name', 'output_dir', True)
# Test auto password unzip no passwords
snake_config['zip_passwords'] = []
with pytest.raises(error.SnakeError):
await route_support.unzip_file_python('path', 'name', 'output_dir', True)
@pytest.mark.asyncio
async def test_unzip_file_unix(mocker):
"""
Test unzip_file_unix function
"""
class Proc: # pylint: disable=too-few-public-methods
returncode = 0
def __init__(self, ret=0):
self.returncode = ret
async def communicate(self):
return b'', b''
async def create_subprocess_exec(*args, **kwargs): # pylint: disable=unused-argument
ret = 0
if args[2] == b'bad':
ret = 1
if args[2] == b'incorrect':
ret = 1
return Proc(ret)
mocker.patch("snake.core.route_support.asyncio.create_subprocess_exec", create_subprocess_exec)
# Test normal unzip
path = await route_support.unzip_file_unix('path', 'name', 'output_dir')
assert path == 'output_dir/name'
# Test password unzip
path = await route_support.unzip_file_unix('path', 'name', 'output_dir', True, 'password')
assert path == 'output_dir/name'
# Test bad password unzip
with pytest.raises(error.SnakeError):
await route_support.unzip_file_unix('path', 'name', 'output_dir', True, 'bad')
# Test unzip error
with pytest.raises(error.SnakeError):
await route_support.unzip_file_unix('path', 'name', 'output_dir', True, 'incorrect')
# Test auto password unzip
snake_config['zip_passwords'] = ['bad', 'password']
path = await route_support.unzip_file_unix('path', 'name', 'output_dir', True)
assert path == 'output_dir/name'
# Test auto bad password unzip
snake_config['zip_passwords'] = ['bad']
with pytest.raises(error.SnakeError):
await route_support.unzip_file_unix('path', 'name', 'output_dir', True)
# Test auto password unzip error
snake_config['zip_passwords'] = ['bad', 'incorrect']
with pytest.raises(error.SnakeError):
await route_support.unzip_file_unix('path', 'name', 'output_dir', True)
# Test auto password unzip no passwords
snake_config['zip_passwords'] = []
with pytest.raises(error.SnakeError):
await route_support.unzip_file_unix('path', 'name', 'output_dir', True)
@pytest.mark.asyncio
async def test_unzip_file(mocker):
"""
Test unzip_file function
"""
def no(*args, **kwargs): # pylint: disable=unused-argument, invalid-name
return False
async def dummy(*args, **kwargs): # pylint: disable=unused-argument
return 'output_dir/name'
async def dummy_1(*args, **kwargs): # pylint: disable=unused-argument
return None
class ZipFile: # pylint: disable=too-few-public-methods
class Item:
filename = 'file_name'
flag_bits = 1
def __init__(self, path):
pass
def infolist(self): # pylint: disable=no-self-use
return [self.Item()]
class ZipFileMulti: # pylint: disable=too-few-public-methods
def __init__(self, path):
pass
def infolist(self): # pylint: disable=no-self-use
return [1, 2]
mocker.patch("snake.core.route_support.shutil.which")
mocker.patch("snake.core.route_support.unzip_file_python", dummy)
mocker.patch("snake.core.route_support.unzip_file_unix", dummy)
# Test unzip external
mocker.patch("snake.core.route_support.zipfile.ZipFile", ZipFile)
path = await route_support.unzip_file('file_path')
assert path == 'output_dir/name'
# Test unzip builtin
mocker.patch("snake.core.route_support.shutil.which", no)
path = await route_support.unzip_file('file_path')
assert path == 'output_dir/name'
# Test unzip failure
mocker.patch("snake.core.route_support.unzip_file_python", dummy_1)
with pytest.raises(error.SnakeError):
await route_support.unzip_file('file_path')
# Test multiple files
mocker.patch("snake.core.route_support.zipfile.ZipFile", ZipFileMulti)
with pytest.raises(error.SnakeError):
await route_support.unzip_file('file_path')
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,675 | TheoKlein/snake-core | refs/heads/master | /snake/managers.py | """The managers module.
This contains all instantiated managers for uses around snake.
Attributes:
scale_manager (:obj:`ScaleManager`): The scale manager.
"""
from snake.core import scale_manager
scale_manager = scale_manager.ScaleManager() # pylint: disable=invalid-name
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,676 | TheoKlein/snake-core | refs/heads/master | /snake/snaked.py | #!/usr/bin/python3
"""The snake daemon.
This is snake. This is how snake is loaded, usually this file is called through
a service but it can also be called from the command line (usually for
debug/development purposes).
Examples:
snaked # Run snake (if installed with pip).
python -m snake.snaked -d # Run snake and output log to console.
Attributes:
BANNER (str): The snake banner, used in the logs and console.
"""
import argparse
import logging
import os
from os import path
import shutil
import sys
import time
from celery.task.control import inspect # pylint: disable=import-error, no-name-in-module
from tornado.options import options
from tornado import ioloop, web
from tornado.log import enable_pretty_logging
from snake.config import constants
from snake.config import config_parser
from snake.config import snake_config
# pylint: disable=too-many-locals
BANNER = """
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_|
_ _ _ _ _ _ _ _
|_| |_| |_| |_| |_| |_| |_| |_|
_ _ _ _ _ _ _ _
|_| |_| |_| |_| |_| |_| |_| |_|
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_|
_ _ _ _ _ _ _ _
|_| |_| |_| |_| |_| |_| |_| |_|
_ _ _ _ _ _ _ _
|_| |_| |_| |_| |_| |_| |_| |_|
_ _ _ _ _ _ _ _ _ _ _ _ _ _
|_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_| |_|
By Countercept, Version {}
""".format(constants.VERSION)
def main(config_file=None, debug=False, pidfile=None): # pylint: disable=too-many-branches, too-many-statements
"""The main function for snake.
Runs snake, what more do you need?
Args:
config_file (str, optional): Path to a custom configuration file.
Defaults to None.
debug (bool): Whether to debug or not. Defaults to False.
This will direct log output to console.
pidfile (str, optional): Path to pidfile. Defaults to None.
This is used by the systemd snake service.
"""
# Allow PID file creation for use by systemd
if pidfile:
if path.isfile(pidfile):
print('Snake server is already running')
sys.exit(1)
with open(pidfile, 'w') as pid:
pid.write(str(os.getpid()))
# If user specified reload config file
if config_file:
config_parser.load_config(config_file)
# Check all dirs exist otherwise give up
keys = ['cache_dir', 'file_db', 'log_dir']
for key in keys:
directory = path.abspath(path.expanduser(config_parser.snake_config[key]))
if not path.exists(directory):
print("Directory for '{}' does not exist: {}".format(key, directory))
exit(1)
# Log to console or file
if not debug:
log_dir = path.abspath(path.expanduser(config_parser.snake_config['log_dir']))
options.log_file_prefix = path.join(log_dir, 'snake.log')
with open(path.join(log_dir, 'snake.log'), 'w+') as f:
f.write(BANNER)
else:
print(BANNER)
# Bring in all the snake imports after config is setup
from snake.core.celery import celery
from snake.core.route_manager import generate_routes
from snake.core.snake_handler import DefaultHandler
from snake import db
# Logging
enable_pretty_logging()
app_log = logging.getLogger("tornado.application")
# Test mongo connection
if not db.test_connection():
app_log.error('failed to connect to mongo server')
sys.exit(1)
# Test celery
try:
insp = inspect(app=celery, timeout=1.0)
count = 0
fail = True
while count < 4:
if insp.stats():
fail = False
break
time.sleep(5)
count += 1
if fail:
app_log.error('failed to find any running Celery workers')
exit(1)
except IOError as err:
app_log.error('failed to connect to backend - %s', err)
sys.exit(1)
# Run DB command cleaning
db.command_collection.clean()
# Clear the cache
cache_dir = path.abspath(path.expanduser(snake_config['cache_dir']))
if path.exists(cache_dir):
for i in os.listdir(cache_dir):
f = path.join(cache_dir, i)
if path.isfile(f):
os.unlink(f)
else:
shutil.rmtree(f)
# Routes
routes = generate_routes()
# Spin up
ioloop.IOLoop.configure('tornado.platform.asyncio.AsyncIOMainLoop')
application = web.Application(
routes,
debug=debug,
autoreload=debug,
default_handler_class=DefaultHandler
)
application.listen(snake_config['port'], address=snake_config['address'], max_buffer_size=10485760000) # Set a 10GB limit
ioloop.IOLoop.current().start()
def __main__():
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config_file", dest="config_file", default=None, help="custom config path")
parser.add_argument("-d", "--debug", action='store_true', dest="debug", default=False, help="enable debug mode")
parser.add_argument("--pidfile", dest="pidfile", default=None, help="path to PID file")
args = parser.parse_args()
main(config_file=args.config_file, debug=args.debug, pidfile=args.pidfile)
if __name__ == "__main__":
__main__()
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,677 | TheoKlein/snake-core | refs/heads/master | /snake/routes/memory.py | """ The memory route module.
Attributes:
MemoryRoute (tuple): The MemoryRoute.
MemoriesRoute (tuple): The MemoriesRoute.
"""
from tornado import escape
from webargs import tornadoparser
from snake import db
from snake import enums
from snake import error
from snake import fields
from snake import schema
from snake import utils
from snake.core import snake_handler
# pylint: disable=abstract-method
# pylint: disable=arguments-differ
class MemoryHandler(snake_handler.SnakeHandler):
"""Extends `SnakeHandler`."""
async def get(self, sha256_digest):
document = await db.async_file_collection.select(sha256_digest)
if not document or document['file_type'] != enums.FileType.MEMORY:
self.write_warning("memory - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.jsonify({'memory': document})
self.finish()
async def delete(self, sha256_digest):
document = await db.async_file_collection.select(sha256_digest)
if not document or document['file_type'] != enums.FileType.MEMORY:
self.write_warning("memory - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
try:
file_storage = utils.FileStorage(sha256_digest)
file_storage.delete()
except error.SnakeError:
pass
await db.async_file_collection.delete(sha256_digest)
self.set_status(200)
self.jsonify(None)
self.finish()
async def patch(self, sha256_digest):
document = await db.async_file_collection.select(sha256_digest)
if not document or document['file_type'] != enums.FileType.MEMORY:
self.write_warning("memory - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
if not self.request.body:
self.write_warning("memory - no request body found", 422, sha256_digest)
self.finish()
return
data = escape.json_decode(self.request.body)
data = schema.FileSchema(only=('description', 'name', 'tags'), partial=True).load(data)
data = schema.FileSchema(only=('description', 'name', 'tags')).dump(data)
if data.keys():
await db.async_file_collection.update(sha256_digest, data)
document = await db.async_file_collection.select(sha256_digest)
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.jsonify({'memory': document})
self.finish()
async def put(self, sha256_digest):
document = await db.async_file_collection.select(sha256_digest)
if not document or document['file_type'] != enums.FileType.MEMORY:
self.write_warning("memory - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
if not self.request.body:
self.write_warning("memory - no request body found", 422, sha256_digest)
self.finish()
return
data = escape.json_decode(self.request.body)
if 'description' not in data.keys():
data['description'] = ''
if 'name' not in data.keys():
data['name'] = ''
if 'tags' not in data.keys():
data['tags'] = ''
data = schema.FileSchema(only=('description', 'name', 'tags'), partial=True).load(data)
data = schema.FileSchema(only=('description', 'name', 'tags')).dump(data)
await db.async_file_collection.update(sha256_digest, data)
document = await db.async_file_collection.select(sha256_digest)
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.jsonify({'memory': document})
self.finish()
class MemoriesHandler(snake_handler.SnakeHandler):
"""Extends `SnakeHandler`."""
@tornadoparser.use_args({
'limit': fields.Str(required=False),
'operator': fields.Str(required=False, missing='and'),
'order': fields.Int(required=False, missing=-1),
'sort': fields.Str(required=False),
})
async def get(self, data):
documents = []
sort = None
if 'sort' in data.keys():
sort = data['sort']
filter_ = self.create_filter(self.request.arguments, data['operator'])
if filter_:
filter_ = {
'$and': [
{'file_type': enums.FileType.MEMORY},
filter_
]
}
else:
filter_ = {'file_type': enums.FileType.MEMORY}
cursor = db.async_file_collection.select_all(filter_, data['order'], sort)
index = 0
while await cursor.fetch_next:
if 'limit' in data.keys():
if index >= int(data['limit']):
break
index += 1
documents += [cursor.next_object()]
documents = schema.FileSchema(many=True).dump(schema.FileSchema(many=True).load(documents))
self.jsonify({'memories': documents})
self.finish()
MemoryRoute = (r"/memory/(?P<sha256_digest>[a-zA-Z0-9]+)?", MemoryHandler) # pylint: disable=invalid-name
MemoriesRoute = (r"/memories", MemoriesHandler) # pylint: disable=invalid-name
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,678 | TheoKlein/snake-core | refs/heads/master | /snake/routes/download.py | """ The download route module.
Attributes:
DownloadRoute (tuple): The DownloadRoute.
"""
from snake.core import snake_handler
from snake.db import async_file_collection
from snake.utils import file_storage as fs
from urllib.parse import quote
# pylint: disable=abstract-method
# pylint: disable=arguments-differ
class DownloadHandler(snake_handler.SnakeHandler):
"""Extends `SnakeHandler`."""
async def get(self, sha256_digest):
document = await async_file_collection.select(sha256_digest)
if not document:
self.write_warning("download - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
file_storage = fs.FileStorage(sha256_digest)
buf_size = 4096
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Disposition', 'attachment; filename="' + quote(document['name']) + '.inactive"')
with open(file_storage.file_path, 'rb') as f:
while True:
data = f.read(buf_size)
if not data:
break
self.write(data)
self.finish()
DownloadRoute = (r"/download/(?P<sha256_digest>[a-zA-Z0-9]+)?", DownloadHandler) # pylint: disable=invalid-name
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,679 | TheoKlein/snake-core | refs/heads/master | /snake/db.py | """The database module.
This contains all the databse objects leveraging the engines.
Attributes:
command_collection (:obj:`CommandCollection`): The synchronous command collection.
file_collection (:obj:`FileCollection`): The synchronous file collection.
note_collection (:obj:`NoteCollection`): The synchronous note collection.
file_output_collection (:obj:`FileOutputCollection`): The synchronous file output collection.
async_command_collection (:obj:`AsyncCommandCollection`): The asynchronous command collection.
async_file_collection (:obj:`AsyncFileCollection`): The asynchronous file collection.
async_note_collection (:obj:`AsyncNoteCollection`): The asynchronous note collection.
async_file_output_collection (:obj:`AsyncFileOutputCollection`): The asynchronous file output collection.
"""
# pylint: disable=invalid-name
# pylint: disable=unused-import
import pymongo
from motor import motor_asyncio
from snake.config import snake_config
from snake.engines.mongo import command
from snake.engines.mongo import file
from snake.engines.mongo import note
__db__ = pymongo.MongoClient(snake_config['mongodb']).snake
__async_db__ = motor_asyncio.AsyncIOMotorClient(snake_config['mongodb']).snake
command_collection = command.CommandCollection(__db__)
file_collection = file.FileCollection(__db__)
note_collection = note.NoteCollection(__db__)
command_output_collection = command.CommandOutputCollection(__db__)
async_command_collection = command.AsyncCommandCollection(__async_db__)
async_file_collection = file.AsyncFileCollection(__async_db__)
async_note_collection = note.AsyncNoteCollection(__async_db__)
async_command_output_collection = command.AsyncCommandOutputCollection(__async_db__)
def test_connection():
"""Test that the mongodb is reachable.
Returns:
bool: True on success, False on failure.
"""
try:
pymongo.MongoClient(snake_config['mongodb']).server_info()
except pymongo.errors.ServerSelectionTimeoutError:
return False
return True
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,680 | TheoKlein/snake-core | refs/heads/master | /snake/core/route_manager.py | """The module that supplies router management.
Currently this only hosts one function that is used to centralise the
generation of routes.
"""
def generate_routes(): # pylint: disable=too-many-locals
"""Generate a list of routes.
Returns:
list: The list of API routes.
"""
# Only load them on generation
from snake.routes import api
from snake.routes import command
from snake.routes import download
from snake.routes import file
from snake.routes import memory
from snake.routes import scale
from snake.routes import note
from snake.routes import store
from snake.routes import upload
routes = [
api.APIRoute,
command.CommandRoute, command.CommandsRoute,
download.DownloadRoute,
file.FileRoute, file.FilesRoute, file.FileHexRoute,
memory.MemoryRoute, memory.MemoriesRoute,
scale.ScaleRoute, scale.ScaleCommandsRoute, scale.ScaleInterfaceRoute, scale.ScaleUploadRoute, scale.ScalesRoute,
note.NoteRoute, note.NotePostRoute, note.NotesRoute,
store.StoreSampleRoute, store.StoreRoute,
upload.UploadFileRoute, upload.UploadFilesRoute, upload.UploadMemoryRoute
]
return routes
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,681 | TheoKlein/snake-core | refs/heads/master | /snake/enums.py | """The enums module.
This contains all the enums used in snake.
"""
# pylint: disable=too-few-public-methods
class IterableType(type):
"""Base enum class.
This allows for iteration through the class variables. A little crude but
works for what is required.
"""
def __iter__(cls):
for attr in dir(cls):
if not attr.startswith("__"):
yield cls.__getattribute__(cls, attr)
class FileType(metaclass=IterableType):
"""FileType enum.
This is used to handle the type of files stored within the file database.
Currently this can be between: FILE & MEMORY.
"""
FILE = 'file'
MEMORY = 'memory'
def __new__(cls, value):
if value in FileType:
return value
raise ValueError('%s is not a valid FileType' % value)
class Format(metaclass=IterableType):
"""Format enum.
This is used to handle the supported output types for response data.
"""
JSON = 'json'
MARKDOWN = 'markdown'
PLAINTEXT = 'plaintext'
def __new__(cls, value):
if value in Format:
return value
raise ValueError('%s is not a valid Format' % value)
class InterfaceType(metaclass=IterableType):
"""Interface command type enum.
This is used to handle the command types for interfaces.
"""
PULL = 'pull'
PUSH = 'push'
def __new__(cls, value):
if value in InterfaceType:
return value
raise ValueError('%s is not a valid InterfaceType' % value)
class ScaleComponent(metaclass=IterableType):
"""Scales components enum.
This is used to handle the supported components in scales.
"""
COMMANDS = 'commands'
INTERFACE = 'interface'
UPLOAD = 'upload'
def __new__(cls, value):
if value in ScaleComponent:
return value
raise ValueError('%s is not a valid ScaleComponent' % value)
class Status(metaclass=IterableType):
"""Command status enum.
This is used to handle the current status of scale commands.
"""
ERROR = 'error'
FAILED = 'failed'
PENDING = 'pending'
RUNNING = 'running'
SUCCESS = 'success'
def __new__(cls, value):
if value in Status:
return value
raise ValueError('%s is not a valid Status' % value)
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,682 | TheoKlein/snake-core | refs/heads/master | /snake/core/scale_manager.py | """The scale manager module.
This module provides scale management, it does all things scales. i.e. it is
responsible for loading, storing and providing access to scales.
"""
import logging
import os
import pkgutil
import sys
from importlib import util
import pkg_resources
from snake import enums
from snake import error
from snake import scales as core_scales
from snake.config import config_parser
from snake.config import snake_config
app_log = logging.getLogger("tornado.application") # pylint: disable=invalid-name
class ScaleManager():
"""The scale manager.
This managers everything to do with scales to make life easier in other parts of snake.
Attributes:
scales (dict): A dictionary of loaded scales.
"""
def __init__(self, scales=None):
"""Initialise the scale manager.
Attempts to load in all installed scales, and will gracefully handle
any errors encountered when trying to load a scale and present it in
the log.
Args:
scales (list, optional): A list of specific scales to load. Defaults to None.
"""
self.scales = {}
self.__load_scales(scales)
def __load_scale(self, scale_name, scale_path):
"""Load a scale.
Will attempt to load in a scale. It is added to the scale dictionary on success.
Note:
A scale with the same name as another will be replaced.
Args:
scale_name (str): The scale to load.
scale_path (str): The path to the scale's __init__.py.
Raises:
SnakeError: When scale fails to load.
ImportError: When a scale fails to import.
"""
try:
namespace = "snake.scales.{}".format(scale_name)
spec = util.spec_from_file_location(namespace, scale_path)
mod = util.module_from_spec(spec)
spec.loader.exec_module(mod)
sys.modules[namespace] = mod # Allow for import by name
config_parser.load_scale_config(scale_name) # TODO: Handle me better?!
scale = mod.__scale__
scale.load_components()
self.scales[scale_name] = scale
except error.SnakeError as err:
app_log.error('%s - %s', scale_name, err)
except ImportError as err:
app_log.error('%s - %s', scale_name, err)
def __load_scales(self, scales=None):
"""Load scales.
Loads the scales using all three supported methods. This will load the
core scales, followed by the pip installed scales, and wrapped up by
loading an user specified scales.
Notes:
An empty list will result in no scales being loaded.
Args:
scales (list, optional): A list of specific scales to load. Defaults to None.
"""
# Core
for _imp, mod_name, is_pkg in pkgutil.iter_modules(core_scales.__path__):
if is_pkg:
if isinstance(scales, list) and mod_name not in scales:
continue
scale_path = os.path.join(core_scales.__path__[0], mod_name)
self.__load_scale(mod_name, os.path.join(scale_path, "__init__.py"))
# Pip
for entry_point in pkg_resources.iter_entry_points(group='snake.scales'):
if isinstance(scales, list) and entry_point.name not in scales:
continue
loader = pkgutil.get_loader(entry_point.module_name)
self.__load_scale(entry_point.name, loader.path)
# User
for directory in snake_config['snake_scale_dirs']:
directory = os.path.abspath(os.path.expanduser(directory))
if not os.path.isdir(directory):
app_log.error("snake scale directory provided is not a directory: %s", directory)
continue
# Get the first layer of directories and give these to `iter_modules`
scale_dirs = [os.path.join(directory, x) for x in os.listdir(directory) if os.path.isdir(os.path.join(directory, x))]
for imp, mod_name, is_pkg in pkgutil.iter_modules(scale_dirs):
if is_pkg:
if isinstance(scales, list) and mod_name not in scales:
continue
scale_path = os.path.join(imp.path, mod_name)
self.__load_scale(mod_name, os.path.join(scale_path, "__init__.py"))
# General
@staticmethod
def get_component(scale, component):
"""Gets the component for a scale.
This will return the specified component for the given scale if supported.
Args:
scale (:obj:`Scale`): The scale.
component (:obj:`ScaleComponent`): The component to get.
Returns:
obj: The requested component.
Raises:
ScaleError: If the component is not supported or provided.
"""
if component == enums.ScaleComponent.COMMANDS:
if component not in scale.components:
raise error.ScaleError("scale does not provide commands: %s" % scale.name)
return scale.components[enums.ScaleComponent.COMMANDS]
if component == enums.ScaleComponent.INTERFACE:
if component not in scale.components:
raise error.ScaleError("scale does not provide interface: %s" % scale.name)
return scale.components[enums.ScaleComponent.INTERFACE]
if component == enums.ScaleComponent.UPLOAD:
if component not in scale.components:
raise error.ScaleError("scale does not provide upload: %s" % scale.name)
return scale.components[enums.ScaleComponent.UPLOAD]
raise error.ScaleError("component is not supported")
def get_scale(self, scale, file_type=None):
"""Get a scale.
Gets the scale for a given name. If the `file_type` is given then scales will be restricted to those supported.
Args:
scale (str): The name of the scale.
file_type (:obj:`FileType`, optional): The type of the file. Defaults to None.
Returns:
`Scale`: The requested scale.
Raises:
ScaleError: If it is not supported for the given `FileType` or if the scale is not found.
"""
if scale in self.scales.keys():
_scale = self.scales[scale]
if file_type and _scale.supports and file_type not in _scale.supports:
raise error.ScaleError("scale does not support file type: %s" % file_type)
return _scale
raise error.ScaleError('scale not found')
def get_scales(self, file_type=None):
"""Get scales information.
Gets the information about all loaded scales. If the `file_type` is
given then any unsupported scales are ignored.
Args:
file_type (:obj:`FileType`, optional): The type of the file. Defaults to None.
Returns:
list: A list dictionaries containing scale information.
"""
scales = []
for _, v in self.scales.items():
if file_type and v.supports and file_type not in v.supports:
continue
scales += [v.info()]
return scales
def reload_scales(self):
"""Reloads the scales.
Just calls `__load_scales` to reload them.
"""
self.__load_scales()
# Command
def get_autoruns(self, file_type=None):
"""Get autoruns.
Gets all autorun commands. If the `file_type` is given then this is
restricted to those supported.
Args:
file_type (:obj:`FileType`, optional): The type of the file. Defaults to None.
Returns:
list: A list of autorun tuples (scale, command, mime).
"""
autoruns = []
for k, v in self.scales.items():
if enums.ScaleComponent.COMMANDS not in v.components:
continue
if file_type and v.supports and file_type not in v.supports:
continue
cmd = v.components['commands']
for i in cmd.command_list:
if i.__autorun__:
autoruns += [(k, i.__name__, i.cmd_opts.mime)]
return autoruns
# Interface
@staticmethod
def get_interface_command(interface, interface_type, command):
"""Get interface command.
Gets the command from an interface, these are either push or pull based commands.
Args:
interface (:obj:`Interface`): The interface.
interface_type (:obj:`InterfaceType`): The interface command type.
command (str): The command.
Returns:
func: The requested command.
"""
i_type = enums.InterfaceType(interface_type)
if i_type == enums.InterfaceType.PULL:
return interface.snake.puller(command)
if i_type == enums.InterfaceType.PUSH:
return interface.snake.pusher(command)
return None # XXX: Should never get here.
# Upload
# None
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,683 | TheoKlein/snake-core | refs/heads/master | /tests/test_scale.py | import pytest
from snake import scale
from snake.error import ScaleError
def test_command_decorator():
"""
Test the command decorator
"""
def f(args, file, opts): # pylint: disable=unused-argument
return 'abcd'
# Check no cmd_opts
cmd = scale.command()(f)
assert cmd.__command__ is True
assert cmd.__wrapped__ is f
assert isinstance(cmd.cmd_opts, scale.CommandOptions)
# assert cmd(None, None, None) is 'abcd'
# Check cmd_opts
cmd = scale.command({})(f)
assert cmd.__command__ is True
assert cmd.__wrapped__ is f
assert isinstance(cmd.cmd_opts, scale.CommandOptions)
# assert cmd(None, None, None) is 'abcd'
# Check invalid cmd_opts
with pytest.raises(TypeError):
cmd = scale.command({'invalid': None})(f)
def test_scales():
"""
Test the Module class
"""
# TODO: Assert mandatory
with pytest.raises(ScaleError):
mod = scale.Scale({})
mod = scale.Scale({
'name': 'abcd',
'description': '1234',
'author': '1234',
'version': '1.0',
'supports': [
'binary',
'memory'
]
})
assert mod.name == 'abcd'
assert len(mod.components.keys()) is 0
assert mod.description == '1234'
assert mod.author == '1234'
assert mod.version == '1.0'
assert mod.supports == ['binary', 'memory']
assert mod.info() == {
"name": mod.name,
"description": mod.description,
"author": mod.author,
"version": mod.version,
"components": [],
"supports": mod.supports
}
def test_command_options():
"""
Test the CommandOptions class
"""
cmd_opts = scale.CommandOptions()
assert cmd_opts.args == {}
assert cmd_opts.info == 'No help available!'
assert cmd_opts.mime is None
def test_commands():
"""
Test the Commands class
"""
with pytest.raises(TypeError):
scale.Commands() # pylint: disable=abstract-class-instantiated
class Commands(scale.Commands):
def check(self):
pass
@scale.command()
def test(self, args, file, opts): # pylint: disable=unused-argument, no-self-use
return 'abcd'
cmds = Commands()
assert cmds.snake.info() == [{
'command': 'test',
'args': None,
'info': 'No help available!',
'formats': ['json']
}]
assert cmds.snake.command('test').__name__ == 'test'
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,684 | TheoKlein/snake-core | refs/heads/master | /snake/utils/submitter.py | """The submitter module.
This is used to submit new samples to snake without using the API. This is
useful for scales that wish to upload as part of their workflow.
"""
from datetime import datetime
from snake import db
from snake import error
from snake import schema
from snake.utils import file_storage as fs
def submit(file_schema, file_type, file, parent, scale_name): # pylint: disable=too-many-branches
"""Submit a new file to Snake.
This is used generally by the command component of scales to submit a new
file into snake.
Args:
"""
# We need to be safe here so instance check the above
if not isinstance(file_schema, dict):
raise TypeError("file_schema must be of type dict")
if not isinstance(file, fs.FileStorage):
raise TypeError("file must be of type FileSchema")
if not isinstance(parent, fs.FileStorage):
raise TypeError("parent must be of type FileStorage")
# If the hashes are the same, just stop
if file.sha256_digest == parent.sha256_digest:
return db.file_collection.select(file.sha256_digest)
# Create submission type
submission_type = 'scale:{}'.format(scale_name)
# Check if the file to submit is already in Snake, if not lets add it
document = db.file_collection.select(file.sha256_digest)
if not document:
# Validate
data = schema.FileSchema().dump(schema.FileSchema().load(file_schema))
# Save the file
if not file.save(move=True):
raise error.SnakeError("could not save new file to disk for hash {}".format(file.sha256_digest))
data.update(file.to_dict())
# NOTE: Don't set the parent we will do this later, so blank them out
# if the scale tried to be smart
data['children'] = {}
data['parents'] = {}
data['submission_type'] = submission_type
data['timestamp'] = datetime.utcnow()
data = schema.FileSchema().dump(data)
data['file_type'] = file_type # load_only=True
# Save
db.file_collection.insert(data)
# Update the parent child relationships
document = db.file_collection.select(file.sha256_digest)
if document:
# HACK: This is needed to get submission_type of parent
p = db.file_collection.select(parent.sha256_digest)
# Check if the parent and type already exist
if 'parents' not in document:
document['parents'] = {}
if parent.sha256_digest in document['parents']:
if submission_type in document['parents'][parent.sha256_digest]:
return document
else:
document['parents'][parent.sha256_digest] += [p["submission_type"]]
else:
document['parents'][parent.sha256_digest] = [p["submission_type"]]
# Validate
document = schema.FileSchema().dump(schema.FileSchema().load(document))
# Update
db.file_collection.update(file.sha256_digest, document)
# Update the parents children
document = db.file_collection.select(parent.sha256_digest)
if not document: # Parent does not exist it has been delete, don't update it
return db.file_collection.select(file.sha256_digest)
if 'children' not in document:
document['children'] = {}
if file.sha256_digest in document['children']:
if submission_type in document['children'][file.sha256_digest]:
return db.file_collection.select(file.sha256_digest)
else:
document['children'][file.sha256_digest] += [submission_type]
else:
document['children'][file.sha256_digest] = [submission_type]
# Validate
document = schema.FileSchema().dump(schema.FileSchema().load(document))
# Update
db.file_collection.update(parent.sha256_digest, document)
else:
raise error.SnakeError("could not submit new file for hash {}".format(file.sha256_digest))
return db.file_collection.select(file.sha256_digest)
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,685 | TheoKlein/snake-core | refs/heads/master | /snake/scales/strings/__init__.py | # pylint: disable=missing-docstring
from snake.config import constants
from snake.scale import scale, FileType
__scale__ = scale(
name='strings',
description='a module to extract strings from files',
version=constants.VERSION,
author="Countercept",
supports=[
FileType.FILE
]
)
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,686 | TheoKlein/snake-core | refs/heads/master | /snake/utils/markdown.py | """The markdown module."""
# pylint: disable=invalid-name
def bold(text):
"""Bold.
Args:
text (str): text to make bold.
Returns:
str: bold text.
"""
return '**' + text + '**'
def code(text, inline=False, lang=''):
"""Code.
Args:
text (str): text to make code.
inline (bool, optional): format as inline code, ignores the lang argument. Defaults to False.
lang (str, optional): set the code block language. Defaults to ''.
Returns:
str: code text.
"""
if inline:
return '`{}`'.format(text)
return '```{}\r\n'.format(lang) + text + '\r\n```'
def cr():
"""Carriage Return (Line Break).
Returns:
str: Carriage Return.
"""
return '\r\n'
def h1(text):
"""Heading 1.
Args:
text (str): text to make heading 1.
Returns:
str: heading 1 text.
"""
return '# ' + text + '\r\n'
def h2(text):
"""Heading 2.
Args:
text (str): text to make heading 2.
Returns:
str: heading 2 text.
"""
return '## ' + text + '\r\n'
def h3(text):
"""Heading 3.
Args:
text (str): text to make heading 3.
Returns:
str: heading 3 text.
"""
return '### ' + text + '\r\n'
def h4(text):
"""Heading 4.
Args:
text (str): text to make heading 4.
Returns:
str: heading 4 text.
"""
return '#### ' + text + '\r\n'
def newline():
"""New Line.
Returns:
str: New Line.
"""
return '\r\n'
def paragraph(text):
"""Paragraph.
Args:
text (str): text to make into a paragraph.
Returns:
str: paragraph text.
"""
return text + '\r\n'
def sanitize(text):
"""Sanitize text.
This attempts to remove formatting that could be mistaken for markdown.
Args:
text (str): text to sanitise.
Returns:
str: sanitised text.
"""
if '```' in text:
text = text.replace('```', '(3xbacktick)')
if '|' in text:
text = text.replace('|', '(pipe)')
if '_' in text:
text = text.replace('_', r'\_')
return text
def table_header(columns=None):
"""Table header.
Creates markdown table headings.
Args:
text (tuple): column headings.
Returns:
str: markdown table header.
"""
line_1 = '|'
line_2 = '|'
for c in columns:
line_1 += ' ' + c + ' |'
line_2 += ' --- |'
line_1 += '\r\n'
line_2 += '\r\n'
return line_1 + line_2
def table_row(columns=None):
"""Table row.
Creates markdown table row.
Args:
text (tuple): column data.
Returns:
str: markdown table row.
"""
row = '|'
for c in columns:
row += ' ' + c + ' |'
row += '\r\n'
return row
def url(text, url_):
"""Url
Args:
text (str): text for url.
url (str): url for text.
Returns:
str: url.
"""
return '[' + text + '](' + url_ + ')'
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,687 | TheoKlein/snake-core | refs/heads/master | /snake/snake_utility.py | #!/usr/bin/python3
"""The snake command line utility.
This file exists to try and make the management of Snake easier. Currently its
only capability is to try and ease installation of Scales. But it will probably
do more at some point.
Examples:
snake install SCALE_NAME
snake install https://pypi.python.org/pypi/snake_SCALE_NAME
snake install git+https://github.com/countercept/snake-scales/SCALE_NAME
snake check SCALE_NAME
"""
import argparse
import imp
import os
from os import path
import shutil
import subprocess
import sys
import pkg_resources
from snake.config import constants
from snake.core import scale_manager as sm
# pylint: disable=missing-docstring
DEFAULT_REPO = "git+https://github.com/countercept/snake-scales#subdirectory="
# Allow pip to install to first path in user's python path
TARGET_DIR = os.getenv("SNAKE_PYTHON_DIR", None)
if TARGET_DIR:
TARGET_DIR = TARGET_DIR.split(":")[-1]
def check(scale):
print("Checking: {}".format(scale))
scale_manager = sm.ScaleManager([])
scale_manager._ScaleManager__load_scales([scale]) # pylint: disable=no-member, protected-access
if scale not in scale_manager.scales:
print("Scale not loadable: {}".format(scale))
else:
print("Scale loadable: {}".format(scale))
def install(scales, upgrade=False):
# Check for pip3
pip3 = shutil.which('pip3')
if not pip3:
print("Could not find 'pip3'!")
sys.exit(1)
# Pre process
scales_ = []
for scale in scales:
# TODO: Make robust cater for all scenarios
if len(scale.rsplit('=', 1)) > 1: # XXX: Handle subdirectory, don't assume it is last arg!
r, s = scale.rsplit('=', 1) # pylint: disable=invalid-name
scales_ += [("{}=".format(r), s)]
elif len(scale.rsplit('/', 1)) > 1:
r, s = scale.rsplit('/', 1) # pylint: disable=invalid-name
scales_ += [("{}/".format(r), s)]
else:
scales_ += [(DEFAULT_REPO, scale)]
# Install scales
for repo, scale in scales_: # pylint: disable=invalid-name
print("Installing: {}".format(scale))
args = [pip3, 'install']
if TARGET_DIR:
args += ['--prefix', TARGET_DIR]
if upgrade:
args += ['--upgrade']
proc = subprocess.run([*args, '{}{}'.format(repo, scale)])
if proc.returncode:
print("Failed to install: {}".format(scale))
sys.exit(1)
# Copy config if present
scale_path = imp.find_module('snake_{}'.format(scale))[1]
if path.exists(path.join(scale_path, '{}.conf'.format(scale))):
scales_dir = path.join(constants.ETC_DIR, 'scales')
if not path.exists(scales_dir):
os.makedirs(scales_dir, exist_ok=True)
shutil.copy(path.join(scale_path, '{}.conf'.format(scale)), path.join(scales_dir, '{}.conf.example'.format(scale)))
if not path.exists(path.join(scales_dir, '{}.conf'.format(scale))):
shutil.copy(path.join(scales_dir, '{}.conf.example'.format(scale)), path.join(scales_dir, '{}.conf'.format(scale)))
# Reload pkg_resources
imp.reload(pkg_resources)
# Check installed scales
for _repo, scale in scales_:
check(scale)
def main():
# NOTE: subparsers bug: https://bugs.python.org/issue9253#msg186387
parser = argparse.ArgumentParser(prog='snake')
subparsers = parser.add_subparsers(help='command help')
subparsers.required = True
subparsers.dest = 'command'
parser_check = subparsers.add_parser('check', help='check the status of a scale')
parser_check.add_argument('scale', nargs=1, help='a scale')
parser_install = subparsers.add_parser('install', help='install a scale or multiple scales')
parser_install.add_argument('scale', nargs='+', help='a scale or multiple scales')
parser_upgrade = subparsers.add_parser('upgrade', help='upgrade a scale or multiple scales')
parser_upgrade.add_argument('scale', nargs='+', help='a scale or multiple scales')
args = parser.parse_args()
if args.command == 'check':
check(args.scale[0]) # XXX: Hmm, namespace clash? nargs is 1
return
if args.command == 'install':
install(args.scale)
return
if args.command == 'upgrade':
install(args.scale, upgrade=True)
return
if __name__ == "__main__":
main()
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,688 | TheoKlein/snake-core | refs/heads/master | /snake/utils/file_storage.py | """The file storage module."""
import hashlib
import os
from os import path
import shutil
import string
import magic
from snake import error
from snake.config import snake_config
class FileStorage():
"""File storage, used to manage files through snake.
Attributes:
sha256_digest (str): The hash of the file.
file_path (str): The path to the file.
magic (str): The magic of the file.
mime (str): The mime of the file.
size (int): The size of the file.
"""
def __init__(self, sha256_digest=None):
self.sha256_digest = sha256_digest
if self.sha256_digest:
try:
self.file_path = self.path()
self.magic = magic.from_file(self.file_path)
self.mime = magic.from_file(self.file_path, mime=True)
self.size = self._size()
except FileNotFoundError:
raise error.ServerError('File not found on disk')
return
def to_dict(self):
"""Dictionary representation of the class.
Returns:
dict: A dictionary containing: magic, mime, sha256_digest and size.
"""
return {
'magic': self.magic,
'mime': self.mime,
'sha256_digest': self.sha256_digest,
'size': self.size
}
def save(self, move=False):
"""Save storage file.
This will only save the file if the paths don't match. It will only
save by copying so the original file remains intact.
Args:
move (bool, optional): Whether to move or just copy the file.
Defaults to False.
Returns:
bool: True on success, False on failure.
"""
# Only save if the path does NOT match
true_path = self.path()
if self.file_path == true_path:
return True
directory, _ = path.split(true_path)
os.makedirs(directory, exist_ok=True)
if move:
if not shutil.move(self.file_path, true_path):
return False
else:
if not shutil.copy(self.file_path, true_path):
return False
self.file_path = true_path
return True
def create(self, file_path, sha256_digest=None):
"""Create storage file.
This function is used to create a file and should only be called on a
clean FileStorage instance. It will not save the object to disk, the
save function must be called to do this. If the sha256_digest is not
provided it will be calculated.
Args:
file_path (str): The path to the file.
sha256_digest (str, optional): The hash for the file.
Defaults to None.
Returns:
bool: True on success, False on failure.
"""
self.file_path = file_path
self.sha256_digest = sha256_digest
if not self.sha256_digest:
sha2 = hashlib.sha256()
with open(self.file_path, 'rb') as f:
chunk = f.read(4096)
while chunk:
sha2.update(chunk)
chunk = f.read(4096)
self.sha256_digest = sha2.hexdigest()
self.magic = magic.from_file(self.file_path)
self.mime = magic.from_file(self.file_path, mime=True)
self.size = self._size()
return True
def delete(self):
"""Delete storage file.
Returns:
bool: True on success, False on failure.
"""
if self.file_path == self.path():
return os.remove(self.file_path)
return False
def directory(self):
"""Get the directory for the storage file.
Returns:
str: Directory to the storage file.
"""
return path.join(path.abspath(path.expanduser(snake_config['file_db'])), self.sha256_digest[0:2], self.sha256_digest[2:4])
def path(self):
"""Get the path for the storage file.
Returns:
str: Path to the storage file.
"""
return path.join(path.abspath(path.expanduser(snake_config['file_db'])), self.sha256_digest[0:2], self.sha256_digest[2:4], self.sha256_digest)
def _size(self):
self.size = path.getsize(self.file_path)
return self.size
def to_hexdump(self, lines=16):
"""Create a hex dump for the storage file.
Args:
lines (int): Number of lines to hexdump. Defaults to 16.
Returns:
str: hexdump of storage file.
"""
buf = ''
with open(self.file_path, 'rb') as f:
counter = 0
for chunk in iter(lambda: f.read(16), b""):
if counter >= lines:
break
_hex = [('%02x' % i) for i in chunk]
buf += '%07x: %-39s %s\n' % (
(counter * 16),
(' '.join([''.join(_hex[i:i + 2]) for i in range(0, len(_hex), 2)])),
(''.join([chr(c) if chr(c) in string.printable[:-5] else '.' for c in chunk])))
counter += 1
return buf
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,689 | TheoKlein/snake-core | refs/heads/master | /tests/test_error.py | import pytest
from snake import error
def test_snake_error():
"""
Test the class SnakeError
"""
with pytest.raises(TypeError):
error.SnakeError() # pylint: disable=no-value-for-parameter
err = error.SnakeError('hello')
assert 'hello' in err.message
assert None is err.status_code
assert None is err.payload
err = error.SnakeError('hello', 500)
assert 'hello' in err.message
assert 500 is err.status_code
assert None is err.payload
err = error.SnakeError('hello', 500, 'extra')
assert 'hello' in err.message
assert 500 is err.status_code
assert 'extra' is err.payload
def test_command_error():
"""
Test the class CommandError
"""
err = error.CommandError('hello')
assert 'hello' in err.message
assert err.status_code == 500
assert None is err.payload
def test_scale_error():
"""
Test the class CommandError
"""
err = error.ScaleError('hello')
assert 'hello' in err.message
assert err.status_code == 500
assert None is err.payload
def test_mongo_error():
"""
Test the class CommandError
"""
err = error.MongoError('hello')
assert 'hello' in err.message
assert err.status_code == 500
assert None is err.payload
def test_server_error():
"""
Test the class CommandError
"""
err = error.ServerError('hello')
assert 'hello' in err.message
assert err.status_code == 500
assert None is err.payload
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,690 | TheoKlein/snake-core | refs/heads/master | /snake/config/config.py | """The configuration module.
This module contains all the classes and code in order to share configuration
information across snake.
"""
import sys
from os import path
import pkg_resources
import yaml
from snake import error
from snake.config import constants
class Config:
"""The config class passed around snake to share configuration variables.
The config class should really only be instantiated once and then passed
around as required by other files. This means that any updates to this
object will be shared across without the need to restart snake.
Attributes:
scale_configs (dict): Configuration parameters for snake.
snake_config (dict): Configuration parameters for snake scales.
"""
def __init__(self, config_file=None):
"""Initialise the config object
This will clear out the dictionaries and load in the base configuration
for snake. If a configuration file is supplied this will be loaded
'ontop' of the base configuration.
Args:
config_file (:obj:`str`, optional): The path to an external
configuration file. Defaults to None.
"""
self.scale_configs = {}
self.snake_config = {}
self.load_config(config_file)
def load_config(self, config_file=None):
"""Load the snake configuration files.
This loads the base configuration and along with an external
configuration if supplied.
Args:
config_file (str, optional): The path to an external
configuration file. Defaults to None.
Raises:
Exception: When any error occurs in loading a configuration from
file.
"""
# Load base
config_path = pkg_resources.resource_filename("snake", "data/config/snake.conf")
try:
with open(config_path, 'rb') as stream:
base_config = yaml.safe_load(stream)
self.snake_config.update(base_config)
except Exception as err:
print("Failed to parse base config file: %s" % err)
sys.exit(1)
# If user specified config file then use that otherwise try and load
# from etc config
if config_file:
if not path.exists(config_file):
print("Not a valid config_file: %s" % config_file)
sys.exit(1)
try:
with open(config_file, 'rb') as stream:
snake_config = yaml.safe_load(stream)
self.snake_config.update(snake_config)
except Exception as err:
print("Failed to parse user config file: %s" % err)
sys.exit(1)
else:
# /etc/snake
etc_conf = path.join(path.abspath(path.expanduser(constants.ETC_DIR)), 'snake.conf')
if path.exists(etc_conf):
try:
etc_config = {}
with open(etc_conf, 'rb') as stream:
etc_config = yaml.safe_load(stream)
self.snake_config.update(etc_config)
except Exception as err:
print("Failed to parse etc config file: %s" % err)
sys.exit(1)
def load_scale_config(self, scale_name):
"""Load a scale configuration from file
This loads the scale configuration files based on the scale name
passed. It will load the base config along with the etc configuration
if present.
Args:
scale_name (str): The name of the scale to load the configuration
for.
Raises:
SnakeError: When the external configuration file fails to load.
"""
self.scale_configs[scale_name] = {}
# Load base if we need one
config_path = pkg_resources.resource_filename("snake.scales.{}".format(scale_name), "{}.conf".format(scale_name))
if path.exists(config_path):
with open(config_path, 'rb') as stream:
base_config = yaml.safe_load(stream)
self.scale_configs[scale_name].update(base_config)
# Try and load from etc config
etc_conf = path.join(path.abspath(path.expanduser(constants.ETC_DIR)), "scales", "{}.conf".format(scale_name))
if path.exists(etc_conf):
try:
etc_config = {}
with open(etc_conf, 'rb') as stream:
etc_config = yaml.safe_load(stream)
if etc_config is None: # The config file is empty this is fine
etc_config = {}
self.scale_configs[scale_name].update(etc_config)
except Exception as err:
raise error.SnakeError('failed to load config: {}: {} - {}'.format(etc_conf, err.__class__, err))
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,691 | TheoKlein/snake-core | refs/heads/master | /snake/routes/upload.py | """ The file route module.
Attributes:
UploadFileRoute (tuple): The UploadFileRoute.
UploadFilesRoute (tuple): The UploadFilesRoute.
UploadMemoryRoute (tuple): The UploadMemoryRoute.
"""
import hashlib
from os import path
from tornado import escape
from webargs import tornadoparser
from snake import db
from snake import enums
from snake import error
from snake import fields
from snake import schema
from snake.core import route_support
from snake.core import snake_handler
# pylint: disable=arguments-differ
class UploadFileSchema(schema.FileSchema):
"""Extends `FileSchema`."""
name = fields.Str(required=False) # Override
extract = fields.Bool(missing=False)
password = fields.Str(missing=None)
class UploadFileHandler(snake_handler.StreamHandler):
"""Extends `SnakeHandler`."""
@tornadoparser.use_args(UploadFileSchema())
async def post(self, data):
if data == []:
self.write_warning("upload/file - no request body found", 422)
self.finish()
return
if 'file' not in self.request.files:
self.write_warning("upload/file - no 'file' in part", 422)
self.finish()
return
# Set name if missing
if 'name' not in data:
data['name'] = self.request.files['file'][0]['filename']
# Get the files offset and size
f_path = self.request.files['file'][0]['body'].decode('utf-8')
# Extract if required, zip only
if data['extract']:
try:
f_path = await route_support.unzip_file(f_path, data['password'])
except error.SnakeError as err:
self.write_warning("upload/file - {}".format(err), 422)
self.finish()
return
# Update name if not overriden
if data['name'] == self.request.files['file'][0]['filename']:
data['name'] = path.basename(f_path)
# Set submission type
data['submission_type'] = 'upload:file'
# Hash the file
sha2 = hashlib.sha256()
with open(f_path, 'rb') as f:
chunk = f.read(4096)
while chunk:
sha2.update(chunk)
chunk = f.read(4096)
sha256_digest = sha2.hexdigest()
# Check if the file already exists
document = await db.async_file_collection.select(sha256_digest)
if document:
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.write_warning("upload/file - file already exists for given sha256 digest", 409, {'sample': document})
self.finish()
return
# Save the file and add it to the database
document = await route_support.store_file(sha256_digest, f_path, enums.FileType.FILE, data)
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.jsonify({'sample': document})
self.finish()
class UploadFilesHandler(snake_handler.StreamHandler):
"""Extends `SnakeHandler`."""
async def post(self):
# XXX: Does not support extraction atm
# curl 'http://127.0.0.1:5000/upload/files' -F '0=@./file1' -F '1=@./file2' -F 'data={0:{"name": "file1"}, 1:{"name": "file2"}}'
#
data = {}
try:
data = self.get_argument('data')
except Exception: # noqa
data = {}
if data == {}:
missing_fields = {}
missing_fields['data'] = ["Missing data for required field."]
self.write_warning(missing_fields, 422)
self.finish()
return
try:
data = escape.json_decode(data)
except Exception: # noqa
self.write_warning("upload/files - must be content type application/json", 422, data)
self.finish()
return
# Data is optional we do not check that it keys correctly, to avoid
# some errors later down the line prevalidate the data dictionaries
data_arrays = []
for k, v in data.items():
if 'name' not in v:
v['name'] = self.request.files[k][0]['filename']
data_arrays += [v]
# Validate with discard we need the keys
data_arrays = schema.FileSchema(many=True).load(data_arrays)
schema.FileSchema(many=True).dump(data_arrays)
# Upload the files
documents = []
for k, v in data.items():
# Set submission type
v['submission_type'] = 'upload:file'
# Get the files offset and size
f_path = self.request.files[k][0]['body'].decode('utf-8')
# Hash the file
sha2 = hashlib.sha256()
with open(f_path, 'rb') as f:
chunk = f.read(4096)
while chunk:
sha2.update(chunk)
chunk = f.read(4096)
sha256_digest = sha2.hexdigest()
# Check if the file already exists, if so add to documents, but there is no need to upload it
document = await db.async_file_collection.select(sha256_digest)
if document:
documents += [document]
else:
documents += [await route_support.store_file(sha256_digest, f_path, enums.FileType.FILE, v)]
documents = schema.FileSchema(many=True).dump(schema.FileSchema(many=True).load(documents))
self.jsonify({'samples': documents})
self.finish()
class UploadMemoryHandler(snake_handler.StreamHandler):
"""Extends `SnakeHandler`."""
@tornadoparser.use_args(UploadFileSchema())
async def post(self, data):
if data == []:
self.write_warning("upload/memory - no request body found", 422)
self.finish()
return
if 'file' not in self.request.files:
self.write_warning("upload/memory - no 'file' in part", 422)
self.finish()
return
# Set name if missing
if 'name' not in data:
data['name'] = self.request.files['file'][0]['filename']
# Get the files offset and size
f_path = self.request.files['file'][0]['body'].decode('utf-8')
# Extract if required, zip only
if data['extract']:
try:
f_path = await route_support.unzip_file(f_path, data['password'])
except error.SnakeError as err:
self.write_warning("upload/memory - {}".format(err), 422)
self.finish()
return
# Update name if not overriden
if data['name'] == self.request.files['file'][0]['filename']:
data['name'] = path.basename(f_path)
# Set submission type
data['submission_type'] = 'upload:memory'
# Hash the file
sha2 = hashlib.sha256()
with open(f_path, 'rb') as f:
chunk = f.read(4096)
while chunk:
sha2.update(chunk)
chunk = f.read(4096)
sha256_digest = sha2.hexdigest()
# Check if the file already exists
document = await db.async_file_collection.select(sha256_digest)
if document:
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.write_warning("upload/memory - memory already exists for given sha256 digest", 409, {'sample': document})
self.finish()
return
# Save the file and add it to the database
document = await route_support.store_file(sha256_digest, f_path, enums.FileType.MEMORY, data)
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.jsonify({'sample': document})
self.finish()
UploadFileRoute = (r"/upload/file", UploadFileHandler) # pylint: disable=invalid-name
UploadFilesRoute = (r"/upload/files", UploadFilesHandler) # pylint: disable=invalid-name
UploadMemoryRoute = (r"/upload/memory", UploadMemoryHandler) # pylint: disable=invalid-name
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,692 | TheoKlein/snake-core | refs/heads/master | /snake/core/celery.py | """All things celery.
This module contains everything needed for snake to communicate with celery.
This means that there are functions that will be shared by snake along with the
snake workers in the snake_pit.
Notes:
* The code in this file is a bit messy due to some issues that were met
when using celery. To work around these some assumptions and hacks have
been used. These have been marked with XXX.
Attributes:
celery (:obj:`Celery`): The celery object used by snake and celery.
Todo:
* Address the XXXs found within this file
"""
import asyncio
import logging
import json
import os
import signal
import subprocess
import time
from datetime import datetime
import pymongo
from celery import Celery
from celery import exceptions
from celery.app import task
from celery.worker import request
from snake import enums
from snake import error
from snake import schema
from snake.config import snake_config
from snake.core import scale_manager
from snake.engines.mongo import command
# pylint: disable=abstract-method
# pylint: disable=invalid-name
# pylint: disable=protected-access
# pylint: disable=redefined-outer-name
# pylint: disable=reimported
# XXX: There are some super awful assumptions here to deal with 'stuck' Popen
# processes from causing defunct processes once killed. We add in a sleep to
# hope that the Popen objects will communicate or wait! Killing pgid is not a
# good idea as the worker will die too. A smarter person will be able to fix
# this :)
app_log = logging.getLogger("tornado.application")
class SnakeRequest(request.Request):
"""Extend `Request`.
This is done to allow us to kill errant child processes as celery does not
seem to supply this out of the box.
"""
def kill_child_processes(self, parent_pid, sig=signal.SIGKILL): # pylint: disable=no-self-use
"""Kill child processes for the PID supplied
This will try to look for any children and kill them with the signal
supplied.
Args:
parent_pid (int): The process id for which the children will be
killed.
sig (:obj:`int`): The signal used to kill the child processes.
"""
proc = subprocess.run(
['ps', '-o', 'pid', '--ppid', '%d' % parent_pid, '--noheaders'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if proc.returncode:
return
for pid_str in proc.stdout.decode('utf-8').split("\n")[:-1]:
try:
os.kill(int(pid_str), sig)
time.sleep(2) # Allow subprocess tasks to flush or we will have defunct until the worker exits
except OSError:
pass
def on_timeout(self, soft, timeout):
"""Extend `on_timeout`.
This overrides `on_timeout` so that when timeout limits are met
children processes can be killed
"""
# XXX: The correct way?!
# super().on_timeout(self, soft, timeout)
self._on_timeout(soft, timeout)
if soft:
self.kill_child_processes(self.worker_pid, signal.SIGTERM)
else:
self.kill_child_processes(self.worker_pid)
class SnakeTask(task.Task):
"""Extend `Task`.
This is a simple extension just used to set the extended Request class.
"""
Request = SnakeRequest
# XXX: Monkey patch because the recommended method below base=... does not seem
# to be working!
request.Request._on_timeout = request.Request.on_timeout
request.Request.kill_child_processes = SnakeRequest.kill_child_processes
request.Request.on_timeout = SnakeRequest.on_timeout
celery = Celery('snake', **snake_config)
# Hard overrides
celery.conf.update(accept_content=['pickle'])
celery.conf.update(result_serializer='pickle')
celery.conf.update(task_serializer='pickle')
# XXX: The correct way?!
# @celery.task(base=SnakeTask, time_limit=630, soft_time_limit=600)
@celery.task(time_limit=630, soft_time_limit=600)
def execute_command(command_schema):
"""Execute the command on the celery worker
This is the task used by celery for the workers. It will execute the
command and update the database as required.
Args:
command_schema (:obj:`CommandSchema`): The command schema to execute.
"""
from snake.config import snake_config # XXX: Reload config, bit hacky but required
with pymongo.MongoClient(snake_config['mongodb']) as connection:
try:
# NOTE: We assume the _output_id is always NULL!
command_collection = command.CommandCollection(connection.snake)
command_output_collection = command.CommandOutputCollection(connection.snake)
command_schema['start_time'] = datetime.utcnow()
command_schema['status'] = enums.Status.RUNNING
command_schema = schema.CommandSchema().dump(command_schema)
command_collection.update(command_schema['sha256_digest'], command_schema['scale'], command_schema['command'], command_schema['args'], command_schema)
command_schema = schema.CommandSchema().load(command_schema)
scale_manager_ = scale_manager.ScaleManager([command_schema['scale']])
scale = scale_manager_.get_scale(command_schema['scale'])
commands = scale_manager_.get_component(scale, enums.ScaleComponent.COMMANDS)
cmd = commands.snake.command(command_schema['command'])
output = cmd(args=command_schema['args'], sha256_digest=command_schema['sha256_digest'])
command_schema['status'] = enums.Status.SUCCESS
except error.CommandWarning as err:
output = {'error': str(err)}
command_schema['status'] = enums.Status.FAILED
app_log.warning(err)
except (error.SnakeError, error.MongoError, TypeError) as err:
output = {'error': str(err)}
command_schema['status'] = enums.Status.FAILED
app_log.error(err)
except (exceptions.SoftTimeLimitExceeded, exceptions.TimeLimitExceeded, BrokenPipeError) as err:
output = {'error': 'time limit exceeded'}
command_schema['status'] = enums.Status.FAILED
app_log.exception(err)
except Exception as err:
output = {'error': 'a server side error has occurred'}
command_schema['status'] = enums.Status.FAILED
app_log.exception(err)
else:
# Test serialising of scale output as it could fail and we need to catch that
try:
json.dumps(output)
except TypeError as err:
output = {'error': 'failed to serialize scale output - {}'.format(err)}
finally:
command_schema['end_time'] = datetime.utcnow()
command_schema = schema.CommandSchema().dump(command_schema)
_output_id = command_output_collection.put(command_schema['command'], bytes(json.dumps(output), 'utf-8'))
command_schema['_output_id'] = _output_id
command_collection.update(command_schema['sha256_digest'], command_schema['scale'], command_schema['command'], command_schema['args'], command_schema)
async def wait_for_task(task):
"""Async wrapper to wait for a synchronous task.
Does what is says on the tin, is a work around used to allow for
asynchronous methods to wait on synchronous without blocking.
Args:
task: (:obj:`Task`): The celery task to wait on.
"""
while True:
if task.ready():
return task.result
else:
await asyncio.sleep(1)
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,693 | TheoKlein/snake-core | refs/heads/master | /snake/scales/hashes/commands.py | # pylint: disable=missing-docstring
# pylint: disable=no-self-use
# pylint: disable=unused-argument
import hashlib
import logging
import shutil
from snake import db
from snake import enums
from snake import error
from snake import scale
from snake.utils import markdown as md
app_log = logging.getLogger("tornado.application") # pylint: disable=invalid-name
try:
import pydeep
has_ssdeep = True # pylint: disable=invalid-name
except ImportError as err:
app_log.warning('fuzzy search disabled - optional dependencies not met: %s', err)
has_ssdeep = False # pylint: disable=invalid-name
class Commands(scale.Commands):
def check(self):
if has_ssdeep:
ssdeep = shutil.which('ssdeep')
if not ssdeep:
app_log.warning('binary \'ssdeep\' not found')
@scale.command({
'info': 'list all calculated hashes'
})
def all(self, args, file, opts):
if has_ssdeep:
fuzzy = self.ssdeep(None, file.sha256_digest) # pylint: disable=no-value-for-parameter
else:
fuzzy = None
md5_digest = self.md5_digest(None, file.sha256_digest) # pylint: disable=no-value-for-parameter
sha1_digest = self.sha1_digest(None, file.sha256_digest) # pylint: disable=no-value-for-parameter
sha512_digest = self.sha512_digest(None, file.sha256_digest) # pylint: disable=no-value-for-parameter
output = {
'md5_digest': md5_digest['md5_digest'] if md5_digest is not None else 'n/a',
'sha1_digest': sha1_digest['sha1_digest'] if sha1_digest is not None else 'n/a',
'sha256_digest': file.sha256_digest,
'sha512_digest': sha512_digest['sha512_digest'] if sha512_digest is not None else 'n/a',
'ssdeep': fuzzy['ssdeep'] if fuzzy is not None else 'n/a'
}
return output
@staticmethod
def all_markdown(json):
output = md.table_header(('Hash Type', 'Hash'))
output += md.table_row(('MD5', json['md5_digest']))
output += md.table_row(('SHA1', json['sha1_digest']))
output += md.table_row(('SHA256', json['sha256_digest']))
output += md.table_row(('SHA512', json['sha512_digest']))
output += md.table_row(('SSDEEP', json['ssdeep']))
return output
@scale.autorun
@scale.command({
'info': 'calculates the md5 hash for the file'
})
def md5_digest(self, args, file, opts):
document = db.file_collection.select(file.sha256_digest)
if 'md5_digest' not in document:
md5_hash = hashlib.md5()
with open(file.file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
md5_hash.update(chunk)
md5_digest = md5_hash.hexdigest()
data = {'md5_digest': md5_digest}
if not db.file_collection.update(file.sha256_digest, data):
raise error.MongoError('error adding md5_digest into file document %s' % file.sha256_digest)
document = db.file_collection.select(file.sha256_digest)
return {'md5_digest': document['md5_digest']}
@staticmethod
def md5_digest_plaintext(json):
return json['md5_digest']
@scale.autorun
@scale.command({
'info': 'calculates the sha1 hash for the file'
})
def sha1_digest(self, args, file, opts):
document = db.file_collection.select(file.sha256_digest)
if 'sha1_digest' not in document:
sha1_hash = hashlib.sha1()
with open(file.file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
sha1_hash.update(chunk)
sha1_digest = sha1_hash.hexdigest()
data = {'sha1_digest': sha1_digest}
if not db.file_collection.update(file.sha256_digest, data):
raise error.MongoError('error adding sha1_digest into file document %s' % file.sha256_digest)
document = db.file_collection.select(file.sha256_digest)
return {'sha1_digest': document['sha1_digest']}
@staticmethod
def sha1_digest_plaintext(json):
return json['sha1_digest']
@scale.autorun
@scale.command({
'info': 'calculates the sha512 hash for the file'
})
def sha512_digest(self, args, file, opts):
document = db.file_collection.select(file.sha256_digest)
if 'sha512_digest' not in document:
sha512_hash = hashlib.sha512()
with open(file.file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
sha512_hash.update(chunk)
sha512_digest = sha512_hash.hexdigest()
data = {'sha512_digest': sha512_digest}
if not db.file_collection.update(file.sha256_digest, data):
raise error.MongoError('error adding sha512_digest into file document %s' % file.sha256_digest)
document = db.file_collection.select(file.sha256_digest)
return {'sha512_digest': document['sha512_digest']}
@staticmethod
def sha512_digest_plaintext(json):
return json['sha512_digest']
if has_ssdeep: # Optional Dependency
@scale.autorun
@scale.command({
'info': 'calculates the ssdeep hash for the file'
})
def ssdeep(self, args, file, opts):
document = db.file_collection.select(file.sha256_digest)
if 'ssdeep' not in document:
fuzzy = str(pydeep.hash_file(file.file_path), encoding="utf-8")
data = {'ssdeep': fuzzy}
if not db.file_collection.update(file.sha256_digest, data):
raise error.MongoError('error adding ssdeep hash into file document %s' % file.sha256_digest)
document = db.file_collection.select(file.sha256_digest)
return {'ssdeep': document['ssdeep']}
@staticmethod
def ssdeep_plaintext(json):
return json['ssdeep']
@scale.command({
'info': 'perform fuzzy hash search on the file passed'
})
def fuzzy_search(self, args, file, opts):
results = []
fuzzy = bytes(self.ssdeep(None, file.sha256_digest)['ssdeep'], 'utf-8') # pylint: disable=no-value-for-parameter
documents = db.file_collection.select_all({'file_type': enums.FileType.FILE}) # Only get file type 'file'
for document in documents:
if 'ssdeep' not in document:
_ssdeep = self.ssdeep(None, document['sha256_digest'])['ssdeep'] # pylint: disable=no-value-for-parameter
else:
_ssdeep = document['ssdeep']
_ssdeep = bytes(_ssdeep, 'utf-8')
if _ssdeep == fuzzy:
continue
score = pydeep.compare(fuzzy, _ssdeep)
if score > 40:
results.append((document['name'],
document['sha256_digest'],
document['file_type'],
pydeep.compare(fuzzy, _ssdeep)))
output = []
for result in results:
output += [(str(result[0]), result[1], '/store/' + result[1], str(result[3]))]
return output
@staticmethod
def fuzzy_search_markdown(json):
output = md.table_header(('File Name', 'SHA256', 'Match (%)'))
count = 0
for j in json:
output += md.table_row((md.url(str(j[0]), 'samples/' + str(j[1])),
str(j[1]),
str(j[3])))
count += 1
if count:
output += md.paragraph(md.bold('Hits:') + str(count))
else:
output += md.table_row(('-', '-', '-'))
output += md.paragraph(md.bold('Hits:') + '0')
return output
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,694 | TheoKlein/snake-core | refs/heads/master | /snake/scales/strings/commands.py | # pylint: disable=missing-docstring
# pylint: disable=no-self-use
# pylint: disable=unused-argument
import shutil
import subprocess
from snake import error
from snake import fields
from snake import scale
from snake.scales.strings import regex
SPECIAL_CHARS = [" ", "'", '(', '"', '|', '&', '<', '`', '!', '>', ';', '$', ')', '\\\\']
class Commands(scale.Commands):
def check(self):
strings = shutil.which('strings')
if not strings:
raise error.CommandWarning("Binary 'strings' not found")
return
@scale.command({
'info': 'This function will return strings found within the file'
})
def all_strings(self, args, file, opts):
return str(subprocess.check_output(["strings", file.file_path]), encoding="utf-8").split('\n')
@staticmethod
def all_strings_plaintext(json):
return '\n'.join(json)
@scale.command({
'args': {
'min_length': fields.Int(default=5)
},
'info': 'This function will return interesting strings found within the file'
})
def interesting(self, args, file, opts):
strings = str(subprocess.check_output(["strings", file.file_path]), encoding="utf-8").split('\n')
min_length = args['min_length']
output = []
for string in strings:
rules = []
match = regex.IPV4_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['IPV4_REGEX']
match = regex.IPV6_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['IPV6_REGEX']
match = regex.EMAIL_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['EMAIL_REGEX']
match = regex.URL_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['URL_REGEX']
match = regex.DOMAIN_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['DOMAIN_REGEX']
match = regex.WINDOWS_PATH_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['WINDOWS_PATH_REGEX']
match = regex.MAC_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['MAC_REGEX']
match = regex.DATE1_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['DATE1_REGEX']
match = regex.DATE2_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['DATE2_REGEX']
match = regex.DATE3_REGEX.search(string)
if match and len(match.group()) > min_length:
rules += ['DATE3_REGEX']
match = regex.UNIX_PATH_REGEX.search(string)
if match:
valid_path = False
match_str = match.group()
if len(match_str) <= min_length:
continue
if ((match_str.startswith("'") and match_str.endswith("'")) or (match_str.startswith('"') and match_str.endswith('"'))):
valid_path = True
elif any(char in SPECIAL_CHARS for char in match_str):
valid_path = True
for i in SPECIAL_CHARS:
if i in match_str:
index = match_str.index(i)
if index > 0 and match_str[index - 1] != "\\":
valid_path = False
else:
valid_path = True
if valid_path:
rules += ['UNIX_PATH_REGEX']
if rules:
output += ['{} ({})'.format(string, ', '.join(rules))]
return output
@staticmethod
def interesting_plaintext(json):
return '\n'.join(json)
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,695 | TheoKlein/snake-core | refs/heads/master | /snake/routes/file.py | """ The file route module.
Attributes:
FileHexRoute (tuple): The FileHexRoute.
FileRoute (tuple): The FileRoute.
FilesRoute (tuple): The FilesRoute.
"""
from tornado import escape
from webargs import tornadoparser
from snake import db
from snake import enums
from snake import error
from snake import fields
from snake import schema
from snake import utils
from snake.core import snake_handler
# pylint: disable=abstract-method
# pylint: disable=arguments-differ
class FileHexHandler(snake_handler.SnakeHandler):
"""Extends `SnakeHandler`."""
async def get(self, sha256_digest):
document = await db.async_file_collection.select(sha256_digest)
if not document or document['file_type'] != enums.FileType.FILE:
self.write_warning("file/hex - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
file_storage = utils.FileStorage(sha256_digest)
data = file_storage.to_hexdump(16)
self.jsonify({'hex': data})
self.finish()
class FileHandler(snake_handler.SnakeHandler):
"""Extends `SnakeHandler`."""
async def get(self, sha256_digest):
document = await db.async_file_collection.select(sha256_digest)
if not document or document['file_type'] != enums.FileType.FILE:
self.write_warning("file - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.jsonify({'file': document})
self.finish()
async def delete(self, sha256_digest):
document = await db.async_file_collection.select(sha256_digest)
if not document or document['file_type'] != enums.FileType.FILE:
self.write_warning("file - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
try:
file_storage = utils.FileStorage(sha256_digest)
file_storage.delete()
except error.SnakeError: # Means the file is missing so no harm in removal from db
pass
await db.async_file_collection.delete(sha256_digest)
self.set_status(200)
self.jsonify(None)
self.finish()
async def patch(self, sha256_digest):
# NOTE: We only allow updating of 'description', 'name' and 'tags'
document = await db.async_file_collection.select(sha256_digest)
if not document or document['file_type'] != enums.FileType.FILE:
self.write_warning("file - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
if not self.request.body:
self.write_warning("file - no request body found", 422, sha256_digest)
self.finish()
return
data = escape.json_decode(self.request.body)
data = schema.FileSchema(only=('description', 'name', 'tags'), partial=True).load(data)
data = schema.FileSchema(only=('description', 'name', 'tags')).dump(data)
if data.keys():
await db.async_file_collection.update(sha256_digest, data)
document = await db.async_file_collection.select(sha256_digest)
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.jsonify({'file': document})
self.finish()
async def put(self, sha256_digest):
# NOTE: This is a pseudo PUT as we won't allow clearing of fixed fields
document = await db.async_file_collection.select(sha256_digest)
if not document or document['file_type'] != enums.FileType.FILE:
self.write_warning("file - no sample for given sha256 digest", 404, sha256_digest)
self.finish()
return
if not self.request.body:
self.write_warning("file - no request body found", 422, sha256_digest)
self.finish()
return
data = escape.json_decode(self.request.body)
if 'description' not in data.keys():
data['description'] = ''
if 'name' not in data.keys():
data['name'] = ''
if 'tags' not in data.keys():
data['tags'] = ''
data = schema.FileSchema(only=('description', 'name', 'tags'), partial=True).load(data)
data = schema.FileSchema(only=('description', 'name', 'tags')).dump(data)
await db.async_file_collection.update(sha256_digest, data)
document = await db.async_file_collection.select(sha256_digest)
document = schema.FileSchema().dump(schema.FileSchema().load(document))
self.jsonify({'file': document})
self.finish()
class FilesHandler(snake_handler.SnakeHandler):
"""Extends `SnakeHandler`."""
@tornadoparser.use_args({
'limit': fields.Str(required=False),
'operator': fields.Str(required=False, missing='and'),
'order': fields.Int(required=False, missing=-1),
'sort': fields.Str(required=False)
})
async def get(self, data):
documents = []
sort = None
if 'sort' in data.keys():
sort = data['sort']
filter_ = self.create_filter(self.request.arguments, data['operator'])
if filter_:
filter_ = {
'$and': [
{'file_type': enums.FileType.FILE},
filter_
]
}
else:
filter_ = {'file_type': enums.FileType.FILE}
cursor = db.async_file_collection.select_all(filter_, data['order'], sort)
index = 0
while await cursor.fetch_next:
if 'limit' in data.keys():
if index >= int(data['limit']):
break
index += 1
documents += [cursor.next_object()]
documents = schema.FileSchema(many=True).dump(schema.FileSchema(many=True).load(documents))
self.jsonify({'files': documents})
self.finish()
FileHexRoute = (r"/file/(?P<sha256_digest>[a-zA-Z0-9]+)?/hex", FileHexHandler) # pylint: disable=invalid-name
FileRoute = (r"/file/(?P<sha256_digest>[a-zA-Z0-9]+)?", FileHandler) # pylint: disable=invalid-name
FilesRoute = (r"/files", FilesHandler) # pylint: disable=invalid-name
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,696 | TheoKlein/snake-core | refs/heads/master | /snake/config/__init__.py | """This module exposes the initialised config object.
Attributes:
config_parser (:obj:`Config`): The configuration parser for snake.
scale_configs (dict): Convenient access to the scale_configs dictionary.
snake_config (dict): Convenient access to the snake_config dictionary.
"""
from snake.config import config
# pylint: disable=invalid-name
config_parser = config.Config()
scale_configs = config_parser.scale_configs
snake_config = config_parser.snake_config
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,697 | TheoKlein/snake-core | refs/heads/master | /snake/routes/api.py | """ The API route module.
Attributes:
APIRoute (tuple): The APIRoute.
"""
from snake.config import constants
from snake.core import snake_handler
# pylint: disable=abstract-method
# pylint: disable=arguments-differ
class APIHandler(snake_handler.SnakeHandler):
"""Extends `SnakeHandler`."""
async def get(self):
self.jsonify({'api_version': constants.API_VERSION})
self.finish()
APIRoute = (r"/api", APIHandler) # pylint: disable=invalid-name
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,698 | TheoKlein/snake-core | refs/heads/master | /snake/scale.py | """The scale module.
This contains most if not all the code required to create a scale for snake.
"""
import abc
import copy
import functools
import importlib
import json as j
import logging
import pkgutil
import sys
from snake import enums
from snake import error
from snake import schema
from snake import utils
from snake.enums import FileType
# pylint: disable=too-few-public-methods
app_log = logging.getLogger("tornado.application") # pylint: disable=invalid-name
__all__ = [
"FileType"
]
class BaseOptions():
"""The base options class.
This is the base options class used throughout scale components.
Attributes:
args (dict, optional): Valid arguments for a function. Defaults to None.
info (str, optional): The information about a function. Defaults to 'No help available!'
mime (str, optional): The mime restriction on the function. Defaults to None.
"""
def __init__(self, args=None, info='No help available!', mime=None):
self.args = {} if args is None or not args else args
self.info = info
self.mime = mime
class CommandOptions(BaseOptions):
"""Extends `BaseOptions`.
Used for command functions in the command component.
"""
pass
class PullOptions(BaseOptions):
"""Extends `BaseOptions`.
Used for pull functions in the interface component.
"""
pass
class PushOptions(BaseOptions):
"""Extends `BaseOptions`.
Used for push functions in the interface component.
"""
pass
class Commands(metaclass=abc.ABCMeta):
"""The command component.
This is used to create a command component for a scale. The command
component is used to run commands on a sample (it does what it says on the
tin). The commands are run on the sample and the output is stored into the
database in json format. Supporting functions can be created that will
convert the json to additional formats if so desired.
This class defines abstract functions that must be overriden in order for a
scale to support a command component.
Attributes:
command_list (list): A list of all the command functions.
snake (:obj:`Snake`): For use internally by snake. (Do not touch)!
"""
class Snake:
"""A collection of private methods for use by snake.
Attributes:
__cmd (:obj:`Commands`): The parent `Commands` object.
"""
def __init__(self, cmd):
self.__cmd = cmd
def __formats(self, cmd):
"""A list of supported formats for a command.
This automatically finds the supporting formatting functions for a command.
Args:
cmd (str): The name of the command in question.
Returns:
list: The list of formatting functions for the command.
"""
fmts = [enums.Format.JSON]
for f in enums.Format:
if f == enums.Format.JSON:
continue
func = '{}_{}'.format(cmd, f)
if hasattr(self.__cmd, func):
fmts += [f]
return fmts
def command(self, command_name):
"""Get a command function.
Get the function for a given command name.
Args:
command_name (str): The name of the command to get.
Returns:
func: The command function requested.
Raises:
CommandError: If the command is not supported.
"""
for i in self.__cmd.command_list:
if i.__name__ == command_name:
return i
raise error.CommandError('commands does not support command: {}'.format(command_name))
def command_info(self, cmd):
"""Get the information for a command.
This extracts and returns the 'useful' information for a command.
Args:
cmd (fun): The command function to extract the information from.
Returns:
dict: A dictionary containing the: name, args, formats, and info.
"""
return {
'command': cmd.__name__,
'args': {k: v.to_dict() for k, v in cmd.cmd_opts.args.items()} if cmd.cmd_opts.args else None,
'formats': self.__formats(cmd.__name__),
'info': cmd.cmd_opts.info
}
def format(self, fmt, cmd, json):
"""Format a commands output.
This formats the commands json output into a alternative but
supported format. So if the command supports the requested format
the json will be converted to this and returned. This never alters
the data in the database.
Args:
fmt (str): The format to convert the output to.
cmd (func): The command.
json (str): The json to covert.
Returns:
str: The formatted output.
Raises:
TypeError: If the format is not supported by the enum.
"""
if isinstance(json, bytes):
json = j.loads(json.decode('utf-8'))
if fmt not in enums.Format:
raise TypeError('format not supported')
if fmt == enums.Format.JSON:
return json
if fmt == enums.Format.MARKDOWN:
func = '%s_markdown' % cmd
elif fmt == enums.Format.PLAINTEXT:
func = '%s_plaintext' % cmd
else:
raise TypeError('format not supported')
if not hasattr(self.__cmd, func):
raise TypeError('format not supported')
if json is None: # Running or pending
return json
if isinstance(json, dict) and 'error' in json: # Handle error message formating
if fmt == enums.Format.JSON:
return json
if fmt == enums.Format.MARKDOWN:
return '**' + json['error'] + '**'
elif fmt == enums.Format.PLAINTEXT:
return json['error']
return self.__cmd.__getattribute__(func)(json)
def info(self):
"""A list of information for commands.
A list of dictionaries containing the information about all the supported commands.
Returns:
list: list of `command_info` dictionaries.
"""
commands = []
for cmd in self.__cmd.command_list:
commands.append(self.command_info(cmd))
return commands
def __init__(self):
self.check()
self.command_list = []
for i in dir(self):
f = self.__getattribute__(i)
if hasattr(f, '__command__'):
self.command_list.append(f)
if not self.command_list:
raise error.CommandError('commands has no commands defined!')
self.snake = self.Snake(self)
@abc.abstractmethod
def check(self):
"""The basic check command.
This is used by snake to check if it can successfully run commands
within the Commands component. If this check fails the component will
fail.
"""
pass
class Interface(metaclass=abc.ABCMeta):
"""The interface component.
This is used to add interface support to a scale. An interface acts as a
middleware layer between the user and another 3rd party api. In general no
information is stored in snake and the queries are always live to the
interfaced system, but cache can be used if required.
This class defines abstract functions that must be overriden in order for a
scale to support a interface component.
Attributes:
pull_list (list): A list of all the pull functions.
push_list (list): A list of all the push functions.
snake (:obj:`Snake`): For use internally by snake. (Do not touch)!
"""
class Snake:
"""A collection of private methods for use by snake.
Attributes:
__intf (:obj:`Interface`): The parent `Interface` object.
"""
def __init__(self, intf):
self.__intf = intf
def __formats(self, cmd):
"""A list of supported formats for a command.
This automatically finds the supporting formatting functions for a command.
Args:
cmd (str): The name of the command in question.
Returns:
list: The list of formatting functions for the command.
"""
fmts = [enums.Format.JSON]
for f in enums.Format:
if f == enums.Format.JSON:
continue
func = '{}_{}'.format(cmd, f)
if hasattr(self.__intf, func):
fmts += [f]
return fmts
def format(self, fmt, cmd, json):
"""Format a pull/push command output.
This formats the commands json output into a alternative but
supported format. So if the command supports the requested format
the json will be converted to this and returned. This never alters
the data in the database.
Args:
fmt (str): The format to convert the output to.
cmd (func): The command.
json (str): The json to covert.
Returns:
str: The formatted output.
Raises:
TypeError: If the format is not supported by the enum.
"""
if fmt not in enums.Format:
raise TypeError('format not supported')
if fmt == enums.Format.JSON:
return json
if fmt == enums.Format.MARKDOWN:
func = '%s_markdown' % cmd
elif fmt == enums.Format.PLAINTEXT:
func = '%s_plaintext' % cmd
else:
raise TypeError('format not supported')
if not hasattr(self.__intf, func):
raise TypeError('format not supported')
return self.__intf.__getattribute__(func)(json)
def info(self):
# TODO: Update doc
"""A dictionary of information for commands.
A dictionary containing two lists of commands, one for pull, and one for push.
Returns:
dict: dictionary containing two list of `puller_info`/`pusher_info` dictionaries.
"""
commands = []
for i in self.__intf.pull_list:
commands.append(self.puller_info(i))
for i in self.__intf.push_list:
commands.append(self.pusher_info(i))
return commands
def puller(self, puller):
"""Get a pull command function.
Get the function for a given command name.
Args:
puller (str): The name of the command to get.
Returns:
func: The command function requested.
Raises:
InterfaceError: If the command is not supported.
"""
for i in self.__intf.pull_list:
if i.__name__ == puller:
return i
raise error.InterfaceError('interface does not support puller: %s' % puller)
def puller_info(self, cmd):
"""Get the information for a pull command.
This extracts and returns the 'useful' information for a pull command.
Args:
cmd (fun): The command function to extract the information from.
Returns:
dict: A dictionary containing the: command, args, formats, and info.
"""
return {
'command': cmd.__name__,
'args': {k: v.to_dict() for k, v in cmd.pull_opts.args.items()} if cmd.pull_opts.args else None,
'formats': self.__formats(cmd.__name__),
'info': cmd.pull_opts.info,
'type': 'pull'
}
def pusher(self, pusher):
"""Get a push command function.
Get the function for a given command name.
Args:
pusher (str): The name of the command to get.
Returns:
func: The command function requested.
Raises:
InterfaceError: If the command is not supported.
"""
for i in self.__intf.push_list:
if i.__name__ == pusher:
return i
raise error.InterfaceError('interface does not support pusher: %s' % pusher)
def pusher_info(self, cmd):
"""Get the information for a push command.
This extracts and returns the 'useful' information for a push command.
Args:
cmd (fun): The command function to extract the information from.
Returns:
dict: A dictionary containing the: command, args, formats, and info.
"""
return {
'command': cmd.__name__,
'args': {k: v.to_dict() for k, v in cmd.push_opts.args.items()} if cmd.push_opts.args else None,
'formats': self.__formats(cmd.__name__),
'info': cmd.push_opts.info,
'type': 'push'
}
def __init__(self):
self.check()
self.pull_list = []
self.push_list = []
for i in dir(self):
f = self.__getattribute__(i)
if hasattr(f, '__pull__') and f.__pull__:
self.pull_list.append(f)
elif hasattr(f, '__push__') and f.__push__:
self.push_list.append(f)
self.snake = self.Snake(self)
@abc.abstractmethod
def check(self):
"""The basic check command.
This is used by snake to check if it can successfully run commands
within the Interface component. If this check fails the component will
fail.
"""
pass
class Scale: # pylint: disable=too-many-instance-attributes
"""A snake scale.
The class required to create a scale. This should never be subclassed just
called with the correct attributes.
Attributes:
name (str): name of scale.
description (str): description of scale.
version (str): version number.
author (str): author of scale.
supports (list): supported file types (`FileType`).
"""
def __init__(self, attrs):
if 'name' not in attrs:
raise error.ScaleError('scale requires name field')
if 'description' not in attrs:
raise error.ScaleError('scale requires description field')
if 'version' not in attrs:
raise error.ScaleError('scale requires version field')
if 'author' not in attrs:
raise error.ScaleError('scale requires author field')
self.name = attrs['name']
self.description = attrs['description']
self.version = attrs['version']
self.author = attrs['author']
self.supports = attrs['supports'] if 'supports' in attrs and attrs['supports'] else [x for x in enums.FileType]
self.components = {}
self.caveats = attrs['caveats'] if 'caveats' in attrs else None # TODO: Remove
self.scale_requires = attrs['scale_requires'] if 'scale_requires' in attrs else None # TODO: Remove
self.system_requires = attrs['system_requires'] if 'system_requires' in attrs else None # TODO: Remove
def info(self):
"""Scale information.
Reports information about the scale, usually just the attibutes defined
on the scale.
Returns:
dict: A dictionary of information about the scale.
"""
dictionary = {
"name": self.name,
"description": self.description,
"version": self.version,
"author": self.author,
"supports": self.supports,
"components": [x for x in self.components]
}
return dictionary
def load_components(self):
"""Load the scales components.
This searches through the scales folder and attempts to import its components.
Raises:
Exception: when any error occurs that is not a scale based error.
"""
# A little bit assumptive but we can get the already imported module in
# order to parse its components
mod = sys.modules.get("snake.scales.{}".format(self.name))
if not mod:
raise error.ScaleError("failed to locate module: snake.scales.{}".format(self.name))
for _imp, mod_name, _is_pkg in pkgutil.iter_modules(mod.__path__):
if mod_name == 'commands':
try:
cmd = importlib.import_module('snake.scales.' + self.name + '.commands')
if hasattr(cmd, 'Commands'):
self.components['commands'] = cmd.Commands()
except error.ScaleError as err: # TODO: Handle warnings somehow?
app_log.error('%s: %s', self.name, err)
except Exception as err:
raise err
if mod_name == 'interface':
try:
intf = importlib.import_module('snake.scales.' + self.name + '.interface')
if hasattr(intf, 'Interface'):
self.components['interface'] = intf.Interface()
except error.ScaleError as err: # TODO: Handle warnings somehow?
app_log.error('%s: %s', self.name, err)
except Exception as err:
raise err
if mod_name == 'upload':
try:
upld = importlib.import_module('snake.scales.' + self.name + '.upload')
if hasattr(upld, 'Upload'):
self.components['upload'] = upld.Upload()
except error.ScaleError as err: # TODO: Handle warnings somehow?
app_log.error('%s: %s', self.name, err)
except Exception as err:
raise err
class Upload(metaclass=abc.ABCMeta):
"""The upload component.
This is used to add upload support to a scale. This component allows snake
to learn new ways to ingest files.
This class defines abstract functions that must be overriden in order for a
scale to support a upload component.
Attributes:
snake (:obj:`Snake`): For use internally by snake. (Do not touch)!
"""
class Snake:
"""A collection of private methods for use by snake.
Attributes:
__upld (:obj:`Upload`): The parent `Upload` object.
"""
def __init__(self, upld):
self.__upld = upld
def info(self):
"""Information about the uploader.
This presents the information about the upload component.
Returns:
dict: information about upload.
"""
return {
'args': {k: v.to_dict() for k, v in self.__upld.arguments().items()} if self.__upld.arguments() else None,
'info': self.__upld.info()
}
def __init__(self):
self.snake = self.Snake(self)
@abc.abstractmethod
def arguments(self):
"""Supported arguments to upload.
Returns:
dict: A dictionary of supported arguments.
"""
pass
@abc.abstractmethod
def info(self):
"""Information about the uploader.
Returns:
str: Information about the upload component.
"""
pass
@abc.abstractmethod
def upload(self, args, working_dir):
"""The upload function.
This handles the custom uploader so that snake can ingest.
Note:
This must drop a file into the `working_dir` and return the name of the file dropped.
Args:
args (dict): The populated arguments built from `arguments`.
working_dir (str): Path to the working directory.
Returns:
str: The name of the file dropped into the working_dir.
"""
pass
# TODO: Allow decorator without args...
def autorun(func):
"""Commands: Autorun decorator.
This is used to flag a command as an autorun. A function decorated with
this depending on mime will be automatically executed when a file is
uploaded to snake.
Returns:
func: The autorun enabled function.
"""
func.__autorun__ = True
return func
def command(cmd_dict=None):
"""Commands: Command decorator.
Marks a function as a command function.
Note:
The following prototype must be followed:
func(self, args, file, opts)
Args:
cmd_dict (:obj:`CommandOptions`, optional): The additional information for a command.
Returns:
func: The wrapped command function.
"""
def decorator(func):
"""Decorates the function."""
# Load the attached dictionary if there is one, otherwise create
# the default
if cmd_dict:
cmd_opts = CommandOptions(**cmd_dict)
else:
cmd_opts = CommandOptions()
func.cmd_opts = cmd_opts
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
"""Wraps the function."""
if args and 'args' in kwargs:
raise TypeError("%s got multiple values for argument 'args'" % func.__name__)
elif 'args' in kwargs:
args_ = kwargs['args']
else:
args_ = args[0]
args_ = copy.deepcopy(args_)
if len(args) > 1 and 'sha256_digest' in kwargs:
raise TypeError("%s got multiple values for argument 'sha256_digest'" % func.__name__)
elif 'sha256_digest' in kwargs:
file_storage = utils.FileStorage(kwargs['sha256_digest'])
else:
file_storage = utils.FileStorage(args[1])
opts = func.cmd_opts
for k, v in cmd_opts.args.items():
if k not in args_ and v.has_default():
args_[k] = v.default
if cmd_opts.args.keys():
args_ = schema.Schema(fields=copy.deepcopy(cmd_opts.args)).load(args_)
self.check()
output = func(args=args_, file=file_storage, opts=opts, self=self)
if not isinstance(output, dict) and not isinstance(output, list):
raise TypeError("%s failed to return a dictionary or list" % func.__name__)
return output
wrapper.__wrapped__ = func
wrapper.__command__ = True
if not hasattr(wrapper, '__autorun__'):
wrapper.__autorun__ = False
return wrapper
return decorator
def pull(pull_dict=None):
"""Interface: Pull decorator.
Marks a function as a pull command function.
Note:
The following prototype must be followed:
func(self, args, file, opts)
Args:
pull_dict (:obj:`PullOptions`, optional): The additional information for a command.
Returns:
func: The wrapped command function.
"""
def decorator(func):
"""Decorates the function."""
# Load the attached dictionary if there is one, otherwise create
# the default
if pull_dict:
pull_opts = PullOptions(**pull_dict)
else:
pull_opts = PullOptions()
func.pull_opts = pull_opts
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
"""Wraps the function."""
if args and 'args' in kwargs:
raise TypeError("%s got multiple values for argument 'args'" % func.__name__)
elif 'args' in kwargs:
args_ = kwargs['args']
else:
args_ = args[0]
args_ = copy.deepcopy(args_)
if len(args) > 1 and 'sha256_digest' in kwargs:
raise TypeError("%s got multiple values for argument 'sha256_digest'" % func.__name__)
elif 'sha256_digest' in kwargs:
file_storage = utils.FileStorage(kwargs['sha256_digest'])
else:
file_storage = utils.FileStorage(args[1])
opts = func.pull_opts
for k, v in pull_opts.args.items():
if k not in args_ and v.has_default():
args_[k] = v.default
if pull_opts.args.keys():
args_ = schema.Schema(fields=copy.deepcopy(pull_opts.args)).load(args_)
self.check()
output = func(args=args_, file=file_storage, opts=opts, self=self)
if not isinstance(output, dict) and not isinstance(output, list):
raise TypeError("%s failed to return a dictionary or list" % func.__name__)
return output
wrapper.__wrapped__ = func
wrapper.__pull__ = True
return wrapper
return decorator
def push(push_dict=None):
"""Interface: Push decorator.
Marks a function as a push command function.
Note:
The following prototype must be followed:
func(self, args, file, opts)
Args:
push_dict (:obj:`PushOptions`, optional): The additional information for a command.
Returns:
func: The wrapped command function.
"""
def decorator(func):
"""Decorates the function."""
# Load the attached dictionary if there is one, otherwise create
# the default
if push_dict:
push_opts = PushOptions(**push_dict)
else:
push_opts = PushOptions()
func.push_opts = push_opts
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
"""Wraps the function."""
if args and 'args' in kwargs:
raise TypeError("%s got multiple values for argument 'args'" % func.__name__)
elif 'args' in kwargs:
args_ = kwargs['args']
else:
args_ = args[0]
args_ = copy.deepcopy(args_)
if len(args) > 1 and 'sha256_digest' in kwargs:
raise TypeError("%s got multiple values for argument 'sha256_digest'" % func.__name__)
elif 'sha256_digest' in kwargs:
file_storage = utils.FileStorage(kwargs['sha256_digest'])
else:
file_storage = utils.FileStorage(args[1])
opts = func.push_opts
for k, v in push_opts.args.items():
if k not in args_ and v.has_default():
args_[k] = v.default
if push_opts.args.keys():
args_ = schema.Schema(fields=copy.deepcopy(push_opts.args)).load(args_)
self.check()
output = func(args=args_, file=file_storage, opts=opts, self=self)
if not isinstance(output, dict) and not isinstance(output, list):
raise TypeError("%s failed to return a dictionary or list" % func.__name__)
return output
wrapper.__wrapped__ = func
wrapper.__push__ = True
return wrapper
return decorator
def scale(**attrs):
"""Scale creation function.
This is used to create a scale, with the correct attributes.
Returns:
:obj:`Scale`: An initialised scale.
"""
return Scale(attrs)
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,699 | TheoKlein/snake-core | refs/heads/master | /tests/utils/test_markdown.py | from snake.utils import markdown as md
def test_bold():
"""
Test bold function
"""
output = md.bold('abcd')
assert output == '** abcd **'
def test_code():
"""
Test code function
"""
output = md.code('abcd')
assert output == '```\r\nabcd\r\n```'
def test_cr():
"""
Test cr function
"""
output = md.cr()
assert output == '\r\n'
def test_h1():
"""
Test h1 function
"""
output = md.h1('abcd')
assert output == '# abcd\r\n'
def test_h2():
"""
Test h2 function
"""
output = md.h2('abcd')
assert output == '## abcd\r\n'
def test_h3():
"""
Test h3 function
"""
output = md.h3('abcd')
assert output == '### abcd\r\n'
def test_h4():
"""
Test h4 function
"""
output = md.h4('abcd')
assert output == '#### abcd\r\n'
def test_newline():
"""
Test newline function
"""
output = md.newline()
assert output == '\r\n'
def test_paragraph():
"""
Test paragraph function
"""
output = md.paragraph('abcd')
assert output == 'abcd\r\n'
def test_sanitize():
"""
Test sanitize function
"""
output = md.sanitize('```')
assert output == '(3xbacktick)'
output = md.sanitize('|')
assert output == '(pipe)'
output = md.sanitize('_')
assert output == r'\_'
def test_table_header():
"""
Test table_header function
"""
output = md.table_header(('a', 'b'))
assert output == '| a | b |\r\n| --- | --- |\r\n'
def test_table_row():
"""
Test table_row function
"""
output = md.table_row(('a', 'b'))
assert output == '| a | b |\r\n'
def test_url():
"""
Test url function
"""
output = md.url('a', 'b')
assert output == '[a](b)'
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,700 | TheoKlein/snake-core | refs/heads/master | /snake/engines/mongo/command.py | """The Mongo Command Collection Module.
This module provides everything required to communicate with the Mongo CommandCollection.
"""
import bson
import gridfs
from motor import motor_asyncio
import pymongo
from pymongo import collation
from snake import enums
class CommandCollection():
"""Synchronous Command Collection.
Attributes:
db (obj): The database object
"""
def __init__(self, db):
self.db = db
def clean(self):
"""Clean lost commands from the database.
This removes any pending or running commands. This is used when snake
is restarted to clean out commands that will never run.
"""
documents = self.db.commands.find({'$or': [{'status': enums.Status.PENDING}, {'status': enums.Status.RUNNING}]})
for document in documents:
self.db.commands.update_one({'_id': document['_id']}, {'$set': {'status': enums.Status.FAILED}})
def delete(self, sha256_digest, scale, command, args):
"""Delete command.
Args:
sha256_digest (str): The hash of the file.
scale (str): The scale.
command (str): The command.
args (dict): The arguments.
"""
return self.db.commands.delete_many({"sha256_digest": sha256_digest, "scale": scale, "command": command,
"args": args})
def insert(self, document):
"""Insert command.
Args:
document (:obj:CommandSchema): The command to insert.
Returns:
:obj:`CommandSchema`: The inserted command.
"""
return self.db.commands.insert_one(document)
def select(self, sha256_digest, scale, command, args):
"""Select command.
Args:
sha256_digest (str): The hash of the file.
scale (str): The scale.
command (str): The command.
args (dict): The arguments.
Returns:
:obj:`CommandSchema`: The selected command.
"""
return self.db.commands.find_one({"sha256_digest": sha256_digest, "scale": scale, "command": command,
"args": args})
def select_many(self, sha256_digest=None, scale=None, command=None, args=None):
"""Select commands.
Args:
sha256_digest (str, optional): The hash of the file. Defaults to None.
scale (str, optional): The scale. Defaults to None.
command (str, optional): The command. Defaults to None.
args (dict, optional): The arguments. Defaults to None.
Returns:
:obj:`Cursor`: The mongodb cursor.
"""
data = {"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args}
keys = [k for k, v in data.items() if v is None]
for k in keys:
del data[k]
return self.db.commands.find(data)
def select_all(self):
"""Select all commands.
Returns:
:obj:`Cursor`: The mongodb cursor.
"""
return self.db.commands.find()
def update(self, sha256_digest, scale, command, args, data):
"""Update command.
Args:
sha256_digest (str): The hash of the file.
scale (str): The scale.
command (str): The command.
args (dict): The arguments.
data (:obj:`CommandSchema): The update data.
Returns:
:obj:`CommandSchema`: The updated command.
"""
return self.db.commands.update_one({"sha256_digest": sha256_digest, "scale": scale, "command": command,
"args": args}, {'$set': data})
class AsyncCommandCollection():
"""Asynchronous Command Collection.
Attributes:
db (obj): The database object
"""
def __init__(self, db):
self.db = db
def delete(self, sha256_digest, scale, command, args, callback=None):
"""Delete command.
Args:
sha256_digest (str): The hash of the file.
scale (str): The scale.
command (str): The command.
args (dict): The arguments.
callback (func, optional): The callback function. Defaults to None.
"""
future = self.db.commands.delete_many({"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args})
if callback:
future.add_done_callback(callback)
return future
def insert(self, document, callback=None):
"""Insert command.
Args:
document (:obj:CommandSchema): The command to insert.
callback (func, optional): The callback function. Defaults to None.
Returns:
:obj:`CommandSchema`: The inserted command.
"""
future = self.db.commands.insert_one(document)
if callback:
future.add_done_callback(callback)
return future
def select(self, sha256_digest, scale=None, command=None, args=None, callback=None):
"""Select command.
Args:
sha256_digest (str): The hash of the file.
scale (str): The scale.
command (str): The command.
args(dict): The arguments.
callback (func, optional): The callback function. Defaults to None.
Returns:
:obj:`CommandSchema`: The selected command.
"""
future = self.db.commands.find_one({"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args})
if callback:
future.add_done_callback(callback)
return future
def select_many(self, sha256_digest=None, scale=None, command=None, args=None, order=pymongo.DESCENDING, sort=None):
"""Select commands.
Args:
sha256_digest (str, optional): The hash of the file. Defaults to None.
scale (str, optional): The scale. Defaults to None.
command (str, optional): The command. Defaults to None.
args (dict, optional): The arguments. Defaults to None.
callback (func, optional): The callback function. Defaults to None.
Returns:
:obj:`Cursor`: The mongodb cursor.
"""
data = {"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args}
keys = [k for k, v in data.items() if v is None]
for k in keys:
del data[k]
cursor = self.db.commands.find(data)
if sort:
cursor = cursor.sort([(sort, order)]).collation(collation.Collation(locale="en"))
return cursor
def select_all(self, filter_=None, order=pymongo.DESCENDING, sort=None):
"""Select all commands.
Returns:
:obj:`Cursor`: The mongodb cursor.
"""
if filter_:
documents = self.db.commands.find(filter_)
else:
documents = self.db.commands.find()
if sort:
documents = documents.sort([(sort, order)]).collation(collation.Collation(locale="en"))
return documents
def update(self, sha256_digest, scale, command, args, data, callback=None): # pylint: disable=too-many-arguments
"""Update command.
Args:
sha256_digest (str): The hash of the file.
scale (str): The scale.
command (str): The command.
args (dict): The arguments.
data (:obj:`CommandSchema): The update data.
callback (func, optional): The callback function. Defaults to None.
Returns:
:obj:`CommandSchema`: The updated command.
"""
future = self.db.commands.update_one({"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args}, {'$set': data})
if callback:
future.add_done_callback(callback)
return future
def replace(self, sha256_digest, scale, command, args, data, callback=None): # pylint: disable=too-many-arguments
"""Replace command.
Args:
sha256_digest (str): The hash of the file.
scale (str): The scale.
command (str): The command.
args (dict): The arguments.
data (:obj:`CommandSchema): The replace data.
callback (func, optional): The callback function. Defaults to None.
Returns:
:obj:`CommandSchema`: The replaced command.
"""
future = self.db.commands.replace_one({"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args}, data)
if callback:
future.add_done_callback(callback)
return future
# pylint: disable=missing-docstring
class CommandOutputCollection():
def __init__(self, db):
self.db = gridfs.GridFSBucket(db)
def delete(self, file_id):
if isinstance(file_id, str):
file_id = bson.ObjectId(file_id)
self.db.delete(file_id)
def get(self, file_id):
if isinstance(file_id, str):
file_id = bson.ObjectId(file_id)
grid_out = self.db.open_download_stream(file_id)
return grid_out.read()
def put(self, file_name, data):
return self.db.upload_from_stream(file_name, data)
class AsyncCommandOutputCollection():
def __init__(self, db):
self.db = motor_asyncio.AsyncIOMotorGridFSBucket(db)
async def delete(self, file_id):
if isinstance(file_id, str):
file_id = bson.ObjectId(file_id)
await self.db.delete(file_id)
async def get(self, file_id):
if isinstance(file_id, str):
file_id = bson.ObjectId(file_id)
grid_out = await self.db.open_download_stream(file_id)
return await grid_out.read()
async def put(self, file_name, data):
return await self.db.upload_from_stream(file_name, data)
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,701 | TheoKlein/snake-core | refs/heads/master | /tests/config/test_config.py | import pytest
from snake import error
from snake.config import config
from snake.config import constants
# pylint: disable=too-many-statements
def test_config(mocker):
"""
Test the Config class
"""
def blank_file(*args, **kwargs): # pylint: disable=unused-argument
return './tests/files/blank.conf'
def invalid_file(*args, **kwargs): # pylint: disable=unused-argument
return './tests/files/invalid.conf'
def load_config_fake(self, config_file): # pylint: disable=unused-argument
pass
def resource_filename_snake(self, config_file): # pylint: disable=unused-argument
return './snake/data/config/snake.conf'
def resource_filename(package_or_requirement, resource_name): # pylint: disable=unused-argument
return './tests/files/test.conf'
def resource_filename_err(package_or_requirement, resource_name): # pylint: disable=unused-argument
return './tests/files/test_fail.conf'
# Test initialisation
Config = config.Config # pylint: disable=invalid-name
load_config = Config.load_config
Config.load_config = load_config_fake # Monkey patch
cfg = Config()
assert len(cfg.scale_configs.keys()) is 0
assert len(cfg.snake_config.keys()) is 0
# Test full initialisation, but lazily
Config.load_config = load_config # Un-Monkey patch
cfg = Config()
assert len(cfg.scale_configs.keys()) is 0
assert len(cfg.snake_config.keys()) is not 0
# Test custom load file
cfg = Config(config_file='./tests/files/test.conf')
assert len(cfg.scale_configs.keys()) is 0
assert len(cfg.snake_config.keys()) is not 0
# Test snake load scale file
mocker.patch('pkg_resources.resource_filename', resource_filename)
cfg = Config()
cfg.load_scale_config('test')
assert len(cfg.scale_configs.keys()) is not 0
assert len(cfg.snake_config.keys()) is not 0
# Cause load_config to fail
mocker.patch('pkg_resources.resource_filename', resource_filename_err)
with pytest.raises(SystemExit):
cfg = Config()
# Remove patch
mocker.stopall()
# Pass missing config file
with pytest.raises(SystemExit):
cfg = Config(config_file='./tests/files/test_fail.conf')
# Fake etc to test ETC support
constants.ETC_DIR = './tests/files'
# Test ETC_DIR config loading
cfg = Config()
assert len(cfg.scale_configs.keys()) is 0
assert len(cfg.snake_config.keys()) is not 0
# Test ETC_DIR failed config loading
mocker.patch('os.path.join', blank_file)
with pytest.raises(SystemExit):
cfg = Config()
mocker.stopall()
# Test ETC_DIR blank config loading for scale
cfg = Config()
mocker.patch('os.path.join', blank_file)
mocker.patch('pkg_resources.resource_filename', resource_filename)
cfg.load_scale_config('test')
assert len(cfg.scale_configs.keys()) is not 0
assert len(cfg.snake_config.keys()) is not 0
mocker.stopall()
# Test ETC_DIR invalid config loading for scale
cfg = Config()
mocker.patch('os.path.join', invalid_file)
mocker.patch('pkg_resources.resource_filename', resource_filename)
with pytest.raises(error.SnakeError):
cfg.load_scale_config('test')
assert len(cfg.scale_configs.keys()) is not 0
assert len(cfg.snake_config.keys()) is not 0
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
57,702 | TheoKlein/snake-core | refs/heads/master | /snake/config/constants.py | """The constants used throughout snake
Attributes:
API_VERSION (str): The API version number.
USER_AGENT (str): The User Agent string. This should be used whenever snake
performs requests.
VERSION (str): The snake version number.
ETC_DIR (str): The path to the etc folder for snake.
"""
API_VERSION = '1.0'
USER_AGENT = 'Snake, a binary analysis platform by Countercept'
VERSION = '1.0.1'
ETC_DIR = '/etc/snake'
| {"/snake/core/route_support.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/tests/core/test_route_support.py": ["/snake/config/__init__.py"], "/snake/snaked.py": ["/snake/config/__init__.py", "/snake/core/celery.py", "/snake/core/route_manager.py", "/snake/core/snake_handler.py"], "/snake/routes/download.py": ["/snake/db.py", "/snake/utils/__init__.py"], "/snake/db.py": ["/snake/config/__init__.py"], "/snake/core/scale_manager.py": ["/snake/config/__init__.py"], "/tests/test_scale.py": ["/snake/error.py"], "/snake/utils/submitter.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"], "/snake/snake_utility.py": ["/snake/config/__init__.py"], "/snake/utils/file_storage.py": ["/snake/config/__init__.py"], "/snake/config/config.py": ["/snake/config/__init__.py"], "/snake/core/celery.py": ["/snake/config/__init__.py"], "/snake/scales/hashes/commands.py": ["/snake/utils/__init__.py"], "/snake/scales/strings/commands.py": ["/snake/scales/strings/__init__.py"], "/snake/routes/api.py": ["/snake/config/__init__.py"], "/snake/scale.py": ["/snake/enums.py"], "/tests/utils/test_markdown.py": ["/snake/utils/__init__.py"], "/tests/config/test_config.py": ["/snake/config/__init__.py"], "/snake/utils/__init__.py": ["/snake/utils/file_storage.py"], "/snake/core/snake_handler.py": ["/snake/config/__init__.py"], "/snake/worker.py": ["/snake/config/__init__.py", "/snake/core/celery.py"], "/snake/routes/scale.py": ["/snake/config/__init__.py", "/snake/managers.py"], "/snake/routes/command.py": ["/snake/error.py", "/snake/managers.py"], "/snake/scales/hashes/__init__.py": ["/snake/config/__init__.py", "/snake/scale.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.