index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
19,224
|
roman-karpovich/drf-batch-requests
|
refs/heads/master
|
/drf_batch_requests/backends/sync.py
|
from django.core.handlers.base import BaseHandler
from rest_framework.status import is_success
from drf_batch_requests.backends.base import RequestsConsumeBaseBackend
class SyncRequestsConsumeBackend(RequestsConsumeBaseBackend):
def __init__(self):
self.responses = {}
# todo: from this point i think we can consume requests pack
def consume_request(self, request, start_callback=None, success_callback=None, fail_callback=None):
start_callback() if start_callback else None
handler = BaseHandler()
handler.load_middleware()
response = handler.get_response(request)
if is_success(response.status_code):
success_callback() if success_callback else None
else:
fail_callback() if fail_callback else None
self.responses[request] = response
return True
|
{"/tests/test_view.py": ["/tests/mixins.py"], "/drf_batch_requests/request.py": ["/drf_batch_requests/exceptions.py", "/drf_batch_requests/serializers.py", "/drf_batch_requests/utils.py"], "/drf_batch_requests/views.py": ["/drf_batch_requests/exceptions.py", "/drf_batch_requests/graph.py", "/drf_batch_requests/request.py", "/drf_batch_requests/response.py", "/drf_batch_requests/utils.py"], "/drf_batch_requests/serializers.py": ["/drf_batch_requests/utils.py"], "/tests/test_request.py": ["/drf_batch_requests/request.py"], "/drf_batch_requests/backends/sync.py": ["/drf_batch_requests/backends/base.py"]}
|
19,225
|
roman-karpovich/drf-batch-requests
|
refs/heads/master
|
/tests/views.py
|
from django.http import JsonResponse
from django.http.response import HttpResponse as DjangoResponse
from django.views.generic import View
from rest_framework.response import Response
from rest_framework.views import APIView
class TestAPIView(APIView):
def get(self, request, *args, **kwargs):
return self.finalize_response(request, Response({
'data': [
{'id': 1, 'some_data': 'foo'},
{'id': 2, 'some_data': 'bar'},
{'id': 3, 'some_data': 'baz'},
],
'page': 1,
'get': request.query_params
}))
def post(self, request, *args, **kwargs):
return self.finalize_response(request, Response({'data': request.data.get('data')}))
def test_fbv(request):
if request.method == 'POST':
return JsonResponse(request.POST)
else:
return JsonResponse({'field1': 'field1_value', 'field2': 'field2_value'})
class TestFilesAPIView(APIView):
def post(self, request, *args, **kwargs):
return self.finalize_response(request, Response({
'files': {
key: {
'name': attachment.name,
'size': attachment.size
}
for key, attachment in request.FILES.items()
}
}))
class SimpleView(View):
def get(self, request):
return DjangoResponse('test non-json output')
|
{"/tests/test_view.py": ["/tests/mixins.py"], "/drf_batch_requests/request.py": ["/drf_batch_requests/exceptions.py", "/drf_batch_requests/serializers.py", "/drf_batch_requests/utils.py"], "/drf_batch_requests/views.py": ["/drf_batch_requests/exceptions.py", "/drf_batch_requests/graph.py", "/drf_batch_requests/request.py", "/drf_batch_requests/response.py", "/drf_batch_requests/utils.py"], "/drf_batch_requests/serializers.py": ["/drf_batch_requests/utils.py"], "/tests/test_request.py": ["/drf_batch_requests/request.py"], "/drf_batch_requests/backends/sync.py": ["/drf_batch_requests/backends/base.py"]}
|
19,226
|
roman-karpovich/drf-batch-requests
|
refs/heads/master
|
/drf_batch_requests/graph.py
|
class RequestGraphNode(object):
STATUS_FAILED = -1
STATUS_FAILED_PARENT = -2
STATUS_NOT_STARTED = 0
STATUS_IN_PROGRESS = 1
STATUS_COMPLETED = 2
def __init__(self, request=None):
self.request = request
self.name = self.request.get('name')
self.parents = set()
self.children_set = set()
self.status = self.STATUS_NOT_STARTED
def start(self):
self.status = RequestGraphNode.STATUS_IN_PROGRESS
def complete(self):
self.status = RequestGraphNode.STATUS_COMPLETED
def fail(self, own_fail=True):
self.status = RequestGraphNode.STATUS_FAILED if own_fail else RequestGraphNode.STATUS_FAILED_PARENT
for child_node in filter(lambda n: n.status == RequestGraphNode.STATUS_NOT_STARTED, self.children_set):
child_node.fail(own_fail=False)
@property
def can_be_performed(self):
if not self.status == RequestGraphNode.STATUS_NOT_STARTED:
return False
return all(map(lambda parent: parent.status == RequestGraphNode.STATUS_COMPLETED, self.parents))
def __str__(self):
return self.name or super(RequestGraphNode, self).__str__()
class RequestGraph(object):
def __init__(self, requests):
self.nodes = [RequestGraphNode(request) for request in requests]
self._named_requests = {
node.request['name']: node
for node in filter(lambda n: n.request.get('name'), self.nodes)
}
for node in self.nodes:
parents = node.request.get('depends_on', [])
for parent_name in parents:
parent = self._named_requests.get(parent_name)
if not parent:
raise Exception('Wrong parent {} in node.'.format(parent_name))
node.parents.add(parent)
parent.children_set.add(node)
def get_node_order(self, node):
return self.nodes.index(node)
def get_not_failed_nodes(self):
return filter(
lambda node: node.status not in [
RequestGraphNode.STATUS_FAILED,
RequestGraphNode.STATUS_FAILED_PARENT
],
self.nodes
)
def get_current_available_nodes(self):
return filter(lambda node: node.can_be_performed, self.get_not_failed_nodes())
def is_completed(self):
return all(map(
lambda node: node.status in [
RequestGraphNode.STATUS_FAILED,
RequestGraphNode.STATUS_FAILED_PARENT,
RequestGraphNode.STATUS_COMPLETED
],
self.nodes
))
|
{"/tests/test_view.py": ["/tests/mixins.py"], "/drf_batch_requests/request.py": ["/drf_batch_requests/exceptions.py", "/drf_batch_requests/serializers.py", "/drf_batch_requests/utils.py"], "/drf_batch_requests/views.py": ["/drf_batch_requests/exceptions.py", "/drf_batch_requests/graph.py", "/drf_batch_requests/request.py", "/drf_batch_requests/response.py", "/drf_batch_requests/utils.py"], "/drf_batch_requests/serializers.py": ["/drf_batch_requests/utils.py"], "/tests/test_request.py": ["/drf_batch_requests/request.py"], "/drf_batch_requests/backends/sync.py": ["/drf_batch_requests/backends/base.py"]}
|
19,236
|
ivan-shishkov/29_phones
|
refs/heads/master
|
/db.py
|
import os
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy import create_engine, Column, Integer, String
Base = declarative_base()
engine = create_engine(os.environ.get('DATABASE_URI'))
db_session = scoped_session(sessionmaker(bind=engine))
class Order(Base):
__tablename__ = 'orders'
id = Column(Integer, primary_key=True)
contact_phone = Column(String(100))
contact_phone_normalized = Column(String(100), index=True)
|
{"/normalize_phones.py": ["/db.py"]}
|
19,237
|
ivan-shishkov/29_phones
|
refs/heads/master
|
/normalize_phones.py
|
import time
import re
from sqlalchemy.exc import OperationalError
import phonenumbers
from phonenumbers.phonenumberutil import NumberParseException
from db import db_session, Order
TIMEOUT_BETWEEN_PHONES_NORMALIZATION_CYCLES = 5 * 60
TIMEOUT_WHEN_ERROR = 10
TIMEOUT_BETWEEN_TRANSACTIONS = 5
def get_normalized_phone_number(source_phone_number, region='RU'):
if source_phone_number is None:
return ''
cleared_phone_number = ''.join(re.findall(r'\d+', source_phone_number))
if cleared_phone_number.startswith('8'):
cleared_phone_number = '{}{}'.format(
phonenumbers.country_code_for_valid_region(region),
cleared_phone_number,
)
try:
return str(
phonenumbers.parse(
cleared_phone_number,
region,
).national_number,
)
except NumberParseException:
return ''
def normalize_contact_phones(orders):
for order in orders:
order.contact_phone_normalized = get_normalized_phone_number(
order.contact_phone,
)
db_session.commit()
def run_phones_normalization_cycle(count_rows_per_transaction=100):
while True:
orders = db_session.query(Order).filter(
Order.contact_phone_normalized.is_(None),
).limit(count_rows_per_transaction).all()
if not orders:
break
normalize_contact_phones(orders)
time.sleep(TIMEOUT_BETWEEN_TRANSACTIONS)
def main():
while True:
try:
run_phones_normalization_cycle()
except OperationalError:
db_session.rollback()
time.sleep(TIMEOUT_WHEN_ERROR)
continue
time.sleep(TIMEOUT_BETWEEN_PHONES_NORMALIZATION_CYCLES)
if __name__ == '__main__':
main()
|
{"/normalize_phones.py": ["/db.py"]}
|
19,238
|
ivan-shishkov/29_phones
|
refs/heads/master
|
/alembic/versions/ece6c9ebeb97_add_normalized_contact_phone_column.py
|
"""add normalized contact phone column
Revision ID: ece6c9ebeb97
Revises:
Create Date: 2018-12-09 21:46:17.286164
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ece6c9ebeb97'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
'orders',
sa.Column(
'contact_phone_normalized',
sa.String(length=100),
index=True,
),
)
def downgrade():
op.drop_column('orders', 'contact_phone_normalized')
|
{"/normalize_phones.py": ["/db.py"]}
|
19,239
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/migrations/0006_product_number_of_units.py
|
# Generated by Django 2.2.1 on 2019-05-31 07:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accountant', '0005_auto_20190526_1836'),
]
operations = [
migrations.AddField(
model_name='product',
name='number_of_units',
field=models.IntegerField(default=12),
preserve_default=False,
),
]
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,240
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/models.py
|
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import models
# Create your models here.
class Accountant(get_user_model()):
location = models.CharField(max_length=100, null=True, blank=True)
class Meta:
verbose_name = 'Accountant'
class Admin_User(get_user_model()):
location = models.CharField(max_length=100, null=True, blank=True)
class Meta:
verbose_name = 'Admin'
class Stock(models.Model):
name = models.CharField(max_length=250)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = 'Stock'
class Product(models.Model):
name = models.CharField(max_length=250)
number_of_units = models.IntegerField()
stock = models.ForeignKey(Stock, on_delete=models.CASCADE)
buying_price = models.IntegerField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = 'Product'
class Customer(models.Model):
first_name = models.CharField(max_length=250)
last_name = models.CharField(max_length=250)
phone_number = models.IntegerField()
class Sale(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
customer = models.ForeignKey(Customer, on_delete=models.CASCADE)
product = models.ForeignKey(Product, on_delete=models.CASCADE)
units_sold = models.IntegerField()
price_per_unit = models.IntegerField()
total = models.IntegerField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = 'Sale'
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,241
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/forms.py
|
from django.contrib.auth.forms import UserCreationForm
from django.forms import ModelForm
from accountant.models import Accountant, Product, Sale, Stock, Customer
class SignUpForm(UserCreationForm):
class Meta:
model = Accountant
fields = ('username', 'first_name', 'last_name', 'email', 'location')
class AddStockForm(ModelForm):
class Meta:
model = Stock
fields = ('name',)
class AddCustomerForm(ModelForm):
class Meta:
model = Customer
fields = ('first_name', 'last_name', 'phone_number')
class AddProductForm(ModelForm):
class Meta:
model = Product
fields = ('name', 'stock', 'buying_price')
class AddSaleForm(ModelForm):
class Meta:
model = Sale
fields = ('user', 'customer', 'product', 'price_per_unit', 'units_sold', 'total')
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,242
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/migrations/0008_auto_20190531_0752.py
|
# Generated by Django 2.2.1 on 2019-05-31 07:52
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accountant', '0007_sale_customer_name'),
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=250)),
('last_name', models.CharField(max_length=250)),
('phone_number', models.IntegerField()),
],
),
migrations.AlterField(
model_name='sale',
name='customer_name',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accountant.Customer'),
),
]
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,243
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/migrations/0009_auto_20190531_0845.py
|
# Generated by Django 2.2.1 on 2019-05-31 08:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accountant', '0008_auto_20190531_0752'),
]
operations = [
migrations.RenameField(
model_name='sale',
old_name='customer_name',
new_name='customer',
),
]
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,244
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/views.py
|
from django.contrib import messages
from django.contrib.auth import authenticate, login
from django.shortcuts import render, redirect
# Create your views here.
from accountant.forms import SignUpForm, AddProductForm, AddStockForm, AddCustomerForm, AddSaleForm
from accountant.models import Accountant, Admin_User, Stock, Product, Customer, Sale
app_name = 'Accountant'
def landing(request):
return render(request, 'accountant/landing_page.html')
def sign_up(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
print(form)
# pdb.set_trace()
if form.is_valid():
farmer = form.save(commit=False)
farmer.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password')
user = authenticate(username=username, password=raw_password)
login(request, user)
return redirect('Farmer:my_sales')
else:
messages.error(request, 'Form Invalid')
return redirect('Accounts:signup')
else:
form = SignUpForm()
return render(request, 'registration/sign_up.html',{'form':form})
def sign_in(request):
msg = []
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
if Accountant.objects.filter(user_ptr_id=user.id).exists():
return redirect('Accountant:dashboard')
elif Admin_User.objects.filter(user_ptr_id=user.id).exists():
return redirect('Accountant:dashboard')
else:
msg.append('You account has been deactivated!')
else:
msg.append('Invalid login')
return render(request, 'registration/sign_in.html', {'errors':msg})
def stocks(request):
stocks = Stock.objects.all()
return render(request, 'accountant/view_stocks.html', {'stocks':stocks})
def new_stock(request):
if request.method == 'POST':
form = AddStockForm(request.POST)
print(form)
if form.is_valid():
form.save()
messages.success(request, 'Stock Added Successfully')
return redirect('Accountant:stocks')
else:
messages.error(request, 'Invalid Details Please Try Again')
return render(request, 'accountant/add_stock.html')
def products(request, stock_id):
stock = Stock.objects.get(id=stock_id)
products = Product.objects.filter(stock=stock)
return render(request, 'accountant/view_products.html', {'products':products, 'stock':stock})
def new_product(request, stock_id):
stock = Stock.objects.get(id=stock_id)
form = AddProductForm(request.POST)
if request.method == 'POST':
print(form)
if form.is_valid():
form.save()
messages.success(request,'Product Added Successfully')
return redirect('Accountant:products', stock.id)
else:
messages.error(request, 'Failed to add the product')
return redirect('Accountant:new_product', stock.id)
else:
form = AddProductForm()
return render(request, 'accountant/add_product.html',{'stock':stock, 'form':form})
def dashboard(request):
return render(request, 'layouts/base.html')
def customers(request):
customers = Customer.objects.all()
context = {
'customers': customers
}
return render(request, 'accountant/customers.html', context)
def add_customer(request):
if request.method == 'POST':
form = AddCustomerForm(request.POST)
if form.is_valid():
form.save()
messages.success(request, 'Customer Added Successfully')
return redirect('Accountant:customers')
else:
messages.error(request, 'Form Validation Failed')
return redirect('Accountant:customers')
return render(request, 'accountant/add_customer.html')
def sales(request, customer_id):
customer = Customer.objects.get(id=customer_id)
customer_sales = Sale.objects.filter(customer=customer)
context = {
'customer': customer,
'customer_sales': customer_sales
}
return render(request, 'accountant/sales.html', context)
def add_sale(request, customer_id):
admin = Admin_User.objects.get(user_ptr_id = request.user.id)
customer = Customer.objects.get(id=customer_id)
products = Product.objects.all()
if request.method == 'POST':
form = AddSaleForm(request.POST)
print(form)
if form.is_valid():
form.save()
messages.success(request, 'Sale added successfully')
return redirect('Accountant:sales', customer_id)
else:
messages.error(request, 'Form Validation Failed')
return redirect('Accountant:sales', customer_id)
context = {
'admin': admin,
'customer': customer,
'products': products
}
return render(request, 'accountant/add_sale.html', context)
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,245
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/migrations/0003_auto_20190526_1816.py
|
# Generated by Django 2.2.1 on 2019-05-26 18:16
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accountant', '0002_admin'),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('buying_price', models.IntegerField()),
],
options={
'verbose_name': 'Product',
},
),
migrations.CreateModel(
name='Stock',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
],
options={
'verbose_name': 'Stock',
},
),
migrations.AlterModelOptions(
name='accountant',
options={'verbose_name': 'Accountant'},
),
migrations.AlterModelOptions(
name='admin',
options={'verbose_name': 'Admin'},
),
migrations.CreateModel(
name='Sales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('units_sold', models.IntegerField()),
('price_per_unit', models.IntegerField()),
('total', models.IntegerField()),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accountant.Product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Sale',
},
),
migrations.AddField(
model_name='product',
name='stock',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accountant.Stock'),
),
]
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,246
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/urls.py
|
from django.urls import path
from accountant import views
app_name = 'Accountant'
urlpatterns = [
path('landing_page', views.landing, name='landing_page'),
path('sign_up', views.sign_up, name='sign_up'),
path('sign_in', views.sign_in, name='sign_in'),
path('dashboard', views.dashboard, name='dashboard'),
path('stocks', views.stocks, name='stocks'),
path('products/<int:stock_id>', views.products, name='products'),
path('new/product/<int:stock_id>', views.new_product, name='new_product'),
path('new/stock', views.new_stock, name='new_stock'),
path('customers', views.customers, name='customers'),
path('customer/sales/<int:customer_id>', views.sales, name='sales'),
path('add/customer', views.add_customer, name='add_customer'),
path('add/customer/sale/<int:customer_id>', views.add_sale, name='add_sale')
]
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,247
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/migrations/0004_auto_20190526_1821.py
|
# Generated by Django 2.2.1 on 2019-05-26 18:21
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accountant', '0003_auto_20190526_1816'),
]
operations = [
migrations.RenameModel(
old_name='Sales',
new_name='Sale',
),
]
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,248
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/migrations/0007_sale_customer_name.py
|
# Generated by Django 2.2.1 on 2019-05-31 07:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accountant', '0006_product_number_of_units'),
]
operations = [
migrations.AddField(
model_name='sale',
name='customer_name',
field=models.CharField(default='simo', max_length=250),
preserve_default=False,
),
]
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,249
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/admin.py
|
from django.contrib import admin
# Register your models here.
from accountant.models import Admin_User, Accountant, Stock
admin.site.register(Admin_User)
admin.site.register(Accountant)
admin.site.register(Stock)
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,250
|
kiamasimon/efarmer
|
refs/heads/master
|
/accountant/migrations/0005_auto_20190526_1836.py
|
# Generated by Django 2.2.1 on 2019-05-26 18:36
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('accountant', '0004_auto_20190526_1821'),
]
operations = [
migrations.AddField(
model_name='product',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='product',
name='updated_at',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='sale',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='sale',
name='updated_at',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='stock',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='stock',
name='updated_at',
field=models.DateTimeField(auto_now=True),
),
]
|
{"/accountant/forms.py": ["/accountant/models.py"], "/accountant/views.py": ["/accountant/forms.py", "/accountant/models.py"], "/accountant/admin.py": ["/accountant/models.py"]}
|
19,263
|
Kes-trel/Iris-Flower-Cluster-Analysis
|
refs/heads/main
|
/app.py
|
import pandas as pd
import streamlit as st
import seaborn as sns
sns.set()
from functions import screen_data, scater_plot, elbow_method, cluster_data, scale_cluster_data
raw_data = pd.read_csv("iris_with_species.csv")
st.set_page_config(page_title="Species Segmentation with Cluster Analysis",page_icon="🌼", layout="wide", initial_sidebar_state="auto")
col_a, col_b = st.beta_columns([4,1])
col_a.title("Iris flower data set")
col_a.header("Species Segmentation with Cluster Analysis (KMeans)")
col_b.image("https://upload.wikimedia.org/wikipedia/commons/thumb/0/05/Scikit_learn_logo_small.svg/1200px-Scikit_learn_logo_small.svg.png")
show_real_data = st.sidebar.checkbox("Show real data", help="Test some clusters yourself first")
width = st.sidebar.slider("Plot width", min_value=1, max_value=25, step=1, value=12, format="%i")
height = st.sidebar.slider("Plot height", min_value=1, max_value=25, step=1, value=5, format="%i")
plot_size = (width, height)
petals_or_sepals = st.sidebar.radio("Select data for analysis", ["sepal", "petal"])
scale_data_box = st.sidebar.checkbox("Scale Data", help="Standardize features by removing the mean and scaling to unit variance. For more info read here: https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html#")
test_clusters = st.sidebar.number_input("Test clusters", value=1, min_value=1, step=1, format="%i")
data_screen = screen_data(data=raw_data, petal_sepal=petals_or_sepals)
if scale_data_box:
data = scale_cluster_data(data=data_screen, number_of_clusters=test_clusters)
else:
data = cluster_data(data=data_screen, number_of_clusters=test_clusters)
real_results = raw_data.copy()
real_results["clusters"] = real_results["species"].map({"setosa":0, "versicolor":1 , "virginica":2})
if show_real_data:
col1, col2 = st.beta_columns(2)
col1.subheader("Your analysis")
col1.pyplot(scater_plot(data=data, value=petals_or_sepals, size=plot_size))
col2.subheader("Real data")
col2.pyplot(scater_plot(data=real_results, value=petals_or_sepals, size=plot_size))
else:
st.pyplot(scater_plot(data=data, value=petals_or_sepals, size=plot_size))
if st.sidebar.checkbox("Get a hint with Elbow Method"):
number_of_cluster_elbow = st.sidebar.number_input("Select number of cluster", value=5, min_value=1, step=1, format="%i", help="""Cluster number keeps track the highest number of clusters we want to use the WCSS method for.
More info https://en.wikipedia.org/wiki/Elbow_method_(clustering)""")
st.pyplot(elbow_method(data=data, clusters=number_of_cluster_elbow, size=plot_size))
conclusion = """
The original dataset has 3 sub-species of the Iris flower. Therefore, the number of clusters is 3.
Read more here: https://en.wikipedia.org/wiki/Iris_flower_data_set
This shows us that:
* the Eblow method is imperfect (we might have opted for 2 or even 4 clusters)
* k-means is very useful in moments where we already know the number of clusters - in this case: 3
* biology cannot be always quantified
"""
iris = "https://upload.wikimedia.org/wikipedia/commons/thumb/4/41/Iris_versicolor_3.jpg/1280px-Iris_versicolor_3.jpg"
if show_real_data:
with st.beta_expander("Iris flower data set"):
col_1, col_2 = st.beta_columns([2,1])
col_1.write(conclusion)
col_2.image(iris)
|
{"/app.py": ["/functions.py"]}
|
19,264
|
Kes-trel/Iris-Flower-Cluster-Analysis
|
refs/heads/main
|
/functions.py
|
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from sklearn import preprocessing
def screen_data(petal_sepal, data):
data_select = data.copy()
if petal_sepal == "sepal":
data_select = data.iloc[:, :-3]
return data_select
elif petal_sepal == "petal":
data_select = data.iloc[:, 2:-1]
return data_select
def scater_plot(data, value, size):
x_values = f"{value}_length"
y_values = f"{value}_width"
x_label = f"Length of {value}"
y_label = f"Width of {value}"
fig, ax = plt.subplots(figsize=size)
if "clusters" in data.columns:
ax.scatter(data[x_values], data[y_values], c=data["clusters"], cmap="rainbow")
else:
ax.scatter(data[x_values], data[y_values])
plt.xlabel(x_label)
plt.ylabel(y_label)
return fig
def elbow_method(clusters, data, size):
wcss = list()
for c in range(1, clusters+1):
kmeans = KMeans(c)
kmeans.fit(data)
wcss_iter = kmeans.inertia_
wcss.append(wcss_iter)
fig, ax = plt.subplots(figsize=size)
ax.plot(range(1, clusters+1), wcss)
plt.title('The Elbow Method')
plt.xlabel('Number of clusters')
plt.ylabel('Within-cluster Sum of Squares')
return fig
def cluster_data(number_of_clusters, data):
data_c = data.copy()
kmeans = KMeans(number_of_clusters)
kmeans.fit(data_c)
data_c["clusters"] = kmeans.fit_predict(data_c)
return data_c
def scale_cluster_data(number_of_clusters, data):
scaler = preprocessing.StandardScaler()
scaler.fit(data)
data_scaled = scaler.transform(data)
data_c = data.copy()
kmeans = KMeans(number_of_clusters)
kmeans.fit(data_scaled)
data_c["clusters"] = kmeans.fit_predict(data_scaled)
return data_c
|
{"/app.py": ["/functions.py"]}
|
19,265
|
marticongost/woost.extensions.nocaptcha
|
refs/heads/master
|
/woost/extensions/nocaptcha/__init__.py
|
#-*- coding: utf-8 -*-
"""
.. moduleauthor:: Martí Congost <marti.congost@whads.com>
"""
from woost import app
from . import settings, admin
from .member import NoCaptcha, NoCaptchaValidationError
from .form import add_nocaptcha, requires_nocaptcha
|
{"/woost/extensions/nocaptcha/__init__.py": ["/woost/extensions/nocaptcha/member.py", "/woost/extensions/nocaptcha/form.py"], "/woost/extensions/nocaptcha/form.py": ["/woost/extensions/nocaptcha/member.py"]}
|
19,266
|
marticongost/woost.extensions.nocaptcha
|
refs/heads/master
|
/woost/extensions/nocaptcha/settings.py
|
#-*- coding: utf-8 -*-
"""
.. moduleauthor:: Martí Congost <marti.congost@whads.com>
"""
from cocktail import schema
from cocktail.translations import translations
from woost.models import add_setting, Configuration
translations.load_bundle("woost.extensions.nocaptcha.settings")
add_setting(
schema.String(
"x_nocaptcha_public_key",
text_search = False
)
)
add_setting(
schema.String(
"x_nocaptcha_private_key",
text_search = False
)
)
|
{"/woost/extensions/nocaptcha/__init__.py": ["/woost/extensions/nocaptcha/member.py", "/woost/extensions/nocaptcha/form.py"], "/woost/extensions/nocaptcha/form.py": ["/woost/extensions/nocaptcha/member.py"]}
|
19,267
|
marticongost/woost.extensions.nocaptcha
|
refs/heads/master
|
/setup.py
|
#-*- coding: utf-8 -*-
"""
.. moduleauthor:: Martí Congost <marti.congost@whads.com>
"""
from setuptools import setup
setup(
name = "woost.extensions.nocaptcha",
version = "0.0b1",
author = "Whads/Accent SL",
author_email = "tech@whads.com",
maintainer = "Marti Congost",
maintainer_email = "marti.congost@whads.com",
url = "http://woost.info",
description =
"""
Woost extension to integrate NoCaptcha controls.
""",
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: ZODB",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Natural Language :: Catalan",
"Natural Language :: Spanish",
"Programming Language :: Python :: 2",
"Topic :: Internet :: WWW/HTTP :: Site Management"
],
install_requires = [
"woost>=3.0b1,<3.1"
],
packages = ["woost.extensions.nocaptcha"],
include_package_data = True,
zip_safe = False
)
|
{"/woost/extensions/nocaptcha/__init__.py": ["/woost/extensions/nocaptcha/member.py", "/woost/extensions/nocaptcha/form.py"], "/woost/extensions/nocaptcha/form.py": ["/woost/extensions/nocaptcha/member.py"]}
|
19,268
|
marticongost/woost.extensions.nocaptcha
|
refs/heads/master
|
/woost/extensions/nocaptcha/form.py
|
#-*- coding: utf-8 -*-
"""
.. moduleauthor:: Martí Congost <marti.congost@whads.com>
"""
from cocktail.events import when
from .member import NoCaptcha
def add_nocaptcha(form, **member_kwargs):
member_kwargs.setdefault("name", "nocaptcha")
member_kwargs.setdefault("member_group", "nocaptcha")
member = NoCaptcha(**member_kwargs)
form.schema.add_member(member, append = True)
if member.member_group and form.schema.groups_order:
if not isinstance(form.schema.groups_order, list):
form.schema.groups_order = list(form.schema.groups_order)
form.schema.groups_order.append(member.member_group)
form.adapter.exclude(member.name)
return member
def requires_nocaptcha(form_class, **member_kwargs):
@when(form_class.declared)
def handler(e):
add_nocaptcha(e.source, **member_kwargs)
return form_class
|
{"/woost/extensions/nocaptcha/__init__.py": ["/woost/extensions/nocaptcha/member.py", "/woost/extensions/nocaptcha/form.py"], "/woost/extensions/nocaptcha/form.py": ["/woost/extensions/nocaptcha/member.py"]}
|
19,269
|
marticongost/woost.extensions.nocaptcha
|
refs/heads/master
|
/woost/extensions/nocaptcha/admin/sections.py
|
#-*- coding: utf-8 -*-
"""
.. moduleauthor:: Martí Congost <marti.congost@whads.com>
"""
from cocktail.events import when
from cocktail.translations import translations
from woost.admin.sections import Settings
from woost.admin.sections.contentsection import ContentSection
translations.load_bundle("woost.extensions.nocaptcha.admin.sections")
class NoCaptchaSection(Settings):
icon_uri = "woost.extensions.nocaptcha.admin.ui://images/nocaptcha.svg"
members = [
"x_nocaptcha_public_key",
"x_nocaptcha_private_key",
]
@when(ContentSection.declared)
def fill(e):
e.source.append(NoCaptchaSection("nocaptcha"))
|
{"/woost/extensions/nocaptcha/__init__.py": ["/woost/extensions/nocaptcha/member.py", "/woost/extensions/nocaptcha/form.py"], "/woost/extensions/nocaptcha/form.py": ["/woost/extensions/nocaptcha/member.py"]}
|
19,270
|
marticongost/woost.extensions.nocaptcha
|
refs/heads/master
|
/woost/extensions/nocaptcha/member.py
|
#-*- coding: utf-8 -*-
"""
.. moduleauthor:: Martí Congost <marti.congost@whads.com>
"""
import cherrypy
from json import loads
import urllib.request, urllib.parse, urllib.error
import urllib.request, urllib.error, urllib.parse
from cocktail.translations import translations
from cocktail import schema
from cocktail.html.uigeneration import default_edit_control
from cocktail.schema.exceptions import ValidationError
from woost.models import get_setting
translations.load_bundle("woost.extensions.nocaptcha.member")
class NoCaptcha(schema.String):
"""A member that handles noCaptcha values."""
VERIFY_SERVER = "https://www.google.com/recaptcha/api/siteverify"
def __init__(self, name = None, *args, **kwargs):
kwargs.setdefault("parameter_name", "g-recaptcha-response")
if not name:
name = "nocaptcha"
schema.String.__init__(self, name, *args, **kwargs)
def _default_validation(self, context):
"""Validation rule for noCaptcha. Checks that the L{response}
member is valid for the L{public_key} and L{private_key}
constraints.
"""
for error in schema.Member._default_validation(self, context):
yield error
value = context.value
if value:
params = urllib.parse.urlencode({
"secret" : get_setting("x_nocaptcha_private_key"),
"response" : value,
"remoteip" : cherrypy.request.remote.ip
}).encode("utf-8")
request = urllib.request.Request(
url = self.VERIFY_SERVER,
data = params,
headers = {
"Content-type" : "application/x-www-form-urlencoded",
"User-agent" : "Woost noCAPTCHA extension"
}
)
httpresp = urllib.request.urlopen(request)
return_values = httpresp.read()
httpresp.close()
response_json = loads(return_values)
if not response_json['success']:
yield NoCaptchaValidationError(context)
else:
yield NoCaptchaValidationError(context)
class NoCaptchaValidationError(ValidationError):
"""A validation error produced when the user fails a NoCaptcha
validation.
"""
default_edit_control.set_member_type_display(
NoCaptcha,
"woost.extensions.nocaptcha.NoCaptchaBox"
)
|
{"/woost/extensions/nocaptcha/__init__.py": ["/woost/extensions/nocaptcha/member.py", "/woost/extensions/nocaptcha/form.py"], "/woost/extensions/nocaptcha/form.py": ["/woost/extensions/nocaptcha/member.py"]}
|
19,298
|
Architect0711/PythonReference
|
refs/heads/master
|
/OOP/my_module_folder/__init__.py
|
__all__ = [ "my_submodule_1" ]
from .my_submodule_1 import my_submodule_1
|
{"/OOP/my_module_folder/__init__.py": ["/OOP/my_module_folder/my_submodule_1.py"]}
|
19,299
|
Architect0711/PythonReference
|
refs/heads/master
|
/OOP/my_module_folder/my_submodule_1.py
|
class my_submodule_1():
def do_something(self):
print("my_submodule_1 does something")
|
{"/OOP/my_module_folder/__init__.py": ["/OOP/my_module_folder/my_submodule_1.py"]}
|
19,300
|
Architect0711/PythonReference
|
refs/heads/master
|
/OOP/my_module.py
|
def method_1():
print("my_module.method_1()")
def method_2():
print("my_module.method_2()")
|
{"/OOP/my_module_folder/__init__.py": ["/OOP/my_module_folder/my_submodule_1.py"]}
|
19,304
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/feature_generation/generate_ground_truth.py
|
import csv
import os
from subprocess import run
# Execute after generating feature values. (generate_feature_values)
def prepare_files_oneset(snapshot_date, classification_types, input_folder, output_folder, with_classification=True):
classification = {}
if os.path.exists("{}/{}/domains_classification.csv".format(input_folder, snapshot_date)):
with open("{}/{}/domains_classification.csv".format(input_folder, snapshot_date)) as classification_file:
classification_csvr = csv.reader(classification_file)
for row in classification_csvr:
classification[row[0]] = row[1:]
for classification_type in classification_types:
with open("{}/{}/feature_values_{}.csv".format(output_folder, snapshot_date, classification_type)) as values_file:
csvr = csv.reader(values_file)
header = next(csvr)
domain_idx = header.index("domain")
with open("{}/{}/weka_output_{}.csv".format(output_folder, snapshot_date, classification_type), "w") as weka_output:
csvw = csv.writer(weka_output)
csvw.writerow(["domain"] + header[:domain_idx] + header[domain_idx + 1:] + ["class"])
for row in csvr:
domain = row[domain_idx]
if with_classification:
if domain not in classification:
print("Not classified:", domain)
continue
domain_class_row = classification.get(domain)
if domain_class_row[2] == "undetermined":
continue
domain_class = "malicious" if domain_class_row[3] == "True" else "benign"
else:
domain_class = None
csvw.writerow([domain] + row[:domain_idx] + row[domain_idx + 1:] + [domain_class])
cmd = '''head -n 1 weka_output_{}.csv > use_in_weka.csv; for f in '''.format(classification_types[0])
cmd += " ".join(["weka_output_{}.csv".format(t) for t in classification_types])
cmd += '''; do tail -n +2 $f | sed 's/"Limited Liability Company ""Registrar of domain names REG.RU"""/"Limited Liability Company Registrar of domain names REG.RU"/g' >> use_in_weka.csv; done;'''
run(cmd,
cwd=os.path.join(os.path.dirname(os.path.realpath(__file__)), output_folder, snapshot_date), shell=True )
def prepare_files_multiplesets_split_by_features_all_instances(snapshot_date, classification_types, input_folder, output_folder, with_classification=True):
with open("{}/{}/feature_values_{}.csv".format(output_folder, snapshot_date, classification_types[0])) as values_file:
csvr = csv.reader(values_file)
header = next(csvr)
domain_idx = header.index("domain")
dnsdb_idx = header.index("dnsdb_available")
whois_idx = header.index("whois_available")
# openintel_idx = header.index("openintel_available")
output_files = {}
for available in ["dnsdb", "whois", "none"]: #"openintel",
output_file = open("{}/{}/weka_multi_output_features_all_instances_{}.csv".format(output_folder, snapshot_date, available), "w")
output_csvw = csv.writer(output_file)
output_header = header.copy()
idxes_to_keep = set()
# if available == "none":
idxes_to_keep.update({header.index(f) for f in header if not f.startswith("dnsdb") and not f.startswith("whois") and f != "domain" and f != "suffix_type"})
if available == "dnsdb":
idxes_to_keep.update({header.index(f) for f in header if f.startswith("dnsdb_") and f.split("_")[-1] not in "CAA HINFO PTR RP SPF".split()})
idxes_to_keep -= {dnsdb_idx}
elif available == "whois":
idxes_to_keep.update({header.index(f) for f in header if f.startswith("whois_") }) # and f != "whois_registrar"
idxes_to_keep -= {whois_idx}
# elif available == "openintel":
# idxes_to_keep.update({header.index(f) for f in header if f.startswith("openintel_") }) # and f != "whois_registrar"
# idxes_to_keep -= {openintel_idx}
output_header = [el for idx, el in enumerate(output_header) if idx in idxes_to_keep]
output_files[available] = (output_csvw, idxes_to_keep)
output_csvw.writerow(["domain"] + output_header +["class"])
classification = {}
if os.path.exists("{}/{}/domains_classification.csv".format(input_folder, snapshot_date)):
with open("{}/{}/domains_classification.csv".format(input_folder, snapshot_date)) as classification_file:
classification_csvr = csv.reader(classification_file)
for row in classification_csvr:
classification[row[0]] = row[1:]
for classification_type in classification_types:
with open("{}/{}/feature_values_{}.csv".format(output_folder, snapshot_date, classification_type)) as values_file:
csvr = csv.reader(values_file)
next(csvr)
for row in csvr:
domain = row[domain_idx]
if with_classification:
if domain not in classification:
print("Not classified:", domain)
continue
domain_class_row = classification.get(domain)
if domain_class_row[2] == "undetermined":
continue
domain_class = "malicious" if domain_class_row[3] == "True" else "benign"
else:
domain_class = None
dnsdb_available = row[dnsdb_idx]
whois_available = row[whois_idx]
# openintel_available = row[openintel_idx]
# While passive DNS is considered a different data set in terms of cost/...,
# the absence of data from passive DNS can be considered equal to having 0 queries.
if True or dnsdb_available == "True":
output_dnsdb, idxes_to_keep_dnsdb = output_files["dnsdb"]
dnsdb_row = [el for idx, el in enumerate(row) if idx in idxes_to_keep_dnsdb]
output_dnsdb.writerow([domain] + dnsdb_row + [domain_class])
if whois_available == "True":
output_whois, idxes_to_keep_whois = output_files["whois"]
whois_row = [el for idx, el in enumerate(row) if idx in idxes_to_keep_whois]
output_whois.writerow([domain] + whois_row + [domain_class])
# if openintel_available == "True":
# output_openintel, idxes_to_keep_openintel = output_files["openintel"]
# openintel_row = [el for idx, el in enumerate(row) if idx in idxes_to_keep_openintel]
# output_openintel.writerow([domain] + openintel_row + [domain_class])
output_none, idxes_to_keep_none = output_files["none"]
none_row = [el for idx, el in enumerate(row) if idx in idxes_to_keep_none]
output_none.writerow([domain] + none_row + [domain_class])
if __name__ == '__main__':
input_tuples = [("20171129", ["no_action", "action_seize"]),
("20181129", ["no_action", "action_seize"]),
("20191129", ["no_action", "action_seize"])]
input_folder = "input_data"
output_folder = "output_data"
for snapshot_date, classification_types in input_tuples:
prepare_files_multiplesets_split_by_features_all_instances(snapshot_date, classification_types, input_folder, output_folder, with_classification=True)
prepare_files_oneset(snapshot_date, classification_types, input_folder, output_folder, with_classification=True)
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,305
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/dataprocessing/sampleselection.py
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from evaluation.postanalysis import workReducedPostDomains
from joblib import load
import random as rand
def random(x, y, **kwargs):
'''randomly pick domains'''
try:
fraction = kwargs['fraction']
except KeyError:
fraction = 0.1
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=1-fraction, shuffle=True)
return x_train, x_test, y_train, y_test
def practical(x, y, **kwargs):
'''pick domains in the most practical manner, those that are most likely to have to be classified manually'''
sourcepattern = kwargs['code']
clf = load('models/2017/model' + sourcepattern + '.joblib')
scores = clf.predict_proba(x)
negative_pred_ind, no_action_pred_ind, positive_pred_ind = workReducedPostDomains('2017', sourcepattern, scores)
positive_pred = x.loc[positive_pred_ind]
negative_pred = x.loc[negative_pred_ind]
x_train = x.loc[no_action_pred_ind]
y_train = y[no_action_pred_ind]
print('benign', len(y_train)-sum(y_train), 'malicious', sum(y_train))
positive_pred_labels = y[positive_pred_ind]
negative_pred_labels = y[negative_pred_ind]
x_test = pd.concat([positive_pred, negative_pred])
y_test = np.concatenate((positive_pred_labels, negative_pred_labels))
return x_train, x_test, y_train, y_test
def createTrueFalseList(length, true_indices):
out = []
for i in range(length):
if i in true_indices:
out.append(True)
else:
out.append(False)
return out
def practicalFraction(x,y, **kwargs):
try:
fraction = kwargs['fraction']
except KeyError:
fraction = 0.5
sourcepattern = kwargs['code']
clf = load('models/2017/model' + sourcepattern + '.joblib')
scores = clf.predict_proba(x)
negative_pred_ind, no_action_pred_ind, positive_pred_ind = workReducedPostDomains('2017', sourcepattern, scores)
ind_where_true = [i for i, b in zip(range(len(no_action_pred_ind)), no_action_pred_ind) if b]
if fraction <= 1:
amount_of_train_domains = int(fraction*len(ind_where_true))
else:
amount_of_train_domains = fraction
ind_where_true_train = rand.sample(ind_where_true, amount_of_train_domains)
ind_where_true_test = [i for i in ind_where_true if i not in ind_where_true_train]
no_action_pred_ind_train = createTrueFalseList(len(no_action_pred_ind), ind_where_true_train)
no_action_pred_ind_test = createTrueFalseList(len(no_action_pred_ind), ind_where_true_test)
positive_pred = x.loc[positive_pred_ind]
negative_pred = x.loc[negative_pred_ind]
no_action_test = x.loc[no_action_pred_ind_test]
x_train = x.loc[no_action_pred_ind_train]
y_train = y[no_action_pred_ind_train]
no_action_test_labels = y[no_action_pred_ind_test]
print('benign', len(y_train) - sum(y_train), 'malicious', sum(y_train))
positive_pred_labels = y[positive_pred_ind]
negative_pred_labels = y[negative_pred_ind]
x_test = pd.concat([positive_pred, negative_pred, no_action_test])
y_test = np.concatenate((positive_pred_labels, negative_pred_labels, no_action_test_labels))
print('practical')
return x_train, x_test, y_train, y_test
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,306
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/production_train.py
|
import argparse
import pandas as pd
import numpy as np
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import GridSearchCV
from joblib import dump, load
from dataprocessing.preprocessing import loadAndCleanDataMaxDom
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Train classifier in one year, tune its hyperparameters with 10 fold cross validation')
parser.add_argument('--sources', '-s',
type=str,
default="1111",
help='what datasets to use in a binary pattern, reputation + lexicographic, passivedns, whois, activedns')
parser.add_argument('--tuning', '-t',
type=bool,
default=False,
help='Whether to tune or take hyperparameters of past')
parser.add_argument('--year', '-y',
type=str,
default='2017',
help='year to consider')
args = parser.parse_args()
sourcepattern = args.sources
tune = args.tuning
year = args.year
features, labels, post_analysis_labels = loadAndCleanDataMaxDom(sourcepattern, False, year)
print(labels.sum())
metrics = {'f1': [], 'precision': [], 'recall': [], 'auc': [], 'acc_train': [], 'acc_test': [], 'eer': []}
data = {'x_test': np.empty((0, features.shape[1])), 'y_test': np.empty((0,)), 'y_pred': np.empty((0,)),
'importance': np.zeros(len(features.columns)), 'agg_scores_train': [], 'agg_scores_test': [],
'labels_train': [], 'labels_test': [], 'estimators': [],
'y_post': np.empty((0, post_analysis_labels.shape[1])),
'domainname_test': []}
param_grid = [
{'loss': ['deviance', 'exponential'], 'learning_rate': [2 ** x for x in range(-5, 2, 1)], \
'n_estimators': [2 ** x for x in range(4, 10)], 'min_samples_split': [2, 3, 4, 6], \
'min_samples_leaf': [2, 3, 4, 6], 'max_features': ['auto', 'log2', 'sqrt'], 'max_depth': [3, 6, 12]}
]
if tune:
gbc = GradientBoostingClassifier()
clf = GridSearchCV(gbc, param_grid, cv=10, scoring='f1', n_jobs=4)
clf.fit(features, labels)
params = clf.best_params_
else:
clf_tuned = load('models/2017/model' + sourcepattern + '.joblib')
if isinstance(clf_tuned, GradientBoostingClassifier):
params = clf_tuned.get_params()
clf = GradientBoostingClassifier(**params)
else:
params = clf_tuned.best_params_
clf = GradientBoostingClassifier(**params, random_state=44)
clf.fit(features, labels)
dump(clf, 'models/' + year + '/model' + sourcepattern + '.joblib')
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,307
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/ensemble_evaluation.py
|
import pandas as pd
import numpy as np
import utils
import argparse
from joblib import load
import itertools
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import KFold, StratifiedKFold
from sklearn.metrics import confusion_matrix, f1_score, roc_auc_score, precision_score, recall_score, accuracy_score
from evaluation.metrics import workReducedPostLoadThr, workReducedPostDetermineThrOneGo, workReducedPostDetermineThr
import dataprocessing.preprocessing as pre
import macroify
import bob.measure
'''Evaluates the ensemble when training and testing on the same year. Thus, executes experiment.py for every model.'''
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Do the avalanche experiments')
parser.add_argument('--year', '-y',
type=str,
default='2018',
help='year to consider')
args = parser.parse_args()
year = args.year
available, reputation, dns, whois, openintel, label = pre.loadAndCleanDataPerDataSet(False, year)
total_fp = 0
total_fn = 0
total_manual = 0
total_pred = 0
total_amount_of_domains = len(available.index)
classDictionary = {'malicious': 1, 'benign': 0}
labelzsss = label.map(classDictionary)
total_amount_positive = labelzsss.sum()
total_amount_negative = len(labelzsss.index) - labelzsss.sum()
l = [False, True]
dfs = []
codesz = []
ensemble_predictions = []
ensemble_labels = []
ensemble_scores_pos = []
ensemble_scores_neg = []
ensemble_predictions_priori = []
ensemble_labels_priori = []
metrics = {'f1': [], 'precision': [], 'recall': [], 'acc_train': [], 'acc_test': [], 'eer': [],
'fnr_work_reduced': [],
'fpr_work_reduced': [], 'work_reduced': [], 'work_reduced_negative': [], 'work_reduced_positive': []}
i = 1
kf = StratifiedKFold(n_splits=10, shuffle=True, random_state=44)
for train_index, test_index in kf.split(available.values, label):
# Df index.
df_train_ind, df_test_ind = available.iloc[train_index].index, available.iloc[test_index].index
for x in itertools.product(l, repeat=4):
code = ''.join(['1' if i else '0' for i in x])
if code != '0000':
features_maxdata, labelzz_max_data, _ = pre.loadAndCleanDataMaxDom(code, False, year)
labelzz_max_data = pd.Series(labelzz_max_data, index=features_maxdata.index)
tr_index = df_train_ind.intersection(features_maxdata.index)
te_index = df_test_ind.intersection(features_maxdata.index)
x_train, x_test = features_maxdata.loc[tr_index], features_maxdata.loc[te_index]
y_train, y_test = labelzz_max_data.loc[tr_index], labelzz_max_data.loc[te_index]
clf_tuned = load('models/'+ year +'/model' + code + '.joblib')
if isinstance(clf_tuned, GradientBoostingClassifier):
params = clf_tuned.get_params()
clf = GradientBoostingClassifier(**params)
else:
params = clf_tuned.best_params_
clf = GradientBoostingClassifier(**params, random_state=42)
clf.fit(x_train, y_train)
y_pred = clf.predict(x_test)
scores = clf.predict_proba(x_test)
features, labelzz = pre.loadAndCleanDataExactPattern(x, available, reputation, dns, whois, openintel,
label)
ind_now_in_test_set = features.index.intersection(df_test_ind)
features = features.loc[ind_now_in_test_set]
labelzz = labelzz.loc[ind_now_in_test_set]
amount_of_domains = len(features.index)
codesz.append(code)
print(amount_of_domains, 'domains to classify for sourcepattern', code)
if len(labelzz.index != 0):
scores = clf.predict_proba(features)
predictions = clf.predict(features)
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, _, _ = workReducedPostDetermineThrOneGo(x_train, y_train, code, scores, labelzz)
total_fp += (len(positive_pred) - sum(positive_pred))
total_fn += sum(negative_pred)
total_manual += len(no_action_pred)
total_pred += (len(positive_pred) + len(negative_pred))
ensemble_predictions = ensemble_predictions + [1] * len(positive_pred) + [0] * len(
negative_pred) + no_action_pred
ensemble_labels = ensemble_labels + positive_pred + negative_pred + no_action_pred
ensemble_predictions_priori = ensemble_predictions_priori + predictions.tolist()
ensemble_labels_priori = ensemble_labels_priori + labelzz.values.tolist()
ensemble_scores_neg = ensemble_scores_neg + scores[:, 1][labelzz == 0].tolist()
ensemble_scores_pos = ensemble_scores_pos + scores[:, 1][labelzz == 1].tolist()
print('Makes a prediction for', (len(positive_pred) + len(negative_pred)), 'domains')
print('Would predict', np.sum(predictions), 'domains malicious')
print('Total work reduced', (total_amount_of_domains-total_manual)/total_amount_of_domains)
print('Total FNR', total_fn/total_amount_positive)
print('Total FPR', total_fp/total_amount_negative)
print('Accuracy', accuracy_score(ensemble_labels, ensemble_predictions))
print('F1', f1_score(ensemble_labels, ensemble_predictions))
print('Precision', precision_score(ensemble_labels, ensemble_predictions))
print('Recall', recall_score(ensemble_labels, ensemble_predictions))
print('Little check', total_amount_positive+total_amount_negative == total_amount_of_domains)
print('Little check', total_pred+total_manual == total_amount_of_domains)
results = {}
y = utils.translateyear(year)
z = utils.translateyear(year)
results[y+z+'workreduced'+ 'posteriori'] = (total_amount_of_domains-total_manual)/total_amount_of_domains *100
results[y+z+'fnr'+ 'posteriori'] = total_fn/total_amount_positive *100
results[y+z+'fpr'+ 'posteriori'] = total_fp/total_amount_negative *100
results[y+z+'accuracy'+ 'posteriori'] = accuracy_score(ensemble_labels, ensemble_predictions) *100
results[y+z+'fone'+ 'posteriori'] = f1_score(ensemble_labels, ensemble_predictions) *100
results[y+z+'precision'+ 'posteriori'] = precision_score(ensemble_labels, ensemble_predictions) *100
results[y+z+'recall' + 'posteriori'] = recall_score(ensemble_labels, ensemble_predictions) *100
results[y+z+'accuracy'] = accuracy_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[y+z+'fone'] = f1_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[y+z+'precision'] = precision_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[y+z+'recall'] = recall_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[y+z+'eer'] = bob.measure.eer(ensemble_scores_neg,ensemble_scores_pos) *100
macroify.append_file(results)
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,308
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/feature_generation/generate_feature_values.py
|
import csv
import datetime
import os
import re
import traceback
from feature_generation.features import FeatureSet
def traverse_file(file, input_new_domain):
orig_new_domain = input_new_domain
new_domain = re.sub(r'[^\w]', '', input_new_domain) if input_new_domain else input_new_domain
return_line = None
if file not in file_traversal_cache:
file_traversal_cache[file] = {"next_domain": None, "next_record": None, "finished": False}
orig_next_domain = file_traversal_cache[file]["next_domain"]
if orig_next_domain:
next_domain = re.sub(r'[^\w]', '', orig_next_domain)
else:
next_domain = orig_next_domain
while not file_traversal_cache[file]["finished"] and (not orig_next_domain or not new_domain or next_domain < new_domain
or ((next_domain == new_domain) and (orig_next_domain != input_new_domain))) :
try:
next_record = next(file)
file_traversal_cache[file]["next_record"] = next_record
orig_next_domain = next_record[0].lower()
next_domain = re.sub(r'[^\w]', '', orig_next_domain)
file_traversal_cache[file]["next_domain"] = orig_next_domain
if not new_domain:
new_domain = next_domain
except StopIteration:
file_traversal_cache[file]["next_record"] = None
file_traversal_cache[file]["finished"] = True
if new_domain and next_domain == new_domain and orig_new_domain == orig_next_domain:
next_record = file_traversal_cache[file]["next_record"]
return_line = next_record.copy() if next_record else None
return return_line
def generate(formatted_snapshot_date, classification_types, input_folder, output_folder):
snapshot_date = datetime.datetime.strptime(formatted_snapshot_date[:8], "%Y%m%d")
feature_names = FeatureSet.get_feature_names()
remove_sinkholed = True
sinkholed_index = feature_names.inpredex("known_sinkhole")
if remove_sinkholed:
del feature_names[sinkholed_index]
sinkholed_removed_count = 0
total_count = 0
classes_counts = {c:0 for c in classification_types}
abridged = False
if abridged:
dataset_check_descriptors = "DNSDB WHOIS Renewal Validity OpenIntel".split(" ")
else:
dataset_check_descriptors = "DNSDB WHOIS Alexa Umbrella Majestic Quantcast Suffix Renewal Validity Wordlist Wayback CT OpenIntel".split(
" ")
for classification_type in classification_types:
agd_path = "{}/{}/{}.csv".format(input_folder, formatted_snapshot_date, classification_type)
try:
feature_output_file = open("{}/{}/feature_values_{}.csv".format(output_folder, formatted_snapshot_date, classification_type), "w")
feature_output = csv.writer(feature_output_file)
# alphabetically ordered data sets
agd_file = open(agd_path)
agd_csvr = csv.reader(agd_file)
# _header = next(malware_csvr) -- header stripped
dnsdb_pdns_file = open("{}/{}/dnsdb_results_snapshot.csv".format(input_folder, formatted_snapshot_date))
dnsdb_pdns_csvr = csv.reader(dnsdb_pdns_file)
whois_file = open("{}/{}/whois_data_snapshot.csv".format(input_folder, formatted_snapshot_date))
whois_csvr = csv.reader(whois_file)
whois_header = next(whois_csvr)
if not whois_header[0].startswith("domain"):
raise ValueError("Incorrect header on WHOIS file!")
all_toplists_data = {}
for provider in "alexa majestic quantcast umbrella".split(" "):
all_toplists_data[provider] = {}
with open("{}/{}/topsites_results_{}.csv".format(input_folder, formatted_snapshot_date, provider)) as toplists_data:
toplists_csvr = csv.reader(toplists_data)
for row in toplists_csvr:
domain, occurrences, ranksum = row
all_toplists_data[provider][domain] = (int(occurrences), float(ranksum) if ranksum else None)
suffix_file = open("{}/{}/suffix_results.csv".format(input_folder, formatted_snapshot_date))
suffix_csvr = csv.reader(suffix_file)
if os.path.exists("{}/{}/renewal_results.csv".format(input_folder, formatted_snapshot_date)):
renewal_file = open("{}/{}/renewal_results.csv".format(input_folder, formatted_snapshot_date))
renewal_csvr = csv.reader(renewal_file)
else:
renewal_file = None
renewal_csvr = None
if os.path.exists("{}/{}/whois_validity_data.csv".format(input_folder, formatted_snapshot_date)):
whois_validity_file = open("{}/{}/whois_validity_data.csv".format(input_folder, formatted_snapshot_date))
whois_validity_csvr = csv.reader(whois_validity_file)
else:
whois_validity_file = None
whois_validity_csvr = None
wordlist_based_file = open("{}/{}/wordlist_based_results.csv".format(input_folder, formatted_snapshot_date))
wordlist_based_csvr = csv.reader(wordlist_based_file)
feature_output.writerow(feature_names)
dataset_check_results = []
for domain_data in agd_csvr:
domain = domain_data[0]
whois_line = traverse_file(whois_csvr, domain)
if whois_line:
if domain != whois_line[0]:
print(domain, whois_line)
whois_data = dict(zip(whois_header, whois_line)) if whois_line else None
dnsdb_pdns_data = traverse_file(dnsdb_pdns_csvr, domain)
toplists_data = {}
for provider in "alexa majestic quantcast umbrella".split(" "):
toplists_data[provider] = all_toplists_data[provider].get(domain, None)
suffix_data = traverse_file(suffix_csvr, domain)
renewal_data = traverse_file(renewal_csvr, domain) if renewal_csvr else None
whois_validity_data = traverse_file(whois_validity_csvr, domain) if whois_validity_csvr else None
wordlist_based_data = traverse_file(wordlist_based_csvr, domain)
# openintel_adns_data = traverse_file(openintel_adns_csvr, domain)
fs = FeatureSet(domain, snapshot_date, domain_data, dnsdb_pdns_data, whois_data, toplists_data, suffix_data, renewal_data, whois_validity_data, wordlist_based_data)
try:
fs.generate_feature()
exported_feature = fs.export()
if remove_sinkholed:
if exported_feature[sinkholed_index] != None:
sinkholed_removed_count += 1
continue
else:
total_count += 1
classes_counts[classification_type] += 1
del exported_feature[sinkholed_index]
feature_output.writerow(exported_feature)
datasets_available = fs.check_datasets(abridged=abridged)
dataset_check_results.append(datasets_available)
except: # feature generation failed
traceback.print_exc()
continue
finally:
agd_file.close()
dnsdb_pdns_file.close()
whois_file.close()
suffix_file.close()
if renewal_file: renewal_file.close()
if whois_validity_file: whois_validity_file.close()
wordlist_based_file.close()
print(classification_type, snapshot_date, "(stats after sinkholing)")
for idx, results_row in enumerate(zip(*dataset_check_results)):
print(dataset_check_descriptors[idx].ljust(15), str(len([r for r in results_row if r is not False])).rjust(6), str(len([r for r in results_row if r is False])).rjust(6))
print("Sinkholed domains", sinkholed_removed_count)
print("Retained domains", total_count)
print("Counts per class", classes_counts)
if __name__ == '__main__':
input_tuples = [
("20171129", ["no_action", "action_seize"]),
("20181129", ["no_action", "action_seize"]),
("20191129", ["no_action", "action_seize"])
]
input_folder = "input_data"
output_folder = "output_data"
for snapshot_date, classification_types in input_tuples:
file_traversal_cache = {}
generate(snapshot_date, classification_types, input_folder, output_folder)
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,309
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/evaluation/preanalysis.py
|
import numpy as np
import seaborn as sns
from matplotlib import pyplot as plt
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from sklearn.covariance import empirical_covariance
from scipy.ndimage import gaussian_filter
def pcaAnalysis(features, labels, path):
'''
Do a PCA.
:param features: the features of the data points
:param labels: labels for data points
:param path: paht to save to
'''
#### PCA
pca = PCA()
scl = StandardScaler()
standardized = scl.fit_transform(features,labels)
reduced = pca.fit_transform(standardized, labels)
sum = np.zeros(pca.components_.shape[0])
for row, sv in zip(pca.components_, pca.singular_values_):
sum = sum + np.abs(row * sv)
for i, v in enumerate(sorted(range(len(sum)), key=lambda k: sum[k])):
print(str(i), '. ', features.columns[v])
sns.heatmap(pca.inverse_transform(np.eye(features.shape[1])), cmap='hot', cbar=False)
plt.xlabel('feature index')
plt.ylabel('principal component')
plt.savefig('figures/' + path + 'pcaheatmap.pdf')
plt.close()
def covMatrix(features, labels, path):
'''
Calculate feature correlations
:param features: feature values of the data points
:param labels: labels of the data points
:param path: path to save to
'''
scl = StandardScaler()
standardized = scl.fit_transform(features, labels)
corr = empirical_covariance(standardized)
# mask = np.zeros_like(corr, dtype=np.bool)
# mask[np.triu_indices_from(mask)] = True
f, ax = plt.subplots(figsize=(11, 9))
cmap = sns.diverging_palette(220, 10, as_cmap=True)
sns.heatmap(corr, cmap=cmap, center=0,
square=True, linewidths=.5, cbar_kws={"shrink": .5})
plt.savefig('figures/' + path + 'covmatrix.pdf')
plt.close()
filtered = gaussian_filter(np.abs(corr), sigma=2)
# mask = np.zeros_like(corr, dtype=np.bool)
# mask[np.triu_indices_from(mask)] = True
f, ax = plt.subplots(figsize=(11, 9))
cmap = sns.diverging_palette(220, 10, as_cmap=True)
sns.heatmap(filtered, cmap=cmap, center=0,
square=True, linewidths=.5, cbar_kws={"shrink": .5})
plt.savefig('figures/' + path + 'filtered.pdf')
plt.close()
filtered = gaussian_filter(np.clip(corr, a_min=-1, a_max=0), sigma=2)
# mask = np.zeros_like(corr, dtype=np.bool)
# mask[np.triu_indices_from(mask)] = True
f, ax = plt.subplots(figsize=(11, 9))
cmap = sns.diverging_palette(220, 10, as_cmap=True)
sns.heatmap(filtered, cmap=cmap, center=0,
square=True, linewidths=.5, cbar_kws={"shrink": .5})
plt.savefig('figures/' + path + 'negativecorrelated.pdf')
plt.close()
filtered = gaussian_filter(np.clip(corr, a_min=0, a_max=1), sigma=2)
# mask = np.zeros_like(corr, dtype=np.bool)
# mask[np.triu_indices_from(mask)] = True
f, ax = plt.subplots(figsize=(11, 9))
cmap = sns.diverging_palette(220, 10, as_cmap=True)
sns.heatmap(filtered, cmap=cmap, center=0,
square=True, linewidths=.5, cbar_kws={"shrink": .5})
plt.savefig('figures/' + path + 'positivecorrelatedcov.pdf')
plt.close()
abs_corr = np.abs(corr)
mat = abs_corr[0:10,0:10]
jj = np.sum(mat)/(mat.shape[0]*mat.shape[1])
mat = abs_corr[0:10,10:21]
jp = np.sum(mat) / (mat.shape[0] * mat.shape[1])
mat = abs_corr[0:10, 21:36]
jw = np.sum(mat) / (mat.shape[0] * mat.shape[1])
mat = abs_corr[0:10, 36:]
ja = np.sum(mat) / (mat.shape[0] * mat.shape[1])
mat = abs_corr[10:21, 10:21]
pp = np.sum(mat) / (mat.shape[0] * mat.shape[1])
mat = abs_corr[10:21, 21:36]
pw = np.sum(mat) / (mat.shape[0] * mat.shape[1])
mat = abs_corr[10:21, 36:]
pa = np.sum(mat) / (mat.shape[0] * mat.shape[1])
mat = abs_corr[21:36, 21:36]
ww = np.sum(mat) / (mat.shape[0] * mat.shape[1])
mat = abs_corr[21:36, 36:]
wa = np.sum(mat) / (mat.shape[0] * mat.shape[1])
mat = abs_corr[36:, 36:]
aa = np.sum(mat) / (mat.shape[0] * mat.shape[1])
average = np.array([[jj, jp, jw, ja], [jp, pp, pw, pa], [jw, pw, ww, wa], [ja, pa, wa, aa]])
f, ax = plt.subplots(figsize=(4,1.8))
cmap = sns.diverging_palette(220, 10, as_cmap=True)
sns.heatmap(average, cmap=cmap, center=0, vmax=0.2, annot=True,
square=False, linewidths=.5, cbar_kws={"shrink": 1},
xticklabels=["Joint", "Passive\nDNS", "WHOIS", "Active\nDNS"],
yticklabels=["Joint", "Passive DNS", "WHOIS", "Active DNS"])
plt.tight_layout()
plt.savefig('figures/' + path + 'averageperdatasetcov.pdf',bbox_inches='tight', dpi=600)
plt.close()
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,310
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/evaluation/metrics.py
|
from matplotlib import pyplot as plt
import matplotlib.ticker
import matplotlib.patches as patches
import pandas as pd
import numpy as np
from sklearn.model_selection import KFold, StratifiedKFold
from sklearn.ensemble import GradientBoostingClassifier
from joblib import load
import warnings
warnings.filterwarnings("ignore")
# import bob.measure
def workreduced(scores, labels, costs, plot, savemetrics, path):
df = pd.DataFrame({'scores': scores, 'labels':labels, 'inverse_labels':np.bitwise_xor(labels.astype(int),np.ones(len(labels),dtype=int))})
#FNR
sorted_asc = df.sort_values('scores', ascending=True)
cumsum = np.cumsum(sorted_asc['labels'])
total_malicious = cumsum.iloc[-1]
sorted_asc['cumsum'] = cumsum
sorted_asc.index = range(0,len(sorted_asc.index))
sorted_asc['malrate'] = sorted_asc['cumsum']/sorted_asc.index
metricsfnr = []
thresholdsfnr = []
for c in costs:
filtered = sorted_asc[sorted_asc['cumsum']/total_malicious < c]
if len(filtered.index) != 0:
# print(filtered.iloc[-1,2])
ind = filtered.index[-1]
metricsfnr.append(ind/len(sorted_asc.index))
thresholdsfnr.append(filtered.loc[:,'scores'].iloc[-1])
else:
metricsfnr.append(0)
thresholdsfnr.append(0)
print('For cost', c, 'df fnr is empty')
#FPR
sorted_desc = df.sort_values('scores', ascending=False)
cumsum = np.cumsum(sorted_desc['inverse_labels'])
total_benign = cumsum.iloc[-1]
sorted_desc['cumsum'] = cumsum
sorted_desc.index = range(0,len(sorted_desc.index))
sorted_desc['benignrate'] = sorted_desc['cumsum']
metricsfpr = []
thresholdsfpr = []
for c in costs:
filtered = sorted_desc[sorted_desc['cumsum'] / total_benign < c]
if len(filtered.index) != 0:
ind = filtered.index[-1]
metricsfpr.append(ind / len(sorted_desc.index))
thresholdsfpr.append(filtered.loc[:,'scores'].iloc[-1])
else:
metricsfpr.append(0)
thresholdsfpr.append(1)
print('For cost', c, 'df fpr is empty')
if plot:
plotBins(costs, sorted_desc, total_benign, sorted_asc, total_malicious, metricsfnr, metricsfpr, path, scores, labels)
if savemetrics:
saveBinMetrics(metricsfnr, metricsfpr, costs, thresholdsfnr, thresholdsfpr, path)
return metricsfnr, metricsfpr
def workreducedThrBis(scores, labels, costs, plot, savemetrics, path):
df = pd.DataFrame({'scores': scores, 'labels':labels, 'inverse_labels':np.bitwise_xor(labels.astype(int),np.ones(len(labels),dtype=int))})
#FNR
sorted_asc = df.sort_values('scores', ascending=True)
cumsum = np.cumsum(sorted_asc['labels'])
total_malicious = cumsum.iloc[-1]
sorted_asc['cumsum'] = cumsum
sorted_asc.index = range(0,len(sorted_asc.index))
sorted_asc['malrate'] = sorted_asc['cumsum']/sorted_asc.index
metricsfnr = []
thresholdsfnr = []
for c in costs:
filtered = sorted_asc[sorted_asc['cumsum']/total_malicious < c]
if len(filtered.index) != 0:
# print(filtered.iloc[-1,2])
ind = filtered.index[-1]
metricsfnr.append(ind/len(sorted_asc.index))
thresholdsfnr.append(filtered.loc[:,'scores'].iloc[-1])
else:
metricsfnr.append(0)
thresholdsfnr.append(0)
print('For cost', c, 'df fnr is empty')
#FPR
sorted_desc = df.sort_values('scores', ascending=False)
cumsum = np.cumsum(sorted_desc['inverse_labels'])
total_benign = cumsum.iloc[-1]
sorted_desc['cumsum'] = cumsum
sorted_desc.index = range(0,len(sorted_desc.index))
sorted_desc['benignrate'] = sorted_desc['cumsum']
metricsfpr = []
thresholdsfpr = []
for c in costs:
filtered = sorted_desc[sorted_desc['cumsum'] / total_benign < c]
if len(filtered.index) != 0:
ind = filtered.index[-1]
metricsfpr.append(ind / len(sorted_desc.index))
thresholdsfpr.append(filtered.loc[:,'scores'].iloc[-1])
else:
metricsfpr.append(0)
thresholdsfpr.append(1)
print('For cost', c, 'df fpr is empty')
if plot:
plotBins(costs, sorted_desc, total_benign, sorted_asc, total_malicious, metricsfnr, metricsfpr, path, scores, labels)
if savemetrics:
saveBinMetrics(metricsfnr, metricsfpr, costs, thresholdsfnr, thresholdsfpr, path)
return metricsfnr, metricsfpr, thresholdsfnr, thresholdsfpr
def workreducedThr(scores, labels, c):
df = pd.DataFrame({'scores': scores, 'labels':labels, 'inverse_labels':np.bitwise_xor(labels.astype(int),np.ones(len(labels),dtype=int))})
#FNR
sorted_asc = df.sort_values('scores', ascending=True)
cumsum = np.cumsum(sorted_asc['labels'])
total_malicious = cumsum.iloc[-1]
sorted_asc['cumsum'] = cumsum
sorted_asc.index = range(0,len(sorted_asc.index))
sorted_asc['malrate'] = sorted_asc['cumsum']/sorted_asc.index
filtered = sorted_asc[sorted_asc['cumsum']/total_malicious < c]
if len(filtered.index) != 0:
# print(filtered.iloc[-1,2])
ind = filtered.index[-1]
metricsfnr = ind/len(sorted_asc.index)
thresholdsfnr = filtered.loc[:,'scores'].iloc[-1]
else:
metricsfnr = 0
thresholdsfnr = 0
print('For cost', c, 'df fnr is empty')
#FPR
sorted_desc = df.sort_values('scores', ascending=False)
cumsum = np.cumsum(sorted_desc['inverse_labels'])
total_benign = cumsum.iloc[-1]
sorted_desc['cumsum'] = cumsum
sorted_desc.index = range(0,len(sorted_desc.index))
sorted_desc['benignrate'] = sorted_desc['cumsum']
filtered = sorted_desc[sorted_desc['cumsum'] / total_benign < c]
if len(filtered.index) != 0:
ind = filtered.index[-1]
metricsfpr = ind / len(sorted_desc.index)
thresholdsfpr = filtered.loc[:,'scores'].iloc[-1]
else:
metricsfpr = 0
thresholdsfpr = 1
print('For cost', c, 'df fpr is empty')
return metricsfnr, metricsfpr, thresholdsfnr, thresholdsfpr
def plotBins(costs, sorted_desc, total_benign, sorted_asc, total_malicious, metricsfnr, metricsfpr, path, scores, labels):
figsize = (6.4, 3.2)
f = plt.figure(figsize=figsize)
plt.semilogy(sorted_asc.index/len(sorted_asc.index)*100, sorted_asc['cumsum']/total_malicious *100, label='False negative rate')
plt.semilogy((sorted_desc.index/len(sorted_desc.index)*100), (sorted_desc['cumsum']/total_benign *100)[::-1], label='False positive rate')
plt.legend()
plt.gca().yaxis.set_major_formatter(matplotlib.ticker.ScalarFormatter())
# find intersection of two curves
isec = ((sorted_asc['cumsum']/total_malicious *100) - (sorted_desc['cumsum']/total_benign *100)[::-1]).abs()
# plt.semilogy((isec.index/len(isec.index)*100), 100-isec)
idxmin = ((100-isec).argsort()[0:2])
print(isec[idxmin[0]-2:idxmin[0]+2])
eer = ((sorted_asc['cumsum']/total_malicious *100).iloc[idxmin]).mean()
print("eer", eer)
scores_neg = scores[labels == 0].tolist()
scores_pos = scores[labels == 1].tolist()
# eer = bob.measure.eer(scores_neg, scores_pos)*100
plt.gca().yaxis.set_ticks([0.1,1,10,100,0.5,2])
plt.gca().yaxis.set_ticklabels(["0.1"]+["1.0"]+["10.0"]+["100.0"]+["0.5"]+["2.0"])
plt.ylim((0.05,100))
plt.xlim((0,100))
plt.xlabel('Fraction of domains (%)')
plt.ylabel('Error rate (%)')
axis_to_data = plt.gca().transAxes + plt.gca().transData.inverted()
data_to_axis = axis_to_data.inverted()
half_spacing_between_arrows = 0.00
for c, max_fnr, max_fpr in zip([costs[-1]],[metricsfnr[-1]], [metricsfpr[-1]]):
# points_data = axis_to_data.transform([(0, c*100), (max_fnr*100, c*100)])
print(data_to_axis.transform([(0, c*100), (max_fnr*100, c*100)]))
# plt.hlines(c*100, 0, max*100, linestyles='dashed', colors="black" if c == 0.02 else "grey")
plt.annotate('', xytext=data_to_axis.transform([(0, c*100)])[0] + [0, half_spacing_between_arrows], textcoords='axes fraction',
xy=data_to_axis.transform([(max_fnr*100, c*100)])[0] + [0, half_spacing_between_arrows], xycoords='axes fraction',
arrowprops={'arrowstyle': '-|>', 'color': "C2" if c == 0.02 else "lightgrey", "linestyle": "--", "linewidth":1, "shrinkA": 0, "shrinkB": 0})
plt.annotate('', xytext=data_to_axis.transform([(100, c*100)])[0] - [0, half_spacing_between_arrows], textcoords='axes fraction',
xy=data_to_axis.transform([(100 - max_fpr*100, c*100)])[0] - [0, half_spacing_between_arrows], xycoords='axes fraction',
arrowprops={'arrowstyle': '-|>', 'color': "C3" if c == 0.02 else "lightgrey", "linestyle": "--", "linewidth":1, "shrinkA": 0, "shrinkB": 0})
if c == 0.02:
plt.annotate('', xytext=data_to_axis.transform([(max_fnr * 100, c * 100)])[0],
textcoords='axes fraction',
xy=[data_to_axis.transform([(max_fnr * 100, 1)])[0][0], 0],
xycoords='axes fraction',
arrowprops={'arrowstyle': '-|>', 'color': "C2" , "linestyle": "--",
"linewidth": 1, "shrinkA": 0, "shrinkB": 0})
plt.annotate('', xytext=data_to_axis.transform([(100 - max_fpr * 100, c * 100)])[0],
textcoords='axes fraction',
xy=[data_to_axis.transform([(100 - max_fpr * 100, 1)])[0][0], 0],
xycoords='axes fraction',
arrowprops={'arrowstyle': '-|>', 'color': "C3" , "linestyle": "--",
"linewidth": 1, "shrinkA": 0, "shrinkB": 0})
ticks_list = list(plt.xticks()[0])
ticks_list.remove(60)
plt.xticks(ticks_list + [max_fnr * 100, 100 - max_fpr * 100])
p = patches.Rectangle((0,eer), 100, 100-eer, linewidth=0, fill=None, hatch='///', color='lightgrey') # data_to_axis.transform([(5.1 * 100, 0)])[0]
plt.gca().add_patch(p)
bbox_props = dict(boxstyle="rarrow", fc="white", ec="C0", lw=1)
plt.text(50, eer, "Equal error rate", ha="center", va="center", rotation=0,
size=10,
bbox=bbox_props)
plt.text(50, 25, "Above equal error rate: use 100% of automated classification", size=10, rotation=0,
ha="center", va="center",
bbox=dict(boxstyle="round",
ec="white",
facecolor="white",
)
)
f.savefig('figures/' + path + 'bins.pdf',bbox_inches='tight', dpi=600)
plt.close()
def saveBinMetrics(metricsfnr, metricsfpr, costs, thresholdsfnr, thresholdsfpr, path):
metricsfnr = [mfnr * 100 for mfnr in metricsfnr]
metricsfpr = [mfpr * 100 for mfpr in metricsfpr]
costs = [cost * 100 for cost in costs]
sum = [x + y for x, y in zip(metricsfnr, metricsfpr)]
df = pd.DataFrame({'fnr': metricsfnr, 'fpr': metricsfpr, 'thresholds_fnr': thresholdsfnr,
'thresholds_fpr': thresholdsfpr, 'sum': sum}, index=costs)
df.to_csv('dfs/' + path + 'workreduced.csv')
def workReducedPost(lower, upper, scores, y_true):
# scores lower than thresh, higher than threshold, in the middle. Calculate fraction and calculate metrics -> labels and predictions
negative_pred = [l for s, l in zip(scores[:, 1], y_true) if s < lower]
no_action_pred = [l for s, l in zip(scores[:, 1], y_true) if s >= lower and s <= upper]
positive_pred = [l for s, l in zip(scores[:, 1], y_true) if s > upper]
total_malicious = y_true.sum()
total_benign = len(y_true) - total_malicious
fnr = sum(negative_pred) / total_malicious
fpr = (len(positive_pred) - sum(positive_pred)) / total_benign
work_reduced_negative = len(negative_pred) / len(y_true)
work_reduced_positive = len(positive_pred) / len(y_true)
work_reduced = work_reduced_negative + work_reduced_positive
return positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, work_reduced_positive
def workReducedPostLoadThr(trainyear, code, scores, y_true):
thresholds = pd.read_csv('dfs/' + trainyear + '/' + code + '_workreduced.csv', index_col=0).loc[:,
['thresholds_fnr', 'thresholds_fpr']]
upper = thresholds.iloc[3, 1]
lower = thresholds.iloc[3, 0]
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive = workReducedPost(lower, upper, scores, y_true)
return positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, lower, upper
def workReducedPostDetermineThr(features, labels, code, scoreszz, y_true):
fnr = []
fpr = []
thr_fnr = []
thr_fpr = []
kf = StratifiedKFold(n_splits=10, shuffle=True, random_state=44)
for train_index, test_index in kf.split(features.values, labels):
# Split the training and testing data
X_train, X_test = features.values[train_index], features.values[test_index]
y_train, y_test = labels[train_index], labels[test_index]
# Load parameters of the hyperparameter tuned model.
clf_tuned = load('models/2017/model' + code + '.joblib')
if isinstance(clf_tuned, GradientBoostingClassifier):
params = clf_tuned.get_params()
clf = GradientBoostingClassifier(**params)
else:
params = clf_tuned.best_params_
clf = GradientBoostingClassifier(**params, random_state=44)
clf.fit(X_train, y_train)
scores = clf.predict_proba(X_test)
metricsfnr, metricsfpr, thresholdsfnr, thresholdsfpr = workreducedThr(scores[:,1], y_test, 0.02)
fnr.append(metricsfnr)
fpr.append(metricsfpr)
thr_fnr.append(thresholdsfnr)
thr_fpr.append(thresholdsfpr)
fnr = np.array(fnr)
fpr = np.array(fpr)
thr_fnr = np.array(thr_fnr)
thr_fpr = np.array(thr_fpr)
print('FNR work reduced', fnr.mean(), '+/-', fnr.std())
print('FPR work reduced', fpr.mean(), '+/-', fpr.std())
print('Total work reduced', fnr.mean() + fpr.mean())
print('Lower thr', thr_fnr.mean(), '+/-', thr_fnr.std())
print('Upper thr', fpr.mean(), '+/-', fpr.std())
print()
lower, upper = thr_fnr.mean(), thr_fpr.mean()
# lower, upper = thr_fnr.mean() - thr_fnr.std(), thr_fpr.mean() + thr_fpr.std()
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive = workReducedPost(lower, upper, scoreszz, y_true)
return positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, lower, upper
def workReducedPostDetermineThrOneGo(features, labels, code, scoreszz, y_true):
scores = []
labelsz = []
kf = StratifiedKFold(n_splits=10, shuffle=True, random_state=44)
for train_index, test_index in kf.split(features.values, labels):
# Split the training and testing data
X_train, X_test = features.values[train_index], features.values[test_index]
y_train, y_test = labels[train_index], labels[test_index]
# Load parameters of the hyperparameter tuned model.
clf_tuned = load('models/2017/model' + code + '.joblib')
if isinstance(clf_tuned, GradientBoostingClassifier):
params = clf_tuned.get_params()
clf = GradientBoostingClassifier(**params)
else:
params = clf_tuned.best_params_
clf = GradientBoostingClassifier(**params, random_state=44)
clf.fit(X_train, y_train)
s = clf.predict_proba(X_test)
scores = np.append(scores, s[:,1])
labelsz = np.append(labelsz, y_test)
metricsfnr, metricsfpr, thresholdsfnr, thresholdsfpr = workreducedThr(scores, labelsz, 0.02)
lower, upper = thresholdsfnr, thresholdsfpr
# lower, upper = thr_fnr.mean() - thr_fnr.std(), thr_fpr.mean() + thr_fpr.std()
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive = workReducedPost(lower, upper, scoreszz, y_true)
return positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, lower, upper
def workReducedPostDetermineThrOneGoBis(features, labels, code, scoreszz, y_true, stratify_classes, costs, plot=False,
savemetrics=False, path=''):
scores = []
labelsz = []
kf = StratifiedKFold(n_splits=10, shuffle=True, random_state=44)
for train_index, test_index in kf.split(features.values, stratify_classes):
# Split the training and testing data
X_train, X_test = features.values[train_index], features.values[test_index]
y_train, y_test = labels[train_index], labels[test_index]
# Load parameters of the hyperparameter tuned model.
clf_tuned = load('models/2017/model' + code + '.joblib')
if isinstance(clf_tuned, GradientBoostingClassifier):
params = clf_tuned.get_params()
clf = GradientBoostingClassifier(**params)
else:
params = clf_tuned.best_params_
clf = GradientBoostingClassifier(**params, random_state=44)
clf.fit(X_train, y_train)
s = clf.predict_proba(X_test)
scores = np.append(scores, s[:,1])
labelsz = np.append(labelsz, y_test)
metricsfnr, metricsfpr, thresholdsfnr, thresholdsfpr = workreducedThrBis(scores, labelsz, costs,
plot=plot, savemetrics=savemetrics, path=path)
lower, upper = thresholdsfnr[-1], thresholdsfpr[-1]
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive = workReducedPost(lower, upper, scoreszz, y_true)
return positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, thresholdsfnr, thresholdsfpr
def workReducedPostDetermineThrOneGoOneYear(features, labels, additional_features, addtional_labels, code, scoreszz, y_true):
'''Only look for thresholds on the additional dataset'''
scores = []
labelsz = []
kf = StratifiedKFold(n_splits=10, shuffle=True, random_state=44)
for train_index, test_index in kf.split(additional_features.values, addtional_labels):
# Split the training and testing data
X_train_add, X_test = additional_features.values[train_index], additional_features.values[test_index]
y_train_add, y_test = addtional_labels[train_index], addtional_labels[test_index]
X_train = np.concatenate((features.values, X_train_add))
y_train = np.concatenate((labels, y_train_add))
# Load parameters of the hyperparameter tuned model.
clf_tuned = load('models/2017/model' + code + '.joblib')
if isinstance(clf_tuned, GradientBoostingClassifier):
params = clf_tuned.get_params()
clf = GradientBoostingClassifier(**params)
else:
params = clf_tuned.best_params_
clf = GradientBoostingClassifier(**params, random_state=44)
clf.fit(X_train, y_train)
s = clf.predict_proba(X_test)
scores = np.append(scores, s[:,1])
labelsz = np.append(labelsz, y_test)
metricsfnr, metricsfpr, thresholdsfnr, thresholdsfpr = workreducedThr(scores, labelsz, 0.02)
# print('Total work reduced', metricsfnr, metricsfpr, metricsfnr + metricsfpr)
# print('Lower thr', thresholdsfnr)
# print('Upper thr', thresholdsfpr)
# print()
lower, upper = thresholdsfnr, thresholdsfpr
# lower, upper = thr_fnr.mean() - thr_fnr.std(), thr_fpr.mean() + thr_fpr.std()
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive = workReducedPost(lower, upper, scoreszz, y_true)
return positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, lower, upper
def plotBinsGreyScale(costs, sorted_desc, total_benign, sorted_asc, total_malicious, metricsfnr, metricsfpr, path, scores, labels):
figsize = (6.4, 3.2)
f = plt.figure(figsize=figsize)
plt.semilogy(sorted_asc.index/len(sorted_asc.index)*100, sorted_asc['cumsum']/total_malicious *100, label='False negative rate')
plt.semilogy((sorted_desc.index/len(sorted_desc.index)*100), (sorted_desc['cumsum']/total_benign *100)[::-1], label='False positive rate')
plt.legend()
plt.gca().yaxis.set_major_formatter(matplotlib.ticker.ScalarFormatter())
# find intersection of two curves
isec = ((sorted_asc['cumsum']/total_malicious *100) - (sorted_desc['cumsum']/total_benign *100)[::-1]).abs()
# plt.semilogy((isec.index/len(isec.index)*100), 100-isec)
idxmin = ((100-isec).argsort()[0:2])
print(isec[idxmin[0]-2:idxmin[0]+2])
eer = ((sorted_asc['cumsum']/total_malicious *100).iloc[idxmin]).mean()
print("eer", eer)
scores_neg = scores[labels == 0].tolist()
scores_pos = scores[labels == 1].tolist()
# eer = bob.measure.eer(scores_neg, scores_pos)*100
plt.gca().yaxis.set_ticks([0.1,1,10,100,0.5,2])
plt.gca().yaxis.set_ticklabels(["0.1"]+["1.0"]+["10.0"]+["100.0"]+["0.5"]+["2.0"])
plt.ylim((0.05,100))
plt.xlim((0,100))
plt.xlabel('Fraction of domains (%)')
plt.ylabel('Error rate (%)')
axis_to_data = plt.gca().transAxes + plt.gca().transData.inverted()
data_to_axis = axis_to_data.inverted()
half_spacing_between_arrows = 0.00
for c, max_fnr, max_fpr in zip([costs[-1]],[metricsfnr[-1]], [metricsfpr[-1]]):
print(data_to_axis.transform([(0, c*100), (max_fnr*100, c*100)]))
plt.annotate('', xytext=data_to_axis.transform([(0, c*100)])[0] + [0, half_spacing_between_arrows], textcoords='axes fraction',
xy=data_to_axis.transform([(max_fnr*100, c*100)])[0] + [0, half_spacing_between_arrows], xycoords='axes fraction',
arrowprops={'arrowstyle': '-|>', 'color': "C2" if c == 0.02 else "lightgrey", "linestyle": "--", "linewidth":1, "shrinkA": 0, "shrinkB": 0})
plt.annotate('', xytext=data_to_axis.transform([(100, c*100)])[0] - [0, half_spacing_between_arrows], textcoords='axes fraction',
xy=data_to_axis.transform([(100 - max_fpr*100, c*100)])[0] - [0, half_spacing_between_arrows], xycoords='axes fraction',
arrowprops={'arrowstyle': '-|>', 'color': "C3" if c == 0.02 else "lightgrey", "linestyle": "--", "linewidth":1, "shrinkA": 0, "shrinkB": 0})
if c == 0.02:
plt.annotate('', xytext=data_to_axis.transform([(max_fnr * 100, c * 100)])[0],
textcoords='axes fraction',
xy=[data_to_axis.transform([(max_fnr * 100, 1)])[0][0], 0],
xycoords='axes fraction',
arrowprops={'arrowstyle': '-|>', 'color': "C2" , "linestyle": "--",
"linewidth": 1, "shrinkA": 0, "shrinkB": 0})
plt.annotate('', xytext=data_to_axis.transform([(100 - max_fpr * 100, c * 100)])[0],
textcoords='axes fraction',
xy=[data_to_axis.transform([(100 - max_fpr * 100, 1)])[0][0], 0],
xycoords='axes fraction',
arrowprops={'arrowstyle': '-|>', 'color': "C3" , "linestyle": "--",
"linewidth": 1, "shrinkA": 0, "shrinkB": 0})
ticks_list = list(plt.xticks()[0])
ticks_list.remove(60)
plt.xticks(ticks_list + [max_fnr * 100, 100 - max_fpr * 100])
p = patches.Rectangle((0,eer), 100, 100-eer, linewidth=0, fill=None, hatch='///', color='lightgrey') # data_to_axis.transform([(5.1 * 100, 0)])[0]
plt.gca().add_patch(p)
bbox_props = dict(boxstyle="rarrow", fc="white", ec="C0", lw=1)
plt.text(50, eer, "Equal error rate", ha="center", va="center", rotation=0,
size=10,
bbox=bbox_props)
plt.text(50, 25, "Above equal error rate: use 100% of automated classification", size=10, rotation=0,
ha="center", va="center",
bbox=dict(boxstyle="round",
ec="white",
facecolor="white",
)
)
f.savefig('figures/' + path + 'bins.pdf',bbox_inches='tight', dpi=600)
plt.close()
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,311
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/production_evaluation.py
|
import datetime
import os
import argparse
import json
import matplotlib.ticker
import matplotlib.patches as patches
import pandas as pd
import numpy as np
import utils
from sklearn.preprocessing import StandardScaler, Binarizer, LabelEncoder, LabelBinarizer, OneHotEncoder
from sklearn.pipeline import Pipeline
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.impute import SimpleImputer
from joblib import load
import itertools
from sklearn.metrics import confusion_matrix, f1_score, roc_auc_score, precision_score, recall_score, accuracy_score
from evaluation.metrics import workReducedPostLoadThr
import dataprocessing.preprocessing as pre
import macroify
# import bob.measure
import warnings
warnings.filterwarnings("ignore")
def saveimportance(importances, featurenames):
df = pd.DataFrame({'featurename':featurenames, 'score':importances})
df = df.sort_values('score',ascending=False)
print(df)
df.to_csv('dfs/importance1.csv')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Do the avalanche experiments')
parser.add_argument('--trainyear', '-tr',
type=str,
default='2017',
help='year to consider')
parser.add_argument('--testyear', '-te',
type=str,
default='2018',
help='year to consider')
args = parser.parse_args()
testyear = args.testyear
trainyear = args.trainyear
results = {}
y = utils.translateyear(trainyear)
z = utils.translateyear(testyear)
available, reputation, dns, whois, openintel, label = pre.loadAndCleanDataPerDataSet(False, testyear)
total_fp = 0
total_fn = 0
total_manual = 0
total_pred = 0
total_amount_of_domains = len(available.index)
classDictionary = {'malicious': 1, 'benign': 0}
labelzsss = label.map(classDictionary)
total_amount_positive = labelzsss.sum()
total_amount_negative = len(labelzsss.index) - labelzsss.sum()
l = [False,True]
dfs = []
codesz = []
ensemble_scores_pos = []
ensemble_scores_neg = []
ensemble_predictions = []
ensemble_predictions_priori = []
ensemble_labels_priori = []
ensemble_labels = []
metrics = { 'f1': [], 'precision': [], 'recall': [], 'acc_train': [], 'acc_test': [], 'eer': [], 'fnr_work_reduced': [],
'fpr_work_reduced': [], 'work_reduced': [], 'work_reduced_negative': [], 'work_reduced_positive': []}
for x in itertools.product(l,repeat=4):
code = ''.join(['1' if i else '0' for i in x])
if code != '0000': # code[0] != '0'
clf = load('models/' + trainyear + '/model' + code + '.joblib')
features_maxdata, labelzz_max_data, _ = pre.loadAndCleanDataMaxDom(code, False, testyear)
# Evaluate model performance on max domains
predictions = clf.predict(features_maxdata)
metrics['acc_test'].append(accuracy_score(labelzz_max_data, predictions))
metrics['f1'].append(f1_score(labelzz_max_data, predictions))
metrics['precision'].append(precision_score(labelzz_max_data, predictions))
metrics['recall'].append(recall_score(labelzz_max_data, predictions))
# Evaluate model performance work reduced
scores = clf.predict_proba(features_maxdata)
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, _, _ = workReducedPostLoadThr(trainyear, code, scores, labelzz_max_data)
metrics['work_reduced_negative'].append(work_reduced_negative)
metrics['work_reduced_positive'].append(work_reduced_positive)
metrics['work_reduced'].append(work_reduced)
metrics['fnr_work_reduced'].append(fnr)
metrics['fpr_work_reduced'].append(fpr)
# Construct domains that should be classified by this model
features, labelzz = pre.loadAndCleanDataExactPattern(x, available, reputation, dns, whois, openintel, label)
amount_of_domains = len(features.index)
codesz.append(code)
print(amount_of_domains, 'domains to classify for sourcepattern', code)
if len(labelzz.index != 0):
print('With', amount_of_domains-labelzz.sum(), 'negative domains and', labelzz.sum(), 'positive domains')
index = features.index
scores = clf.predict_proba(features)
predictions = clf.predict(features)
df = pd.DataFrame(list(zip(predictions, scores[:,1], len(predictions)*[code])),
index=features.index, columns=['classification 0=benign, 1=malicious', 'score', 'model code'])
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, _, _ = workReducedPostLoadThr(trainyear, code, scores, labelzz)
total_fp += (len(positive_pred) - sum(positive_pred))
total_fn += sum(negative_pred)
total_manual += len(no_action_pred)
total_pred += (len(positive_pred) + len(negative_pred))
ensemble_predictions = ensemble_predictions + [1]*len(positive_pred) + [0]*len(negative_pred) + no_action_pred
ensemble_labels = ensemble_labels + positive_pred + negative_pred + no_action_pred
ensemble_predictions_priori = ensemble_predictions_priori + predictions.tolist()
ensemble_labels_priori = ensemble_labels_priori + labelzz.values.tolist()
dfs.append(df)
ensemble_scores_neg = ensemble_scores_neg + scores[:, 1][labelzz == 0].tolist()
ensemble_scores_pos = ensemble_scores_pos + scores[:, 1][labelzz == 1].tolist()
print('Makes a prediction for', (len(positive_pred) + len(negative_pred)), 'domains')
print('Would predict', np.sum(predictions), 'domains malicious')
# Save predictions
df = pd.concat(dfs)
print(len(df.index)," predictions made")
df.to_csv('dfs/predictions.csv')
# Print performance per model
print('===============================================================================')
for key, value in metrics.items():
if value:
print('========== %s ============' % (key))
for i,v in enumerate(value):
print('Model %s: %.3f' % (codesz[i], v))
# codestr = utils.translatecode(code)
# results[y+z+key + codestr] = v
print('===============================================================================')
print('Total work reduced', (total_amount_of_domains-total_manual)/total_amount_of_domains)
print('Total FNR', total_fn/total_amount_positive)
print('Total FPR', total_fp/total_amount_negative)
print('Accuracy', accuracy_score(ensemble_labels, ensemble_predictions))
print('F1', f1_score(ensemble_labels, ensemble_predictions))
print('Precision', precision_score(ensemble_labels, ensemble_predictions))
print('Recall', recall_score(ensemble_labels, ensemble_predictions))
print('Little check', total_amount_positive+total_amount_negative == total_amount_of_domains)
print('Little check', total_pred+total_manual == total_amount_of_domains)
results[y+z+'workreduced'+ 'posteriori'] = (total_amount_of_domains-total_manual)/total_amount_of_domains *100
results[y+z+'fnr'+ 'posteriori'] = total_fn/total_amount_positive *100
results[y+z+'fpr'+ 'posteriori'] = total_fp/total_amount_negative *100
results[y+z+'accuracy'+ 'posteriori'] = accuracy_score(ensemble_labels, ensemble_predictions) *100
results[y+z+'fone'+ 'posteriori'] = f1_score(ensemble_labels, ensemble_predictions) *100
results[y+z+'precision'+ 'posteriori'] = precision_score(ensemble_labels, ensemble_predictions) *100
results[y+z+'recall'+ 'posteriori'] = recall_score(ensemble_labels, ensemble_predictions) *100
results[y + z + 'accuracy'] = accuracy_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[y + z + 'fone'] = f1_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[y + z + 'precision'] = precision_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[y + z + 'recall'] = recall_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[y + z + 'eer'] = bob.measure.eer(ensemble_scores_neg, ensemble_scores_pos) * 100
# fpr, fnr = bob.measure.farfrr(ensemble_scores_neg, ensemble_scores_pos, 0.5)
results[y + z + 'fpr'] = fpr*100
results[y + z + 'fnr'] = fnr*100
macroify.append_file(results)
print('Little check 2', len(ensemble_scores_neg) + len(ensemble_scores_pos) == total_amount_of_domains)
np.savez('dfs/' + trainyear + '_' + testyear + 'ensemble_det_curve.npz', pos=ensemble_scores_pos, neg=ensemble_scores_neg)
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,312
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/experiment.py
|
import argparse
import json
import pandas as pd
import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.model_selection import train_test_split, KFold, StratifiedKFold
from sklearn.metrics import confusion_matrix, f1_score, roc_auc_score, precision_score, recall_score
from joblib import dump, load
import evaluation.metrics as m
import evaluation.postanalysis as postan
import evaluation.preanalysis as prean
from dataprocessing.preprocessing import loadAndCleanDataMaxDom
from matplotlib import pyplot as plt
import seaborn as sns
import warnings
warnings.filterwarnings("ignore")
'''This script runs the experiments within one year. This allows to compute the estimated total work reduced'''
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Do the avalanche experiments within one year')
parser.add_argument('--sources', '-s',
type=str,
default="0111",
help='what datasets to use in a binary pattern, reputation + lexicographic, passivedns, whois, activedns')
parser.add_argument('--year', '-y',
type=str,
default='2017',
help='year to consider')
args = parser.parse_args()
sourcepattern = args.sources
year = args.year
path = year + '/' + sourcepattern + '_'
features, labels, post_analysis_labels = loadAndCleanDataMaxDom(sourcepattern, False, year)
prean.pcaAnalysis(features,labels, path)
prean.covMatrix(features, labels, path)
print("Input sizes:")
print("Total", len(labels), "Negative (0, benign)", (labels == 0).sum(), "Positive (1, malicious)", (labels == 1).sum())
# pipeline
i = 1
kf = StratifiedKFold(n_splits=10, shuffle=True, random_state=44)
metrics = {'f1': [], 'precision': [], 'recall': [], 'auc': [], 'acc_train': [], 'acc_test': [], 'eer': []}
data = {'x_test': np.empty((0, features.shape[1])), 'y_test': np.empty((0,)), 'y_pred': np.empty((0,)),
'importance': np.zeros(len(features.columns)), 'agg_scores_train': [], 'agg_scores_test': [],
'labels_train': [], 'labels_test': [], 'estimators':[], 'y_post': np.empty((0, post_analysis_labels.shape[1])),
'domainname_test':[]}
for train_index, test_index in kf.split(features.values, labels):
# Split the training and testing data
X_train, X_test = features.values[train_index], features.values[test_index]
y_train, y_test = labels[train_index], labels[test_index]
y_post = post_analysis_labels.iloc[test_index].values
domainname_test = features.index[test_index]
# Load parameters of the hyperparameter tuned model. Note that we do not tune in each iteration.
# It is possible that tuning within the split leads to other hyperparameters, however, the hyperparameters
# should transfer quite well as the problem and data remains the same. At worst performance could be slightly better.
clf_tuned = load('models/2017/model' + sourcepattern + '.joblib')
if isinstance(clf_tuned, GradientBoostingClassifier):
params = clf_tuned.get_params()
pipe = Pipeline([('clf', GradientBoostingClassifier(**params))]) # ('scl', StandardScaler()),
else:
params = clf_tuned.best_params_
pipe = Pipeline([('clf', GradientBoostingClassifier(random_state=44, **params))]) #('scl', StandardScaler()),
# Train the model
pipe.fit(X_train, y_train)
# Calculate metrics for this split
metrics['acc_train'].append(pipe.score(X_train, y_train))
metrics['acc_test'].append(pipe.score(X_test, y_test))
y_pred = pipe.predict(X_test)
metrics['f1'].append(f1_score(y_test, y_pred))
metrics['auc'].append(roc_auc_score(y_test, y_pred))
metrics['precision'].append(precision_score(y_test, y_pred))
metrics['recall'].append(recall_score(y_test, y_pred))
# Some post processing information for this split
data['x_test'] = np.append(data['x_test'], X_test, axis=0)
data['y_test'] = np.append(data['y_test'], y_test)
data['y_pred'] = np.append(data['y_pred'], y_pred)
data['y_post'] = np.append(data['y_post'], y_post, axis=0)
data['importance'] = np.sum([data['importance'], pipe.named_steps['clf'].feature_importances_], axis=0)
data['estimators'].append(pipe.named_steps['clf'].estimators_)
data['domainname_test'] = np.append(data['domainname_test'], domainname_test)
malicious = X_test[y_test == 1]
benign = X_test[y_test == 0]
negatives = pipe.predict_proba(benign)[:, 1]
positives = pipe.predict_proba(malicious)[:, 1]
scores_test = pipe.predict_proba(X_test)[:, 1]
scores_train = pipe.predict_proba(X_train)[:, 1]
data['agg_scores_train'] = np.append(data['agg_scores_train'], scores_train)
data['agg_scores_test'] = np.append(data['agg_scores_test'], scores_test)
data['labels_train'] = np.append(data['labels_train'], y_train)
data['labels_test'] = np.append(data['labels_test'], y_test)
ind = []
mean = []
std = []
print('===============================================================================')
for key, value in metrics.items():
if value:
print('GBC pipeline test %s and std: %.3f +- %.3f' % (key, np.array(value).mean(), np.array(value).std()))
ind.append(key)
mean.append(np.array(value).mean())
std.append(np.array(value).std())
print('===============================================================================')
df = pd.DataFrame({'mean': mean, 'std': std}, index=ind)
df.to_csv('dfs/' + year + '/' + sourcepattern + '_' + 'performance_metrics.csv')
costs = [0.001, 0.005, 0.01, 0.02]
metricsfnr, metricsfpr = m.workreduced(data['agg_scores_test'], data['labels_test'], costs, plot= True, savemetrics=True, path=path)
postan.saveFpFnDf(data['x_test'], data['y_test'], data['y_pred'], features.columns, data['domainname_test'], path)
postan.saveimportance(data['importance'] / kf.n_splits, features.columns, path)
for c, vfnr, vfpr in zip(costs, metricsfnr, metricsfpr):
print('Testing: When a fnr and fpr of', c*100 , '% is acceptable, work saved is', vfnr , vfpr ,
'total', vfnr + vfpr )
print('===============================================================================')
distributions = dict()
for name in features.columns:
distributions[name] = []
for estims in data['estimators']:
postan.featureDistribution(features.columns, estims, distributions)
with open('dfs/' + year + '/' + sourcepattern + '_' + 'thresholds.json', 'w') as fp:
json.dump(distributions, fp)
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,313
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/feature_generation/features.py
|
import csv
import datetime
import json
import os
import traceback
import dateutil.parser as dateparser
# Reminder: this only applies to already registered domains, e.g. these domains would have to be seized if they turn out
# to be malicious.
import feature_generation.retrieve_sinkhole_data as retrieve_sinkhole_data
families = ['Andromeda', 'Bolek', 'Citadel', 'CoreBot', 'Gozi2', 'Goznym', 'Goznym Stage 1', 'KINS', 'MS-Andromeda',
'Marcher', 'Matsnu', 'Nymaim', 'Pandabanker', 'Ranbyus', 'Rovnix', 'Smart App', 'Smoke Loader / Dofoil',
'TeslaCrypt', 'Tiny Banker', 'Trusteer App', 'Unknown', 'UrlZone', 'Vawtrak', 'Xswkit']
# source: DGArchive https://dgarchive.caad.fkie.fraunhofer.de/site/families.html with manual corrections of family names
malware_family_validities = {'CoreBot': '2015-01-01',
'Gozi2': '2010-01-01', # 'Gozi (Days+Monthly+Seasonal)': '2010-01-01', cf. https://malpedia.caad.fkie.fraunhofer.de/details/win.isfb
'Goznym': '2016-01-01', 'Goznym Stage 1': '2016-01-01', # 'GozNym 2nd Stage': '2016-01-01',
'Matsnu': '2014-01-01', 'Nymaim': '2014-01-01', 'PandaBanker': '2016-08-01', 'Ranbyus': '2015-01-01', 'Rovnix': '2015-01-01',
'Tiny Banker': '2014-01-01', 'UrlZone': '2014-01-01', 'Vawtrak': '2016-01-01',
'Bobax': '2008-01-01', 'BeeBone': None, 'Blackhole': '2012-06-01', 'Bedep': '2015-01-01',
'Banjori': '2013-01-01', 'Bamital': '2010-11-01', 'Cryptolocker': '2013-01-01',
'CCleaner DGA': '2017-01-01', 'Conficker': '2008-11-01',
'Chinad': None, 'Chir': '2011-01-01', 'Darkshell': None, 'Dyre': '2014-01-01',
'DNS Changer': '2011-01-01', 'DiamondFox': '2015-01-01', 'DirCrypt': '2013-01-01',
'Emotet.C': '2014-10-01', 'EKforward': '2014-01-01', 'Feodo': '2012-02-01',
'Fobber': '2015-01-01',
'Gameover P2P': '2011-01-01', 'Gameover DGA': '2014-01-01',
'Gspy': None, 'Hesperbot': '2013-01-01', 'Infy': '2015-01-01', 'Locky': '2016-01-01',
'ModPack (Andromeda?)': '2016-01-01', 'Murofet': '2010-01-01',
'Mirai': '2016-12-01', 'MadMax DGA': '2015-01-01', 'Necurs': '2013-01-01',
'Omexo': None, 'Oderoor': '2013-01-01', 'Pushdo (TID version)': '2011-01-01',
'Pykspa 2': '2013-04-01', 'Proslikefan DGA': '2016-01-01',
'Pykspa': '2009-10-01', 'Pushdo': '2013-01-01', 'PadCrypt': '2016-01-01', 'QakBot': '2013-01-01',
'Qadars': '2016-01-01', 'Ramdo': '2013-01-01', 'Redyms': '2012-01-01',
'Ramnit': '2012-01-01', 'Symmi': '2014-01-01', 'SuppoBox': '2013-01-01',
'Sisron': '2013-01-01', 'Sphinx Zeus DGA': '2016-09-01', 'Szribi': '2007-01-01', 'Shifu': '2015-01-01',
'Sutra TDS': '2012-01-01', 'Simda': '2012-01-01', 'Tsifiri': None, 'Tempedreve': '2014-01-01',
'Tempedreve TDD': '2015-01-01', 'Torpig': '2007-01-01',
'Tofsee DGA': '2016-01-01', 'UD4': '2016-01-01', 'VolatileCedar': '2014-01-01',
'Vidro(TID)': None, 'Virut': '2011-08-01', 'WD': '2017-01-01',
'XxHex DGA': '2016-01-01'}
wordlist_families = {"Matsnu", "Gozi2", "SuppoBox", # https://dgarchive.caad.fkie.fraunhofer.de/site/families.html, paper Plohmann (TDD-W type)
"Banjori", "Rovnix", # https://arxiv.org/abs/1810.02023 (high 'smashword' score), in addition to previous sources
"Pizd", # https://osint.bambenekconsulting.com/feeds/pizd-domlist.txt
}
shadowserver_sinkholes_ns = ["ns1.kryptoslogicsinkhole.com", "ns2.kryptoslogicsinkhole.net", "ns3.kryptoslogicsinkhole.org", "ns4.kryptoslogicsinkhole.me",
"b66.microsoftinternetsafety.net", "b67.microsoftinternetsafety.net",
'ns1.markmonitor.com', 'ns2.markmonitor.com', 'ns3.markmonitor.com', 'ns4.markmonitor.com', 'ns5.markmonitor.com', 'ns6.markmonitor.com', 'ns7.markmonitor.com',
'ns1.i56a4c1dlzcdsohkwr.biz', 'ns2.i56a4c1dlzcdsohkwr.biz', 'ns3.i56a4c1dlzcdsohkwr.biz', 'ns4.i56a4c1dlzcdsohkwr.biz',
"ns1.honeybot.us", "ns2.honeybot.us",
"sc-a.sinkhole.shadowserver.org", "sc-b.sinkhole.shadowserver.org", "sc-c.sinkhole.shadowserver.org", "sc-d.sinkhole.shadowserver.org",
'ns1.csof.net', 'ns2.csof.net', 'ns3.csof.net', 'ns4.csof.net',
"ns1.arbors1nkh0le.com", "ns1.arbor-sinkhole.net", "ns2.arbor-sinkhole.net", "ns1.asertdns.com", "ns2.asertdns.com"]
shadowserver_sinkholes_a = ["82.112.184.197"]
initialized = False
auxiliary_data = {}
def initialize(formatted_snapshot_date):
auxiliary_data[formatted_snapshot_date] = {}
if os.path.exists("input_data/{}/disposable_email_addresses_exact.json".format(formatted_snapshot_date)):
with open("input_data/{}/disposable_email_addresses_exact.json".format(formatted_snapshot_date)) as tempmail_exact_json:
tempmail_exact = json.load(tempmail_exact_json)
with open("input_data/{}/disposable_email_addresses_wildcard.json".format(formatted_snapshot_date)) as tempmail_wildcard_json:
tempmail_wildcard = json.load(tempmail_wildcard_json)
tempmail_data = (tempmail_exact, tempmail_wildcard)
else:
tempmail_data = None
auxiliary_data[formatted_snapshot_date]["tempmail_data"] = tempmail_data
with open("input_data/{}/sinkhole_results.csv".format(formatted_snapshot_date)) as sinkhole_csv:
sinkhole_csvr = csv.reader(sinkhole_csv)
auxiliary_data[formatted_snapshot_date]["sinkhole_data"] = {r[0]: True if r[1] == "True" else (False if r[1] == "False" else None) for r in sinkhole_csvr}
with open("input_data/{}/wayback_results_domain.csv".format(formatted_snapshot_date)) as wayback_domain_csv:
wayback_domain_csvr = csv.reader(wayback_domain_csv)
auxiliary_data[formatted_snapshot_date]["wayback_domain_data"] = {r[0]: r[1:] for r in wayback_domain_csvr}
with open("input_data/{}/ct_results.txt".format(formatted_snapshot_date)) as ct_csv:
ct_csvr = csv.reader(ct_csv)
auxiliary_data[formatted_snapshot_date]["ct_data"] = {r[0]: r[1:] for r in ct_csvr}
if os.path.exists("input_data/{}/openintel_results.csv".format(formatted_snapshot_date)):
with open("input_data/{}/openintel_results.csv".format(formatted_snapshot_date)) as openintel_csv:
openintel_csvr = csv.reader(openintel_csv)
auxiliary_data[formatted_snapshot_date]["openintel_data"] = {r[0]: r[1:] for r in openintel_csvr}
else:
auxiliary_data[formatted_snapshot_date]["openintel_data"] = {}
openintel_cap = 333 # nb days between 1 Jan and 29 Nov (inclusive)
class FeatureSet:
@classmethod
def get_feature_names(cls):
return [func[2:] for func in dir(cls) if callable(getattr(cls, func)) and func.startswith("f_")]
def __init__(self, domain, snapshot_date, malware_data, pdns_data, whois_data, topsites_data, suffix_data, renewal_data, whois_validity_data, wordlist_based_data):#, adns_data):
self.domain = domain
self.snapshot_date = snapshot_date
self.formatted_snapshot_date = snapshot_date.strftime("%Y%m%d")
if self.formatted_snapshot_date not in auxiliary_data:
initialize(self.formatted_snapshot_date)
self.malware_data = malware_data
self.pdns_data = pdns_data
self.whois_data = whois_data
self.topsites_data = topsites_data
self.suffix_data = suffix_data
self.renewal_data = renewal_data
self.whois_validity_data = whois_validity_data
self.wordlist_based_data = wordlist_based_data
self.adns_data = [self.domain] + auxiliary_data[self.formatted_snapshot_date]["openintel_data"][self.domain] if self.domain in auxiliary_data[self.formatted_snapshot_date]["openintel_data"] else None
self.features = {}
def check_datasets(self, abridged=True):
if abridged:
datasets_to_check = [self.pdns_data, self.whois_data,
self.renewal_data, self.whois_validity_data,
self.adns_data]
else:
datasets_to_check = [self.pdns_data, self.whois_data,
self.topsites_data["alexa"], self.topsites_data["umbrella"],
self.topsites_data["majestic"], self.topsites_data["quantcast"], self.suffix_data,
self.renewal_data, self.whois_validity_data,
self.wordlist_based_data,
auxiliary_data[self.formatted_snapshot_date]["wayback_domain_data"].get(self.domain, None),
auxiliary_data[self.formatted_snapshot_date]["ct_data"].get(self.domain, None),
self.adns_data]
result = [not dataset for dataset in datasets_to_check]
return result
def export(self):
return [self.features[k] for k in FeatureSet.get_feature_names()]
def generate_feature(self):
for feature_name in FeatureSet.get_feature_names():
self.features[feature_name] = getattr(FeatureSet, "f_" + feature_name)(self)
def f_domain(self):
return self.domain
### Malware-based features ###
def f_malware_family(self):
"""
Type: categorical
Indicates the family of malware that generated the DGA domain.
Intuition: Some DGAs generate random strings, while others concatenate words from a wordlist. There is a higher
chance that the latter collides with a benign domain.
:return:
"""
return self.malware_data[1]
def f_malware_validity_start(self):
"""
Type: numeric
Start of validity of the AGD. (only for post-analysis)
:return:
"""
return dateparser.parse(self.malware_data[2]).timestamp()
def f_malware_validity_end(self):
"""
Type: numeric
End of validity of the AGD. (only for post-analysis)
:return:
"""
return dateparser.parse(self.malware_data[3]).timestamp()
def f_malware_validity_length(self):
"""
Type: numeric
Length in days of the period of validity of the AGD.
Intuition: An AGD that is valid for a short period of time is potentially less likely to be registered by the
malicious party upfront.
:return:
"""
return (dateparser.parse(self.malware_data[3]) - dateparser.parse(self.malware_data[2])).days + 1
def f_whois_registration_date(self):
"""
Type: date
Creation date of the domain. Do not use in model.
:return:
"""
if not self._is_whois_available("created_date"):
return None
try:
return self._parse_whois_date(self.whois_data["created_date"]).timestamp()
except:
return None
def f_whois_registration_and_family_start_date(self):
"""
Type: numeric
Difference between start date of malware and creation date of the domain.
Intuition: Sites with registration dates a long time
before the malware started operating could be more likely to be benign.
:return:
"""
family = self.malware_data[1]
if family not in malware_family_validities or not malware_family_validities[family]:
return None
else:
if not self._is_whois_available("created_date"):
return None
try:
return (self._parse_whois_date(self.whois_data["created_date"]) -
datetime.datetime.strptime(malware_family_validities[family], "%Y-%m-%d")).days
except:
return None
def f_whois_registration_and_validity_start_date(self):
"""
Type: numeric
Difference between start date of validity of the AGD and creation date of the domain.
Intuition: Combining with the registration date of the AGD, sites with registration dates a long time
before the validity of the AGD could be more likely to be benign.
:return:
"""
if not self._is_whois_available("created_date"):
return None
try:
return (self._parse_whois_date(self.whois_data["created_date"]) -
dateparser.parse(self.malware_data[2])).days
except:
return None
def f_malware_wordlist_based_dga(self):
"""
Type: categorical
Indicates whether the DGA uses a wordlist to generate domains.
Intuition: AGDs based on wordlists can resemble regular phrases and are therefore more likely to collide with legitimate domains.
:return:
"""
if not self.wordlist_based_data:
return None
return self.wordlist_based_data[1] == "True" # self.malware_data[1] in wordlist_families
### Domain name features ###
def f_domain_length(self):
"""
Type: numeric
Length of the domain (without the suffix).
Intuition: Shorter domains have a higher chance of collision with a benign domain.
Source: FANCI; PREDATOR; Liu2017CCS; ?
:return:
"""
if not self.suffix_data:
return None
return len(self.suffix_data[5] + self.suffix_data[4])
def f_domain_digit_ratio(self):
"""
Type: numeric
Proportion of digits over all characters (for the domain without the suffix).
Intuition: Malicious domains / AGDs are more likely to contain digits.
Source: EXPOSURE < ? ; FANCI
:return:
"""
if not self.suffix_data:
return None
return sum(list(map(lambda x: 1 if x.isdigit() else 0, self.suffix_data[5] + self.suffix_data[4])))/len(self.suffix_data[5] + self.suffix_data[4])
### DNS features ###
def f_known_sinkhole(self):
"""
Type: categorical (sinkdb|email|stamparm|none)
Indicates whether the domain belongs to a known sinkhole (Evaluation Scheme - 4).
Based on: A record + listing in SinkDB, whois email, A record + listing in Miroslav Stampar's sinkhole list.
Intuition: Sinkholed domains shouldn't be seized.
:return:
"""
try:
if self.pdns_data and self.pdns_data[4] and any(auxiliary_data[self.formatted_snapshot_date]["sinkhole_data"].get(ip_address, False) for ip_address in eval(self.pdns_data[4])): # A records
result = "dns_a_sinkdb"
elif self.pdns_data and self.pdns_data[4] and any(ip_address in shadowserver_sinkholes_a for ip_address in eval(self.pdns_data[4])):
result = "dns_a_shadowserver"
elif self.pdns_data and self.pdns_data[5] and any(ns.strip(".") in shadowserver_sinkholes_ns for ns in eval(self.pdns_data[5])):
result = "dns_ns_stamparm"
elif self._is_whois_available("nameserver") and any(
ns.strip(".") in shadowserver_sinkholes_ns for ns in (
eval(self.whois_data["nameserver"]) if self.whois_data["nameserver"].startswith("[") else [
self.whois_data["nameserver"]])):
result = "whois_ns_stamparm"
elif self.pdns_data and self.pdns_data[4] and any(retrieve_sinkhole_data.check_against_stamparm_ip(ip_address) for ip_address in eval(self.pdns_data[4])):
result = "dns_a_stamparm"
elif self.pdns_data and self.pdns_data[5] and any(retrieve_sinkhole_data.check_against_stamparm_ns(ns.strip(".")) for ns in eval(self.pdns_data[5])):
result = "dns_ns_stamparm"
elif self._is_whois_available("nameserver") and any(retrieve_sinkhole_data.check_against_stamparm_ns(ns.strip(".")) for ns in (eval(self.whois_data["nameserver"]) if self.whois_data["nameserver"].startswith("[") else [self.whois_data["nameserver"]])):
result = "whois_ns_stamparm"
elif self.pdns_data and self.pdns_data[5] and any(
retrieve_sinkhole_data.check_against_alowaisheq_ns(ns.strip(".")) for ns in eval(self.pdns_data[5])):
result = "dns_ns_alowaisheq"
elif self._is_whois_available("nameserver") and any(
retrieve_sinkhole_data.check_against_alowaisheq_ns(ns.strip(".")) for ns in (
eval(self.whois_data["nameserver"]) if self.whois_data["nameserver"].startswith("[") else [
self.whois_data["nameserver"]])):
result = "whois_ns_alowaisheq"
elif self._is_whois_available("reg_email") and retrieve_sinkhole_data.check_against_sinkhole_emails(self.whois_data["reg_email"]):
result = "whois_email"
else:
result = None
return result
except:
traceback.print_exc()
return None
def f_dnsdb_available(self):
return self.pdns_data is not None
def f_dnsdb_nb_queries(self):
"""
Type: numeric
Number of DNS queries observed for the domain. (from DNSDB)
Intuition: Benign sites will actually receive (more) queries.
Source: Lison2017BIGDATA
:return:
"""
if not self.pdns_data:
return None
return self.pdns_data[3]
def f_dnsdb_active_period(self):
"""
Type: numeric
Time between last seen query and first seen query. (from DNSDB)
Intuition: Sites active for longer are more likely to be benign.
:return:
"""
if not self.pdns_data:
return None
return (datetime.datetime.strptime(self.pdns_data[2], "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(self.pdns_data[1], "%Y-%m-%d %H:%M:%S")).seconds
def f_dnsdbwhois_first_seen_after_registration(self):
"""
Type: numeric
Time between first seen query and domain creation date. (from DNSDB + WHOIS)
Intuition: Sites active quickly after registration are less likely to be dormant malicious domains.
:return:
"""
if not self.pdns_data or not self._is_whois_available("created_date"):
return None
return (datetime.datetime.strptime(self.pdns_data[1], "%Y-%m-%d %H:%M:%S") - self._parse_whois_date(self.whois_data["created_date"])).seconds
def f_dnsdb_first_seen_before_validity(self):
"""
Type: numeric
Time between first seen query and AGD validity date. (from DNSDB)
Intuition: Sites registered a long time before validity are more likely to be benign.
:return:
"""
if not self.pdns_data:
return None
return (dateparser.parse(self.malware_data[2]) - datetime.datetime.strptime(self.pdns_data[1], "%Y-%m-%d %H:%M:%S")).seconds
# return (datetime.datetime.strptime(self.malware_data[2], "%Y-%m-%d %H:%M:%S") - datetime.datetime.strptime(self.pdns_data[1], "%Y-%m-%d %H:%M:%S")).seconds
def f_dnsdb_first_seen_before_now(self):
"""
Type: numeric
Time between first seen query and domain creation date. (from DNSDB + WHOIS)
Intuition: Sites active quickly after registration are less likely to be dormant malicious domains.
:return:
"""
if not self.pdns_data:
return None
return (self.snapshot_date - datetime.datetime.strptime(self.pdns_data[1], "%Y-%m-%d %H:%M:%S")).seconds
def f_dnsdb_record_A(self):
"""
Type: categorical (true|false)
Record type A seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[6]
def f_dnsdb_record_AAAA(self):
"""
Type: categorical (true|false)
Record type AAAA seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[7]
def f_dnsdb_record_CAA(self):
"""
Type: categorical (true|false)
Record type CAA seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[8]
def f_dnsdb_record_CNAME(self):
"""
Type: categorical (true|false)
Record type CNAME seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[9]
def f_dnsdb_record_HINFO(self):
"""
Type: categorical (true|false)
Record type HINFO seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[10]
def f_dnsdb_record_MX(self):
"""
Type: categorical (true|false)
Record type MX seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[11]
def f_dnsdb_record_NS(self):
"""
Type: categorical (true|false)
Record type NS seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[12]
def f_dnsdb_record_PTR(self):
"""
Type: categorical (true|false)
Record type PTR seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[13]
def f_dnsdb_record_RP(self):
"""
Type: categorical (true|false)
Record type RP seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[14]
def f_dnsdb_record_SOA(self):
"""
Type: categorical (true|false)
Record type SOA seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[15]
def f_dnsdb_record_SPF(self):
"""
Type: categorical (true|false)
Record type SPF seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[16]
def f_dnsdb_record_TXT(self):
"""
Type: categorical (true|false)
Record type TXT seen on this domain (from DNSDB).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.pdns_data:
return None
return self.pdns_data[17]
def f_openintel_available(self):
return self.adns_data is not None
def f_openintel_first_seen_before_now(self):
"""
Type: numeric
Time between last seen query and first seen query. (from OpenIntel)
Intuition: Sites active for longer are more likely to be benign.
:return:
"""
if not self.adns_data:
return None
return ( min(openintel_cap,
max(int(self.adns_data[1]) if self.adns_data[1] else 0,
int(self.adns_data[2]) if self.adns_data[2] else 0,
int(self.adns_data[3]) if self.adns_data[3] else 0,
int(self.adns_data[4]) if self.adns_data[4] else 0,
int(self.adns_data[15] if self.adns_data[15] else 0))
))
def f_openintel_first_seen_before_validity(self):
"""
Type: numeric
Time between last seen query and first seen query. (from OpenIntel)
Intuition: Sites active for longer are more likely to be benign.
:return:
"""
if not self.adns_data:
return None
if not self.adns_data[1] and not self.adns_data[2] and not self.adns_data[3] and not self.adns_data[4] and not self.adns_data[15]:
return 0
return (min(openintel_cap,
max(int(self.adns_data[1]) if self.adns_data[1] else 0,
int(self.adns_data[2]) if self.adns_data[2] else 0,
int(self.adns_data[3]) if self.adns_data[3] else 0,
int(self.adns_data[4]) if self.adns_data[4] else 0,
int(self.adns_data[15] if self.adns_data[15] else 0))) +
(dateparser.parse(self.malware_data[2]) - self.snapshot_date).days)
def f_openintel_nb_days_seen_A(self):
"""
Type: numeric
Record type A seen on this domain (from OpenIntel).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.adns_data:
return None
return min(openintel_cap,int(self.adns_data[5])) if self.adns_data[5] else 0
def f_openintel_nb_days_seen_AAAA(self):
"""
Type: numeric
Record type AAAA seen on this domain (from OpenIntel).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.adns_data:
return None
return min(openintel_cap,int(self.adns_data[6])) if self.adns_data[6] else 0
def f_openintel_nb_days_seen_MX(self):
"""
Type: numeric
Record type MX seen on this domain (from OpenIntel).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.adns_data:
return None
return min(openintel_cap,int(self.adns_data[8])) if self.adns_data[8] else 0
def f_openintel_nb_days_seen_NS(self):
"""
Type: numeric
Record type NS seen on this domain (from OpenIntel).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.adns_data:
return None
return min(openintel_cap,int(self.adns_data[7])) if self.adns_data[7] else 0
def f_openintel_nb_days_seen_SOA(self):
"""
Type: numeric
Record type SOA seen on this domain (from OpenIntel).
Intuition: Benign sites may have certain 'rarer' record types.
Source: Fraunhofer report
"""
if not self.adns_data:
return None
return min(openintel_cap,int(self.adns_data[14])) if self.adns_data[14] else 0
### Registration/WHOIS features ###
def f_whois_available(self):
return self.whois_data is not None
def f_whois_registrar(self):
"""
Type: categorical
The registrar used for the latest registration of the domain.
Intuition: Malicious parties may prefer certain registrars e.g. due to low prices or few validity checks.
Source: PREDATOR paper < Felegyhazi2010 + Hao2013
:return:
"""
if not self._is_whois_available("registrar"):
return None
if "registrar_iana_id" in self.whois_data and self.whois_data["registrar_iana_id"]:
return "reg-{}".format(self.whois_data["registrar_iana_id"])
else:
return self.whois_data["registrar"]
def f_whois_registration_age(self):
"""
Type: numeric
Length in days of the period between the date of registration and today. (~ Evaluation Scheme - 7)
Intuition: Domains that have been registered a long time ago are more likely to be 'real' benign sites.
Source: PREDENTIFIER
:return:
"""
if not self._is_whois_available("created_date"):
return None
try:
return (self.snapshot_date - self._parse_whois_date(self.whois_data["created_date"])).days
except:
return None
def f_whois_registration_period(self):
"""
Type: numeric
Length in days of the period for which a domain is registered. (~ Evaluation Scheme - 7)
Intuition: Malicious domains will be registered for short periods (e.g. 1 year), while domains registered for
a longer time are more likely to be benign.
Source: PREDATOR
Keep in mind (from "WHOIS Lost In Translation"):
When a registrar does not renew or delete a domain before its expiration date, the registry automatically
extends the registration by one year by moving the domain into the auto-renew state.
:return:
"""
if (not self._is_whois_available("expired_date")) or (not self._is_whois_available("created_date")):
return None
try:
return (self._parse_whois_date(self.whois_data["expired_date"]) -
self._parse_whois_date(self.whois_data["created_date"])).days
except:
return None
def f_whois_has_been_renewed(self):
"""
Type: categorical (true|false)
Indicates whether a domain has been renewed.
Intuition: Malicious domains are short-lived and therefore unlikely to be renewed.
:return:
"""
return self.renewal_data[1] if self.renewal_data else None
def f_whois_privacy(self):
"""
Type: categorical (true|false)
The WHOIS privacy used for the domain, or None if no privacy service is used.
Intuition: abusive domains tend to use Privacy and Proxy services
(but using a WHOIS Privacy and Proxy is not a reliable indicator of malicious activity)
~ not using privacy/proxy -> rather benign; using it -> unknown
Source: Cybercrime gTLDs Korczynski
:return:
"""
for property in ["reg_org", "reg_name", "reg_street", "reg_city", "reg_state", "reg_postal", "reg_country", "reg_email", "reg_phone", "reg_fax", "reg_id"]:
if self._is_whois_available(property):
value = self.whois_data[property]
for keyword in ["privacy", "private", "proxy", "protect", "redacted"]: # actively using privacy service
if keyword in value.lower():
return True
return None
def f_whois_temporary_mail(self):
"""
Type: categorical (true|false)
The mail address used to register the domain belongs to a temporary mail service.
Uses the data collected by `disposable_email_service.py`
Intuition: malicious actors may not want to bother setting up 'real' mail addresses, and therefore resort to
temporary mail services.
:return:
"""
if not self._is_whois_available("reg_email"):
return None
if "@" in self.whois_data["reg_email"]:
email_parts = self.whois_data["reg_email"].split("@")
if len(email_parts) == 2:
tempmail_data = auxiliary_data[self.formatted_snapshot_date]["tempmail_data"]
return (email_parts[1].lower() in tempmail_data[0]) or any(d.endswith(email_parts[1].lower()) for d in tempmail_data[1])
# domain in exact domains / wildcard domains
else: # invalid email address / not checked
return None
else:
return None
def f_whois_valid_phone(self):
"""
Type: categorical
0 if the phone number provided in WHOIS is valid, 1 if invalid, 2 if not present.
:return:
"""
if not self.whois_validity_data:
return None
status = self.whois_validity_data[3]
return True if status == "VALID" else (False if status == "INVALID" else None)
### Top websites lists features (~ Evaluation Scheme - 1) ###
def f_topsites_alexa_presence(self):
"""
Type: numeric
Number of days when the domain appeared in Alexa's top websites list.
Intuition: Presence over a long period suggests actual popularity and benignness.
Source: ~ Lison2017BIGDATA
:return:
"""
if not self.topsites_data["alexa"]:
return None
return self.topsites_data["alexa"][0]
def f_topsites_alexa_average_rank(self):
"""
Type: numeric
Average rank of the domain for all appearances in Alexa's top websites list.
Intuition: Better ranks suggest actual popularity and benignness.
Source: Lison2017BIGDATA
:return:
"""
if not self.topsites_data["alexa"]:
return None
average_rank = round(self.topsites_data["alexa"][1]/self.topsites_data["alexa"][0] if self.topsites_data["alexa"][1] else 0)
return average_rank if average_rank > 0 else None
def f_topsites_umbrella_presence(self):
"""
Type: numeric
Number of days when the domain appeared in Umbrella's top websites list.
Intuition: Presence over a long period suggests actual popularity and benignness.
:return:
"""
if not self.topsites_data["umbrella"]:
return None
return self.topsites_data["umbrella"][0]
def f_topsites_umbrella_average_rank(self):
"""
Type: numeric
Average rank of the domain for all appearances in Umbrella's top websites list.
Intuition: Better ranks suggest actual popularity and benignness.
:return:
"""
if not self.topsites_data["umbrella"]:
return None
average_rank = round(self.topsites_data["umbrella"][1]/self.topsites_data["umbrella"][0] if self.topsites_data["umbrella"][1] else 0)
return average_rank if average_rank > 0 else None
def f_topsites_majestic_presence(self):
"""
Type: numeric
Number of days when the domain appeared in Majestic's top websites list.
Intuition: Presence over a long period suggests actual popularity and benignness.
:return:
"""
if not self.topsites_data["majestic"]:
return None
return self.topsites_data["majestic"][0]
def f_topsites_majestic_average_rank(self):
"""
Type: numeric
Average rank of the domain for all appearances in Majestic's top websites list.
Intuition: Better ranks suggest actual popularity and benignness.
:return:
"""
if not self.topsites_data["majestic"]:
return None
average_rank = round(self.topsites_data["majestic"][1]/self.topsites_data["majestic"][0] if self.topsites_data["majestic"][1] else 0)
return average_rank if average_rank > 0 else None
def f_topsites_quantcast_presence(self):
"""
Type: numeric
Number of days when the domain appeared in Quantcast's top websites list.
Intuition: Presence over a long period suggests actual popularity and benignness.
:return:
"""
if not self.topsites_data["quantcast"]:
return None
return self.topsites_data["quantcast"][0]
def f_topsites_quantcast_average_rank(self):
"""
Type: numeric
Average rank of the domain for all appearances in Quantcast's top websites list.
Intuition: Better ranks suggest actual popularity and benignness.
:return:
"""
if not self.topsites_data["quantcast"]:
return None
average_rank = round(self.topsites_data["quantcast"][1]/self.topsites_data["quantcast"][0] if self.topsites_data["quantcast"][1] else 0)
return average_rank if average_rank > 0 else None
### Content-based features ###
def f_search_pages_found_wayback_machine(self):
"""
Type: numeric
Number of scraped pages on the Wayback Machine.
Intuition: many pages & found/crawled by search engine -> more likely to be real content
<-> malicious: don't bother setting up a real website / not found
:return:
"""
if self.domain not in auxiliary_data[self.formatted_snapshot_date]["wayback_domain_data"]:
return None
return auxiliary_data[self.formatted_snapshot_date]["wayback_domain_data"][self.domain][1]
def f_search_wayback_machine_first_seen_before_now(self):
"""
Type: numeric
Difference between the snapshot date and when the site was first seen on the Wayback Machine.
Intuition: existing for longer time -> more likely to be benign
:return:
"""
if self.domain not in auxiliary_data[self.formatted_snapshot_date]["wayback_domain_data"]:
return None
wayback_timestamp = auxiliary_data[self.formatted_snapshot_date]["wayback_domain_data"][self.domain][2]
if wayback_timestamp == "-1":
return None
return (self.snapshot_date - datetime.datetime.strptime(wayback_timestamp, "%Y%m%d%H%M%S")).seconds
def f_search_wayback_machine_first_seen_before_validity(self):
"""
Type: numeric
Difference between the validity start date and when the site was first seen on the Wayback Machine.
Intuition: existing for longer time -> more likely to be benign
:return:
"""
if self.domain not in auxiliary_data[self.formatted_snapshot_date]["wayback_domain_data"]:
return None
wayback_timestamp = auxiliary_data[self.formatted_snapshot_date]["wayback_domain_data"][self.domain][2]
if wayback_timestamp == "-1":
return None
return (dateparser.parse(self.malware_data[2]) - datetime.datetime.strptime(wayback_timestamp, "%Y%m%d%H%M%S")).seconds
### Certificate transparency logs ###
def f_ct_has_certificate(self):
"""
Type: binary
The domain had a certificate.
Intuition: Acquiring a certificate requires (setup) effort, indicating benignness.
:return:
"""
d = auxiliary_data[self.formatted_snapshot_date]["ct_data"].get(self.domain, None)
if d:
return d[0] == "True"
else:
return None
### Helper methods ###
def _is_whois_available(self, field):
return self.whois_data and field in self.whois_data and self.whois_data[field]
def _parse_whois_date(self, whois_date):
try:
return dateparser.parse(whois_date.strip("[]':")).replace(tzinfo=None)
except:
return None
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,314
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/ensemble_2019_evaluation.py
|
import datetime
import os
import argparse
import json
import matplotlib.ticker
import matplotlib.patches as patches
import pandas as pd
import numpy as np
import utils
from sklearn.preprocessing import StandardScaler, Binarizer, LabelEncoder, LabelBinarizer, OneHotEncoder
from sklearn.pipeline import Pipeline
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.impute import SimpleImputer
from joblib import load
import itertools
from sklearn.metrics import confusion_matrix, f1_score, roc_auc_score, precision_score, recall_score, accuracy_score
from evaluation.metrics import workReducedPostDetermineThrOneGoBis
import evaluation.preanalysis as prean
import dataprocessing.preprocessing as pre
import macroify
if __name__ == "__main__":
results = {}
features_2017, labelzz_2017, _ = pre.loadAndCleanDataMaxDom('1111', False, '2017', whoisdatacompl=True)
features_2018, labelzz_2018, _ = pre.loadAndCleanDataMaxDom('1111', False, '2018', whoisdatacompl=True)
x_train = pd.concat([features_2017, features_2018])
y_train = np.concatenate([labelzz_2017, labelzz_2018])
prean.covMatrix(x_train,y_train,'extended/')
available, reputation, dns, whois, openintel, label = pre.loadAndCleanDataPerDataSet(False, '2019', whoisdatacompl=False)
total_fn = 0
total_manual = 0
total_pred = 0
total_amount_of_domains = len(available.index)
classDictionary = {'malicious': 1, 'benign': 0}
labelzsss = label.map(classDictionary)
total_amount_positive = labelzsss.sum()
total_amount_negative = len(labelzsss.index) - labelzsss.sum()
l = [False,True]
dfs = []
codesz = []
ensemble_scores_pos = []
ensemble_scores_neg = []
ensemble_predictions = []
ensemble_predictions_priori = []
ensemble_labels_priori = []
ensemble_labels = []
metrics = { 'f1': [], 'precision': [], 'recall': [], 'acc_train': [], 'acc_test': [], 'eer': [], 'fnr_work_reduced': [],
'fpr_work_reduced': [], 'work_reduced': [], 'work_reduced_negative': [], 'work_reduced_positive': []}
for x in itertools.product(l,repeat=4):
code = ''.join(['1' if i else '0' for i in x])
if code != '0000': # code[0] != '0'
features_2017, labelzz_2017, _ = pre.loadAndCleanDataMaxDom(code, False, '2017', whoisdatacompl=False)
features_2018, labelzz_2018, _ = pre.loadAndCleanDataMaxDom(code, False, '2018', whoisdatacompl=False)
x_train = pd.concat([features_2017, features_2018])
y_train = np.concatenate([labelzz_2017, labelzz_2018])
y_train_category = np.concatenate([labelzz_2017, labelzz_2018*2])
clf_tuned = load('models/2017/model' + code + '.joblib')
if isinstance(clf_tuned, GradientBoostingClassifier):
params = clf_tuned.get_params()
clf = GradientBoostingClassifier(**params)
else:
params = clf_tuned.best_params_
clf = GradientBoostingClassifier(**params, random_state=42)
clf.fit(x_train, y_train)
# Construct domains that should be classified by this model
features, labelzz = pre.loadAndCleanDataExactPattern(x, available, reputation, dns, whois, openintel, label)
amount_of_domains = len(features.index)
codesz.append(code)
print(amount_of_domains, 'domains to classify for sourcepattern', code)
if len(labelzz.index != 0):
print(features.columns)
print('With', amount_of_domains-labelzz.sum(), 'negative domains and', labelzz.sum(), 'positive domains')
scores = clf.predict_proba(features)
predictions = clf.predict(features)
df = pd.DataFrame(list(zip(predictions, scores[:,1], len(predictions)*[code])),
index=features.index, columns=['classification 0=benign, 1=malicious', 'score', 'model code'])
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, _, _ = workReducedPostDetermineThrOneGoBis(x_train, y_train, code, scores,
labelzz, y_train_category, [0.02])
total_fn += sum(negative_pred)
total_manual += len(no_action_pred)
total_pred += (len(positive_pred) + len(negative_pred))
ensemble_predictions = ensemble_predictions + [1]*len(positive_pred) + [0]*len(negative_pred) + no_action_pred
ensemble_labels = ensemble_labels + positive_pred + negative_pred + no_action_pred
ensemble_predictions_priori = ensemble_predictions_priori + predictions.tolist()
ensemble_labels_priori = ensemble_labels_priori + labelzz.values.tolist()
dfs.append(df)
ensemble_scores_neg = ensemble_scores_neg + scores[:, 1][labelzz == 0].tolist()
ensemble_scores_pos = ensemble_scores_pos + scores[:, 1][labelzz == 1].tolist()
print('Makes a prediction for', (len(positive_pred) + len(negative_pred)), 'domains')
print('Would predict', np.sum(predictions), 'domains malicious')
# Save predictions
df = pd.concat(dfs)
print(len(df.index)," predictions made")
df.to_csv('dfs/2019/predictions.csv')
print('Total work reduced', (total_amount_of_domains-total_manual - total_amount_of_domains*0.15)/total_amount_of_domains)
print('Total FNR', total_fn/total_amount_positive)
print('Total FPR', total_fp/total_amount_negative)
print('Accuracy', accuracy_score(ensemble_labels, ensemble_predictions))
print('F1', f1_score(ensemble_labels, ensemble_predictions))
print('Precision', precision_score(ensemble_labels, ensemble_predictions))
print('Recall', recall_score(ensemble_labels, ensemble_predictions))
print('Little check', total_amount_positive+total_amount_negative == total_amount_of_domains)
print('Little check', total_pred+total_manual == total_amount_of_domains)
results['Cworkreducedextended'] = (total_amount_of_domains-total_manual)/total_amount_of_domains *100
results['Cworkreduced'] = (total_amount_of_domains-total_manual - total_amount_of_domains*0.15)/total_amount_of_domains*100
macroify.append_file(results)
print('Little check 2', len(ensemble_scores_neg) + len(ensemble_scores_pos) == total_amount_of_domains)
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,315
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/dataset_impact_evaluation_extended.py
|
import datetime
import os
import argparse
import json
import matplotlib.ticker
import matplotlib.patches as patches
import pandas as pd
import numpy as np
import utils
from sklearn.preprocessing import StandardScaler, Binarizer, LabelEncoder, LabelBinarizer, OneHotEncoder
from sklearn.pipeline import Pipeline
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.impute import SimpleImputer
from joblib import load
import itertools
from sklearn.metrics import confusion_matrix, f1_score, roc_auc_score, precision_score, recall_score, accuracy_score
from evaluation.metrics import workReducedPostLoadThr
import dataprocessing.preprocessing as pre
import macroify
import bob.measure
import warnings
warnings.filterwarnings("ignore")
if __name__ == "__main__":
# 'Do the avalanche experiments, dataset impact evaluation of the extended model'
results_posteriori = {'work_reduction_metric':[], 'fnr_metric': [], 'fpr_metric': [], 'accuracy_metric': [], 'f1_metric': [],
'precision_metric': [],'recall_metric': [], 'eer_metric':[]}
results_priori = {'work_reduction_metric': [], 'fnr_metric': [], 'fpr_metric': [], 'accuracy_metric': [], 'f1_metric': [],
'precision_metric': [], 'recall_metric': [], 'eer_metric': []}
missing_column = []
for to_drop in ['None','reputation_available', 'dnsdb_available', 'whois_available', 'openintel_available']:
available, reputation, dns, whois, openintel, label = pre.loadAndCleanDataPerDataSet(False, '2018')
available['reputation_available'] = [True] * len(available.index)
# real amount of labels with extra_train
total_amount_of_2018_domains = len(available.index)
classDictionary = {'malicious': 1, 'benign': 0}
labelzsss = label.map(classDictionary)
total_amount_2018_positive = labelzsss.sum()
total_amount_2018_negative = len(labelzsss.index) - labelzsss.sum()
# dropping train labels
ind_extra_train = load('models/' + 'extended' + '/additionaltrainindices.joblib')
manual_added_to_trainingset = len(ind_extra_train)
available = available.drop(ind_extra_train)
reputation = reputation.drop(ind_extra_train)
dns = dns.drop(ind_extra_train)
whois = whois.drop(ind_extra_train)
openintel = openintel.drop(ind_extra_train)
label = label.drop(ind_extra_train)
# amount of evaluation data
total_amount_of_domains = len(available.index)
classDictionary = {'malicious': 1, 'benign': 0}
labelzsss = label.map(classDictionary)
total_amount_positive = labelzsss.sum()
total_amount_negative = len(labelzsss.index) - labelzsss.sum()
if to_drop == 'activeandpassive':
available['dnsdb_available'] = [False] * len(available.index)
available['openintel_available'] = [False] * len(available.index)
elif not to_drop == 'None':
available[to_drop] = [False]*len(available.index)
# keeping track of results
total_fp = 0
total_fn = 0
total_manual = 0
total_pred = 0
total_amount_of_domains = len(available.index)
dfs = []
codesz = []
ensemble_predictions = []
ensemble_labels = []
ensemble_scores_pos = []
ensemble_scores_neg = []
ensemble_predictions_priori = []
ensemble_labels_priori = []
metrics = { 'f1': [], 'precision': [], 'recall': [], 'acc_train': [], 'acc_test': [], 'eer': [], 'fnr_work_reduced': [],
'fpr_work_reduced': [], 'work_reduced': [], 'work_reduced_negative': [], 'work_reduced_positive': []}
l = [False, True]
for x in itertools.product(l,repeat=4):
code = ''.join(['1' if i else '0' for i in x])
features, labelzz = pre.loadAndCleanDataExactPatternAlt(x, available, reputation, dns, whois, openintel,
label)
amount_of_domains = len(features.index)
print(amount_of_domains, 'domains to classify for sourcepattern', code)
if code != '0000': # code[0] != '0'
clf = load('models/' + 'extended' + '/model' + code + '.joblib')
# Construct domains that should be classified by this model
if len(labelzz.index != 0):
print('With', amount_of_domains-labelzz.sum(), 'negative domains and', labelzz.sum(), 'positive domains')
index = features.index
scores = clf.predict_proba(features)
predictions = clf.predict(features)
df = pd.DataFrame(list(zip(predictions, scores[:,1], len(predictions)*[code])),
index=features.index, columns=['classification 0=benign, 1=malicious', 'score', 'model code'])
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, _, _ = workReducedPostLoadThr('extended', code, scores, labelzz)
total_fp += (len(positive_pred) - sum(positive_pred))
total_fn += sum(negative_pred)
total_manual += len(no_action_pred)
total_pred += (len(positive_pred) + len(negative_pred))
ensemble_predictions = ensemble_predictions + [1]*len(positive_pred) + [0]*len(negative_pred) + no_action_pred
ensemble_labels = ensemble_labels + positive_pred + negative_pred + no_action_pred
ensemble_predictions_priori = ensemble_predictions_priori + predictions.tolist()
ensemble_labels_priori = ensemble_labels_priori + labelzz.values.tolist()
ensemble_scores_neg = ensemble_scores_neg + scores[:, 1][labelzz == 0].tolist()
ensemble_scores_pos = ensemble_scores_pos + scores[:, 1][labelzz == 1].tolist()
print('Makes a prediction for', (len(positive_pred) + len(negative_pred)), 'domains')
print('Would predict', np.sum(predictions), 'domains malicious')
else:
total_manual += len(labelzz.index)
ensemble_predictions = ensemble_predictions + labelzz.values.tolist()
ensemble_labels = ensemble_labels + labelzz.values.tolist()
print('Total work reduced', (total_amount_of_domains-total_manual)/total_amount_of_domains)
print('Total FNR', total_fp/total_amount_negative)
print('Total FPR', total_fn/total_amount_positive)
print('Total work reduced real', (total_amount_of_2018_domains - total_manual - manual_added_to_trainingset) / total_amount_of_2018_domains)
print('Total FNR real', total_fn / total_amount_2018_positive)
print('Total FPR real', total_fp / total_amount_2018_negative)
print('Accuracy', accuracy_score(ensemble_labels, ensemble_predictions))
print('F1', f1_score(ensemble_labels, ensemble_predictions))
print('Precision', precision_score(ensemble_labels, ensemble_predictions))
print('Recall', recall_score(ensemble_labels, ensemble_predictions))
print('Little check', total_amount_positive+total_amount_negative == total_amount_of_domains)
print('Little check', total_pred+total_manual == total_amount_of_domains)
print('Little check', len(ensemble_scores_pos) + len(ensemble_scores_neg) == total_amount_of_domains)
print('Little check', len(ensemble_scores_pos) == total_amount_positive)
print('Little check', len(ensemble_scores_neg) == total_amount_negative)
print('Little check', total_amount_of_domains + manual_added_to_trainingset == total_amount_of_2018_domains)
results_posteriori['work_reduction_metric'].append((total_amount_of_2018_domains - total_manual - manual_added_to_trainingset) / total_amount_of_2018_domains)
results_posteriori['fnr_metric'].append(total_fn / total_amount_2018_positive)
results_posteriori['fpr_metric'].append(total_fp / total_amount_2018_negative)
results_posteriori['accuracy_metric'].append(accuracy_score(ensemble_labels, ensemble_predictions))
results_posteriori['f1_metric'].append(f1_score(ensemble_labels, ensemble_predictions))
results_posteriori['precision_metric'].append(precision_score(ensemble_labels, ensemble_predictions))
results_posteriori['recall_metric'].append(recall_score(ensemble_labels, ensemble_predictions))
results_posteriori['eer_metric'].append(bob.measure.eer(ensemble_scores_neg,ensemble_scores_pos))
results_priori['eer_metric'].append(bob.measure.eer(ensemble_scores_neg,ensemble_scores_pos))
results_priori['work_reduction_metric'].append((total_amount_of_2018_domains - total_manual - manual_added_to_trainingset) / total_amount_of_2018_domains)
results_priori['fnr_metric'].append(total_fn / total_amount_2018_positive)
results_priori['fpr_metric'].append(total_fp / total_amount_2018_negative)
results_priori['accuracy_metric'].append(accuracy_score(ensemble_labels_priori, ensemble_predictions_priori))
results_priori['f1_metric'].append(f1_score(ensemble_labels_priori, ensemble_predictions_priori))
results_priori['precision_metric'].append(precision_score(ensemble_labels_priori, ensemble_predictions_priori))
results_priori['recall_metric'].append(recall_score(ensemble_labels_priori, ensemble_predictions_priori))
missing_column.append(to_drop)
df = pd.DataFrame(results_posteriori, index=missing_column)
df.to_csv('dfs/' + 'extended' + '/dataset_impact_posteriori.csv')
df = pd.DataFrame(results_priori, index=missing_column)
df.to_csv('dfs/' + 'extended' + '/dataset_impact_priori.csv')
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,316
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/utils.py
|
def translatecode(code):
out = ''
for char in code:
if char == '1':
out += 'Y'
else:
out += 'N'
return out
def translateyear(year):
out = 'somethingwentworng'
if year == '2017':
out = 'A'
elif year == '2018':
out = 'B'
elif year == '2019':
out = 'C'
return out
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,317
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/macroify.py
|
def macroify(dct):
result_string = ""
for k, v in dct.items():
result_string += "{}\n".format(macroify_single(k, v))
return result_string
def macroify_single(key, value):
return "\\newcommand{{\\{key}}}{{{value}}}".format(key=key, value=value)
def append_file(dct):
with open('latexvariables.txt','a') as myfile:
myfile.write(macroify(dct))
def new_file(dct):
with open('latexvariables.txt', 'w+') as myfile:
myfile.write(macroify(dct))
if __name__ == '__main__':
print(macroify({"a": 123, "b": 456, "c": "ABC"}))
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,318
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/evaluation/postanalysis.py
|
import pandas as pd
import numpy as np
import datetime
from sklearn.metrics import confusion_matrix
import macroify
def saveFpFnDf(x_test,y_test,y_pred,columnnames,domainname, path):
''' Save the domains that were falsy classified. FP and FN are saved in sperate files
:param x_test: features of the data points
:param y_test: real labels
:param y_pred: predicted labels
:param columnnames: names of the features
:param domainname: list of domainnnames
:param path: path to save to
'''
df = pd.DataFrame(x_test, columns=columnnames, index = domainname)
fpdf = df[np.logical_and(y_test != y_pred, y_pred == 1)]
fndf = df[np.logical_and(y_test != y_pred, y_pred == 0)]
fpdf.to_csv('dfs/' + path + 'falsepositives.csv')
fndf.to_csv('dfs/' + path + 'falsenegatives.csv')
def saveFpFnDfBis(df, labels,labels_pred, path):
''' Save the domains that were falsy classified. FP and FN are saved in sperate files
:param x_test: features of the data points
:param y_test: real labels
:param y_pred: predicted labels
:param columnnames: names of the features
:param domainname: list of domainnnames
:param path: path to save to
'''
y_test = labels.values
y_pred = labels_pred
print(y_test)
print(y_pred)
print(y_test != y_pred)
fpdf = df[np.logical_and(y_test != y_pred, y_pred == 1)]
fndf = df[np.logical_and(y_test != y_pred, y_pred == 0)]
fpdf.to_csv('dfs/' + path + 'falsepositives.csv')
fndf.to_csv('dfs/' + path + 'falsenegatives.csv')
def saveimportance(importances, featurenames, path):
''' Save the feature importances
:param importances: the importance scores
:param featurenames: the name of the features
:param path: path to save to
'''
df = pd.DataFrame({'featurename':featurenames, 'score':importances})
df = df.sort_values('score',ascending=False)
df.to_csv('dfs/' + path + 'importance.csv')
def featureDistribution(columnnames, estimators, distributions):
for estimator in estimators:
estimator = estimator[0]
for fid,thr in zip(estimator.tree_.feature, estimator.tree_.threshold):
if fid >= 0:
distributions[columnnames[fid]].append(thr)
def understandDecisionTree(estimator):
n_nodes = estimator.tree_.node_count
children_left = estimator.tree_.children_left
children_right = estimator.tree_.children_right
feature = estimator.tree_.feature
threshold = estimator.tree_.threshold
node_depth = np.zeros(shape=n_nodes, dtype=np.int64)
is_leaves = np.zeros(shape=n_nodes, dtype=bool)
stack = [(0, -1)] # seed is the root node id and its parent depth
while len(stack) > 0:
node_id, parent_depth = stack.pop()
node_depth[node_id] = parent_depth + 1
# If we have a test node
if (children_left[node_id] != children_right[node_id]):
stack.append((children_left[node_id], parent_depth + 1))
stack.append((children_right[node_id], parent_depth + 1))
else:
is_leaves[node_id] = True
print("The binary tree structure has %s nodes and has "
"the following tree structure:"
% n_nodes)
for i in range(n_nodes):
if is_leaves[i]:
print("%snode=%s leaf node." % (node_depth[i] * "\t", i))
else:
print("%snode=%s test node: go to node %s if X[:, %s] <= %s else to "
"node %s."
% (node_depth[i] * "\t",
i,
children_left[i],
feature[i],
threshold[i],
children_right[i],
))
def print_performance_per_malware_family(y_test, y_pred, y_post, column_malware_family = 0, print_to_tex=False):
'''
Analyse the performance for each malware family
:param y_test: real labels
:param y_pred: predicted labels
:param y_post: the accounting information for each data point, i.e. malware family
:param column_malware_family: where the malware family column is located
:param print_to_tex:
:return:
'''
malware_families = set(y_post[:,column_malware_family])
s = 0
if print_to_tex:
print("Family & \# samples & Acc. & Prec. & Rec. & FNR & FPR \\\\")
for family in malware_families:
y_test_family = y_test[y_post[:,column_malware_family] == family]
y_pred_family = y_pred[y_post[:,column_malware_family] == family]
try:
tn, fp, fn, tp = confusion_matrix(y_test_family, y_pred_family).ravel()
print('{} got {} tp, {} fp, {} tn, {} fn, {:.2f} accuracy, {:.2f} fnr, {:.2f} fpr'.format(\
family, tp, fp, tn, fn, (tp + tn) / (tp + tn + fp + fn), fn / (fn + tp), fp / (fp+tn)))
if tp + fp + tn + fn > 50:
if print_to_tex:
print('{} & {} & {:.1f}\\% & {:.1f}\\% & {:.1f}\\% & {:.1f}\\% & {:.1f}\\% \\\\'.format(
family, tp + fp + tn + fn, 100*(tp + tn) / (tp + tn + fp + fn), 100*tp/(tp+fp), 100*tp/(tp+fn), 100*fn / (fn + tp) , 100*fp / (fp+tn)))
else:
print('{} got {} samples, {:.2f} accuracy, {:.2f} fnr, {:.2f} fpr, {:.2f} precision, {:.2f} recall'.format(\
family, tp + fp + tn + fn, (tp + tn) / (tp + tn + fp + fn), fn / (fn + tp), fp / (fp+tn), tp/(tp+fp), tp/(tp+fn)))
s = s + tn + fp + fn + tp
except ValueError:
print('family {} got no result'.format(family))
print('Total amount of domains ' + str(s))
def print_performance_per_malware_validity_timestamp(y_test, y_pred, y_post, column_timestamp=-1):
'''
Print the performance per malware validity timestamp
:param y_test:
:param y_pred:
:param y_post:
:param column_timestamp:
:return:
'''
timestamps = [datetime.datetime(2017, 11, 30, 0, 0, 0)] + \
[datetime.datetime(2017, month, 1, 0, 0, 0) for month in range(12, 12+1)] + \
[datetime.datetime(2018, month, 1, 0, 0, 0) for month in range(1, 12 + 1)] + \
[datetime.datetime(2019, 1, 1, 0, 0, 0)] + \
[datetime.datetime(2049, 1, 1, 0, 0, 0)]
s = 0
plot_data = []
for timestamp_idx in range(len(timestamps) - 1):
y_test_family = y_test[(timestamps[timestamp_idx] <= y_post[:, column_timestamp]) & (y_post[:, column_timestamp] < timestamps[timestamp_idx+1])]
y_pred_family = y_pred[(timestamps[timestamp_idx] <= y_post[:, column_timestamp]) & (y_post[:, column_timestamp] < timestamps[timestamp_idx+1])]
try:
tn, fp, fn, tp = confusion_matrix(y_test_family, y_pred_family).ravel()
print('{} got {} tp, {} fp, {} tn, {} fn, {:.2f} accuracy, {:.2f} fnr, {:.2f} fpr'.format(
timestamps[timestamp_idx].month, tp, fp, tn, fn, (tp + tn) / (tp + tn + fp + fn), fn / (fn + tp), fp / (fp + tn)))
plot_data.append(("{}-{}".format(timestamps[timestamp_idx].month, timestamps[timestamp_idx].year),
tp, fp, tn, fn, (tp + tn) / (tp + tn + fp + fn), fn / (fn + tp), fp / (fp + tn)))
s = s + tn + fp + fn + tp
except ValueError:
print('got no result')
import matplotlib.pyplot as plt
labels = "tp fp tn fn accuracy fnr fpr".split(" ")
plt.plot([d[0] for d in plot_data], [sum(d[1:5]) for d in plot_data] )
plt.plot([d[0] for d in plot_data], [sum(d[1:3]) for d in plot_data] )
plt.plot([d[0] for d in plot_data], [sum(d[3:5]) for d in plot_data])
plt.show()
for i in range(4,len(labels)):
plt.plot([d[0] for d in plot_data], [d[i+1] for d in plot_data],
label=labels[i])
plt.show()
def workReducedPostDomains(trainyear, code, scores):
'''returns the actual domains'''
thresholds = pd.read_csv('dfs/' + trainyear + '/' + code + '_workreduced.csv', index_col=0).loc[:,
['thresholds_fnr', 'thresholds_fpr']]
upper = thresholds.iloc[3, 1]
lower = thresholds.iloc[3, 0]
negative_pred_ind = [ s < lower for s in scores[:,1]]
no_action_pred_ind = [ (s >= lower) and (s <= upper) for s in scores[:,1]]
positive_pred_ind = [ s > upper for s in scores[:,1]]
return negative_pred_ind, no_action_pred_ind, positive_pred_ind
def thresholdsToLatex(path='dfs/2017/1111_workreduced.csv'):
df = pd.read_csv(path,index_col=0)
results = {}
results['WorkReducedLowerBound'] = df.loc[:,'fnr'].iloc[-1]
results['WorkReducedUpperBound'] = 100-df.loc[:,'fpr'].iloc[-1]
results['WorkReducedHundredMinusUpperBound'] = df.loc[:,'fpr'].iloc[-1]
results['WorkReducedTwoPercent'] = df.loc[:,'sum'].iloc[-1]
results['WorkReducedHundredMinusTwoPercent'] = 100 - df.loc[:,'sum'].iloc[-1]
# results['WorkReducedOnePercent'] = df.iloc[2,2]
# results['WorkReducedPointFivePercent'] = df.iloc[1,2]
# results['WorkReducedPointOnePercent'] = df.iloc[0,2]
macroify.append_file(results)
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,319
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/feature_generation/retrieve_sinkhole_data.py
|
import csv
import os
import time
import urllib.request
from glob import glob
import socket
import requests
from credentials import SINKDB_HTTP_API_KEY
def download_sinkholes_stamparm(formatted_snapshot_date):
"""
ns = document.querySelectorAll(".js-navigation-open");
results = [];
for (i = 0; i < ns.length; i++) {
let n = ns[i].text;
if (n.startsWith("sinkhole_")) {
results.push("https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/" + n)
}
};
console.log(results);
@ https://github.com/stamparm/maltrail/tree/master/trails/static/malware
"""
urls = [
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_abuse.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_anubis.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_arbor.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_bitdefender.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_blacklab.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_botnethunter.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_certgovau.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_certpl.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_checkpoint.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_cirtdk.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_collector.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_conficker.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_cryptolocker.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_drweb.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_dynadot.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_dyre.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_farsight.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_fbizeus.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_fitsec.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_fnord.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_fraunhofer.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_gameoverzeus.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_georgiatech.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_gladtech.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_honeybot.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_hyas.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_kaspersky.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_kryptoslogic.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_microsoft.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_noip.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_rsa.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_secureworks.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_shadowserver.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_sidnlabs.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_sinkdns.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_sofacy.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_sugarbucket.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_supportintel.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_switch.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_tech.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_tsway.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_unknown.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_virustracker.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_wapacklabs.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_xaayda.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_yourtrap.txt",
"https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/malware/sinkhole_zinkhole.txt"
]
for url in urls:
urllib.request.urlretrieve(url, "input_data/{}/stamparm_sinkhole/{}".format(formatted_snapshot_date, url.split("/")[-1]))
def parse_sinkholes_stamparm(formatted_snapshot_date):
sinkholes_ip = set()
sinkholes_ip_with_source = set()
sinkholes_ns = set()
sinkholes_ns_with_source = set()
for fp in glob(os.path.join(os.path.dirname(__file__), "input_data/{}/stamparm_sinkhole/*.txt".format(formatted_snapshot_date))):
source = fp[:-4].split("_")[-1]
with open(fp) as f:
for line in f:
line = line.rstrip()
if line and not line.startswith("#"):
try:
socket.inet_aton(line)
# is an IP address
sinkholes_ip.add(line)
sinkholes_ip_with_source.add((line, source))
except socket.error:
# is not an IP address
sinkholes_ns.add(line)
sinkholes_ns_with_source.add((line, source))
return sinkholes_ip, sinkholes_ns, sinkholes_ip_with_source, sinkholes_ns_with_source
def parse_sinkholes_alowaisheq_ns():
sinkholes_ns = set()
with open(os.path.join(os.path.dirname(__file__), "alowaisheq_sinkholes_ns.txt")) as f:
for line in f:
line = line.rstrip()
if line:
sinkholes_ns.add(line)
return sinkholes_ns
def load_sinkdb_cache(record, folder):
if not os.path.exists(os.path.join(folder, "sinkdb_cache_{record}.csv".format(record=record))):
return {}
with open(os.path.join(folder, "sinkdb_cache_{record}.csv".format(record=record))) as sc:
csvr = csv.reader(sc)
return {entry: True if status == "True" else False for entry, status in csvr}
def check_a_against_sinkdb(ip_address, sinkdb_a_cache, cache_folder):
if ip_address in sinkdb_a_cache:
return sinkdb_a_cache[ip_address]
try:
r = requests.post("https://sinkdb-api.abuse.ch/api/v1/", data={"api_key": SINKDB_HTTP_API_KEY, "ipv4": ip_address})
answer = r.json()
if answer["query_status"] == "no_results":
result = False
elif answer["query_status"] == "ok":
result = any(result["source"] == "sinkhole" for result in answer["results"])
else:
result = False
except:
result = False
with open(os.path.join(cache_folder, "sinkdb_cache_a.csv"), "a") as sc:
sc.write("{},{}\n".format(ip_address, result))
return result
def check_ns_against_sinkdb(nameserver, sinkdb_ns_cache, cache_folder):
if nameserver in sinkdb_ns_cache:
return sinkdb_ns_cache[nameserver]
try:
r = requests.post("https://sinkdb-api.abuse.ch/api/v1/", data={"api_key": SINKDB_HTTP_API_KEY, "domain": nameserver})
answer = r.json()
if answer["query_status"] == "no_results":
result = False
elif answer["query_status"] == "ok":
result = any(result["source"] == "sinkhole" for result in answer["results"])
else:
result = False
except:
# NXDOMAIN
result = False
with open(os.path.join(cache_folder, "sinkdb_cache_ns.csv"), "a") as sc:
sc.write("{},{}\n".format(nameserver, result))
return result
sinkholes_stamparm_ip, sinkholes_stamparm_ns, _, _ = parse_sinkholes_stamparm("20191129")
def check_against_stamparm_ip(ip_address):
return ip_address in sinkholes_stamparm_ip
def check_against_stamparm_ns(ns):
return ns in sinkholes_stamparm_ns
sinkholes_alowaisheq_ns = parse_sinkholes_alowaisheq_ns()
def check_against_alowaisheq_ns(ns):
return ns in sinkholes_alowaisheq_ns
def load_whois_sinkhole_emails():
with open("sinkhole_emails.txt") as sem:
return [mail_address.rstrip() for mail_address in sem if mail_address.rstrip() and not mail_address.startswith("#")]
def check_against_sinkhole_emails(mail_address):
whois_sinkhole_emails = load_whois_sinkhole_emails()
return mail_address in whois_sinkhole_emails
def check_all_against_sinkdb(input_file, cache_folder, rrtype):
sinkdb_a_cache = load_sinkdb_cache(rrtype, cache_folder)
with open(input_file) as input:
for line in input:
ip = line.split(",")[0]
if rrtype == "a":
res = check_a_against_sinkdb(ip, sinkdb_a_cache, cache_folder)
elif rrtype == "ns":
res = check_ns_against_sinkdb(ip, sinkdb_a_cache, cache_folder)
if res == True:
print(ip, res)
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,320
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/incremental_learning_evaluation.py
|
import dataprocessing.preprocessing as pre
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import train_test_split
from joblib import load
import utils
import argparse
import pandas as pd
import numpy as np
import itertools
from joblib import dump
from sklearn.metrics import confusion_matrix, f1_score, roc_auc_score, precision_score, recall_score, accuracy_score
from evaluation.metrics import workReducedPostLoadThr, workReducedPostDetermineThr, workReducedPostDetermineThrOneGo, \
workReducedPostDetermineThrOneGoOneYear, workReducedPost, workReducedPostDetermineThrOneGoBis
import dataprocessing.sampleselection as ss
import evaluation.postanalysis as postan
import macroify
import bob.measure
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Train production classifier with some 2018 data. This code implements more techniques than described in the NDSS 2020 paper')
parser.add_argument('--strategy', '-st',
type=str,
default='random',
help='How to select the additional samples that have to be added, should be either random')
args = parser.parse_args()
strategy = args.strategy
# We tried more strategies to select additional data than described in the NDSS paper.
strategies = {'random': ss.random, 'practical': ss.practical, 'practicalFraction':ss.practicalFraction}
if strategy == 'random':
fraction = 0.15
else:
fraction = 100
results = {}
method = {'random':ss.random, 'practical':ss.practical, 'practicalFraction':ss.practicalFraction}
available, reputation, dns, whois, openintel, label = pre.loadAndCleanDataPerDataSet(False, '2018')
total_amount_of_2018_domains = len(available.index)
costs = [0.001, 0.005, 0.01, 0.02]
workreduceddict = {}
for c in costs:
workreduceddict[c] = {}
workreduceddict[c]['total_fp'] = 0
workreduceddict[c]['total_fn'] = 0
workreduceddict[c]['total_manual'] = 0
workreduceddict[c]['total_pred'] = 0
workreduceddict[c]['ensemble_predictions'] = []
workreduceddict[c]['ensemble_labels'] = []
workreduceddict[c]['ensemble_scores_pos'] = []
workreduceddict[c]['ensemble_scores_neg'] = []
workreduceddict[c]['ensemble_predictions_priori'] = []
workreduceddict[c]['ensemble_labels_priori'] = []
whoisclassified_domians_dfs = []
whoisclassified_domains_labels = []
whoisclassified_domains_prediction = []
classDictionary = {'malicious': 1, 'benign': 0}
labelzsss = label.map(classDictionary)
total_amount_2018_positive = labelzsss.sum()
total_amount_2018_negative = len(labelzsss.index) - labelzsss.sum()
l = [False,True]
dfs = []
codesz = []
metrics = { 'f1': [], 'precision': [], 'recall': [], 'acc_train': [], 'acc_test': [], 'eer': [], 'fnr_work_reduced': [],
'fpr_work_reduced': [], 'work_reduced': [], 'work_reduced_negative': [],
'work_reduced_positive': [], 'work_reduced_real':[]}
# Select training data - features2 need to be added to the training set.
features2, labels2, post_analysis_labels2 = pre.loadAndCleanDataMaxDom('1111', False, '2018')
features2, features_test_domains, labels2, labels_test_domains = \
method[strategy](features2, labels2, **{'fraction':fraction, 'code': '1111'})
manual_added_to_trainingset = len(labels2)
print('From 2018', manual_added_to_trainingset ,'samples are added to the training set')
labels2 = pd.Series(labels2, index=features2.index)
labels_test_domains = pd.Series(labels_test_domains, index=features_test_domains.index)
amount_of_test_domains = len(labelzsss) - len(features2)
total_amount_positive_test = total_amount_2018_positive - labels2.sum()
total_amount_negative_test = total_amount_2018_negative - (len(labels2.index) - labels2.sum())
ind_extra_train = features2.index
# save extra_train_indices to drop them when models are used
dump(ind_extra_train, 'models/' + 'extended' + '/additionaltrainindices.joblib')
for x in itertools.product(l,repeat=4):
code = ''.join(['1' if i else '0' for i in x])
if code != '0000':
# features1 is the 2017 data and is first part of the training set.
features1, labels1, post_analysis_labels1 = pre.loadAndCleanDataMaxDom(code, False, '2017')
# select training and testing indices from 'correct' (=abiding model code) featureset
features3, labels3, post_analysis_labels3 = pre.loadAndCleanDataMaxDom(code, False, '2018')
labels3 = pd.Series(labels3, index=features3.index)
features_extra_train = features3.loc[ind_extra_train]
labels_extra_train = labels3.loc[ind_extra_train]
features_test = features3.drop(ind_extra_train)
labels_test = labels3.drop(ind_extra_train)
features_train = pd.concat([features1, features_extra_train])
labels_train = np.concatenate([labels1, labels_extra_train])
labels_train_year = np.concatenate([labels1, labels_extra_train*2])
nb_test_domains = len(labels_test)
nb_test_domains_with_extra_train = len(labels3)
print('Total training set size', len(labels_train))
# Load hyperparameters and train classifier
clf_tuned = load('models/2017/model' + code + '.joblib')
if isinstance(clf_tuned, GradientBoostingClassifier):
params = clf_tuned.get_params()
clf = GradientBoostingClassifier(**params)
else:
params = clf_tuned.best_params_
clf = GradientBoostingClassifier(**params, random_state=42)
clf.fit(features_train, labels_train)
#save clf
dump(clf, 'models/' + 'extended' + '/model' + code + '.joblib')
# Evaluate
predictions = clf.predict(features_test)
scores = clf.predict_proba(features_test)
acc = accuracy_score(labels_test, predictions)
f1 = f1_score(labels_test, predictions)
prec = precision_score(labels_test, predictions)
reca = recall_score(labels_test, predictions)
# TODO: choose threshold selection method
# positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
# work_reduced_positive, lower, upper = \
# workReducedPostLoadThr('2017', code, scores, labels_test)
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive, thresholdsfnr, thresholdsfpr = \
workReducedPostDetermineThrOneGoBis(features_train, labels_train, code, scores, labels_test,
labels_train_year, costs, plot=True, savemetrics=True, path='extended/' + code + '_')
if code == '1111':
postan.thresholdsToLatex(path='dfs/extended/1111_workreduced.csv')
postan.saveimportance(clf.feature_importances_, features_test.columns, 'extended/1111_')
# positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
# work_reduced_positive, lower, upper = \
# workReducedPostDetermineThrOneGoOneYear(features1, labels1, features_extra_train, labels_extra_train, code,
# scores, labels_test)
print('Manual work for', len(no_action_pred), 'domains. This work is on top of the',
len(labels_extra_train), 'that had to be labeled manually to add them to the trainingset')
codesz.append(code)
metrics['acc_test'].append(acc)
metrics['f1'].append(f1)
metrics['precision'].append(prec)
metrics['work_reduced_negative'].append(len(negative_pred) / nb_test_domains)
metrics['work_reduced_positive'].append(len(positive_pred) / nb_test_domains)
metrics['work_reduced'].append(len(negative_pred) / nb_test_domains + len(positive_pred) / nb_test_domains)
metrics['work_reduced_real'].append((nb_test_domains - len(no_action_pred)) / nb_test_domains_with_extra_train)
metrics['fnr_work_reduced'].append(fnr)
metrics['fpr_work_reduced'].append(fpr)
# Construct domains that should be classified by this model
features, labelzz = pre.loadAndCleanDataExactPattern(x, available, reputation, dns, whois, openintel, label)
iters = features.index.intersection(features_test.index)
features_to_classify = features_test.loc[iters]
labelzz = labelzz.loc[iters]
amount_of_domains = len(features_to_classify.index)
print(amount_of_domains, 'domains to classify for code', code)
if len(labelzz.index != 0):
scores = clf.predict_proba(features_to_classify)
predictions = clf.predict(features_to_classify)
if code[2] == '1':
df = whois.loc[features_to_classify.index]
whoisclassified_domians_dfs.append(df)
whoisclassified_domains_labels.append(labelzz.loc[df.index])
print(type(predictions))
whoisclassified_domains_prediction.append(predictions)
for i,c in enumerate(costs):
lower = thresholdsfnr[i]
upper = thresholdsfpr[i]
positive_pred, negative_pred, no_action_pred, fnr, fpr, work_reduced, work_reduced_negative, \
work_reduced_positive = workReducedPost(lower, upper, scores, labelzz)
workreduceddict[c]['total_fp'] += (len(positive_pred) - sum(positive_pred))
workreduceddict[c]['total_fn'] += sum(negative_pred)
workreduceddict[c]['total_manual'] += len(no_action_pred)
workreduceddict[c]['total_pred'] += (len(positive_pred) + len(negative_pred))
workreduceddict[c]['ensemble_predictions'] = workreduceddict[c]['ensemble_predictions'] + [1] * len(positive_pred) + [0] * len(negative_pred) + no_action_pred
workreduceddict[c]['ensemble_labels'] = workreduceddict[c]['ensemble_labels'] + positive_pred + negative_pred + no_action_pred
workreduceddict[c]['ensemble_predictions_priori'] = workreduceddict[c]['ensemble_predictions_priori'] + predictions.tolist()
workreduceddict[c]['ensemble_labels_priori'] = workreduceddict[c]['ensemble_labels_priori'] + labelzz.values.tolist()
workreduceddict[c]['ensemble_scores_neg'] = workreduceddict[c]['ensemble_scores_neg'] + scores[:, 1][labelzz == 0].tolist()
workreduceddict[c]['ensemble_scores_pos'] = workreduceddict[c]['ensemble_scores_pos'] + scores[:, 1][labelzz == 1].tolist()
# dfs.append(df)
print('Makes a prediction for', (len(positive_pred) + len(negative_pred)), 'domains')
print('Would predict', np.sum(predictions), 'domains malicious')
print('=========================================')
# Print performance per model
print('===============================================================================')
for key, value in metrics.items():
if value:
print('========== %s ============' % (key))
for i,v in enumerate(value):
print('Model %s: %.3f' % (codesz[i], v))
# codestr = utils.translatecode(code)
# results[key + codestr] = v
print('===============================================================================')
total_fp = workreduceddict[0.02]['total_fp']
total_fn = workreduceddict[0.02]['total_fn']
total_manual = workreduceddict[0.02]['total_manual']
total_pred = workreduceddict[0.02]['total_pred']
# Test set + what has to be added to training set.
print('Total work reduced real', (total_amount_of_2018_domains - total_manual - manual_added_to_trainingset) / total_amount_of_2018_domains)
print('Total FNR', total_fn / total_amount_2018_positive)
print('Total FPR', total_fp / total_amount_2018_negative)
# Only test set.
print('Total work reduced only test set', (amount_of_test_domains-total_manual)/amount_of_test_domains)
print('Total FNR only test set', total_fn/total_amount_positive_test)
print('Total FPR only test set', total_fp/total_amount_negative_test)
ensemble_labels = workreduceddict[0.02]['ensemble_labels']
ensemble_predictions = workreduceddict[0.02]['ensemble_predictions']
ensemble_labels_priori = workreduceddict[0.02]['ensemble_labels_priori']
ensemble_predictions_priori = workreduceddict[0.02]['ensemble_predictions_priori']
ensemble_scores_pos = workreduceddict[0.02]['ensemble_scores_pos']
ensemble_scores_neg = workreduceddict[0.02]['ensemble_scores_neg']
# FP and FN to file
df_data = pd.concat(whoisclassified_domians_dfs)
df_labels = pd.concat(whoisclassified_domains_labels)
labels_pred = np.concatenate(whoisclassified_domains_prediction)
postan.saveFpFnDfBis(df_data, df_labels, labels_pred, 'extended/')
np.savez('dfs/' + 'ensemble_extended_det_curve.npz', pos=ensemble_scores_pos, neg=ensemble_scores_neg)
print('AccuracyPosteriori', accuracy_score(ensemble_labels, ensemble_predictions))
print('F1Posteriori', f1_score(ensemble_labels, ensemble_predictions))
print('PrecisionPosteriori', precision_score(ensemble_labels, ensemble_predictions))
print('RecallPosteriori', recall_score(ensemble_labels, ensemble_predictions))
print('Little check', total_amount_2018_positive + total_amount_2018_negative == total_amount_of_2018_domains)
print('Little check', total_amount_positive_test+total_amount_negative_test == amount_of_test_domains)
print('Little check', total_pred + total_manual + manual_added_to_trainingset == total_amount_of_2018_domains)
print('Little check', amount_of_test_domains + manual_added_to_trainingset == total_amount_of_2018_domains)
results[strategy + 'workreduced'] = (total_amount_of_2018_domains - total_manual - manual_added_to_trainingset) / total_amount_of_2018_domains *100
results[strategy + 'fnr'+ 'posteriori'] = total_fn / total_amount_2018_positive *100
results[strategy + 'fpr'+ 'posteriori'] = total_fp/total_amount_2018_negative *100
results[strategy + 'accuracy' + 'posteriori'] = accuracy_score(ensemble_labels, ensemble_predictions) *100
results[strategy + 'fone' + 'posteriori'] = f1_score(ensemble_labels, ensemble_predictions) *100
results[strategy + 'precision' + 'posteriori'] = precision_score(ensemble_labels, ensemble_predictions) *100
results[strategy + 'recall' + 'posteriori'] = recall_score(ensemble_labels, ensemble_predictions) *100
results[strategy + 'accuracy'] = accuracy_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[strategy + 'fone'] = f1_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[strategy + 'precision'] = precision_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[strategy + 'recall'] = recall_score(ensemble_labels_priori, ensemble_predictions_priori) * 100
results[strategy + 'eer'] = bob.measure.eer(ensemble_scores_neg, ensemble_scores_pos) * 100
fpr, fnr = bob.measure.farfrr(ensemble_scores_neg, ensemble_scores_pos, 0.5)
results[strategy + 'fpr'] = fpr*100
results[strategy + 'fnr'] = fnr*100
print('Accuracy', accuracy_score(ensemble_labels_priori, ensemble_predictions_priori) * 100)
print('F1', f1_score(ensemble_labels_priori, ensemble_predictions_priori) * 100)
print('Precision', precision_score(ensemble_labels_priori, ensemble_predictions_priori) * 100)
print('Recall', recall_score(ensemble_labels_priori, ensemble_predictions_priori) * 100)
total_manual = workreduceddict[0.01]['total_manual']
results[strategy + 'WorkReducedOnePercent'] = (total_amount_of_2018_domains - total_manual - manual_added_to_trainingset) / total_amount_of_2018_domains *100
total_manual = workreduceddict[0.005]['total_manual']
results[strategy + 'WorkReducedPointFivePercent'] = (total_amount_of_2018_domains - total_manual - manual_added_to_trainingset) / total_amount_of_2018_domains *100
total_manual = workreduceddict[0.001]['total_manual']
results[strategy + 'WorkReducedPointOnePercent'] = (total_amount_of_2018_domains - total_manual - manual_added_to_trainingset) / total_amount_of_2018_domains *100
macroify.append_file(results)
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,321
|
DistriNet/avalanche-ndss2020
|
refs/heads/master
|
/evaluation_code_and_models/dataprocessing/preprocessing.py
|
import pandas as pd
from sklearn.preprocessing import Binarizer, OneHotEncoder, LabelBinarizer, LabelEncoder
from sklearn.impute import SimpleImputer
import numpy as np
import os
def loadDataIntersectSamples(sourcepattern, malwareFamily, year):
'''Loads the data. Every dataset combination leads contains domains of the most restrictive dataset,
i.e. 1111 (all datasources available). Thus, 1011 will have the same amount of domains as 1111'''
DATAPATH = 'datasets/' + year
FILENAME1 = 'weka_multi_output_features_all_instances_whois.csv'
FILENAME2 = 'weka_multi_output_features_all_instances_dnsdb.csv'
whois = pd.read_csv(os.path.join(DATAPATH, FILENAME1))
whois.index = whois['domain']
whois.drop(whois.index.duplicated())
label = pd.DataFrame(whois.iloc[:, -1])
reputation = whois.iloc[:, 0:28]
openintel = whois.iloc[:, 9:17]
whois = whois.iloc[:, 28:-1]
print()
dns = pd.read_csv(os.path.join(DATAPATH, FILENAME2), parse_dates=['malware_validity_start', 'malware_validity_end'])
dns.index = dns['domain']
dns = dns.drop(dns.index.duplicated())
dns = dns.iloc[:, 2:13]
#### Open Intel clean up ####
openintel = pd.concat([openintel, label], axis=1, join='inner')
openintel = openintel[openintel['openintel_available'] == True]
# redifine label, as openintel offers least amount of labels
label = pd.DataFrame(openintel.iloc[:, -1])
openintel = openintel.drop(['openintel_available', 'class'], axis=1)
more_columns_to_drop = ['openintel_available', 'openintel_first_seen_before_now',
'openintel_first_seen_before_validity', 'openintel_nb_days_seen_A',
'openintel_nb_days_seen_AAAA', 'openintel_nb_days_seen_MX', 'openintel_nb_days_seen_NS',
'openintel_nb_days_seen_SOA']
reputation = reputation.drop(more_columns_to_drop, axis=1)
### Dates ###
reputation['malware_validity_start'] = pd.to_datetime(reputation['malware_validity_start'], unit='s')
reputation['malware_validity_end'] = pd.to_datetime(reputation['malware_validity_end'], unit='s')
whois['whois_registration_date'] = pd.to_datetime(whois['whois_registration_date'], unit='s')
# binarize
if malwareFamily == True:
to_binarize = reputation.loc[:, ['malware_wordlist_based_dga','ct_has_certificate', 'topsites_alexa_presence', 'topsites_majestic_presence',
'topsites_quantcast_presence', 'topsites_umbrella_presence']]
binarized = Binarizer().transform(to_binarize)
reputation.loc[:,
['malware_wordlist_based_dga', 'ct_has_certificate', 'topsites_alexa_presence', 'topsites_majestic_presence', 'topsites_quantcast_presence',
'topsites_umbrella_presence']] = binarized
else:
to_binarize = reputation.loc[:, ['ct_has_certificate', 'topsites_alexa_presence',
'topsites_majestic_presence',
'topsites_quantcast_presence', 'topsites_umbrella_presence']]
binarized = Binarizer().transform(to_binarize)
reputation.loc[:,
['ct_has_certificate', 'topsites_alexa_presence', 'topsites_majestic_presence',
'topsites_quantcast_presence',
'topsites_umbrella_presence']] = binarized
# encode categorical feature
if malwareFamily == True:
enco = OneHotEncoder()
categorical = enco.fit_transform(reputation.loc[:, ['malware_family']])
df = pd.DataFrame(categorical.toarray(), columns=enco.get_feature_names(
['malware_family']), index=reputation.index)
reputation = pd.concat([reputation, df], axis=1)
reputation = reputation.drop(['malware_family'], axis=1)
# impute search_wayback
to_impute = reputation.loc[:, ['search_pages_found_wayback_machine', 'search_wayback_machine_first_seen_before_now',
'search_wayback_machine_first_seen_before_validity']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=0).fit_transform(to_impute)
reputation.loc[:, ['search_pages_found_wayback_machine', 'search_wayback_machine_first_seen_before_now',
'search_wayback_machine_first_seen_before_validity']] = imputed
#### whois clean up ####
# impute whois_privacy and whois_temporary_mail with Not known
booleanDictionary = {True: 'TRUE', False: 'FALSE'}
whois.loc[:, 'whois_privacy'] = whois.loc[:, 'whois_privacy'].map(booleanDictionary)
whois.loc[:, 'whois_temporary_mail'] = whois.loc[:, 'whois_temporary_mail'].map(booleanDictionary)
whois.loc[:, 'whois_has_been_renewed'] = whois.loc[:, 'whois_has_been_renewed'].map(booleanDictionary)
whois.loc[:, 'whois_valid_phone'] = whois.loc[:, 'whois_valid_phone'].map(booleanDictionary)
to_impute = whois.loc[:, ['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value='Not known').fit_transform(to_impute)
whois.loc[:, ['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']] = imputed
# categroical features, those that are imputed
enc = OneHotEncoder()
categorical = enc.fit_transform(
whois.loc[:, ['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']])
df = pd.DataFrame(categorical.toarray(), columns=enc.get_feature_names(
['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']), index=whois.index)
whois = pd.concat([whois, df], axis=1)
whois = whois.drop(['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone'], axis=1)
# impute with mean whois_registration_age and whois_registration_and_validity_start_date and whois_registration_period
to_impute = whois.loc[:, ['whois_registration_age', 'whois_registration_and_family_start_date',
'whois_registration_and_validity_start_date', 'whois_registration_period']]
imputed = SimpleImputer(missing_values=np.nan, strategy='mean').fit_transform(to_impute)
whois.loc[:,
['whois_registration_age', 'whois_registration_and_family_start_date', 'whois_registration_and_validity_start_date',
'whois_registration_period']] = imputed
#### dsndb clean up ####
# impute DNS records to False
to_impute = dns.loc[:,
['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=False).fit_transform(to_impute)
dns.loc[:, ['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']] = imputed
# binarize DNS record booleans
to_binarize = dns.loc[:,
['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']]
binarized = LabelBinarizer().fit_transform(to_binarize)
dns.loc[:, ['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']] = binarized
# impute dns nb_queries, active_period
to_impute = dns.loc[:, ['dnsdb_active_period', 'dnsdb_nb_queries']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=0).fit_transform(to_impute)
dns.loc[:, ['dnsdb_active_period', 'dnsdb_nb_queries']] = imputed
# impute dns timestamps
to_impute = dns.loc[:, ['dnsdb_first_seen_before_now', 'dnsdb_first_seen_before_validity']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=0).fit_transform(to_impute)
dns.loc[:, ['dnsdb_first_seen_before_now', 'dnsdb_first_seen_before_validity']] = imputed
#### Join data ####
post_analysis_columns = ['malware_family', 'malware_wordlist_based_dga'] + \
['domain', 'malware_validity_length',
'topsites_alexa_average_rank', 'topsites_majestic_average_rank',
'topsites_quantcast_average_rank', 'topsites_umbrella_average_rank',
'malware_validity_start', 'malware_validity_end', 'whois_registration_date', 'whois_registrar']
# ['openintel_available', 'openintel_first_seen_before_now',
# 'openintel_first_seen_before_validity', 'openintel_nb_days_seen_A',
# 'openintel_nb_days_seen_AAAA', 'openintel_nb_days_seen_MX', 'openintel_nb_days_seen_NS',
# 'openintel_nb_days_seen_SOA']
datasources = [source for p, source in zip(sourcepattern, [reputation, dns, whois, openintel]) if int(p)]
columnnames = [cn for p, cn in zip(sourcepattern, [reputation.columns, dns.columns, whois.columns, openintel.columns]) if int(p)]
post_analysis_columns = [x for x in post_analysis_columns if x in np.concatenate(columnnames)]
print(len(datasources[0].index))
print(len(label.index))
data = pd.concat(datasources + [label], axis=1, join='inner')
features = data.drop(['class']+post_analysis_columns, axis=1)
labels = data['class']
post_analysis_labels = data[post_analysis_columns]
# encode the labels
le = LabelEncoder()
labels = le.fit_transform(labels)
# print(le.classes_)
# print("Benign is ", le.transform(["benign"]))
# print("** FINAL COLUMNS: **")
# print(features.columns)
# print(features.shape)
return features, labels, post_analysis_labels
def loadAndCleanDataMaxDom(sourcepattern, malwareFamily, year, whoisdatacompl=True):
DATAPATH = 'datasets/' + year
FILENAME = 'weka_multi_output_features_all_instances_none.csv'
FILENAME1 = 'weka_multi_output_features_all_instances_whois.csv'
FILENAME2 = 'weka_multi_output_features_all_instances_dnsdb.csv'
FILENAME3 = 'use_in_weka.csv'
weka = pd.read_csv(os.path.join(DATAPATH, FILENAME3))
weka.index = weka['domain']
available = weka.loc[:, ['dnsdb_available', 'whois_available', 'openintel_available']]
none = pd.read_csv(os.path.join(DATAPATH, FILENAME))
none.index = none['domain']
reputation = none.iloc[:, 0:28]
label = pd.DataFrame(none['class'])
whois = pd.read_csv(os.path.join(DATAPATH, FILENAME1))
whois.index = whois['domain']
whois = whois.iloc[:, 28:-1]
openintel = none.iloc[:, 9:17]
openintel = openintel[openintel['openintel_available'] == True]
print()
dns = pd.read_csv(os.path.join(DATAPATH, FILENAME2), parse_dates=['malware_validity_start', 'malware_validity_end'])
dns.index = dns['domain']
dns = dns.iloc[:, 2:13]
available_dns = pd.concat([dns, available], axis=1, join='inner')
dns = dns[available_dns['dnsdb_available'] == True]
#### Open Intel clean up ####
# redifine label, as openintel offers least amount of labels
openintel = openintel.drop(['openintel_available'], axis=1)
#### Reputation clean up ####
more_columns_to_drop = ['openintel_available', 'openintel_first_seen_before_now',
'openintel_first_seen_before_validity', 'openintel_nb_days_seen_A',
'openintel_nb_days_seen_AAAA', 'openintel_nb_days_seen_MX', 'openintel_nb_days_seen_NS',
'openintel_nb_days_seen_SOA']
reputation = reputation.drop(more_columns_to_drop, axis=1)
### Dates ###
reputation['malware_validity_start'] = pd.to_datetime(reputation['malware_validity_start'], unit='s')
reputation['malware_validity_end'] = pd.to_datetime(reputation['malware_validity_end'], unit='s')
whois['whois_registration_date'] = pd.to_datetime(whois['whois_registration_date'], unit='s')
# binarize
if malwareFamily == True:
to_binarize = reputation.loc[:, ['malware_wordlist_based_dga','ct_has_certificate', 'topsites_alexa_presence', 'topsites_majestic_presence',
'topsites_quantcast_presence', 'topsites_umbrella_presence']]
binarized = Binarizer().transform(to_binarize)
reputation.loc[:,
['malware_wordlist_based_dga', 'ct_has_certificate', 'topsites_alexa_presence', 'topsites_majestic_presence', 'topsites_quantcast_presence',
'topsites_umbrella_presence']] = binarized
else:
to_binarize = reputation.loc[:, ['ct_has_certificate', 'topsites_alexa_presence',
'topsites_majestic_presence',
'topsites_quantcast_presence', 'topsites_umbrella_presence']]
binarized = Binarizer().transform(to_binarize)
reputation.loc[:,
['ct_has_certificate', 'topsites_alexa_presence', 'topsites_majestic_presence',
'topsites_quantcast_presence',
'topsites_umbrella_presence']] = binarized
# encode categorical feature
if malwareFamily == True:
enco = OneHotEncoder()
categorical = enco.fit_transform(reputation.loc[:, ['malware_family']])
df = pd.DataFrame(categorical.toarray(), columns=enco.get_feature_names(
['malware_family']), index=reputation.index)
reputation = pd.concat([reputation, df], axis=1)
reputation = reputation.drop(['malware_family'], axis=1)
# impute search_wayback
to_impute = reputation.loc[:, ['search_pages_found_wayback_machine', 'search_wayback_machine_first_seen_before_now',
'search_wayback_machine_first_seen_before_validity']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=0).fit_transform(to_impute)
reputation.loc[:, ['search_pages_found_wayback_machine', 'search_wayback_machine_first_seen_before_now',
'search_wayback_machine_first_seen_before_validity']] = imputed
#### whois clean up ####
# impute whois_privacy and whois_temporary_mail with Not known
booleanDictionary = {True: 'TRUE', False: 'FALSE'}
whois.loc[:, 'whois_privacy'] = whois.loc[:, 'whois_privacy'].map(booleanDictionary)
whois.loc[:, 'whois_temporary_mail'] = whois.loc[:, 'whois_temporary_mail'].map(booleanDictionary)
whois.loc[:, 'whois_has_been_renewed'] = whois.loc[:, 'whois_has_been_renewed'].map(booleanDictionary)
whois.loc[:, 'whois_valid_phone'] = whois.loc[:, 'whois_valid_phone'].map(booleanDictionary)
to_impute = whois.loc[:, ['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value='Not known').fit_transform(to_impute)
whois.loc[:, ['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']] = imputed
# categroical features, those that are imputed
whoisdatacomplete = whoisdatacompl
if whoisdatacomplete:
enc = OneHotEncoder()
categorical = enc.fit_transform(
whois.loc[:, ['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']])
df = pd.DataFrame(categorical.toarray(), columns=enc.get_feature_names(
['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']),
index=whois.index)
whois = pd.concat([whois, df], axis=1)
whois = whois.drop(['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone'],
axis=1)
else:
enc = OneHotEncoder()
categorical = enc.fit_transform(
whois.loc[:, ['whois_privacy', 'whois_valid_phone']])
df = pd.DataFrame(categorical.toarray(), columns=enc.get_feature_names(
['whois_privacy', 'whois_valid_phone']), index=whois.index)
whois = pd.concat([whois, df], axis=1)
whois = whois.drop(['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone'], axis=1)
# impute with mean whois_registration_age and whois_registration_and_validity_start_date and whois_registration_period
to_impute = whois.loc[:, ['whois_registration_age', 'whois_registration_and_family_start_date',
'whois_registration_and_validity_start_date', 'whois_registration_period']]
imputed = SimpleImputer(missing_values=np.nan, strategy='mean').fit_transform(to_impute)
whois.loc[:,
['whois_registration_age', 'whois_registration_and_family_start_date', 'whois_registration_and_validity_start_date',
'whois_registration_period']] = imputed
#### dsndb clean up ####
# impute DNS records to False
to_impute = dns.loc[:,
['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=False).fit_transform(to_impute)
dns.loc[:, ['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']] = imputed
# binarize DNS record booleans
to_binarize = dns.loc[:,
['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']]
binarized = LabelBinarizer().fit_transform(to_binarize)
dns.loc[:, ['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']] = binarized
# impute dns nb_queries, active_period
to_impute = dns.loc[:, ['dnsdb_active_period', 'dnsdb_nb_queries']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=0).fit_transform(to_impute)
dns.loc[:, ['dnsdb_active_period', 'dnsdb_nb_queries']] = imputed
# impute dns timestamps
to_impute = dns.loc[:, ['dnsdb_first_seen_before_now', 'dnsdb_first_seen_before_validity']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=0).fit_transform(to_impute)
dns.loc[:, ['dnsdb_first_seen_before_now', 'dnsdb_first_seen_before_validity']] = imputed
#### Join data ####
post_analysis_columns = ['malware_family', 'malware_wordlist_based_dga'] + \
['domain', 'malware_validity_length',
'topsites_alexa_average_rank', 'topsites_majestic_average_rank',
'topsites_quantcast_average_rank', 'topsites_umbrella_average_rank',
'malware_validity_start', 'malware_validity_end', 'whois_registration_date', 'whois_registrar']
# ['openintel_available', 'openintel_first_seen_before_now',
# 'openintel_first_seen_before_validity', 'openintel_nb_days_seen_A',
# 'openintel_nb_days_seen_AAAA', 'openintel_nb_days_seen_MX', 'openintel_nb_days_seen_NS',
# 'openintel_nb_days_seen_SOA']
datasources = [source for p, source in zip(sourcepattern, [reputation, dns, whois, openintel]) if int(p)]
columnnames = [cn for p, cn in
zip(sourcepattern, [reputation.columns, dns.columns, whois.columns, openintel.columns]) if int(p)]
post_analysis_columns = [x for x in post_analysis_columns if x in np.concatenate(columnnames)]
data = pd.concat(datasources + [label], axis=1, join='inner')
features = data.drop(['class'] + post_analysis_columns, axis=1)
labels = data['class']
post_analysis_labels = data[post_analysis_columns]
# encode the labels
le = LabelEncoder()
labels = le.fit_transform(labels)
# print(le.classes_)
# print("Benign is ", le.transform(["benign"]))
# print("** FINAL COLUMNS: **")
# print(features.columns)
# print(features.shape)
return features, labels, post_analysis_labels
def loadAndCleanDataPerDataSet(malwareFamily, year, whoisdatacompl=True):
'''
Contains all data
:param malwareFamily: whether to include malware family as a feature
:param year: dataset
:return:
'''
DATAPATH = 'datasets/' + year
FILENAME = 'weka_multi_output_features_all_instances_none.csv'
FILENAME1 = 'weka_multi_output_features_all_instances_whois.csv'
FILENAME2 = 'weka_multi_output_features_all_instances_dnsdb.csv'
FILENAME3 = 'use_in_weka.csv'
weka = pd.read_csv(os.path.join(DATAPATH, FILENAME3))
weka.index = weka['domain']
weka = weka.drop_duplicates()
available = weka.loc[:,['dnsdb_available', 'whois_available', 'openintel_available']]
none = pd.read_csv(os.path.join(DATAPATH, FILENAME))
none.index = none['domain']
none = none.drop_duplicates()
none = none.loc[none['ct_has_certificate'].isnull()==False]
label = none.iloc[:,-1]
reputation = none.iloc[:, 0:28]
whois = pd.read_csv(os.path.join(DATAPATH, FILENAME1))
whois.index = whois['domain']
whois = whois.drop_duplicates()
# label = pd.DataFrame(whois.iloc[:, -1])
openintel = none.iloc[:, 9:17]
whois = whois.iloc[:, 28:-1]
dns = pd.read_csv(os.path.join(DATAPATH, FILENAME2), parse_dates=['malware_validity_start', 'malware_validity_end'])
dns.index = dns['domain']
dns = dns.drop_duplicates()
dns = dns.iloc[:, 2:13]
ind_intersection = available[available['dnsdb_available']==True].index.intersection(dns.index)
dns = dns.loc[ind_intersection]
#### Open Intel clean up ####
openintel = openintel[openintel['openintel_available'] == True]
# redifine label, as openintel offers least amount of labels
# label = pd.DataFrame(openintel.iloc[:, -1])
openintel = openintel.drop(['openintel_available'], axis=1)
more_columns_to_drop = ['openintel_available', 'openintel_first_seen_before_now',
'openintel_first_seen_before_validity', 'openintel_nb_days_seen_A',
'openintel_nb_days_seen_AAAA', 'openintel_nb_days_seen_MX', 'openintel_nb_days_seen_NS',
'openintel_nb_days_seen_SOA', 'malware_family', 'malware_wordlist_based_dga',
'topsites_alexa_average_rank', 'topsites_majestic_average_rank',
'topsites_quantcast_average_rank', 'topsites_umbrella_average_rank',
'malware_validity_start', 'malware_validity_end', 'domain', 'malware_validity_length'
]
reputation = reputation.drop(more_columns_to_drop, axis=1)
### Dates ###
# reputation['malware_validity_start'] = pd.to_datetime(reputation['malware_validity_start'], unit='s')
# reputation['malware_validity_end'] = pd.to_datetime(reputation['malware_validity_end'], unit='s')
whois['whois_registration_date'] = pd.to_datetime(whois['whois_registration_date'], unit='s')
# binarize
if malwareFamily == True:
to_binarize = reputation.loc[:, ['malware_wordlist_based_dga','ct_has_certificate', 'topsites_alexa_presence', 'topsites_majestic_presence',
'topsites_quantcast_presence', 'topsites_umbrella_presence']]
binarized = Binarizer().transform(to_binarize)
reputation.loc[:,
['malware_wordlist_based_dga', 'ct_has_certificate', 'topsites_alexa_presence', 'topsites_majestic_presence', 'topsites_quantcast_presence',
'topsites_umbrella_presence']] = binarized
else:
to_binarize = reputation.loc[:, ['ct_has_certificate', 'topsites_alexa_presence',
'topsites_majestic_presence',
'topsites_quantcast_presence', 'topsites_umbrella_presence']]
binarized = Binarizer().transform(to_binarize)
reputation.loc[:,
['ct_has_certificate', 'topsites_alexa_presence', 'topsites_majestic_presence',
'topsites_quantcast_presence',
'topsites_umbrella_presence']] = binarized
# encode categorical feature
if malwareFamily == True:
enco = OneHotEncoder()
categorical = enco.fit_transform(reputation.loc[:, ['malware_family']])
df = pd.DataFrame(categorical.toarray(), columns=enco.get_feature_names(
['malware_family']), index=reputation.index)
reputation = pd.concat([reputation, df], axis=1)
reputation = reputation.drop(['malware_family'], axis=1)
# impute search_wayback
to_impute = reputation.loc[:, ['search_pages_found_wayback_machine', 'search_wayback_machine_first_seen_before_now',
'search_wayback_machine_first_seen_before_validity']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=0).fit_transform(to_impute)
reputation.loc[:, ['search_pages_found_wayback_machine', 'search_wayback_machine_first_seen_before_now',
'search_wayback_machine_first_seen_before_validity']] = imputed
#### whois clean up ####
# impute whois_privacy and whois_temporary_mail with Not known
booleanDictionary = {True: 'TRUE', False: 'FALSE'}
whois.loc[:, 'whois_privacy'] = whois.loc[:, 'whois_privacy'].map(booleanDictionary)
whois.loc[:, 'whois_temporary_mail'] = whois.loc[:, 'whois_temporary_mail'].map(booleanDictionary)
whois.loc[:, 'whois_has_been_renewed'] = whois.loc[:, 'whois_has_been_renewed'].map(booleanDictionary)
whois.loc[:, 'whois_valid_phone'] = whois.loc[:, 'whois_valid_phone'].map(booleanDictionary)
to_impute = whois.loc[:, ['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value='Not known').fit_transform(to_impute)
whois.loc[:, ['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']] = imputed
# categroical features, those that are imputed
whoisdatacomplete = whoisdatacompl
if whoisdatacomplete:
enc = OneHotEncoder()
categorical = enc.fit_transform(
whois.loc[:, ['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']])
df = pd.DataFrame(categorical.toarray(), columns=enc.get_feature_names(
['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone']),
index=whois.index)
whois = pd.concat([whois, df], axis=1)
whois = whois.drop(['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone'],
axis=1)
else:
enc = OneHotEncoder()
categorical = enc.fit_transform(
whois.loc[:, ['whois_privacy', 'whois_valid_phone']])
df = pd.DataFrame(categorical.toarray(), columns=enc.get_feature_names(
['whois_privacy', 'whois_valid_phone']), index=whois.index)
whois = pd.concat([whois, df], axis=1)
whois = whois.drop(['whois_privacy', 'whois_temporary_mail', 'whois_has_been_renewed', 'whois_valid_phone'],
axis=1)
whois = whois.drop(['whois_registration_date', 'whois_registrar'], axis=1)
# impute with mean whois_registration_age and whois_registration_and_validity_start_date and whois_registration_period
to_impute = whois.loc[:, ['whois_registration_age', 'whois_registration_and_family_start_date',
'whois_registration_and_validity_start_date', 'whois_registration_period']]
imputed = SimpleImputer(missing_values=np.nan, strategy='mean').fit_transform(to_impute)
whois.loc[:,
['whois_registration_age', 'whois_registration_and_family_start_date', 'whois_registration_and_validity_start_date',
'whois_registration_period']] = imputed
#### dsndb clean up ####
# impute DNS records to False
to_impute = dns.loc[:,
['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=False).fit_transform(to_impute)
dns.loc[:, ['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']] = imputed
# binarize DNS record booleans
to_binarize = dns.loc[:,
['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']]
binarized = LabelBinarizer().fit_transform(to_binarize)
dns.loc[:, ['dnsdb_record_A', 'dnsdb_record_AAAA', 'dnsdb_record_CNAME', 'dnsdb_record_MX', 'dnsdb_record_NS',
'dnsdb_record_SOA', 'dnsdb_record_TXT']] = binarized
# impute dns nb_queries, active_period
to_impute = dns.loc[:, ['dnsdb_active_period', 'dnsdb_nb_queries']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=0).fit_transform(to_impute)
dns.loc[:, ['dnsdb_active_period', 'dnsdb_nb_queries']] = imputed
# impute dns timestamps
to_impute = dns.loc[:, ['dnsdb_first_seen_before_now', 'dnsdb_first_seen_before_validity']]
imputed = SimpleImputer(missing_values=np.nan, strategy='constant', fill_value=0).fit_transform(to_impute)
dns.loc[:, ['dnsdb_first_seen_before_now', 'dnsdb_first_seen_before_validity']] = imputed
return available, reputation, dns, whois, openintel, label
def loadAndCleanDataExactPattern(code, available, reputation, dns, whois, openintel, label):
if code[0]:
df = available.loc[(available['dnsdb_available'] == code[1]) &
(available['whois_available'] == code[2]) &
(available['openintel_available'] == code[3])]
datasets = [ds for ds, i in zip([dns, whois, openintel], code[1:]) if i]
features = pd.concat([reputation] + datasets + [df], axis=1, join='inner')
features = features.drop(['dnsdb_available', 'whois_available', 'openintel_available'], axis=1)
labelzz = pd.concat([label, df], axis=1, join='inner')
labelzz = labelzz.loc[:, 'class']
classDictionary = {'malicious': 1, 'benign': 0}
labelzz = labelzz.map(classDictionary)
else:
features = pd.DataFrame()
labelzz = pd.Series()
return features.sort_index(), labelzz.sort_index()
def loadAndCleanDataExactPatternAlt(code, available, reputation, dns, whois, openintel, label):
df = available.loc[(available['reputation_available'] == code[0]) &
(available['dnsdb_available'] == code[1]) &
(available['whois_available'] == code[2]) &
(available['openintel_available'] == code[3])]
datasets = [ds for ds, i in zip([reputation, dns, whois, openintel], code) if i]
features = pd.concat(datasets + [df], axis=1, join='inner')
features = features.drop(['reputation_available','dnsdb_available', 'whois_available', 'openintel_available'], axis=1)
labelzz = pd.concat([label, df], axis=1, join='inner')
labelzz = labelzz.loc[:, 'class']
classDictionary = {'malicious': 1, 'benign': 0}
labelzz = labelzz.map(classDictionary)
return features.sort_index(), labelzz.sort_index()
|
{"/feature_generation/generate_feature_values.py": ["/feature_generation/features.py"], "/feature_generation/features.py": ["/feature_generation/retrieve_sinkhole_data.py"]}
|
19,323
|
koles161rus/invertedPendulum
|
refs/heads/master
|
/inverted_pendulum (1)/inverted_pendulum/fuzzy/fuzzy_controller.py
|
from .fuzzyinference import FuzzyControl
from math import pi
def get_controller():
ctrl = FuzzyControl()
theta_division = 12
theta_memberships = [
['trapezoid_left', -1.57 / theta_division, -0.58 / theta_division, 'vn'],
['triangle', -0.3927 / theta_division, 0.327 * 2.0 / theta_division, 'mn'],
['triangle', 0.0, 0.1745 * 2.0 / theta_division, 'z'],
['triangle', 0.3927 / theta_division, 0.327 * 2.0 / theta_division, 'mp'],
['trapezoid_right', 0.58 / theta_division, 1.57 / theta_division, 'vp']
]
ctrl.add_input('theta', (-4*pi, 4 * pi), theta_memberships)
dtheta_division = 12
dtheta_memberships = [
['trapezoid_left', -1.57 / dtheta_division, -0.58 / dtheta_division, 'vn'],
['triangle', -0.3927 / dtheta_division, 2 * 0.327 / dtheta_division, 'mn'],
['triangle', 0.0, 2 * 0.1745 / dtheta_division, 'z'],
['triangle', 0.3927 / dtheta_division, 2 * 0.327 / dtheta_division, 'mp'],
['trapezoid_right', 0.58 / dtheta_division, 1.57 / dtheta_division, 'vp']
]
ctrl.add_input('dtheta', (-1 * 200, 200), dtheta_memberships)
force_memberships = [
['trapezoid_left', -0.99, -0.75, 'vn'],
['triangle', -0.6, 0.4, 'sn'],
['triangle', -0.3, 0.4, 'mn'],
['triangle', 0.0, 0.4, 'z'],
['triangle', 0.3, 0.4, 'mp'],
['triangle', 0.6, 0.4, 'sp'],
['trapezoid_right', 0.75, 0.99, 'vp']
]
ctrl.add_output('force', (-1, 1), force_memberships)
ctrl.add_rule({'theta': 'vn', 'dtheta': 'vn'}, {'force': 'vp'})
ctrl.add_rule({'theta': 'vn', 'dtheta': 'mn'}, {'force': 'vp'})
ctrl.add_rule({'theta': 'vn', 'dtheta': 'z'}, {'force': 'vp'})
ctrl.add_rule({'theta': 'vn', 'dtheta': 'mp'}, {'force': 'vp'})
ctrl.add_rule({'theta': 'vn', 'dtheta': 'vp'}, {'force': 'vp'})
ctrl.add_rule({'theta': 'vp', 'dtheta': 'vn'}, {'force': 'vn'})
ctrl.add_rule({'theta': 'vp', 'dtheta': 'mn'}, {'force': 'vn'})
ctrl.add_rule({'theta': 'vp', 'dtheta': 'z'}, {'force': 'vn'})
ctrl.add_rule({'theta': 'vp', 'dtheta': 'mp'}, {'force': 'vn'})
ctrl.add_rule({'theta': 'vp', 'dtheta': 'vp'}, {'force': 'vn'})
ctrl.add_rule({'theta': 'mn', 'dtheta': 'vn'}, {'force': 'vp'})
ctrl.add_rule({'theta': 'mn', 'dtheta': 'mn'}, {'force': 'sp'})
ctrl.add_rule({'theta': 'mn', 'dtheta': 'z'}, {'force': 'mp'})
ctrl.add_rule({'theta': 'mn', 'dtheta': 'mp'}, {'force': 'mp'})
ctrl.add_rule({'theta': 'mn', 'dtheta': 'vp'}, {'force': 'z'})
ctrl.add_rule({'theta': 'z', 'dtheta': 'vn'}, {'force': 'sp'})
ctrl.add_rule({'theta': 'z', 'dtheta': 'mn'}, {'force': 'mp'})
ctrl.add_rule({'theta': 'z', 'dtheta': 'z'}, {'force': 'z'})
ctrl.add_rule({'theta': 'z', 'dtheta': 'mp'}, {'force': 'mn'})
ctrl.add_rule({'theta': 'z', 'dtheta': 'vp'}, {'force': 'sn'})
ctrl.add_rule({'theta': 'mp', 'dtheta': 'vn'}, {'force': 'z'})
ctrl.add_rule({'theta': 'mp', 'dtheta': 'mn'}, {'force': 'mn'})
ctrl.add_rule({'theta': 'mp', 'dtheta': 'z'}, {'force': 'mn'})
ctrl.add_rule({'theta': 'mp', 'dtheta': 'mp'}, {'force': 'sn'})
ctrl.add_rule({'theta': 'mp', 'dtheta': 'vp'}, {'force': 'vn'})
return ctrl
|
{"/inverted_pendulum (1)/inverted_pendulum/qlearning/qlearning.py": ["/inverted_pendulum (1)/inverted_pendulum/fuzzy/fuzzy_controller.py"]}
|
19,324
|
koles161rus/invertedPendulum
|
refs/heads/master
|
/inverted_pendulum (1)/inverted_pendulum/simulation/simulation.py
|
from math import sin, cos, pi
import random
class Simulator:
def simulate_step(self, state, F, dt):
x, x_dot, theta, theta_dot = state
m = 0.2
M = 3
L = 0.3
K = 0.006
A = x
B = x_dot
C = theta
D = theta_dot
g = 9.81
b = 0.1
A = L * sin(theta)
B = L * cos(theta)
C = (B**2 + K + B**2 * m + A**2 * m) * (M + m)
D = F * B * m + B * m**2 * A * theta_dot**2 - b * x_dot * B * m - A * g * (M + m)
theta_dot_dot = D / C
x_dot_dot = ( (F + m * A * theta_dot**2 - b * x_dot) - D / C * B * m ) / (M + m)
x_dot = state[1] + x_dot_dot * dt
x = state[0] + x_dot * dt + x_dot_dot * dt * dt / 2
theta_dot = state[3] + theta_dot_dot * dt
theta = state[2] + theta_dot * dt + theta_dot_dot * dt * dt / 2
return [ x, x_dot, theta, theta_dot ]
def random_state(self, state):
state[0] = 10
state[1] = 0
state[2] = pi - 60 * pi / 180
state[3] = 0
return state
|
{"/inverted_pendulum (1)/inverted_pendulum/qlearning/qlearning.py": ["/inverted_pendulum (1)/inverted_pendulum/fuzzy/fuzzy_controller.py"]}
|
19,325
|
koles161rus/invertedPendulum
|
refs/heads/master
|
/inverted_pendulum (1)/main.py
|
from inverted_pendulum.qlearning.qlearning import QLearning
from inverted_pendulum.fuzzy.fuzzy_controller import *
from numpy import pi, cos, sin
import random
import matplotlib.pyplot as plt
from inverted_pendulum.simulation.simulation import Simulator
import numpy as np
class InvertedPendulumBalancer:
def __init__(self):
self.dt = 0.01
random.seed(200)
self.max_force = 125
self.step_n = int( 10 * 10**6 )
self.last_n = 500 * 10**3
self.simulator = Simulator()
self.controller = QLearning( max_theta=2*pi, max_dtheta=30, max_x=60000, max_dx=40, n_x=10, n_dx=10, n_theta=6, n_dtheta=20, n_action=5, is_fuzzy=True )
def plot_survival_times(self):
survival_times = []
for i in range(len(lines)):
if i >= 1:
survival_times.append(10*(lines[i][0] - lines[i - 1][0]))
plt.plot(survival_times)
plt.plot([last_n for s in survival_times])
plt.show()
def plot_states():
plt.plot(thetas)
for l in lines:
plt.axvline(l[0], color=l[1], alpha=0.5)
plt.plot(xs)
plt.show()
def run(self):
state = [10, 0, pi, 0]
t = 0
plot_n = 7
plot_resolution = 1
states = []
last_stable = 0
survival_times = []
survival_time = 0
lines = []
plt.rc_context({'axes.edgecolor':'orange', 'xtick.color':'red', 'ytick.color':'green', 'figure.facecolor':'white', 'axes.linewidth': 2})
plt.ion()
mng = plt.get_current_fig_manager()
mng.resize(*mng.window.maxsize())
theta_ax = plt.subplot2grid((4,3), (2,0), colspan=3)
x_ax = plt.subplot2grid((4,3), (3,0), colspan=3)
cart_ax = plt.subplot2grid((2,3), (0,0), colspan=3)
for i in range(self.step_n):
state[2] += (random.random() - 0.5) * 0.001
survival_time += 1
t = t + self.dt
prev_state = state
if i % plot_resolution == 0:
survival_times.append(survival_time)
states.append(state)
if i % 1000 == 0:
if len(states) > self.last_n / plot_resolution:
xs = [s[0] for s in states]
thetas = [s[2] for s in states]
last_thetas = thetas[-int(self.last_n/plot_resolution):]
last_xs = xs[-int(self.last_n/plot_resolution):]
theta_std = np.std(last_thetas)
x_std = np.std(last_xs)
if theta_std < 0.1 and xs[-1] < 50 and (last_stable == 0 or i - last_stable > self.last_n):
lines.append( (i / plot_resolution, 'b') )
last_stable = i
survival_time = 0
state = self.simulator.random_state(state)
theta = state[2]
if theta <= pi / 2 or theta >= 3 * pi / 2:
lines.append( (i / plot_resolution, 'r') )
state = self.simulator.random_state(state)
survival_time = 0
q_state = [state[0], state[1], state[2] + pi, state[3]]
action = self.controller.action(q_state)
force = self.max_force * action[1]
state = self.simulator.simulate_step(state, force, self.dt)
next_q_state = [state[0], state[1], state[2] + pi, state[3]]
reward = 5
if abs(pi - state[2]) >= 0.1:
reward = -30 * ( abs(pi - state[2]) ** 2 )
if abs(state[0]) >= 15:
reward -= abs(state[0]) ** 1.5
self.controller.update(q_state, action[0], next_q_state, reward)
if i > 0 and i % (plot_n - 1) == 0:
x_ax.plot([s[0] for s in states], color='g')
theta_ax.plot([s[2] for s in states], color='r')
cart_ax.lines = []
cart_width = 10
cart_height = 0.5
factor = 2
r = 1
cart_ax.axis([-factor * cart_width, factor * cart_width, 0, factor * cart_width])
L = 6 * cart_height
L_discount = (L + L * sin(pi/2 - state[2]) ** 2)
cart_ax.plot([state[0], state[0] - L_discount * cos(pi/2 - state[2])],
[1.5 * cart_height, 1.5 * cart_height - L_discount * sin(pi/2 - state[2])],
color='b',
solid_capstyle="round",
linewidth=2)
plt.pause(0.000001)
if i % 10 == 0:
print(i)
|
{"/inverted_pendulum (1)/inverted_pendulum/qlearning/qlearning.py": ["/inverted_pendulum (1)/inverted_pendulum/fuzzy/fuzzy_controller.py"]}
|
19,326
|
koles161rus/invertedPendulum
|
refs/heads/master
|
/inverted_pendulum (1)/inverted_pendulum/qlearning/qlearning.py
|
import random
from math import pi
import numpy as np
from ..fuzzy.fuzzy_controller import get_controller
class QLearning:
def __init__(self, max_theta, max_dtheta, max_x, max_dx, n_theta, n_dtheta, n_x, n_dx, n_action, is_fuzzy=False):
self.init_constants()
self.max_theta = max_theta
self.max_dtheta = max_dtheta
self.max_x = max_x
self.max_dx = max_dx
self.n_action = n_action
self.n_theta = n_theta
self.n_dheta = n_dtheta
self.n_x = n_x
self.n_dx = n_dx
self.n_action = n_action
shape = ( n_x, n_dx, n_theta, n_dtheta, n_action )
self.Q = self.initial_Q * np.ones(shape, dtype=float)
if is_fuzzy:
self.init_fuzzy()
def init_constants(self):
self.initial_Q = 0
self.initial_fuzzy_Q = 2
self.visits = {}
def init_fuzzy(self):
print("Generating fuzzy table")
ctrl = get_controller()
for i in range( self.Q.shape[2] ):
if i % 10 == 0:
print("Generating row ", i, " of ", self.Q.shape[0], " rows for Q table")
for j in range(self.Q.shape[3]):
_, _, theta, dtheta = self.denormalize_state( (0, 0, i, j) )
print("\t", theta, dtheta)
original_action = ctrl.output({'theta': theta, 'dtheta': dtheta})['force']
normalized_action = self.normalize_action(original_action)
for a in range(self.Q.shape[0]):
for b in range(self.Q.shape[1]):
self.Q[(a, b,i, j, normalized_action)] = self.initial_fuzzy_Q
def normalize_state(self, state):
first = int( (state[2]) / ( 2.0 * self.max_theta ) * self.Q.shape[2] )
second = int( (state[3] + self.max_dtheta) / ( 2.0 * self.max_dtheta ) * self.Q.shape[3] )
third = int( (state[0] + self.max_x) / ( 2.0 * self.max_x ) * self.Q.shape[0] )
fourth = int( (state[1] + self.max_dx) / ( 2.0 * self.max_dx ) * self.Q.shape[1] )
return ( third, fourth, first, second )
def denormalize_state(self, state):
first = (float(state[2]) / self.Q.shape[2] * 2.0 * self.max_theta)
second = (float(state[3]) / self.Q.shape[3] * 2.0 * self.max_dtheta - self.max_dtheta)
third = (float(state[0]) / self.Q.shape[0] * 2.0 * self.max_x - self.max_x)
fourth = (float(state[1]) / self.Q.shape[1] * 2.0 * self.max_dx - self.max_dx)
return ( third, fourth, first, second )
def denormalize_action(self, action):
half = (self.n_action - 1) / 2
if action == half:
return 0
else:
return 2 / (self.n_action - 1) * (action - half)
def sgn(self, x):
if x >= 0:
return 1
else:
return -1
def normalize_action(self, action):
if abs(action) < 0.0001:
return int( (self.n_action - 1) / 2 )
else:
return int( (self.n_action - 1) / 2 + action / (2 / (self.n_action - 1) ) )
def action(self, state, k = 3):
state = self.normalize_state(state)
actions = self.Q[state]
normalization_factor = None
minimal_action = min(actions)
if minimal_action < 0:
normalization_factor = -minimal_action
else:
normalization_factor = 0
actions = [ (i, actions[i]) for i in range(len(actions)) ]
max_action = max(actions, key=lambda x: x[1])
return max_action[0], self.denormalize_action(max_action[0])
probabilities = []
total = 0
for a in range( len(actions) ):
curr_probability = k ** ( self.Q[ tuple( list(state) + [a] ) ] + normalization_factor )
probabilities.append(total + curr_probability)
total = total + curr_probability
probabilities = [p / total for p in probabilities]
chance = random.random()
for i in range(len(probabilities)):
if chance < probabilities[i]:
return i, self.denormalize_action(i)
def update(self, s, a, next_s, r, gamma = 0.7, alpha=1):
if tuple(list(s) + [a]) not in self.visits:
self.visits[ tuple(list(s) + [a]) ] = 1
else:
self.visits[ tuple(list(s) + [a]) ] += 1
alpha = 1
s = self.normalize_state(s)
next_s = self.normalize_state(next_s)
max_action = max( list(self.Q[ tuple(next_s) ]) )
self.Q[ tuple( list(s) + [a] ) ] = self.Q[ tuple( list(s) + [a] ) ] + \
alpha * ( r + gamma * max_action - self.Q[ tuple(list(s) + [a]) ])
if __name__ == "__main__":
q = QLearning(89*pi / 180, 100, 100, 100, 200)
q_str = ""
for i in range( q.Q.shape[0] ):
for j in range(q.Q.shape[0]):
for k in range(q.Q.shape[0]):
q_str += str( q.Q[ (i, j, k) ] ) + " "
q_str += "\n"
q_str += "\n"
print(q_str)
|
{"/inverted_pendulum (1)/inverted_pendulum/qlearning/qlearning.py": ["/inverted_pendulum (1)/inverted_pendulum/fuzzy/fuzzy_controller.py"]}
|
19,327
|
koles161rus/invertedPendulum
|
refs/heads/master
|
/inverted_pendulum (1)/pendulum.py
|
from math import *
from numpy import *
import pygame
import numpy as np
import control.matlab
from main import InvertedPendulumBalancer
dt = 0.01
g = 9.81
l = 1.0
m = 1.0
global k1, k2, k3
k1 = 500000
k2 = 10000
k3 = 10000
global q1, q2, q3, q4, r
q1 = 0.001
q2 = 0.001
q3 = 1000000000
q4 = 20000000
r = 0.005
clock = pygame.time.Clock()
pygame.init()
size = (width, height) = (1800, 1000)
screen = pygame.display.set_mode(size)
class Pendulum:
def __init__(self, x0, y0, phi0):
self.phi0 = phi0
self.phi = phi0
self.velocity = 0
self.x0 = x0
self.y0 = y0
self.x0_vel = 0
self.x = x0 + 250.0 * sin(phi0)
self.y = y0 + 250.0 * cos(phi0)
self.t = dt
self.t = np.arange(0, 30, 0.01)
self.phi_chart_t = 20
self.phi_chart = [(self.phi_chart_t, 820)]
self.x_chart_t = 20
self.x_chart = [(self.x_chart_t, 480)]
def move(self, control):
self.phi = atan2(self.x - self.x0, self.y - self.y0)
d_velocity = -g * sin(self.phi) * dt / l
self.velocity += d_velocity
d_phi = dt * self.velocity
self.phi += d_phi
self.x = self.x0 + 250.0 * sin(self.phi)
self.y = self.y0 + 250.0 * cos(self.phi)
d_x0_vel = dt * control
self.x0_vel += d_x0_vel
dx0 = dt * self.x0_vel
self.x0 += dx0
def draw(self):
pygame.draw.circle(screen, (0, 0, 0), [int(self.x0), int(self.y0)], 5)
pygame.draw.line(screen, (0, 0, 0), [self.x0, self.y0], [self.x, self.y], 2)
pygame.draw.circle(screen, (255, 0, 0), [int(self.x), int(self.y)], 10)
pygame.draw.line(screen, (0, 0, 0), [0, self.y0], [1800, self.y0], 3)
self.phi_chart_t += 0.2
if self.phi_chart_t > size[0]:
self.phi_chart_t = 0
self.phi_chart = [(self.phi_chart_t, 820)]
angle = np.pi - self.phi if self.phi > 0 else -np.pi - self.phi
self.phi_chart.append((self.phi_chart_t, 300 * angle + 820))
pygame.draw.lines(screen, (255, 0, 0), False, self.phi_chart, 3)
pygame.draw.line(screen, (0, 0, 0), [20, 820], [1780, 820], 2)
pygame.draw.line(screen, (0, 0, 0), [20, 660], [20, 980], 2)
pygame.draw.line(screen, (128, 128, 128), [20, 665], [1780, 665], 2)
pygame.draw.line(screen, (128, 128, 128), [20, 975], [1780, 975], 2)
pygame.draw.polygon(screen, (0, 0, 0), ((18, 660), (20, 650), (22, 660)), 2)
pygame.draw.polygon(screen, (0, 0, 0), ((1770, 822), (1780, 820), (1770, 818)), 2)
print(self.phi)
self.x_chart_t += 0.2
if self.x_chart_t > size[0]:
self.x_chart_t = 0
self.x = [(self.x_chart_t, 480)]
move = self.x
self.x_chart.append((self.x_chart_t, -0.2 * move + 683))
pygame.draw.lines(screen, (0, 255, 0), False, self.x_chart, 3)
pygame.draw.line(screen, (0, 0, 0), [20, 480], [1780, 480], 2)
pygame.draw.line(screen, (0, 0, 0), [20, 620], [20, 340], 2)
pygame.draw.line(screen, (128, 128, 128), [20, 345], [1780, 345], 2)
pygame.draw.line(screen, (128, 128, 128), [20, 615], [1780, 615], 2)
pygame.draw.polygon(screen, (0, 0, 0), ((18, 340), (20, 330), (22, 340)), 2)
pygame.draw.polygon(screen, (0, 0, 0), ((1770, 482), (1780, 480), (1770, 478)), 2)
print(self.x)
class PID:
def __init__(self, k1, k2, k3, pendulum):
self.k1 = k1
self.k2 = k2
self.k3 = k3
self.p = pendulum
self.error = pi - self.p.phi
self.derivative = 0
self.integral = 0
def update(self):
self.k1 = k1
self.k2 = k2
self.k3 = k3
tmp = self.error
self.error = copysign(1, p.phi) * (pi - abs(self.p.phi)) + (self.p.x0 - 600) / 10000
diff = self.error - tmp
self.derivative = diff / dt
self.integral += tmp
def output(self):
return self.k1 * self.error + self.k2 * self.derivative + self.k3 * self.integral
class LQR:
def __init__(self, q1, q2, q3, q4, r, pendulum):
self.q1 = q1
self.q2 = q2
self.q3 = q3
self.q4 = q4
self.p = pendulum
self.A = matrix([[0, 1, 0, 0], [0, 0, -g, 0], [0, 0, 0, 1], [0, 0, 2 * g, 0]])
self.B = matrix([[0], [1], [0], [-1]])
self.Q = diag([q1, q2, q3, q4])
self.R = r
self.K = control.matlab.lqr(self.A, self.B, self.Q, self.R)[0]
print(self.K)
def update(self):
self.q1 = q1
self.q2 = q2
self.q3 = q3
self.q4 = q4
self.Q = diag([q1, q2, q3, q4])
self.K = control.matlab.lqr(self.A, self.B, self.Q, self.R)[0]
def output(self):
X = matrix([[-(self.p.x0 - 600) / 10], [self.p.x0_vel / 10],
[copysign(1, self.p.phi) * (-pi + abs(self.p.phi))], [self.p.velocity]])
U = self.K * X
return U.flat[0]
class DrawPID:
def draw_text(self):
myfont = pygame.font.SysFont("monospace", 15)
label1 = myfont.render("Пропорциональный коэффициент: %d" % k1, 1, (255, 0, 0))
screen.blit(label1, (100, 400))
label2 = myfont.render("Дифференциальный коэффициент: %d" % k2, 1, (255, 0, 0))
screen.blit(label2, (100, 420))
label3 = myfont.render("Интегральный коэффициент: %.1f" % k3, 1, (255, 0, 0))
screen.blit(label3, (100, 440))
def make_buttons(k1, k2, k3, pid, pend):
pygame.draw.rect(screen, (0, 0, 255), [420, 400, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [520, 400, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [420, 420, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [520, 420, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [420, 440, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [520, 440, 90, 15])
myfont = pygame.font.SysFont("monospace", 15)
label1 = myfont.render("Повысить", 1, (255, 255, 255))
label2 = myfont.render("Понизить", 1, (255, 255, 255))
screen.blit(label1, (420, 400))
screen.blit(label2, (520, 400))
screen.blit(label1, (420, 420))
screen.blit(label2, (520, 420))
screen.blit(label1, (420, 440))
screen.blit(label2, (520, 440))
if pygame.mouse.get_pressed()[0]:
(pos1, pos2) = pygame.mouse.get_pos()
if 420 <= pos1 <= 510 and 400 <= pos2 <= 415:
k1 += 10
elif 520 <= pos1 <= 610 and 400 <= pos2 <= 415:
k1 -= 10
elif 420 <= pos1 <= 510 and 420 <= pos2 <= 435:
k2 += 1
elif 520 <= pos1 <= 610 and 420 <= pos2 <= 435:
k2 -= 1
elif 420 <= pos1 <= 510 and 440 <= pos2 <= 455:
k3 += 0.1
elif 520 <= pos1 <= 610 and 440 <= pos2 <= 455:
k3 -= 0.1
return k1, k2, k3
class DrawLQR:
def draw_text(self):
myfont = pygame.font.SysFont("monospace", 15)
label1 = myfont.render("Параметр положения тележки: %.5f" % q1, 1, (255, 0, 0))
screen.blit(label1, (50, 400))
label2 = myfont.render("Параметр скорости тележки: %.5f" % q2, 1, (255, 0, 0))
screen.blit(label2, (50, 420))
label3 = myfont.render("Параметр углового положения: %.1f" % q3, 1, (255, 0, 0))
screen.blit(label3, (50, 440))
label3 = myfont.render("Параметр угловой скорости: %.1f" % q4, 1, (255, 0, 0))
screen.blit(label3, (50, 460))
def make_buttons(q1, q2, q3, q4, lqr, pend):
pygame.draw.rect(screen, (0, 0, 255), [420, 400, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [520, 400, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [420, 420, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [520, 420, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [420, 440, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [520, 440, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [420, 460, 90, 15])
pygame.draw.rect(screen, (0, 0, 255), [520, 460, 90, 15])
myfont = pygame.font.SysFont("monospace", 15)
label1 = myfont.render("Повысить", 1, (255, 255, 255))
label2 = myfont.render("Понизить", 1, (255, 255, 255))
screen.blit(label1, (420, 400))
screen.blit(label2, (520, 400))
screen.blit(label1, (420, 420))
screen.blit(label2, (520, 420))
screen.blit(label1, (420, 440))
screen.blit(label2, (520, 440))
screen.blit(label1, (420, 460))
screen.blit(label2, (520, 460))
if (pygame.mouse.get_pressed()[0]):
(pos1, pos2) = pygame.mouse.get_pos()
if 420 <= pos1 <= 510 and 400 <= pos2 <= 415:
q1 += 0.001
elif 520 <= pos1 <= 610 and 400 <= pos2 <= 415:
q1 -= 0.001
if q1 < 0.001: q1 += 0.001
elif 420 <= pos1 <= 510 and 420 <= pos2 <= 435:
q2 += 0.001
elif 520 <= pos1 <= 610 and 420 <= pos2 <= 435:
q2 -= 0.001
elif 420 <= pos1 <= 510 and 440 <= pos2 <= 455:
q3 += 1000
elif 520 <= pos1 <= 610 and 440 <= pos2 <= 455:
q3 -= 1000
elif 420 <= pos1 <= 510 and 460 <= pos2 <= 475:
q4 += 10
elif 520 <= pos1 <= 610 and 460 <= pos2 <= 475:
q4 -= 10
return q1, q2, q3, q4
def draw_designation():
myfont = pygame.font.SysFont("monospace", 20)
label1 = myfont.render("X", 1, (128, 128, 128))
screen.blit(label1, (5, 470))
label2 = myfont.render("PHI", 1, (128, 128, 128))
screen.blit(label2, (0, 640))
label3 = myfont.render("I", 1, (128, 128, 128))
screen.blit(label3, (1785, 820))
screen.blit(label3, (1785, 550))
label4 = myfont.render("PI/3", 1, (128, 128, 128))
screen.blit(label4, (25, 665))
label5 = myfont.render("0", 1, (128, 128, 128))
screen.blit(label5, (20, 820))
screen.blit(label5, (20, 480))
label6 = myfont.render("-PI/3", 1, (128, 128, 128))
screen.blit(label6, (20, 955))
label7 = myfont.render("550", 1, (128, 128, 128))
screen.blit(label7, (25, 345))
label8 = myfont.render("-550", 1, (128, 128, 128))
screen.blit(label8, (20, 600))
#label9 = myfont.render("100", 1, (128, 128, 128))
#screen.blit(label9, (70, 820))
#screen.blit(label9, (70, 480))
label10 = myfont.render("100", 1, (128, 128, 128))
screen.blit(label10, (135, 820))
screen.blit(label10, (135, 480))
#label11 = myfont.render("300", 1, (128, 128, 128))
#screen.blit(label11, (198, 820))
#screen.blit(label11, (198, 480))
label12 = myfont.render("200", 1, (128, 128, 128))
screen.blit(label12, (260, 820))
screen.blit(label12, (260, 480))
#label13 = myfont.render("500", 1, (128, 128, 128))
#screen.blit(label13, (322, 820))
#screen.blit(label13, (322, 550))
label14 = myfont.render("300", 1, (128, 128, 128))
screen.blit(label14, (385, 820))
screen.blit(label14, (385, 480))
#label15 = myfont.render("700", 1, (128, 128, 128))
#screen.blit(label15, (447, 820))
#screen.blit(label15, (447, 550))
label16 = myfont.render("400", 1, (128, 128, 128))
screen.blit(label16, (510, 820))
screen.blit(label16, (510, 480))
#label17 = myfont.render("900", 1, (128, 128, 128))
#screen.blit(label17, (572, 820))
#screen.blit(label17, (572, 550))
label18 = myfont.render("500", 1, (128, 128, 128))
screen.blit(label18, (635, 820))
screen.blit(label18, (635, 480))
#label19 = myfont.render("1100", 1, (128, 128, 128))
#screen.blit(label19, (697, 820))
#screen.blit(label19, (697, 550))
label20 = myfont.render("600", 1, (128, 128, 128))
screen.blit(label20, (760, 820))
screen.blit(label20, (760, 480))
#label21 = myfont.render("1300", 1, (128, 128, 128))
#screen.blit(label21, (822, 820))
#screen.blit(label21, (822, 550))
label22 = myfont.render("700", 1, (128, 128, 128))
screen.blit(label22, (885, 820))
screen.blit(label22, (885, 480))
#label23 = myfont.render("1500", 1, (128, 128, 128))
#screen.blit(label23, (947, 820))
#screen.blit(label23, (947, 550))
label24 = myfont.render("800", 1, (128, 128, 128))
screen.blit(label24, (1010, 820))
screen.blit(label24, (1010, 480))
#label25 = myfont.render("1700", 1, (128, 128, 128))
#screen.blit(label25, (1072, 820))
#screen.blit(label25, (1072, 550))
label26 = myfont.render("900", 1, (128, 128, 128))
screen.blit(label26, (1135, 820))
screen.blit(label26, (1135, 480))
#label27 = myfont.render("1900", 1, (128, 128, 128))
#screen.blit(label27, (1197, 820))
#screen.blit(label27, (1197, 550))
label28 = myfont.render("1000", 1, (128, 128, 128))
screen.blit(label28, (1260, 820))
screen.blit(label28, (1260, 480))
#label29 = myfont.render("2100", 1, (128, 128, 128))
#screen.blit(label29, (1322, 820))
#screen.blit(label29, (1322, 550))
label30 = myfont.render("1100", 1, (128, 128, 128))
screen.blit(label30, (1385, 820))
screen.blit(label30, (1385, 480))
#label31 = myfont.render("2300", 1, (128, 128, 128))
#screen.blit(label31, (1447, 820))
#screen.blit(label31, (1447, 550))
label32 = myfont.render("1200", 1, (128, 128, 128))
screen.blit(label32, (1510, 820))
screen.blit(label32, (1510, 480))
#label33 = myfont.render("2500", 1, (128, 128, 128))
#screen.blit(label33, (1572, 820))
#screen.blit(label33, (1572, 550))
label34 = myfont.render("1300", 1, (128, 128, 128))
screen.blit(label34, (1635, 820))
screen.blit(label34, (1635, 480))
#label35 = myfont.render("2700", 1, (128, 128, 128))
#screen.blit(label35, (1697, 820))
#screen.blit(label35, (1697, 550))
p = Pendulum(900, 300, pi - 30*pi / 180)
pid = PID(k1, k2, k3, p)
lqr = LQR(q1, q2, q3, q4, r, p)
while 1:
screen.fill((255, 255, 255))
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if event.type == pygame.KEYUP:
if event.key == pygame.K_1:
pid.update()
p.move(pid.output())
#DrawPID.draw_text(DrawPID)
#(k1, k2, k3) = DrawPID.make_buttons(k1, k2, k3, pid, p)
if event.key == pygame.K_2:
lqr.update()
p.move(lqr.output())
#DrawLQR.draw_text(DrawLQR)
#(q1, q2, q3, q4) = DrawLQR.make_buttons(q1, q2, q3, q4, lqr, p)
if event.key == pygame.K_3:
balancer = InvertedPendulumBalancer()
balancer.run()
pygame.event.set_blocked(pygame.MOUSEMOTION)
pygame.event.set_blocked(pygame.MOUSEBUTTONUP)
pygame.event.set_blocked(pygame.MOUSEBUTTONDOWN)
pygame.event.set_blocked(pygame.ACTIVEEVENT)
#pid.update()
#lqr.update()
#p.move(pid.output())
#p.move(lqr.output())
p.draw()
#DrawPID.draw_text(DrawPID)
#DrawLQR.draw_text(DrawLQR)
draw_designation()
#(k1, k2, k3) = DrawPID.make_buttons(k1, k2, k3, pid)
#(q1, q2, q3, q4) = DrawLQR.make_buttons(q1, q2, q3, q4, lqr, p)
clock.tick(60)
pygame.display.flip()
|
{"/inverted_pendulum (1)/inverted_pendulum/qlearning/qlearning.py": ["/inverted_pendulum (1)/inverted_pendulum/fuzzy/fuzzy_controller.py"]}
|
19,331
|
qsq-dm/mff
|
refs/heads/master
|
/admin/urls.py
|
# -*- coding: utf-8 -*-
from flask import request
from flask import Blueprint, render_template, abort
from jinja2 import TemplateNotFound
from admin.views import index
from admin.views import get_city_list
from admin.views import login
from admin.views import logout
from admin.views import new_city
from admin.views import get_item_list
from admin.views import item_edit
from admin.views import get_item
from admin.views import get_cat
from admin.views import get_subcat
from admin.views import hospital_edit
from admin.views import get_school_list
from admin.views import get_hospital
from admin.views import get_hospital_list
from admin.views import get_cat_list
from admin.views import get_subcat_list
from admin.views import set_subcat_status
from admin.views import get_period_choice_list
from admin.views import edit_itemcat
from admin.views import edit_itemsubcat
from admin.views import refresh_qntoken
from admin.views import get_apply_list
from admin.views import get_apply_detail
from admin.views import apply_approve
from admin.views import apply_reject
from admin.views import set_item_status
from admin.views import recommend_item
from admin.views import get_user_list
from admin.views import get_user_detail
from admin.views import get_school_city_list
from admin.views import get_order_list
from admin.views import upload_image
from admin.views import verify_chsi
from admin.views import set_chsi_captcha
from admin.views import refresh_captcha
from admin.views import get_advice_list
from admin.views import get_advice_detail
from admin.views import admin_refund_order
from admin.views import get_activity_list
from admin.views import get_activity_items
from admin.views import set_activity_items
from admin.views import activity_edit
from admin.views import get_activity
from admin.views import top_recommend_item
from admin.views import recommend_subcat
from admin.views import top_recommend_subcat
from admin.views import get_item_recommend
from admin.views import item_recommend_edit
from admin.views import get_item_activity
from admin.views import item_activity_edit
from admin.views import get_subcat_recommend
from admin.views import subcat_recommend_edit
from admin.views import set_recommend_order
from admin.views import set_recommend_subcat_order
from admin.views import new_period_pay_choice
from admin.views import get_period_pay_log_list
from admin.views import del_item_activity
from admin.views import get_refund_detail
from admin.views import get_coupon_list
from admin.views import coupon_edit
from admin.views import get_coupon
from admin.views import trial_edit
from admin.views import get_trial_list
from admin.views import get_trial
from admin.views import trial_applyer_list
from admin.views import send_trial
from admin.views import set_trial_order
from admin.views import get_promoter_list
from admin.views import add_promoter
from admin.views import add_hospital_admin
from admin.views import get_hospital_user_list
from admin.views import to_supply
from admin.views import supply_apply
from admin.views import set_hospital_status
from admin.views import get_daily_coupon_list
from admin.views import daily_coupon_edit
from admin.views import get_daily_coupon
from admin.views import set_recommend_hospital_order
from admin.views import hospital_recommend_edit
from admin.views import get_hospital_recommend
from admin.views import get_tutorial_list
from admin.views import tutorial_edit
from admin.views import get_tutorial
from admin.views import set_tutorial_status
from admin.views import get_user_vcode
from admin.views import reset_user_vcode_sent
from admin.views import send_user_coupon
from admin.views import get_city
from admin.views import city_edit
from admin.views import recommend_hospital
from admin.views import daily_applyer_list
from admin.views import set_cats_order
from admin.views import set_city
from admin.views import set_question_status
from admin.views import get_question_list
from admin.views import new_question
from admin.views import get_user_question_list
from admin.views import get_room_list
from admin.views import get_room_detail
admin_api = Blueprint('admin_api', __name__,
template_folder='templates')
admin_api.add_url_rule('/', 'index', index)
admin_api.add_url_rule('/login/', 'login', login, methods=['POST', 'GET'])
admin_api.add_url_rule('/logout/', 'logout', logout, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_city_list/', 'get_city_list', get_city_list)
admin_api.add_url_rule('/new_city/', 'new_city', city_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/city_edit/<int:item_id>/', 'city_edit', city_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/daily_applyer_list/', 'daily_applyer_list', daily_applyer_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_item_list/', 'get_item_list', get_item_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_item/', 'new_item', item_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/edit_item/<int:item_id>/', 'edit_item', item_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_tutorial/', 'new_tutorial', tutorial_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/tutorial_edit/<int:item_id>/', 'edit_tutorial', tutorial_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_item/', 'new_item', item_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/edit_item/<int:item_id>/', 'edit_item', item_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_item/', 'get_item', get_item, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_cat/', 'get_cat', get_cat, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_subcat/', 'get_subcat', get_subcat, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_activity/', 'new_activity', activity_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_itemcat/', 'new_itemcat', edit_itemcat, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_itemsubcat/', 'new_itemsubcat', edit_itemsubcat, methods=['POST', 'GET'])
admin_api.add_url_rule('/edit_itemcat/<int:cat_id>/', 'edit_itemcat', edit_itemcat, methods=['POST', 'GET'])
admin_api.add_url_rule('/edit_itemsubcat/<int:sub_cat_id>/', 'edit_itemsubcat', edit_itemsubcat, methods=['POST', 'GET'])
admin_api.add_url_rule('/edit_activity/<int:item_id>/', 'edit_activity', activity_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_school_list/', 'get_school_list', get_school_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_order_list/', 'get_order_list', get_order_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_city/', 'get_city', get_city, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_cat_list/', 'get_cat_list', get_cat_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_subcat_list/', 'get_subcat_list', get_subcat_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/edit_hospital/<int:item_id>/', 'hospital_edit', hospital_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_hospital/', 'new_hospital', hospital_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_hospital/', 'get_hospital', get_hospital, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_hospital_list/', 'get_hospital_list', get_hospital_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_tutorial_list/', 'get_tutorial_list', get_tutorial_list)
admin_api.add_url_rule('/get_period_choice_list/', 'get_period_choice_list', get_period_choice_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/subcat/set_status/', 'set_status', set_subcat_status, methods=['POST', 'GET'])
admin_api.add_url_rule('/refresh_qntoken/', 'refresh_qntoken', refresh_qntoken, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_apply_list/', 'get_apply_list', get_apply_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_apply_detail/', 'get_apply_detail', get_apply_detail, methods=['POST', 'GET'])
admin_api.add_url_rule('/apply_reject/', 'apply_reject', apply_reject, methods=['POST', 'GET'])
admin_api.add_url_rule('/apply_approve/', 'apply_approve', apply_approve, methods=['POST', 'GET'])
admin_api.add_url_rule('/set_item_status/', 'set_item_status', set_item_status, methods=['POST', 'GET'])
admin_api.add_url_rule('/recommend_item/', 'recommend_item', recommend_item, methods=['POST', 'GET'])
admin_api.add_url_rule('/recommend_hospital/', 'recommend_hospital', recommend_hospital, methods=['POST', 'GET'])
admin_api.add_url_rule('/recommend_subcat/', 'recommend_subcat', recommend_subcat, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_user_list', 'get_user_list', get_user_list)
admin_api.add_url_rule('/get_user_detail', 'get_user_detail', get_user_detail)
admin_api.add_url_rule('/get_school_city_list/', 'get_school_city_list', get_school_city_list)
admin_api.add_url_rule('/get_advice_list/', 'get_advice_list', get_advice_list)
admin_api.add_url_rule('/get_advice_detail/', 'get_advice_detail', get_advice_detail)
admin_api.add_url_rule('/upload_image/', 'upload_image', upload_image, methods=['POST', 'GET'])
admin_api.add_url_rule('/verify_chsi/', 'verify_chsi', verify_chsi, methods=['POST', 'GET'])
admin_api.add_url_rule('/set_chsi_captcha/', 'set_chsi_captcha', set_chsi_captcha, methods=['POST', 'GET'])
admin_api.add_url_rule('/refresh_chsi_captcha/', 'refresh_chsi_captcha', refresh_captcha, methods=['POST', 'GET'])
admin_api.add_url_rule('/refund_order/', 'refund_order', admin_refund_order, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_activity_list/', 'get_activity_list', get_activity_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_activity_items/', 'get_activity_items', get_activity_items, methods=['POST', 'GET'])
admin_api.add_url_rule('/set_activity_items/', 'set_activity_items', set_activity_items, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_activity/', 'get_activity', get_activity, methods=['POST', 'GET'])
admin_api.add_url_rule('/top_recommend_item/', 'top_recommend_item', top_recommend_item, methods=['POST', 'GET'])
admin_api.add_url_rule('/top_recommend_subcat/', 'top_recommend_subcat', top_recommend_subcat, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_item_recommend/', 'get_item_recommend', get_item_recommend, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_hospital_recommend/', 'get_hospital_recommend', get_hospital_recommend, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_subcat_recommend/', 'get_subcat_recommend', get_subcat_recommend, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_item_activity/', 'get_item_activity', get_item_activity, methods=['POST', 'GET'])
admin_api.add_url_rule('/item_recommend_edit/<int:item_id>/', 'item_recommend_edit', item_recommend_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/item_activity_edit/<int:item_id>/', 'item_activity_edit', item_activity_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/subcat_recommend_edit/<int:item_id>/', 'subcat_recommend_edit', subcat_recommend_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/hospital_recommend_edit/<int:item_id>/', 'hospital_recommend_edit', hospital_recommend_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/set_recommend_order/', 'set_recommend_order', set_recommend_order, methods=['POST', 'GET'])
admin_api.add_url_rule('/set_recommend_subcat_order/', 'set_recommend_subcat_order', set_recommend_subcat_order, methods=['POST', 'GET'])
admin_api.add_url_rule('/set_recommend_hospital_order/', 'set_recommend_hospital_order', set_recommend_hospital_order, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_period_pay_choice/', 'new_period_pay_choice', new_period_pay_choice, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_period_pay_log_list/', 'get_period_pay_log_list', get_period_pay_log_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/del_item_activity/', 'del_item_activity', del_item_activity, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_refund_detail/', 'get_refund_detail', get_refund_detail)
admin_api.add_url_rule('/get_coupon_list/', 'get_coupon_list', get_coupon_list)
admin_api.add_url_rule('/coupon_edit/<int:item_id>/', 'coupon_edit', coupon_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_coupon/', 'new_coupon', coupon_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_coupon/', 'get_coupon', get_coupon)
admin_api.add_url_rule('/get_trial_list/', 'get_trial_list', get_trial_list)
admin_api.add_url_rule('/new_trial/', 'new_trial', trial_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/edit_trial/<int:item_id>/', 'trial_edit', trial_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_trial/', 'get_trial', get_trial)
admin_api.add_url_rule('/trial_applyer_list/', 'trial_applyer_list', trial_applyer_list)
admin_api.add_url_rule('/send_trial/', 'send_trial', send_trial, methods=['POST','GET'])
admin_api.add_url_rule('/set_trial_order/', 'set_trial_order', set_trial_order, methods=['POST','GET'])
admin_api.add_url_rule('/get_promoter_list/', 'get_promoter_list', get_promoter_list)
admin_api.add_url_rule('/add_promoter/', 'add_promoter', add_promoter, methods=['POST','GET'])
admin_api.add_url_rule('/get_hospital_user_list/', 'get_hospital_user_list', get_hospital_user_list)
admin_api.add_url_rule('/add_hospital_admin/', 'add_hospital_admin', add_hospital_admin, methods=['POST','GET'])
admin_api.add_url_rule('/to_supply/', 'to_supply', to_supply, methods=['POST','GET'])
admin_api.add_url_rule('/supply_apply/', 'supply_apply', supply_apply, methods=['POST','GET'])
admin_api.add_url_rule('/set_hospital_status/', 'set_hospital_status', set_hospital_status, methods=['POST','GET'])
admin_api.add_url_rule('/get_daily_coupon_list/', 'get_daily_coupon_list', get_daily_coupon_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/daily_coupon_edit/<int:item_id>/', 'daily_coupon_edit', daily_coupon_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_daily_coupon/', 'new_daily_coupon', daily_coupon_edit, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_daily_coupon/', 'get_daily_coupon', get_daily_coupon, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_tutorial/', 'get_tutorial', get_tutorial)
admin_api.add_url_rule('/set_tutorial_status/', 'set_tutorial_status', set_tutorial_status, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_user_vcode/', 'get_user_vcode', get_user_vcode, methods=['GET', 'POST'])
admin_api.add_url_rule('/reset_user_vcode/', 'reset_user_vcode', reset_user_vcode_sent, methods=['GET', 'POST'])
admin_api.add_url_rule('/send_user_coupon/', 'send_user_coupon', send_user_coupon, methods=['POST'])
admin_api.add_url_rule('/set_cats_order/', 'set_cats_order', set_cats_order, methods=['POST', 'GET'])
admin_api.add_url_rule('/change_city/', 'set_city', set_city, methods=['POST', 'GET'])
admin_api.add_url_rule('/set_question_status/', 'set_question_status', set_question_status, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_question_list/', 'get_question_list', get_question_list, methods=['POST', 'GET'])
admin_api.add_url_rule('/new_question/', 'new_question', new_question, methods=['POST', 'GET'])
admin_api.add_url_rule('/get_user_question_list/', 'get_user_question_list', get_user_question_list, methods=['POST','GET'])
admin_api.add_url_rule('/get_room_list/', 'get_room_list', get_room_list, methods=['POST','GET'])
admin_api.add_url_rule('/get_room_detail/', 'get_room_detail', get_room_detail, methods=['POST','GET'])
from admin.views import remark_order
from admin.views import remark_apply
from admin.views import remark_useradvice
admin_api.add_url_rule('/remark_order/', 'remark_order', remark_order, methods=['POST','GET'])
admin_api.add_url_rule('/remark_apply/', 'remark_apply', remark_apply, methods=['POST','GET'])
admin_api.add_url_rule('/remark_useradvice/', 'remark_useradvice', remark_useradvice, methods=['POST','GET'])
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,332
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/4db11f101899_.py
|
"""empty message
Revision ID: 4db11f101899
Revises: 4d043b343761
Create Date: 2015-12-09 15:21:29.065236
"""
# revision identifiers, used by Alembic.
revision = '4db11f101899'
down_revision = '4d043b343761'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('trial',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=300), nullable=True),
sa.Column('image', sa.String(length=300), nullable=True),
sa.Column('cat', sa.Integer(), nullable=True),
sa.Column('item_id', sa.Integer(), nullable=True),
sa.Column('total', sa.Integer(), nullable=True),
sa.Column('sent', sa.Integer(), nullable=True),
sa.Column('apply_count', sa.Integer(), nullable=True),
sa.Column('rules', sa.Text(), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.Column('end_time', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('trial_apply',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('trial_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.Column('phone', sa.String(length=30), nullable=True),
sa.Column('school', sa.String(length=100), nullable=True),
sa.Column('addr', sa.String(length=100), nullable=True),
sa.Column('content', sa.String(length=1000), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['trial_id'], ['trial.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('trial_comment',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('trial_id', sa.Integer(), nullable=False),
sa.Column('photos', sa.String(length=1000), nullable=True),
sa.Column('content', sa.String(length=10000), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['trial_id'], ['trial.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('trial_comment')
op.drop_table('trial_apply')
op.drop_table('trial')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,333
|
qsq-dm/mff
|
refs/heads/master
|
/thirdparty/wechat.py
|
# -*- coding: utf-8 -*-
import time
import urllib
import urllib2
import hashlib
from flask import request
import weixin.client
WechatAuthAPI = weixin.client.WeixinAPI
from wechat_sdk import WechatBasic
from settings import WECHAT_APP_ID
from settings import WECHAT_APP_SECRET
from settings import WECHAT_APP_TOKEN
from settings import SERVER_NAME
from celery.contrib.methods import task_method
from celery import current_app
class WechatInfo(WechatBasic):
def set_cache(self, cache):
self.cache = cache
@property
def access_token(self):
token = self.cache.get()
if not token:
self.refresh_wechat_token()
token = self.cache.get()
return token
@current_app.task(filter=task_method)
def refresh_wechat_token(self):
''' 比较特殊 tasks的参数第一个bound了self '''
data = self.grant_token()
access_token = data["access_token"]
expires_in = data["expires_in"]
self.cache.set(access_token, expires_in-60)
@property
def jsapi_ticket(self):
self._check_appid_appsecret()
if getattr(self, '__jsapi_ticket', None):
now = time.time()
if self.__jsapi_ticket_expires_at - now > 60:
return self.__jsapi_ticket
else:
self.grant_jsapi_ticket()
data = self.grant_jsapi_ticket()
return self.__jsapi_ticket
def grant_jsapi_ticket(self, override=True):
"""
获取 Jsapi Ticket
详情请参考 http://mp.weixin.qq.com/wiki/7/aaa137b55fb2e0456bf8dd9148dd613f.html#.E9.99.84.E5.BD.951-JS-SDK.E4.BD.BF.E7.94.A8.E6.9D.83.E9.99.90.E7.AD.BE.E5.90.8D.E7.AE.97.E6.B3.95
:param override: 是否在获取的同时覆盖已有 jsapi_ticket (默认为True)
:return: 返回的 JSON 数据包
:raise HTTPError: 微信api http 请求失败
"""
self._check_appid_appsecret()
# force to grant new access_token to avoid invalid credential issue
response_json = self._get(
url="https://api.weixin.qq.com/cgi-bin/ticket/getticket",
params={
"access_token": self.access_token,
"type": "jsapi",
}
)
if override:
self.__jsapi_ticket = response_json['ticket']
self.__jsapi_ticket_expires_at = int(time.time()) + response_json['expires_in']
return response_json
wechat = WechatInfo(token=WECHAT_APP_TOKEN, appid=WECHAT_APP_ID, appsecret=WECHAT_APP_SECRET)
menu_data = {
"button":[
{
"type": "view",
"name": u"分期整形",
"url": "http://{}/user/index".format(SERVER_NAME),
},
{
"type": "view",
"name": u"寝室大赛",
"url": "http://{}/static/user/Activities/home.html".format(SERVER_NAME),
},
{
"name": u"更多",
"sub_button":[
{
"type":"view",
"name":u"我",
"url": "http://{}/static/user/my-not-reg.html".format(SERVER_NAME),
},
{
"type":"view",
"name":u"下载APP",
"url": "http://{}/static/user/downLoad.html".format(SERVER_NAME),
},
{
"type":"view",
"name":u"帮助中心",
"url": "http://{}/user/help.html".format(SERVER_NAME),
},
{
"type": "click",
"name": u"联系客服",
"key": "contact_us",
},
{
"type":"view",
"name":u"医院入口",
"url": "http://{}/hospital/".format(SERVER_NAME),
},
]
}
]}
def create_menu():
''' 创建公众号菜单 '''
return wechat.create_menu(menu_data)
REDIRECT_URI = 'http://{}/user/'.format(SERVER_NAME)
AuthApi = WechatAuthAPI(appid=WECHAT_APP_ID,
app_secret=WECHAT_APP_SECRET,
redirect_uri=REDIRECT_URI)
def get_user_snsapi_base_url(redirecturi='http://{}/user/auth'.format(SERVER_NAME), state='STATE'):
'''返回snsapi_base静默登录url '''
link = ('''
https://open.weixin.qq.com/connect/oauth2/authorize?appid=%s&redirect_uri=%s&response_type=code&scope=snsapi_base&state=%s#wechat_redirect
'''%(WECHAT_APP_ID, urllib.quote_plus(redirecturi), state)).strip()
return link
def exchange_code_for_token(code):
''' 通过微信oauth静默登录认证回调的code参数 获取 access_token openid
返回值是一个字典 包含access_token, openid
'''
return AuthApi.exchange_code_for_access_token(code=code)
def get_user_info(access_token, openid):
''' 通过code获取的access_token及open_id获取oauth授权登录用户信息 '''
auth_api = WechatAuthAPI(access_token=access_token)
user = auth_api.user(openid=openid)
return user
def create_link_str(params):
result = ''
for i in sorted(params.keys()):
result += i+'='+params[i]+'&'
return result.rstrip('&')
from util.utils import random_str
def gen_noncestr():
return random_str(10)
def get_sign(path, timestamp, noncestr):
data = dict(
jsapi_ticket = wechat.jsapi_ticket,
noncestr= noncestr,
timestamp= timestamp,
url = path,
)
m = hashlib.sha1()
m.update(create_link_str(data))
print create_link_str(data)
return m.hexdigest()
def get_jssdk_context(link=None):
''' js sdk 参数 '''
try:
noncestr = gen_noncestr()
timestamp= str(time.time())
sign = get_sign(link or request.url, timestamp, noncestr)
context = {
'domain': SERVER_NAME,
'appid': WECHAT_APP_ID,
'noncestr': noncestr,
'timestamp': timestamp,
'sign': sign,
}
return context
except:
import traceback
traceback.print_exc()
print 'jssdk error'
return {}
def create_qrcode(scene_id):
''' 创建二维码ticket'''
data = {
"action_name": "QR_LIMIT_SCENE",
"action_info": {"scene": {"scene_id": scene_id}}
}
a = wechat.create_qrcode(data)
ticket = a['ticket']
print ticket, 'ticket'
return ticket, wechat.show_qrcode(ticket)
import json
import requests
def send_group_mnews(open_ids, media_id):
''' 群发消息 '''
link = 'https://api.weixin.qq.com/cgi-bin/message/mass/send?access_token={}'.format(wechat.access_token)
data = {
"touser": open_ids,
"mpnews":{
"media_id":"123dsdajkasd231jhksad"
},
"msgtype":"mpnews"
}
response= requests.post(link, data=json.dumps(data))
return response
def create_article():
link = 'https://api.weixin.qq.com/cgi-bin/media/uploadnews?access_token={}'.format(wechat.access_token)
article = {
"articles": [
{
"thumb_media_id":'vtZ1MJiazhv0FicHsbhOicw7fRKPbKDQtH85oERG82aia2Eicn5BlEoyYZff6KXTgN8X3gYWVeRVx1ZR7bMmhIU7JgQ',
"author":"xxx",
"title":"Happy Day",
"content_source_url":"www.qq.com",
"content":"content",
"digest":"digest",
"show_cover_pic":"1"
},
{
"thumb_media_id":'vtZ1MJiazhv0FicHsbhOicw7fRKPbKDQtH85oERG82aia2Eicn5BlEoyYZff6KXTgN8X3gYWVeRVx1ZR7bMmhIU7JgQ',
"author":"xxx",
"title":"Happy Day",
"content_source_url":"www.qq.com",
"content":"content",
"digest":"digest",
"show_cover_pic":"0"
}
]
}
response= requests.post(link, data=json.dumps(article))
print response.text
return response
def upload_image():
''' '''
link = 'https://api.weixin.qq.com/cgi-bin/media/uploadimg?access_token={}'.format(wechat.access_token)
files = {'media': open('/tmp/meifenfen/static/user/img/logo.png', 'rb')}
return requests.post(link, files=files)
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,334
|
qsq-dm/mff
|
refs/heads/master
|
/util/validators.py
|
# -*- coding: utf-8 -*-
import re
import json
import urllib2
from util.utils import union_dict
phoneprefix = [
'130','131','132','133','134','135','136','137','138','139',
'150','151','152','153','154','155','156','157','158','159',
'170','171','172','173','174','175','176','177','178','179',
'180','181','182','183','184','185','186','187','188','189'
]
phone_prefix_pattern = re.compile('^(%s)\d{8}$' % '|'.join(phoneprefix))
fake_phone = re.compile(r'1000000(\d){4}')
def valid_phone(phone):
return (bool(phone_prefix_pattern.search(phone)) or bool(fake_phone.search(phone))) and phone
def Optional(field):
field.optional = True
return field
class Field(object):
''' '''
def __init__(self, msg=None, **kw):
self.optional = False
self.msg = msg
def validate(self, data):
is_valid = False
result = '请实现此方法'
return is_valid, result
class TextField(Field):
def __init__(self, min_length=None, max_length=None, **kw):
super(TextField, self).__init__(**kw)
self.min_length = min_length;
self.max_length = max_length
def validate(self, data):
is_valid = isinstance(data, (str, unicode)) and \
(self.min_length<=len(data) if self.min_length is not None else True) and \
(self.max_length>=len(data) if self.max_length is not None else True)
result = data
return is_valid, result
class IntChoiceField(Field):
def __init__(self, choices=None, **kw):
super(IntChoiceField, self).__init__(**kw)
self.choices = choices or set()
def validate(self, data):
is_valid = str(str(data) or '').isdigit() and \
int(data) in self.choices
result = int(data) if is_valid else None
return is_valid, result
class BoolChoiceField(Field):
def __init__(self, choices=None, **kw):
super(BoolChoiceField, self).__init__(**kw)
self.choices = choices or set()
def validate(self, data):
is_valid = True
result = bool(data)
return is_valid, result
class BoolIntChoiceField(Field):
def __init__(self, **kw):
super(BoolIntChoiceField, self).__init__(**kw)
def validate(self, data):
is_valid = True
try:
data = bool(int(data))
except:
is_valid = False
return is_valid, data
class ChoiceField(Field):
def __init__(self, choices=None, **kw):
super(ChoiceField, self).__init__(**kw)
self.choices = choices or set()
def validate(self, data):
result = None
is_valid = False
try:
if data in self.choices:
is_valid = True
result = data
except Exception as e:
print str(e)
return is_valid, result
class IntChoicesField(Field):
def __init__(self, choices=None, all=False, **kw):
super(IntChoicesField, self).__init__(**kw)
self.choices = choices or set()
def validate(self, data):
result = None
is_valid = False
try:
result = map(int, data)
if not all:
is_valid = all(int(i) in self.choices for i in result) if self.choices else False
else:
is_valid = True
except Exception as e:
print str(e)
return is_valid, result
class MobileField(Field):
def validate(self, data):
is_valid = valid_phone(data)
result = data
return is_valid, result
class FloatField(Field):
def validate(self, data):
is_valid = False
result = None
try:
result = float(data)
is_valid = True
except Exception as e:
pass
return is_valid, result
class IntField(Field):
def validate(self, data):
is_valid = False
result = None
try:
result = int(data)
is_valid = True
except Exception as e:
pass
return is_valid, result
class JsonField(Field):
def validate(self, data):
is_valid = False
result = None
try:
result = json.loads(urllib2.unquote(data)) if data else []
is_valid = True
except Exception as e:
pass
return is_valid, result
class IdField(Field):
''' 数据库ID字段 '''
def validate(self, data):
is_valid = str(str(data) or '').isdigit()
result = int(data) if is_valid else None
return is_valid, result
class REGField(Field):
def __init__(self, pattern=None, **kw):
self.pattern = pattern
super(REGField, self).__init__(**kw)
def validate(self, value):
try:
valid = False
p = re.compile(self.pattern)
self.p = p
valid = bool(p.search(str(value)))
except:
import traceback
traceback.print_exc()
return valid, value
else:
return valid, value
NOT_EXIST = object()
class Inputs(object):
def __init__(self, schema):
self._schema = schema
def validate(self, raw_data):
self._raw_data = raw_data
result = {}
self._fields_err = {}
for name, validator in self._schema.items():
print name; print validator
val = self._raw_data.get(name, NOT_EXIST)
data = None
err = ''
print val is NOT_EXIST, val, validator.optional, 'optional'
if val is NOT_EXIST:
if not validator.optional:
err = '缺少字段{}'.format(name)
else:
is_valid, data = validator.validate(val)
if not is_valid: err = validator.msg or '{}字段格式错误'.format(name)
if err: self._fields_err[name] = err
result[name] = data
err_str = '\n'.join(self._fields_err.values())
return err_str, result
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,335
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/1c198b1a91cb_.py
|
"""empty message
Revision ID: 1c198b1a91cb
Revises: 15e92c9ccee8
Create Date: 2015-12-14 14:13:25.240489
"""
# revision identifiers, used by Alembic.
revision = '1c198b1a91cb'
down_revision = '15e92c9ccee8'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('trial', sa.Column('start_time', sa.DateTime(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('trial', 'start_time')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,336
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/3bed4aa05b5a_.py
|
"""empty message
Revision ID: 3bed4aa05b5a
Revises: 4593874013ba
Create Date: 2015-12-05 17:49:05.566697
"""
# revision identifiers, used by Alembic.
revision = '3bed4aa05b5a'
down_revision = '4593874013ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('wechat_location',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('open_id', sa.String(length=50), nullable=True),
sa.Column('lng', sa.String(length=50), nullable=True),
sa.Column('lat', sa.String(length=50), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_wechat_location_open_id'), 'wechat_location', ['open_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_wechat_location_open_id'), table_name='wechat_location')
op.drop_table('wechat_location')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,337
|
qsq-dm/mff
|
refs/heads/master
|
/thirdparty/views.py
|
# -*- coding: utf-8 -*-
from flask import request
from flask import Blueprint, render_template, abort
from jinja2 import TemplateNotFound
from thirdparty.wechat import wechat
from settings import CONTACT
from ops.promote import PromoteService
server_verify = Blueprint('server_verify', __name__,
template_folder='templates')
meifenfen_open_ids = set([
"o56qvw-ThtwfthGGlZ-XbH-3fjRc",
"o56qvw-OyM2NRJ6jHQDxsOXdF0Pc",
"o56qvw3FG1tt39dw4i8L0SrpBFCQ",
"o56qvw2lAC5eXqIa87o35EYvtxJw",
"o56qvw0fwSQlUMl4ChyafE4ajKjM",
"o56qvw63JsGh6Lz2BU5cUEYlZNAw",
"o56qvw-cLgV-CLxVyKU3P-zJ0aJk",
"o56qvw-hxHtmnZ8bNGYSxFihTjRk",
"o56qvw-hFbnnIMzQA3ArpsYNRylE",
"o56qvwy77LP82ZWZ8q5Gy-ebCOeU",
"o56qvw0PnwLCK7Okesvhkc7d6UGA",
"o56qvwy2plGL6LeBY-gzFtn6_Yis",
"o56qvwzrgKsuO28J7PKymLChJrSY",
"o56qvwwzTB80JXYJYnqKsEO-vXqE",
"o56qvw4oaWtk600BtO1Tsa6BbAQY",
"o56qvw2buPRaEWJ1TdKLn-HxqyBo",
"o56qvw2u0B7NcHfKseEDy-oDK1bI",
"o56qvw3ppto7QGZq96W5zd4p58YQ",
"o56qvwxvcD7ddq1GoEr0XNyVAyYs",
"o56qvw9XQZ2-JATmeVcdMNveGJzk"
])
def check_signature():
try:
signature = request.args['signature']
timestamp = request.args['timestamp']
nonce = request.args['nonce']
echostr = request.args.get('echostr') or 'success'
if wechat.check_signature(signature=signature, timestamp=timestamp, nonce=nonce):
return echostr
except:
import traceback
traceback.print_exc()
return ''
index_link= '''http://www.meifenfen.com/user/index/'''
q_link= '''http://www.meifenfen.com/static/user/Activities/home.html'''
WELCOME_MSG = '''这位小主,欢迎关注美分分
美分分,为你加分!
“微整形,分期付,美丽加分0负担 ”
<a href='{}'>→点此查看详情</a>
上海大学生寝室设计大赛火热进行中,
2000元现金大奖,坐等你来抢
<a href='{}'>→点此报名领百元大礼包</a>
'''.format(index_link, q_link)
WELCOME_MSG = '''<a href='http://www.meifenfen.com/static/user/login.html'>美分分,为你加分!</a>
'''
'''
http://omgitsmgp.com/2012/10/13/notes-on-flask/
If a URL has a trailing slash, accessing it without one will automatically redirect;
if a URL doesn’t have a trailing slash, accessing it with one will generate a 404.
'''
@server_verify.route('', methods=['POST','GET'])
def wechat_verify():
try:
if request.method == 'GET':
echostr = check_signature()
if echostr: return echostr
return 'error'
elif request.method == 'POST':
if check_signature():
print request.form, 'form'
print(request.data), 'data'
wechat.parse_data(request.data)
message = wechat.get_message()
response = None
print message.type;
print message.__dict__
if message.type=='click' and message.key=='contact_us':
return wechat.response_text(u'客服电话:{}'.format(CONTACT))
if message.type=='subscribe':
#if message.key: #扫码关注
key = None
if message.key:
key = message.key.split('_')[-1]
log_id = PromoteService.log_qr_user(key, message.source)
if not log_id:
PromoteService.set_wechat_status(message.source, 1)
print log_id, key,'-------qrcode sub'
if key:
qrcode = PromoteService.get_qrcode(key)
if qrcode:
if log_id: PromoteService.incr_promote_follow_count(qrcode.promoter_id)
else:
print 'qrcode not exist'
PromoteService.set_user_sex.delay(message.source)
if message.source in meifenfen_open_ids:
return wechat.response_text(WELCOME_MSG)
return wechat.response_text('')
if message.type=='unsubscribe':
PromoteService.set_wechat_status(message.source, 0)
qrcode_user = PromoteService.get_qrcodeuser_by_open_id(message.source)
if qrcode_user:
qrcode = PromoteService.get_qrcode(qrcode_user.qrcode_id)
if qrcode:
PromoteService.incr_promote_unfollow_count(qrcode.promoter_id)
return wechat.response_text('')
if message.type == 'text':
if message.content == 'wechat':
response = wechat.response_text(u'哈哈')
else:
response = wechat.response_text(u'?')
elif message.type == 'image':
response = wechat.response_text(u'图片')
elif message.type == 'location':
print message.longitude, message.latitude #经纬度
open_id = message.source
lng = message.longitude
lat = message.latitude
is_first = not PromoteService.get_first_location(open_id)
PromoteService.log_wechat_location(open_id, lng, lat)
if is_first: PromoteService.set_first_location(open_id, lng, lat)
response = wechat.response_text(u'地理位置')
else:
response = wechat.response_text(u'未知')
return ''
else:
return ''
except Exception as e:
import traceback
traceback.print_exc()
return ''
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,338
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/174b0601e7f1_.py
|
"""empty message
Revision ID: 174b0601e7f1
Revises: c2bb73ecf64
Create Date: 2016-01-05 11:10:04.014741
"""
# revision identifiers, used by Alembic.
revision = '174b0601e7f1'
down_revision = 'c2bb73ecf64'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
#xianpeng int > float stu_years
### commands auto generated by Alembic - please adjust! ###
op.execute('ALTER TABLE credit_apply modify stu_years float')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
#op.add_column('credit_apply', sa.Column('stu_years', mysql.FLOAT(), nullable=True))
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,339
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/2c01f9e048f7_.py
|
"""empty message
Revision ID: 2c01f9e048f7
Revises: 34a907573645
Create Date: 2015-11-06 09:49:11.763111
"""
# revision identifiers, used by Alembic.
revision = '2c01f9e048f7'
down_revision = '34a907573645'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('help_cat',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('help_entry',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=100), nullable=True),
sa.Column('cat_id', sa.Integer(), nullable=True),
sa.Column('content', sa.String(length=10000), nullable=True),
sa.ForeignKeyConstraint(['cat_id'], ['help_cat.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('help_entry')
op.drop_table('help_cat')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,340
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/e1bfa676445_.py
|
"""empty message
Revision ID: e1bfa676445
Revises: 21e38b1473b2
Create Date: 2015-11-07 16:08:03.127777
"""
# revision identifiers, used by Alembic.
revision = 'e1bfa676445'
down_revision = '21e38b1473b2'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('item', sa.Column('doctor_desc', sa.Text(), nullable=True))
op.add_column('item', sa.Column('surgery_desc', sa.Text(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('item', 'surgery_desc')
op.drop_column('item', 'doctor_desc')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,341
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/1b538e70897_.py
|
"""empty message
Revision ID: 1b538e70897
Revises: 19af1cb7edf0
Create Date: 2016-03-04 15:38:47.633307
"""
# revision identifiers, used by Alembic.
revision = '1b538e70897'
down_revision = '19af1cb7edf0'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('rd_draw_counter',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('used', sa.Integer(), nullable=True),
sa.Column('total', sa.Integer(), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('rd_draw_counter_log',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('count', sa.Integer(), nullable=True),
sa.Column('source', mysql.TINYINT(display_width=1), nullable=False),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('rd_draw_counter_log')
op.drop_table('rd_draw_counter')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,342
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/32a72ba0ce03_.py
|
"""empty message
Revision ID: 32a72ba0ce03
Revises: e1bfa676445
Create Date: 2015-11-09 11:37:08.933073
"""
# revision identifiers, used by Alembic.
revision = '32a72ba0ce03'
down_revision = 'e1bfa676445'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('activity',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=300), nullable=True),
sa.Column('desc', sa.String(length=1000), nullable=True),
sa.Column('start_time', sa.DateTime(), nullable=True),
sa.Column('end_time', sa.DateTime(), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('activity_item',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('activity_id', sa.Integer(), nullable=True),
sa.Column('item_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['activity_id'], ['activity.id'], ),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.add_column(u'item', sa.Column('has_fee', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'item', 'has_fee')
op.drop_table('activity_item')
op.drop_table('activity')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,343
|
qsq-dm/mff
|
refs/heads/master
|
/ops/room_design.py
|
# -*- coding: utf-8 -*-
from sqlalchemy import and_
from sqlalchemy import or_
from sqlalchemy import func
from util.sqlerr import SQL_DUPLICATE
from util.sqlerr import SQL_DUPLICATE_PHONE
from util.utils import convert_locaton
from util.utils import dt_obj
from models import db
from models import School
from models import RoomDesignDetail
from models import RoomDesignVotePrivilege
from models import RoomDesignVoteLog
from ops.utils import get_items
from ops.utils import get_page
from ops.utils import count_items
from ops.cache import RoomDesignVoteCounter
from thirdparty.qn import upload_img
from settings import celery
class RoomDesignService(object):
@staticmethod
def create_room(user_id, room_name, applyer_name, apply_no, phone, addr, school_id, pics):
try:
room = RoomDesignDetail(
user_id=user_id, room_name=room_name, applyer_name=applyer_name,
apply_no=apply_no,
phone=phone, addr=addr,
school_id=school_id, pics=pics or None
)
db.session.add(room)
db.session.commit()
RoomDesignVoteCounter.init(room.id)
return room.id
except Exception as e:
import traceback
traceback.print_exc()
db.session.rollback()
if SQL_DUPLICATE.search(str(e)):
assert 0, '寝室名或者手机号码或申请编号已存在'
@staticmethod
def get_room_dict_by_id(room_id):
room = RoomDesignDetail.query.filter(RoomDesignDetail.id==room_id).first()
if room: return room.as_dict()
@staticmethod
def update_room(where, **kw):
count = RoomDesignDetail.query.filter(where).update(kw)
db.session.commit()
return count
@staticmethod
def get_room(where):
room = RoomDesignDetail.query.filter(where).first()
return room
@staticmethod
def incr_room_vote(room_id, count):
''' 提高投票数 '''
data = {
'vote_count': RoomDesignDetail.vote_count+count
}
count = RoomDesignDetail.query.filter(RoomDesignDetail.id==room_id).update(data)
db.session.commit()
return count
@staticmethod
def get_paged_rooms(**kw):
return get_page(RoomDesignDetail, {}, **kw)
@staticmethod
def get_user_vote_privilede(user_id):
privileges = RoomDesignVotePrivilege.query.filter(RoomDesignVotePrivilege.user_id==user_id).all()
data = []
privileges_id_map = {i.source:i for i in privileges}
query = and_(
RoomDesignVoteLog.user_id==user_id,
RoomDesignVoteLog.source==3
)
vote = RoomDesignVoteLog.query.filter(query).order_by(RoomDesignVoteLog.id.desc()).first()
for i in range(1,3):
privilede = privileges_id_map.get(i)
if privilede:
privilege_id = privilede.id
privilege_status= privilede.status
else:
privilege_id = i
privilege_status= -1
data.append({
'id' : i,
'status' : privilege_status
})
if vote and str(vote.create_time)[:10]==str(dt_obj.now())[:10]:
data.append({
'id' : 3,
'status' : 1
})
else:
data.append({
'id' : 3,
'status' : 0
})
return data
@staticmethod
def add_user_vote_privilege(user_id, source):
''' 给予用户投票特权
source 1额度投票 2完成订单投票 3普通投票
'''
query = and_(
RoomDesignVotePrivilege.user_id==user_id,
RoomDesignVotePrivilege.source==source
)
exists = RoomDesignVotePrivilege.query.filter(query).first()
if exists: return
privilege = RoomDesignVotePrivilege(user_id=user_id, source=source)
db.session.add(privilege)
db.session.commit()
return privilege.id
@staticmethod
def update_vote_privilege_status(user_id, source):
query = and_(
RoomDesignVotePrivilege.user_id==user_id,
RoomDesignVotePrivilege.source==source,
RoomDesignVotePrivilege.status==0
)
count = RoomDesignVotePrivilege.query.filter(query).update({'status': 1})
db.session.commit()
return count
@staticmethod
def add_vote_log(room_id, user_id, source):
log = RoomDesignVoteLog(room_id=room_id, user_id=user_id, source=source)
db.session.add(log)
db.session.commit()
return log.id
@staticmethod
def count_school_pics(school_id):
where = and_(
RoomDesignDetail.school_id==school_id,
RoomDesignDetail.pics!=None
)
return count_items(RoomDesignDetail, where)*4
@staticmethod
def count_rooms(where=None):
''' '''
return count_items(RoomDesignDetail, where)
@staticmethod
def today_voted(user_id):
''' '''
query = and_(
RoomDesignVoteLog.user_id==user_id,
RoomDesignVoteLog.source==3
)
vote = RoomDesignVoteLog.query.filter(query).order_by(RoomDesignVoteLog.id.desc()).first()
return vote and str(vote.create_time)[:10]==str(dt_obj.now())[:10]
@staticmethod
def count_school_pics(school_id):
''' '''
result = db.session.query(func.sum(RoomDesignDetail.pics_count)).filter(RoomDesignDetail.school_id==school_id).scalar()
return int(result or 0)
@staticmethod
def set_school_pics_count(school_id):
''' 参与数'''
result = db.session.query(func.sum(RoomDesignDetail.pics_count)).filter(RoomDesignDetail.school_id==school_id).scalar()
pics_count = int(result or 0)
data = {
'pics_count': pics_count
}
count = School.query.filter(School.id==school_id).update(data)
db.session.commit()
return count
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,344
|
qsq-dm/mff
|
refs/heads/master
|
/util/sign.py
|
# -*- coding: utf-8 -*-
import os
import time
import hashlib
from flask import request
from settings import ADMIN_COOKIE_KEY
from settings import HOSPITAL_COOKIE_KEY
from settings import PROMOTE_COOKIE_KEY
from settings import SECRET_USER_COOKIE
from settings import DOMAIN
def sign_user(user_id):
if not user_id: return None
md5 = hashlib.md5()
md5.update(str(user_id)+SECRET_USER_COOKIE)
return str(user_id)+'.'+md5.hexdigest()
def extract_user_id(sign_user_str):
''' '''
return (sign_user_str or '').split('.')[0]
def get_cookie(name):
return request.cookies.get(name) or os.environ.get(name.upper())
def set_cookie(response, key, val, expire=86400*30):
response.delete_cookie(key, domain=DOMAIN)
response.set_cookie(key, str(val), expire, DOMAIN)
def del_cookie(response, key):
response.delete_cookie(key, domain=DOMAIN)
response.set_cookie(key, expires=0)
TOKEN_DLIMITER = ','
def check_token(token_str, key=ADMIN_COOKIE_KEY):
token_args = (token_str or '').split(TOKEN_DLIMITER)
if len(token_args)!=3: return False, None
name, time_str, token = token_args
md5 = hashlib.new("md5")
data = '.'.join((unicode(name), unicode(time_str), key))
md5.update(str(data))
access_token = md5.hexdigest()
is_valid = token==access_token
return is_valid, name
def check_hospital_token(token_str):
return check_token(token_str, HOSPITAL_COOKIE_KEY)
def check_promote_token(token_str):
return check_token(token_str, PROMOTE_COOKIE_KEY)
def gen_token(name, key=ADMIN_COOKIE_KEY):
name = unicode(name)
md5 = hashlib.new("md5")
data = {}
current_time = unicode(int(time.time()))
data = name+'.'+current_time+'.'+key
md5.update(str(data))
access_token = md5.hexdigest()
token = TOKEN_DLIMITER.join([name, current_time, access_token])
#token = encode(token)
return token
def gen_hospital_token(name):
return gen_token(name, HOSPITAL_COOKIE_KEY)
def gen_promote_token(name):
return gen_token(name, PROMOTE_COOKIE_KEY)
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,345
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/200344fc698d_.py
|
"""empty message
Revision ID: 200344fc698d
Revises: 3ae16db9c83e
Create Date: 2016-01-27 15:15:16.187355
"""
# revision identifiers, used by Alembic.
revision = '200344fc698d'
down_revision = '3ae16db9c83e'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('room_design_detail', sa.Column('phone', sa.String(length=30), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('room_design_detail', 'phone')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,346
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/33a1596e092f_.py
|
"""empty message
Revision ID: 33a1596e092f
Revises: 2c06aa19dd1a
Create Date: 2015-11-26 14:02:10.367734
"""
# revision identifiers, used by Alembic.
revision = '33a1596e092f'
down_revision = '2c06aa19dd1a'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('activity_item', sa.Column('image', sa.String(length=300), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('activity_item', 'image')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,347
|
qsq-dm/mff
|
refs/heads/master
|
/user/trial.py
|
# -*- coding: utf-8 -*-
from flask import request
from sqlalchemy import and_
from sqlalchemy import or_
from models import *
from models import Order
from ops.order import OrderService
from ops.trial import TrialService
from util.utils import dt_obj
from util.utils import date_to_datetime
from util.utils import delta_time_str
from util.utils import jsonify_response
from util.decorators import wechat_loggin_dec
from util.validators import Inputs
from util.validators import Optional
from util.validators import IdField
from util.validators import TextField
from util.validators import MobileField
from util.validators import IntChoiceField
from ops.bulks import fetch_user_refs
from ops.coupon import CouponService
from constants import ResponseCode
def set_trial_apply_status(apply, comment=None, order=None):
''' 0等待审核 1获得了试用,写试用体会 2获得了试用,去下单 3写试用体会 4查看试用体会 5未获得资格 '''
if apply['status']==1 and apply['cat']==1:
apply['status'] = 2
if apply['status']==1 and comment:
apply['status'] = 4
if apply['status']==2 and order:
apply['status']==3
if apply['status']==3 and comment:
apply['status'] = 4
if apply['status']==0 and apply['trial']['total']==apply['trial']['sent']:
apply['status'] = 5
trial_list_validator = Inputs(
{
'cat' : IntChoiceField(choices=[1,2], msg='试用分类'),
'offset' : Optional(TextField(min_length=0, max_length=10000, msg='分页参数')),
}
)
@wechat_loggin_dec(required=False, validator=trial_list_validator, app=True)
def trial_list():
''' 试用列表 '''
offset = request.valid_data.get('offset')
cat = request.valid_data.get('cat') #1当期试用 2往期试用
_sort_dir = 'ASC'
_sort = 'sort_order'
if cat==1:
where = and_(
Trial.end_time>dt_obj.now(),
Trial.start_time<dt_obj.now()
)
else:
_sort_dir = 'DESC'
_sort = 'end_time'
where = Trial.end_time<=dt_obj.now()
fields = ['id', 'sort_order', 'cat_str', 'title', 'total', 'apply_count', 'end_time', 'image']
has_more, infos = TrialService.get_paged_trials(
_sort_dir=_sort_dir, _sort=_sort, where=where, offset=offset, fields=fields)
for info in infos:
end_time = date_to_datetime(str(info['end_time']), format='%Y-%m-%d %H:%M:%S')
info['end_time_str'] = delta_time_str(end_time)
offset = ''
if infos:
offset = str(infos[-1]['sort_order'])
if cat==2 and infos:
offset = str(infos[-1]['end_time'])
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'infos' : infos,
'offset' : offset,
'has_more' : has_more
}
return jsonify_response(result)
my_trial_list_validator = Inputs(
{
'offset' : Optional(TextField(min_length=0, max_length=10000, msg='分页参数')),
}
)
@wechat_loggin_dec(validator=my_trial_list_validator)
def my_trial_list():
''' 我的试用 '''
offset = request.valid_data.get('offset')
where = TrialApply.user_id==request.user_id
fields = ['id', 'cat', 'coupon_id', 'trial_id', 'status', 'create_time']
has_more, infos = TrialService.get_paged_apply_user_list(where=where, fields=fields, offset=offset)
trial_ids = [ i['trial_id'] for i in infos ]
user_coupon_ids = [i['coupon_id'] for i in infos if i['coupon_id']]
where = Trial.id.in_(trial_ids)
fields = ['id', 'cat_str', 'title', 'total', 'apply_count', 'end_time', 'image', 'sent']
_, trials = TrialService.get_paged_trials(where=where, fields=fields)
where = and_(
TrialComment.trial_id.in_(trial_ids),
TrialComment.user_id==request.user_id
)
_, comments = TrialService.get_paged_trial_comments(where=where)
trial_comment_map = {i['trial_id']:i['id'] for i in comments}
trials_map = {i['id']:i for i in trials}
where = Order.coupon_id.in_(user_coupon_ids)
_, orders = OrderService.get_paged_orders(where=where)
coupon_order_map = {i['coupon_id']:1 for i in orders}
where = UserCoupon.id.in_(user_coupon_ids)
_, user_coupons = CouponService.get_paged_user_coupons(where=where)
coupon_item_map = {i['id']:i['item_id'] for i in user_coupons}
for info in infos:
info['trial'] = trials_map[info['trial_id']]
for info in infos:
coupon_id = info['coupon_id']
order = coupon_order_map.get('coupon_id')
item_id = coupon_item_map.get(coupon_id)
info['item_id'] = item_id
set_trial_apply_status(
info, comment=trial_comment_map.get(info['trial']['id']), order=order)
offset = ''
if infos:
offset = str(infos[-1]['id'])
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'infos' : infos,
'has_more' : has_more,
'offset' : offset
}
return jsonify_response(result)
comment_trial_validator = Inputs(
{
'trial_id' : IdField(msg='试用id'),
'content' : TextField(min_length=0, max_length=10000, msg='试用体会'),
'photos' : Optional(TextField(min_length=0, max_length=10000, msg='图片')),
}
)
@wechat_loggin_dec(validator=comment_trial_validator)
def comment_trial():
''' 试用体会 '''
trial_id = request.valid_data.get('trial_id')
content = request.valid_data.get('content')
photos = request.valid_data.get('photos')
trial = TrialService.get_trial(trial_id)
assert trial, '试用商品不存在'
apply = TrialService.get_user_apply(request.user_id, trial_id)
assert apply, '请先提交申请'
assert apply['status']==1, '您未获得试用资格'
comment_id = TrialService.comment(trial_id, request.user_id, content, photos)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '发表成功'
}
return jsonify_response(result)
apply_trial_validator = Inputs(
{
'trial_id' : IdField(msg='试用id'),
'sex' : IntChoiceField(choices=[1,2], msg='性别'),
'name' : TextField(min_length=0, max_length=10000, msg='申请人名字'),
'phone' : MobileField(msg='申请人手机号'),
'content' : TextField(min_length=0, max_length=10000, msg='申请理由'),
'addr' : TextField(min_length=0, max_length=10000, msg='宿舍地址'),
'school' : TextField(min_length=0, max_length=10000, msg='学校'),
}
)
@wechat_loggin_dec(required=True, validator=apply_trial_validator)
def apply_trial():
''' 申请试用 '''
trial_id = request.valid_data.get('trial_id')
sex = request.valid_data.get('sex')
name = request.valid_data.get('name')
phone = request.valid_data.get('phone')
content = request.valid_data.get('content')
addr = request.valid_data.get('addr')
school = request.valid_data.get('school')
trial = TrialService.get_trial(trial_id)
assert trial, '试用不存在'
assert trial['end_time']>dt_obj.now(), '试用已结束'
apply_id = TrialService.add_apply(
request.user_id, name, phone, school, trial_id, content, sex, addr)
TrialService.incr_trial_apply_count(trial_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '申请成功'
}
return jsonify_response(result)
trial_comment_list_validator = Inputs(
{
'trial_id' : IdField(msg='试用id'),
'offset' : Optional(TextField(min_length=0, max_length=10000, msg='分页参数'))
}
)
@wechat_loggin_dec(required=False, validator=trial_comment_list_validator)
def trial_comment_list():
''' 试用评论列表 '''
trial_id = request.valid_data.get('trial_id')
offset = request.valid_data.get('offset')
where = TrialComment.trial_id==trial_id
has_more, infos = TrialService.get_paged_trial_comments(where=where, offset=offset)
user_ids = [i['user_id'] for i in infos]
fetch_user_refs(infos, fields=['id', 'name', 'avatar'])
apply_list = TrialService.get_trial_applies_by_user_ids(trial_id, user_ids)
user_school_map = {i['user_id']:i['school'] for i in apply_list}
for info in infos:
info['school'] = user_school_map[info['user']['id']]
offset = ''
if infos:
offset = infos[-1]['id']
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'infos' : infos,
'offset' : offset
}
return jsonify_response(result)
trial_applyers_validator = Inputs(
{
'trial_id' : IdField(msg='试用id'),
'offset' : Optional(TextField(min_length=0, max_length=10000, msg='分页参数'))
}
)
@wechat_loggin_dec(required=False, validator=trial_applyers_validator)
def trial_applyers():
''' 试用申请者 '''
trial_id = request.valid_data.get('trial_id')
offset = request.valid_data.get('offset')
status = None
info_id = None
length = len((offset or '').split('_'))
if offset and length==2:
print offset, length
status, info_id = offset.split('_')
status = int(status)
info_id = int(info_id)
where = or_()
where.append(
and_(
TrialApply.status==status,
TrialApply.id<info_id
)
)
if status==1:
where.append(
TrialApply.status==0
)
where = and_(
where,
TrialApply.trial_id==trial_id
)
else:
where = TrialApply.trial_id==trial_id
fields = ['id', 'school', 'status', 'user_id', 'create_time']
order_by = TrialApply.status.desc(), TrialApply.id.desc()
has_more, infos = TrialService.get_paged_apply_user_list(
order_by=order_by, where=where, fields=fields
)
fetch_user_refs(infos, fields=['id', 'name', 'avatar'])
offset = ''
if infos:
status = str(infos[-1]['status'])
info_id = str(infos[-1]['id'])
offset = '{}_{}'.format(status, info_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'infos' : infos,
'has_more' : has_more,
'offset' : offset
}
return jsonify_response(result)
trial_detail_validator = Inputs(
{
'trial_id' : IdField(msg='试用id'),
})
@wechat_loggin_dec(required=False, validator=trial_detail_validator)
def get_trial_detail():
''' '''
trial_id = request.valid_data.get('trial_id')
trial = TrialService.get_trial(trial_id)
apply = TrialService.get_trial_apply(request.user_id, trial_id)
status = -1 #未申请
item_id = 0
if apply:
comment = TrialService.get_trial_comment(trial_id, request.user_id)
order = OrderService.get_order_by_coupon_id(apply['coupon_id'])
where = Trial.id==apply['trial_id']
_, trials = TrialService.get_paged_trials(where=where, fields=None)
apply['trial'] = trials[0]
set_trial_apply_status(apply, comment, order)
status = apply['status']
coupon = CouponService.get_user_coupon_by_id(apply['coupon_id']) if apply['coupon_id'] else None
if coupon: item_id = coupon['item_id']
elif trial['end_time']<dt_obj.now():
status = 6 #已结束
result = {
'trial' : trial,
'item_id' : item_id,
'status' : status,
'apply' : apply,
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
@wechat_loggin_dec()
def get_history_apply():
''' 获取最近一次填写的数据 '''
apply = TrialService.get_latest_apply(request.user_id)
result = {
'code': ResponseCode.SUCCESS,
'msg' : '',
'data': apply or {}
}
return jsonify_response(result)
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,348
|
qsq-dm/mff
|
refs/heads/master
|
/app.py
|
# -*- coding: utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
from flask import Flask
from flask import request
from flask import render_template
from flask import send_from_directory
from flask.ext.sqlalchemy import SQLAlchemy
from models import db
from util.utils import jsonify_response
from thirdparty.views import server_verify
from thirdparty.wechat import wechat
from ops.cache import WechatTokenCache
wechat.set_cache(WechatTokenCache)#设置token缓存属性
from user.urls import user_api
from user.api_urls import user_api as user_app_api
from admin.urls import admin_api
from hospital.urls import hospital_api
from promote.urls import promote_api
from constants import ResponseCode
app = Flask(__name__)
close_session = lambda response: db.session.close() or response
app.after_request(close_session) #当请求结束关闭session
#微信回调
app.register_blueprint(server_verify, url_prefix='/wx_callback')
#用户端
app.register_blueprint(user_api, url_prefix='/user')
#用户端app接口
app.register_blueprint(user_app_api, url_prefix='/api')
#管理端
app.register_blueprint(admin_api, url_prefix='/admin')
#医院端
app.register_blueprint(hospital_api, url_prefix='/hospital')
#推广
app.register_blueprint(promote_api, url_prefix='/promote')
from user.views import login_link
from user.views import wechat_room_link
app.add_url_rule('/static/user/login.html', 'login_link', login_link, methods=['POST','GET'])
app.add_url_rule('/static/user/Activities/home.html', 'wechat_room_link', wechat_room_link, methods=['POST','GET'])
@app.errorhandler(500)
def internal_error(exception):
''' 服务器异常 '''
print '-'*80
print(exception), 'internal_error'
print '-'*80
import traceback
traceback.print_exc()
if getattr(request, 'is_app', False):
return jsonify_response({'msg':'服务器异常', 'code': ResponseCode.SERVER_ERROR})
else:
return render_template('server_error.html'), 500
@app.route('/',methods=['POST','GET'])
def pc_index():
''' 首页 '''
return send_from_directory('static/pc/', 'home.html')
return render_template('meifenfen.html')
@app.route('/mobile/',methods=['POST','GET'])
def mobile_index():
''' 移动端首页 '''
return send_from_directory('static/pc/', 'home.html')
return render_template('meifenfen.html')
if __name__ == "__main__":
from settings import RUN_PORT
from werkzeug.serving import run_simple
print RUN_PORT
run_simple('0.0.0.0', RUN_PORT, app, use_reloader=True, use_debugger=True)
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,349
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/51aaf1e0ecdd_.py
|
"""empty message
Revision ID: 51aaf1e0ecdd
Revises: a123ae998bf
Create Date: 2015-11-16 10:22:20.483272
"""
# revision identifiers, used by Alembic.
revision = '51aaf1e0ecdd'
down_revision = 'a123ae998bf'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('hospital', sa.Column('desc', sa.String(length=10000), nullable=True))
op.add_column('hospital', sa.Column('working_time', sa.String(length=100), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('hospital', 'working_time')
op.drop_column('hospital', 'desc')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,350
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/2b097c44cbba_.py
|
"""empty message
Revision ID: 2b097c44cbba
Revises: 4806ede530e3
Create Date: 2015-11-24 10:03:17.516996
"""
# revision identifiers, used by Alembic.
revision = '2b097c44cbba'
down_revision = '4806ede530e3'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('order', sa.Column('user_finished', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('order', 'user_finished')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,351
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/125c0c0cb424_.py
|
"""empty message
Revision ID: 125c0c0cb424
Revises: c855246d7e8
Create Date: 2015-11-03 11:42:59.684984
"""
# revision identifiers, used by Alembic.
revision = '125c0c0cb424'
down_revision = 'c855246d7e8'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('credit_apply', sa.Column('graduate_time', sa.DateTime(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('credit_apply', 'graduate_time')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,352
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/57be38c1806e_.py
|
"""empty message
Revision ID: 57be38c1806e
Revises: 6a2ac421f56
Create Date: 2016-02-24 16:41:00.918666
"""
# revision identifiers, used by Alembic.
revision = '57be38c1806e'
down_revision = '6a2ac421f56'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('credit_apply', sa.Column('body_choice_ids', sa.String(length=100), nullable=True))
op.add_column('credit_apply', sa.Column('body_choice_text', sa.String(length=100), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('credit_apply', 'body_choice_text')
op.drop_column('credit_apply', 'body_choice_ids')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,353
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/19af1cb7edf0_.py
|
"""empty message
Revision ID: 19af1cb7edf0
Revises: 5adc2c5e2c4f
Create Date: 2016-03-03 16:30:05.662503
"""
# revision identifiers, used by Alembic.
revision = '19af1cb7edf0'
down_revision = '5adc2c5e2c4f'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('rd_money_prize',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('amount', sa.Integer(), nullable=True),
sa.Column('sent', sa.Integer(), nullable=True),
sa.Column('total', sa.Integer(), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('rd_money_prize')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,354
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/5329d119ee5f_.py
|
"""empty message
Revision ID: 5329d119ee5f
Revises: 125c0c0cb424
Create Date: 2015-11-04 10:52:05.593655
"""
# revision identifiers, used by Alembic.
revision = '5329d119ee5f'
down_revision = '125c0c0cb424'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('item', sa.Column('is_recommend', sa.Boolean(), nullable=True))
op.add_column('item', sa.Column('status', mysql.TINYINT(display_width=1), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('item', 'status')
op.drop_column('item', 'is_recommend')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,355
|
qsq-dm/mff
|
refs/heads/master
|
/ops/hospital.py
|
# -*- coding: utf-8 -*-
from collections import defaultdict
from sqlalchemy import and_
from util.sqlerr import SQL_DUPLICATE_NAME
from util.sqlerr import SQL_DUPLICATE_PHONE
from models import db
from models import Item
from ops.utils import get_items
from ops.utils import count_items
from ops.utils import get_page
from models import HospitalUser
class HospitalService(object):
@staticmethod
def check_user(name, passwd):
admin = HospitalUser.query.filter(HospitalUser.name==name).first()
return admin and admin.passwd==passwd
@staticmethod
def create_user(name, passwd, hospital_id):
try:
admin = HospitalUser(name=name, passwd=passwd, hospital_id=hospital_id)
db.session.add(admin)
db.session.commit()
return admin.id
except Exception as e:
db.session.rollback()
if SQL_DUPLICATE_NAME.search(str(e)):
assert 0, '用户名已存在'
else:
import traceback
traceback.print_exc()
@staticmethod
def change_passwd(name, new_passwd):
''' 修改密码 '''
count = HospitalUser.query.filter(HospitalUser.name==name).update({'passwd':new_passwd})
db.session.commit()
return count
@staticmethod
def get_hospital_sub_cat_ids(hospital_id):
result = db.session.query(Item).filter(Item.hospital_id==hospital_id).all()
sub_cat_ids = []
for item in result:
sub_cat_id_list = item.as_dict()['sub_cat_id_list']
sub_cat_ids.extend(sub_cat_id_list)
return sub_cat_ids
@staticmethod
def get_hospital_sub_cat_ids_and_count(hospital_id):
result = db.session.query(Item).filter(Item.hospital_id==hospital_id).all()
sub_cat_ids_map = defaultdict(set)
for item in result:
item_dict = item.as_dict()
for i in item_dict['sub_cat_id_list']:
sub_cat_ids_map[i].add(item_dict['id'])
return sub_cat_ids_map
@staticmethod
def get_user_by_name(name):
return HospitalUser.query.filter(HospitalUser.name==name).first()
@staticmethod
def get_paged_hospital_admin_users(**kw):
return get_page(HospitalUser, {}, **kw)
@staticmethod
def count_admin(where=None):
return count_items(HospitalUser, where=where)
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,356
|
qsq-dm/mff
|
refs/heads/master
|
/ops/utils.py
|
# -*- coding: utf-8 -*-
from functools import wraps
from sqlalchemy import func
from util.utils import keep_fields_from_list
from models import db
def get_page(model, condition=None, offset=None, \
limit=10, fields=None, \
start=None, end=None, where=None, \
join=None, order_by=None, \
extra=None, order_by_case=None, \
_sort='id', _sort_dir='DESC', no_limit=False):
if extra:
query = db.session.query(model,extra).outerjoin(extra)
else:
query = db.session.query(model)
if join is not None: query = query.join(join)
if order_by is not None:
if isinstance(order_by, (tuple,list)):
query = query.order_by(*order_by)
else:
query = query.order_by(order_by)
elif _sort and _sort!='id' and getattr(model, _sort, None): #order by 第一个column为主排序column
if _sort_dir=='ASC': order_by = getattr(model, _sort).asc()
if _sort_dir=='DESC': order_by = getattr(model, _sort).desc()
if order_by_case is None: query = query.order_by(order_by)
if order_by_case is None: query = query.order_by(model.id.desc())
else:
query = query.order_by(model.id.asc()) if _sort_dir=='ASC' else query.order_by(model.id.desc())
if condition: query = query.filter_by(**condition)
if where is not None: query = query.filter(where)
if offset: query = query.filter(getattr(model, _sort)< offset) if _sort_dir=='DESC' else query.filter(getattr(model, _sort) > offset)
if start:query = query.offset(start)
if end: query = query.limit(end-start)
items = []
if order_by_case is not None:
query = query.order_by(order_by_case)
data = query.limit(limit+1).all() if not (start or end or no_limit) else query.all()
extras = None
if extra is not None:
extras = [i[1] for i in data if i[1]]
data = [i[0] for i in data]
items[:] = tuple(row.as_dict() for row in (data if no_limit else data[:limit]))
is_more = len(data)>limit
if fields: keep_fields_from_list(items, fields)
if extra:
return is_more, items, extras
return is_more, items
def get_items(model, ids=None, fields=None, all=False):
query = model.query
if getattr(model, 'status', None) and not(all):
query = query.filter(model.show_status())
data = []
if not ids: return data
data[:] = query.filter(model.id.in_(ids)).all()
data[:] = tuple(i.as_dict() for i in data)
if fields: keep_fields_from_list(data, fields)
return data
def get_fields_column(model, fields):
return tuple(getattr(model, field) for field in fields)
def count_items(model, where=None, field='id'):
query = db.session.query(func.count(getattr(model, field)))
if where is not None: query = query.filter(where)
return query.scalar()
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,357
|
qsq-dm/mff
|
refs/heads/master
|
/sql_profile.py
|
# -*- coding: utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import time
from sqlalchemy import event
from sqlalchemy.engine import Engine
@event.listens_for(Engine, "before_cursor_execute")
def before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
context._query_start_time = time.time()
print("查询开始:\n%s\n查询参数:\n%r" % (statement,parameters))
@event.listens_for(Engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
total = time.time() - context._query_start_time
print("查询耗时: %.02fms\n" % (total*1000))
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,358
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/5853d4187f15_.py
|
"""empty message
Revision ID: 5853d4187f15
Revises: 3f2769d4ed16
Create Date: 2016-01-07 15:41:09.540410
"""
# revision identifiers, used by Alembic.
revision = '5853d4187f15'
down_revision = '3f2769d4ed16'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('item_sub_cat', sa.Column('cat_ids', sa.String(length=500), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('item_sub_cat', 'cat_ids')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,359
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/4bbb37c90d8c_.py
|
"""empty message
Revision ID: 4bbb37c90d8c
Revises: 13a5889df13
Create Date: 2016-03-09 10:49:01.279517
"""
# revision identifiers, used by Alembic.
revision = '4bbb37c90d8c'
down_revision = '13a5889df13'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user_advice', sa.Column('remark', sa.String(length=300), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user_advice', 'remark')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,360
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/2ce138017f09_.py
|
"""empty message
Revision ID: 2ce138017f09
Revises: 38dd6746c99b
Create Date: 2015-12-10 19:14:00.636524
"""
# revision identifiers, used by Alembic.
revision = '2ce138017f09'
down_revision = '38dd6746c99b'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user_coupon', sa.Column('is_trial', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user_coupon', 'is_trial')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,361
|
qsq-dm/mff
|
refs/heads/master
|
/user/views.py
|
# -*- coding: utf-8 -*-
import os
import time
import json
from itertools import chain
from flask import request
from flask import redirect
from flask import render_template
from flask import send_from_directory
from sqlalchemy import case
from sqlalchemy import and_
from sqlalchemy import or_
from models import db
from models import ItemComment
from models import Item
from models import School
from models import UserCoupon
from models import Order
from models import PeriodPayLog
from models import ItemFav
from models import Repayment
from models import UserCoupon
from models import HelpCat
from models import ActivityItem
from models import CreditApply
from models import ItemSubCat
from models import Hospital
from models import DailyCoupon
from models import BeautyEntry
from util.utils import format_price
from util.utils import deadline_zh
from util.utils import jsonify_response
from util.utils import template_response
from util.utils import trans_list
from util.utils import calc_expire_remain
from util.utils import dt_obj
from util.utils import day_delta
from util.utils import get_current_period
from util.utils import get_next_period
from util.utils import get_due_time
from util.utils import is_delayed
from util.utils import date_to_datetime
from util.utils import get_timestamp
from util.utils import add_months
from util.utils import js_response
from util.utils import get_date_delta
from util.utils import cacl_punish_fee
from util.utils import get_time_str_from_dt
from util.utils import prefix_img_domain
from util.utils import get_delayed_info
from util.utils import get_next_working_day
from util.utils import get_img_key
from util.utils import set_coupon_use_time
from util.utils import format_dt
from util.utils import format_rate
from util.utils import str_to_int_list
from util.sign import get_cookie
from util.sign import set_cookie
from util.decorators import wechat_loggin_dec
from util.validators import Optional
from util.validators import REGField
from util.validators import Inputs
from util.validators import MobileField
from util.validators import TextField
from util.validators import IntChoiceField
from util.validators import IdField
from util.validators import JsonField
from util.validators import ChoiceField
from ops.common import pay_success_action
from ops.common import get_item_activity_price
from ops.bulks import fetch_user_refs
from ops.bulks import fetch_item_refs
from ops.bulks import fetch_item_subcat_refs
from ops.bulks import fetch_min_period_info
from ops.bulks import fetch_hospital_refs
from ops.bulks import fetch_order_refs
from ops.bulks import fetch_coupon_refs
from ops.item import ItemService
from ops.comment import CommentService
from ops.promote import PromoteService
from ops.user import UserService
from ops.order import OrderService
from ops.activity import ActivityService
from ops.log import LogService
from ops.data import DataService
from ops.redpack import RedpackService
from ops.credit import CreditService
from ops.coupon import CouponService
from ops.beauty_tutorial import TutorialService
from ops.bulks import fetch_servicecode_refrence
from constants import ResponseCode
from constants import ORDER_STATUS
from constants import ORDER_STATUS_LABEL
from constants import PAY_METHOD
from constants import REPAYMENT_STATUS
from constants import APPLY_STATUS
from constants import CREDIT_STATUS
from settings import CONTACT
from settings import DEFAULT_CREDIT
from settings import WX_PAY_NOTIFY_URL
from settings import WX_REPAYMENT_NOTIFY_URL
from thirdparty.wx_pay import Notify_pub
from thirdparty.wx_pay import WxPayConf_pub
from thirdparty.wx_pay import UnifiedOrder_pub
from thirdparty.wx_pay import JsApi_pub
from thirdparty.wx_pay import get_wx_pay_params
from thirdparty.wechat import wechat
from thirdparty.wechat import get_jssdk_context
from thirdparty.qn import gen_qn_token
from thirdparty.qn import upload_img
from thirdparty.sms import send_sms_new_order
from settings import SERVER_NAME
from settings import ITEM_ORDER_CHOICES
from settings import HOSPITAL_ORDER_CHOICES
from settings import CAT_ICONS
from settings import CAT_ICONS_ACTIVE
from ops.order import set_order_status
def set_coupon_cat_str(info, item_cats=None, item_subcats=None):
''' 优惠券品类信息 '''
if info['coupon_cat']==0:
info['cat_str'] = '全部适用'
elif info['coupon_cat']==1:
cat = filter(lambda i:i['id']==info['cat_id'], item_cats)[0]
info['cat_str'] = '仅限{}类项目'.format(cat['name'])
elif info['coupon_cat']==2:
print info['sub_cat_id'], [i['id'] for i in item_subcats]
subcat = filter(lambda i:i['id']==info['sub_cat_id'], item_subcats)[0]
info['cat_str'] = '仅限{}项目'.format(subcat['name'])
else:
info['cat_str'] = '指定项目'
@wechat_loggin_dec(required=False, validator=None, app=True)
def user_index():
''' 用户首页 '''
_, recommend_sub_cats = ItemService.get_paged_recommend_subcats(_sort='sort_order', _sort_dir='ASC')
fetch_item_subcat_refs(recommend_sub_cats)
current_activity = ActivityService.get_current_activity() or {}
where = ActivityItem.activity_id==current_activity.get('id')
fields = ('id', 'item_id', 'price')
_, activity_items = ItemService.get_paged_activity_items(fields=fields, where=where, _sort='sort_order', _sort_dir='ASC')
fields = ('id', 'item_id', 'image', 'desc')
where = None
_, recommend_items = ItemService.get_paged_recommend_items(fields=fields, where=where, _sort='sort_order', _sort_dir='ASC')
fields = ['id', 'title', 'price', 'orig_price', 'has_fee', 'support_choice_list']
fetch_item_refs(chain(activity_items, recommend_items), fields=fields)
item_dict_list = [i['item'] for i in chain(activity_items, recommend_items)]
item_list = []
for i in item_dict_list:
if i not in item_list:
item_list.append(i)
fetch_min_period_info(item_list)
banner = [
{'image':'http://7xnpdb.com1.z0.glb.clouddn.com/o_1a32t99l213e55j47fp1v96u80111348368_1467882916861451_480196332_n.jpg', 'link':'/user/login'},
{'image':'http://7xnpdb.com1.z0.glb.clouddn.com/o_1a32t99l213e55j47fp1v96u80111348368_1467882916861451_480196332_n.jpg', 'link':'/user/login'},
{'image':'http://7xnpdb.com1.z0.glb.clouddn.com/o_1a32t99l213e55j47fp1v96u80111348368_1467882916861451_480196332_n.jpg', 'link':'/user/login'}
]
context = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'activity_items' : activity_items,
'recommend_items' : recommend_items,
'activity' : current_activity,
'banner' : banner,
}
print dir(request)
print request.headers
return jsonify_response(context)
js_sdk_context = get_jssdk_context()
return render_template('user/user_index.html', recommend_sub_cats=recommend_sub_cats, nav={1:'active'}, **js_sdk_context)
item_filters_validator = Inputs(
{
'sub_cat_id' : Optional(IdField(msg='分类id')),
'hospital_id' : Optional(IdField(msg='医院id')),
'city_id' : Optional(IdField(msg='城市id')),
'offset' : Optional(TextField(min_length=1, max_length=100, msg='分页参数')),
'sort_type' : Optional(IntChoiceField(choices=[1,2,3,4], msg='排序选项')),
}
)
@wechat_loggin_dec(required=False, validator=item_filters_validator, app=True)
def item_filters():
''' 筛选参数列表 '''
sub_cat_id = request.valid_data.get('sub_cat_id')
sort_type = request.valid_data.get('sort_type') or 1
order_choices = [
{'id':1, 'name':'综合排序'},
{'id':2, 'name':'销量优先'},
{'id':3, 'name':'低价优先'},
{'id':4, 'name':'高价优先'},
]
has_more, citys = DataService.get_paged_cities()
cat_id = None
subcat = None
if sub_cat_id:
subcat = ItemService.get_subcat_dict_by_id(sub_cat_id)
sort_type_obj = None
if sort_type:
for i in order_choices:
if i['id'] == sort_type:
sort_type_obj = i
all_cats = ItemService.get_item_cats()
all_sub_cats = ItemService.get_item_subcats()
_, all_recommend_subcats = ItemService.get_paged_recommend_subcats(no_limit=True)
id_order_map = {i['sub_cat_id']:i['sort_order'] for i in all_recommend_subcats}
recommend_subcat_ids = set(i['sub_cat_id'] for i in all_recommend_subcats)
recommend_subcats = filter(lambda i:i['id'] in recommend_subcat_ids, all_sub_cats)
recommend_subcats.sort(key=lambda i: id_order_map[i['id']])
item_cat = [
{
'id': 0,
'name':'推荐',
'sub_cats':recommend_subcats,
'icon' : CAT_ICONS[0],
'icon_active' : CAT_ICONS_ACTIVE[0]
}]
for cat in all_cats:
tmp = {'name': cat.name, 'id': cat.id}
tmp['sub_cats'] = [i for i in all_sub_cats if cat.id in i['cat_id_list']]
tmp['icon'] = CAT_ICONS.get(cat.id) or ''
tmp['icon_active'] = CAT_ICONS_ACTIVE.get(cat.id) or ''
item_cat.append(tmp)
sort_type_obj = sort_type_obj or order_choices[0]
subcat = subcat or item_cat[0]['sub_cats'][0]
city_id = get_current_city_id()
city = None
for the_city in citys:
if the_city['id']==city_id: city = the_city
for i in all_sub_cats:
if i['id'] in recommend_subcat_ids:
i['cat_id_list'].append(0)
city = city or citys[0]
result = {
'order_choices': order_choices,
'data': item_cat,
'all_sub_cats':all_sub_cats,
'citys': citys,
'sort_type_obj':sort_type_obj,
'city': city,
'subcat': subcat
}
#return json.dumps(result).decode('unicode-escape').encode('utf8')
return jsonify_response(result)
hospital_filters_validator = Inputs(
{
'sub_cat_id' : Optional(IdField(msg='分类id')),
'city_id' : Optional(IdField(msg='城市id')),
'offset' : Optional(TextField(min_length=1, max_length=100, msg='分页参数')),
'sort_type' : Optional(IntChoiceField(choices=[1,2,3,4], msg='排序选项')),
}
)
@wechat_loggin_dec(required=False, validator=hospital_filters_validator, app=True)
def hospital_filters():
''' 筛选参数列表 '''
sub_cat_id = request.valid_data.get('sub_cat_id')
sort_type = request.valid_data.get('sort_type') or 1
order_choices = HOSPITAL_ORDER_CHOICES
has_more, citys = DataService.get_paged_cities()
cat_id = None
subcat = None
if sub_cat_id:
subcat = ItemService.get_subcat_dict_by_id(sub_cat_id)
sort_type_obj = None
if sort_type:
for i in order_choices:
if i['id'] == sort_type:
sort_type_obj = i
all_cats = ItemService.get_item_cats()
all_sub_cats = ItemService.get_item_subcats()
_, all_recommend_subcats = ItemService.get_paged_recommend_subcats(limit=1000)
id_order_map = {i['sub_cat_id']:i['sort_order'] for i in all_recommend_subcats}
recommend_subcat_ids = set(i['sub_cat_id'] for i in all_recommend_subcats)
recommend_subcats = filter(lambda i:i['id'] in recommend_subcat_ids, all_sub_cats)
recommend_subcats.sort(key=lambda i: id_order_map[i['id']])
item_cat = [
{
'id': 0,
'name':'推荐',
'sub_cats':recommend_subcats,
'icon' : CAT_ICONS[0],
'icon_active' : CAT_ICONS_ACTIVE[0]
}]
total_cat = {'id': 0, 'name':'全部', 'cat_id_list': [0]
}
all_sub_cats.insert(0, total_cat)
recommend_subcats.insert(0, total_cat)
for cat in all_cats:
tmp = {'name': cat.name, 'id': cat.id}
tmp['sub_cats'] = [i for i in all_sub_cats if cat.id in i['cat_id_list']]
tmp['icon'] = CAT_ICONS.get(cat.id) or ''
tmp['icon_active'] = CAT_ICONS_ACTIVE.get(cat.id) or ''
item_cat.append(tmp)
sort_type_obj = sort_type_obj or order_choices[0]
subcat = subcat or item_cat[0]['sub_cats'][0]
city_id = get_current_city_id()
city = None
for the_city in citys:
if the_city['id']==city_id: city = the_city
for i in all_sub_cats:
if i['id'] in recommend_subcat_ids:
i['cat_id_list'].append(0)
city = city or citys[0]
result = {
'order_choices': order_choices,
'data': item_cat,
'all_sub_cats':all_sub_cats,
'citys': citys,
'sort_type_obj':sort_type_obj,
'city': city,
'subcat': subcat
}
return jsonify_response(result)
item_list_validator = Inputs(
{
'sub_cat_id' : Optional(IdField(msg='分类id')),
'hospital_id' : Optional(IdField(msg='医院id')),
'city_id' : Optional(IdField(msg='城市id')),
'offset' : Optional(TextField(min_length=1, max_length=100, msg='分页参数')),
'sort_type' : Optional(IntChoiceField(choices=[1,2,3,4], msg='排序选项')),
}
)
@wechat_loggin_dec(required=False, validator=item_list_validator, app=True)
def item_list():
''' 商品列表 '''
sub_cat_id = request.valid_data.get('sub_cat_id')
hospital_id = request.valid_data.get('hospital_id')
city_id = get_current_city_id()
offset = request.valid_data.get('offset') or ''
sort_type = request.valid_data.get('sort_type') or 1
_sort = 'id'
_sort_dir = 'ASC'
where = and_()
where.append(Item.status==1)
if city_id:
subquery= db.session.query(Hospital.id).filter(Hospital.city_id==city_id).subquery()
where.append(Item.hospital_id.in_(subquery))
if sub_cat_id:
or_query= or_(
Item.sub_cat_ids==sub_cat_id,
Item.sub_cat_ids.like('%,{}'.format(sub_cat_id)),
Item.sub_cat_ids.like('%,{},%'.format(sub_cat_id)),
Item.sub_cat_ids.like('{},%'.format(sub_cat_id))
)
where.append(or_query)
order_by_case = None
offset_id = 0
offset_field= ''
_sort = 'price'
if hospital_id:
_sort='id'
_sort_dir='DESC'
if offset:
offset_id, offset_field = offset.split('_')
offset_id = int(offset_id)
offset_where = Item.id<offset_id
if sort_type==2:
_sort='sold_count'; _sort_dir='DESC'
offset_field = int(offset_field or 10**10)
offset_where = or_(
Item.sold_count<offset_field,
and_(
Item.sold_count<=offset_field,
Item.id<offset_id
)
)
if sort_type==3:
order_by_case = case([(ActivityItem.price>0, ActivityItem.price)], else_=Item.price).asc()
_sort='price'; _sort_dir='ASC'
offset_field = float(offset_field or 0)
offset_where = or_(
Item.price>offset_field,
and_(
Item.price>=offset_field,
Item.id<offset_id
)
)
if sort_type==4:
order_by_case = case([(ActivityItem.price>0, ActivityItem.price)], else_=Item.price).desc()
_sort='price'; _sort_dir='DESC'
offset_field = float(offset_field or 10**10)
offset_where = or_(
Item.price<offset_field,
and_(
Item.price<=offset_field,
Item.id<offset_id
)
)
if offset: where.append(offset_where)
if hospital_id:
where.append(Item.hospital_id==hospital_id)
if offset: where.append(Item.id<offset)
offset = offset
fields = ['id', 'hospital_id', 'title', 'sold_count', 'price', 'orig_price', 'support_choice_list', 'image', 'has_fee']
has_more, items = ItemService.get_paged_items(where=where, order_by_case=order_by_case, fields=fields, _sort=_sort, _sort_dir=_sort_dir)
fetch_min_period_info(items)
fetch_hospital_refs(items, fields=['id','name'])
offset = ''
if items: offset = str(items[-1]['id']) + '_' + (str(items[-1][_sort]) if sort_type !=1 else '')
print offset, 'offset'
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'has_more' : has_more,
'infos' : items,
'offset' : offset
}
return jsonify_response(result)
hospital_list_validator = Inputs(
{
'sub_cat_id' : Optional(IdField(msg='分类id')),
'city_id' : Optional(IdField(msg='城市id')),
'offset' : Optional(TextField(min_length=1, max_length=100, msg='分页参数')),
'sort_type' : Optional(IntChoiceField(choices=[1,2,3], msg='排序选项')), #1综合 2销量 3好评优先
}
)
@wechat_loggin_dec(required=False, validator=hospital_list_validator, app=True)
def hospital_list():
''' 医院列表 '''
sub_cat_id = request.valid_data.get('sub_cat_id')
city_id = get_current_city_id()
offset = request.valid_data.get('offset') or ''
sort_type = request.valid_data.get('sort_type') or 1
_sort = 'id'
_sort_dir = 'DESC'
where = and_()
where.append(Hospital.status==1)
if city_id:
where.append(Hospital.city_id==city_id)
if sub_cat_id:
query = or_(
Item.sub_cat_ids==sub_cat_id,
Item.sub_cat_ids.like('%,{}'.format(sub_cat_id)),
Item.sub_cat_ids.like('%,{},%'.format(sub_cat_id)),
Item.sub_cat_ids.like('{},%'.format(sub_cat_id))
)
hospital_id_sub = db.session.query(Item.hospital_id).filter(query).subquery()
where.append(Hospital.id.in_(hospital_id_sub))
offset_id = 0
offset_field= ''
_sort = 'sold_count'
if offset:
offset_id, offset_field = offset.split('_')
offset_id = int(offset_id)
offset_where = Hospital.id<offset_id
if sort_type==2:
_sort='sold_count'; _sort_dir='DESC'
offset_field = int(offset_field or 10**10)
offset_where = or_(
Hospital.sold_count<offset_field,
and_(
Hospital.sold_count<=offset_field,
Hospital.id<offset_id
)
)
if sort_type==3:
_sort='rate'; _sort_dir='DESC'
offset_field = float(offset_field or 0)
offset_where = or_(
Hospital.rate>offset_field,
and_(
Hospital.rate>=offset_field,
Hospital.id<offset_id
)
)
if offset: where.append(offset_where)
offset = offset
fields = ['id', 'image', 'name', 'tag_list', 'rate', 'sold_count', 'addr']
has_more, items = ItemService.get_paged_hospitals(where=where, fields=fields, _sort=_sort, _sort_dir=_sort_dir)
_, sub_cats = ItemService.get_paged_sub_cats(limit=1000)
_, cats = ItemService.get_paged_cats(limit=1000)
hospital_ids = [i['id'] for i in items]
hospital_item_count_map = ItemService.count_hospital_items(hospital_ids)
hospital_item_subcat_map= ItemService.get_hospital_item_cats(hospital_ids)
for i in items:
i['rate'] = str(format_rate(i['rate']))
i['item_count'] = hospital_item_count_map.get(i['id']) or 0
subcat_ids = hospital_item_subcat_map.get(i['id']) or []
i['cats'] = ItemService.get_sub_cat_id_name(subcat_ids, sub_cats, cats)
offset = ''
if items:
offset = str(items[-1]['id']) + '_' + (str(items[-1][_sort]) if sort_type !=1 else '')
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'has_more' : has_more,
'infos' : items,
'offset' : offset
}
return jsonify_response(result)
def cacl_need_pay(choice, price, credit, has_fee=True):
if has_fee:
total = format_price((choice.period_fee+1) * price)
else:
total = price
need_pay = 0
if total>credit:
credit_amount = format_price(credit*1.0/(1.0+choice.period_fee))
period_amount = format_price(credit_amount*1.0/choice.period_count)
period_money = format_price(credit*1.0/choice.period_count)
period_fee = format_price(period_money-period_amount)
credit_used = credit
need_pay = format_price(price - period_amount*choice.period_count)
else:
period_money = format_price(total/choice.period_count)
period_fee = format_price((choice.period_fee) * price*1.0/choice.period_count)
period_amount = format_price(period_money-period_fee)
credit_used = total
result = {
'id' : choice.id,
'need_pay' : need_pay,
'period_money' : period_money,
'period_total' : period_money,
'period_fee' : period_fee,
'fee' : choice.period_fee,
'total' : total,
'credit_used' : credit_used,
'credit' : credit,
'period_amount' : period_amount,
'period_count' : choice.period_count
}
return result
item_detail_validator = Inputs(
{
'item_id' : IdField(msg='商品id'),
}
)
@wechat_loggin_dec(required=False, validator=item_detail_validator, app=True)
def item_detail():
''' 商品详情 '''
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
item_id = request.valid_data.get('item_id')
fields = [
'id', 'title', 'note', 'use_time', 'support_choice_list', 'has_fee', 'direct_buy', 'photo_list', 'hospital_id', 'price', 'orig_price']
item = ItemService.get_item_dict_by_id(item_id, fields=fields)
can_comment = PromoteService.get_fakeuser_by_userid(request.user_id)
has_fav = False
if request.user_id:
has_fav = bool(ItemService.has_fav(item_id, request.user_id))
assert item, '商品不存在'
get_item_activity_price(item)
credit_amount = DEFAULT_CREDIT #预计额度
verified = False #待审核
if request.user_id:
credit = CreditService.init_credit(request.user_id)
credit_amount = format_price(credit.total-credit.used)
verified = bool(credit.status)
apply = CreditService.get_apply_dict_by_userid(request.user_id)
need_pay = 0
if item['price']>credit_amount:
need_pay = format_price(item['price'] - credit_amount)
total_period_amount = credit_amount
else:
total_period_amount = item['price']
period_choices = CreditService.get_period_choices()
choices = []
now = dt_obj.now()
for period_choice in period_choices:
if period_choice.id not in item['support_choice_list']: continue
tmp = cacl_need_pay(period_choice, item['price'], credit_amount, item['has_fee'])
if apply and apply.get('graduate_time') and not apply['graduate_time']>add_months(now, period_choice.period_count+6):
tmp['disabled'] = True
else:
tmp['disabled'] = False
if not total_period_amount: continue
choices.append(tmp)
if True:#item['direct_buy']:
tmp = {
'id' : 0,
'period_amount': 0,
'period_fee' : 0,
'period_total' : item['price'],
'period_count' : 0
}
choices.insert(0, tmp)
choices.sort(key=lambda i:i['period_count'], reverse=False)
where = ItemComment.item_id==item_id
comment_count = CommentService.count_comments(where)
fields = ['id', 'user_id', 'is_anonymous', 'content', 'rate', 'create_time', 'photo_list', 'item_id']
has_more, comment_list = CommentService.get_paged_comments(where=where, limit=1, fields=fields)
fetch_user_refs(comment_list, fields=['id','name','avatar'])
fields = ['id', 'name', 'photo_list', 'working_time', 'phone', 'long_lat', 'desc', 'tag_list', 'addr']
hospital = ItemService.get_hospital_dict_by_id(item['hospital_id'], fields=fields)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'can_comment': bool(can_comment),
'has_fav' : has_fav,
'pay_choices': choices,
'item' : item,
'hospital' : hospital,
'credit_amount': format_price(credit_amount),
'need_pay' : need_pay,
'verified' : verified,
'total_period_amount': total_period_amount,
'comments' : {
'total' : comment_count,
'infos' : comment_list,
}
}
return jsonify_response(result)
item_comment_list_validator = Inputs(
{
'item_id' : IdField(msg='商品id'),
'offset' : Optional(TextField(min_length=1, max_length=100, msg='分页参数'))
}
)
@wechat_loggin_dec(required=False, validator=item_comment_list_validator, app=True)
def item_comment_list():
''' 评论列表 '''
item_id = request.valid_data.get('item_id')
item = ItemService.get_item_dict_by_id(item_id, fields=['id', 'image', 'title', 'hospital_id', 'price', 'orig_price'])
assert item, '商品不存在'
get_item_activity_price(item)
hospital = ItemService.get_hospital_dict_by_id(item['hospital_id'], fields=['id', 'name'])
offset = request.valid_data.get('offset')
where = ItemComment.item_id==item_id
fields = ['id', 'is_anonymous', 'user_id', 'item_id', 'is_re_comment', 'photo_list', 'content', 'rate', 'create_time']
has_more, comments = CommentService.get_paged_comments(where=where, offset=offset, fields=fields)
fetch_user_refs(comments, fields=['id','name','avatar'])
offset = str(comments[-1]['id']) if comments else ''
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'item' : item,
'hospital' : hospital,
'has_more' : has_more,
'infos' : comments,
'offset' : offset
}
return jsonify_response(result)
my_item_comment_list_validator = Inputs(
{
'item_id' : IdField(msg='商品id'),
'offset' : Optional(TextField(min_length=1, max_length=100, msg='分页参数'))
}
)
@wechat_loggin_dec(required=False, validator=my_item_comment_list_validator, app=True)
def my_item_comment_list():
''' 我的评论列表 '''
item_id = request.valid_data.get('item_id')
item = ItemService.get_item_dict_by_id(item_id, fields=['id', 'image', 'title', 'hospital_id', 'price', 'orig_price'])
assert item, '商品不存在'
get_item_activity_price(item)
hospital = ItemService.get_hospital_dict_by_id(item['hospital_id'], fields=['id', 'name'])
offset = request.valid_data.get('offset')
where = and_(
ItemComment.item_id==item_id,
ItemComment.user_id==request.user_id
)
fields = ['id', 'is_anonymous', 'user_id', 'is_re_comment', 'item_id', 'photo_list', 'content', 'rate', 'create_time']
has_more, comments = CommentService.get_paged_comments(where=where, offset=offset, fields=fields)
fetch_user_refs(comments, fields=['id','name','avatar'])
offset = str(comments[-1]['id']) if comments else ''
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'item' : item,
'hospital' : hospital,
'has_more' : has_more,
'infos' : comments,
'offset' : offset
}
return jsonify_response(result)
user_fav_item_validator = Inputs(
{
'item_id' : IdField(msg='商品id'),
'status' : IntChoiceField(choices=[0, 1], msg='是否收藏'),
}
)
@wechat_loggin_dec(required=True, validator=user_fav_item_validator, app=True)
def user_fav_item():
''' 添加心愿单 '''
item_id = request.valid_data.get('item_id')
status = request.valid_data.get('status')
msg = '添加成功' if status else '已从心愿单中移除'
if status:
ItemService.fav_item(request.user_id, item_id)
else:
ItemService.unfav_item(request.user_id, item_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg,
}
return jsonify_response(result)
user_advice_validator = Inputs(
{
'content' : TextField(min_length=1, max_length=10000, msg='反馈内容'),
'contact' : Optional(TextField(min_length=1, max_length=100, msg='手机号'))
}
)
@wechat_loggin_dec(required=False, validator=user_advice_validator, app=True)
def user_advice():
''' 用户反馈 '''
content = request.valid_data.get('content')
contact = request.valid_data.get('contact')
msg = '感谢您的反馈'
UserService.advice(request.user_id, content, contact)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg,
}
return jsonify_response(result)
user_order_list_validator = Inputs(
{
'cat' : IntChoiceField(choices=[0,1,2,3], msg='订单类型'),
'offset' : Optional(TextField(min_length=0, max_length=100, msg='分页'))
}
)
@wechat_loggin_dec(required=True, validator=user_order_list_validator, app=True)
def user_order_list():
''' 我的订单列表 '''
cat = request.valid_data.get('cat')
offset = request.valid_data.get('offset')
where = and_()
where.append(Order.user_id==request.user_id)
if cat==1:#待支付
where.append(or_(
Order.status.in_([ORDER_STATUS.NEW_ORDER, ORDER_STATUS.TO_PAY]),
Order.credit_verified==False,
)
)
elif cat==2:#待服务
where.append(and_(
Order.status.in_([ORDER_STATUS.PAY_SUCCESS]),
Order.credit_verified==True
))
elif cat==3:#待评价
subquery = db.session.query(ItemComment.order_id).filter(ItemComment.order_id>0).subquery()
where.append(and_(
Order.user_finished==True,
Order.credit_verified==True,
~Order.id.in_(subquery)
)
)
choices = CreditService.get_period_choices()
choice_fee_map = { i.id:i.period_fee for i in choices}
choice_count_map = { i.id:i.period_count for i in choices}
has_more, order_list = OrderService.get_paged_orders(where=where, offset=offset)
fetch_item_refs(order_list, fields=['id', 'title', 'image'], keep_id=True)
for i in order_list:
period_fee_amount = 0
period_count = 1
credit_choice_id = i['credit_choice_id']
i['period_fee'] = 0
period_money = i['credit_amount']/period_count
if credit_choice_id:
period_count = choice_count_map[credit_choice_id]
period_money = i['credit_amount']/period_count
period_fee_amount = i['total_fee']/period_count
i['period_amount'] = format_price(period_money - period_fee_amount)
i['period_fee'] = format_price(period_fee_amount)
i['period_count'] = period_count
fetch_servicecode_refrence(order_list, 'id', dest_key='service_code_dict', keep_id=True)
order_ids = [order['id'] for order in order_list]
comments = CommentService.get_comments_by_order_ids(order_ids, user_id=request.user_id)
order_comment_map = {i['order_id']:i['id'] for i in comments}
print order_comment_map, 'order_comment_map'
for order in order_list:
order['comment'] = order_comment_map.get(order['id'])
set_order_status(order, comment=order_comment_map.get(order['id']), servicecode=order['service_code_dict'])
trans_list(order_list, 'status', 'status_label', ORDER_STATUS_LABEL, pop=False)
offset = str(order_list[-1]['id']) if order_list else ''
fetch_hospital_refs(order_list, fields=['id', 'name', 'phone'])
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'has_more' : has_more,
'infos' : order_list,
'offset' : offset
}
return jsonify_response(result)
def wx_pay_callback():
xml = request.data
LogService.log_pay_callback(PAY_METHOD.WECHAT_WEB, xml)
notify = Notify_pub()
rs = notify.check_sign(xml)
re = {}
if not rs:
re['return_code'] = 'FAIL'
re['return_msg'] = '签名失败'
return notify.arrayToXml(re)
data = notify.get_data()
result_code = data['result_code']
order_no = str(data['out_trade_no'])
total_fee = data['total_fee']
transaction_id = data['transaction_id']
order_info = OrderService.get_order_by_orderno(order_no)
if not order_info:
re['return_code'] = 'FAIL'
re['return_msg'] = '订单不存在:'+order_no
return notify.arrayToXml(re)
total_price = float(total_fee)/100
order_price = float(order_info.price)
if order_price != total_price and (os.environ.get('APP_ENV')=='production'):
print order_price, total_price, '金额不匹配'
re['return_code'] = 'FAIL'
re['return_msg'] = '金额不匹配'
return notify.arrayToXml(re)
msg = ''
if (order_info.status==ORDER_STATUS.PAY_SUCCESS):
re = {'return_code':'SUCCESS','return_msg':'ok'}
return notify.arrayToXml(re)
if result_code.upper() == 'FAIL':
re['return_code'] = 'FAIL'
pay_error_action(order_info)
elif result_code.upper()=='SUCCESS':
re['return_code'] = 'SUCCESS'
pay_success_action(order_info, transaction_id=transaction_id, pay_method=PAY_METHOD.WECHAT_WEB)
else:
print 'wxpay_notify:',result_code
re['return_code'] = 'SUCCESS'
msg = '未知返回码'
re['return_msg'] = msg
return notify.arrayToXml(re)
def wx_repayment_callback():
''' 微信还款回调 '''
xml = request.data
LogService.log_pay_callback(PAY_METHOD.WECHAT_WEB, xml)
notify = Notify_pub()
rs = notify.check_sign(xml)
re = {}
if not rs:
re['return_code'] = 'FAIL'
re['return_msg'] = '签名失败'
return notify.arrayToXml(re)
data = notify.get_data()
result_code = data['result_code']
order_no = str(data['out_trade_no'])
total_fee = data['total_fee']
transaction_id = data['transaction_id']
repayment = OrderService.get_repayment_by_orderno(order_no)
if not repayment:
re['return_code'] = 'FAIL'
re['return_msg'] = '订单不存在:'+order_no
return notify.arrayToXml(re)
total_price = float(total_fee)/100
order_price = float(repayment.price)
if order_price != total_price and (os.environ.get('APP_ENV')=='production'):
print order_price, total_price, '金额不匹配'
re['return_code'] = 'FAIL'
re['return_msg'] = '金额不匹配'
return notify.arrayToXml(re)
msg = ''
if (repayment.status==REPAYMENT_STATUS.PAY_SUCCESS):
re = {'return_code':'SUCCESS','return_msg':'ok'}
return notify.arrayToXml(re)
if result_code.upper() == 'FAIL':
re['return_code'] = 'FAIL'
repayment_error_action(repayment)
elif result_code.upper()=='SUCCESS':
re['return_code'] = 'SUCCESS'
repayment_success_action(repayment, transaction_id=transaction_id, pay_method=PAY_METHOD.WECHAT_WEB)
else:
print 'wxpay_notify:',result_code
re['return_code'] = 'SUCCESS'
msg = '未知返回码'
re['return_msg'] = msg
return notify.arrayToXml(re)
def repayment_error_action(repayment):
''' 还款失败 '''
pass
def repayment_success_action(repayment, **kw):
''' 还款成功 '''
new_status = REPAYMENT_STATUS.PAY_SUCCESS
kw['status'] = new_status
where = and_(
Repayment.id==repayment.id,
Repayment.status.in_([REPAYMENT_STATUS.TO_PAY, REPAYMENT_STATUS.NEW])
)
count = OrderService.update_repayment(where, **kw)
if count:
print '还款成功'
log_ids = [i['id'] for i in json.loads(repayment.data)]
OrderService.gen_repayment_log(repayment)
result = CreditService.update_pay_log(log_ids)
if repayment.price:
CreditService.modify_credit(repayment.user_id, -(repayment.price))
def pay_error_action(order):
''' 支付失败 处理函数 '''
new_status = ORDER_STATUS.PAY_ERROR
where = Order.status!=ORDER_STATUS.PAY_SUCCESS
count = OrderService.update_order_status(order.id, new_status, where=where)
if count:
print 'pay error'
pass #通知后端以及管理员
order_preview_validator = Inputs(
{
'item_id' : IdField(msg='商品id'),
'coupon_id' : Optional(IdField(msg='优惠券id')),
'period_choice_id' : Optional(IdField(msg='分期类型id'))
}
)
@wechat_loggin_dec(required=True, validator=order_preview_validator, app=True)
def order_preview():
item_id = request.valid_data.get('item_id')
coupon_id = request.valid_data.get('coupon_id')
period_choice_id = request.valid_data.get('period_choice_id') or 0
msg = ''
fields = [
'id', 'has_fee', 'direct_buy',
'image', 'title', 'price', 'sub_cat_id',
'sub_cat_id_list', 'support_choice_list', 'hospital_id', 'orig_price'
]
item = ItemService.get_item_dict_by_id(item_id, fields=fields)
assert item, '商品不存在'
get_item_activity_price(item)
if period_choice_id: assert period_choice_id in item['support_choice_list'], '商品不支持该分期选项'
#if period_choice_id==0: assert item['direct_buy'], '商品不支持直接购买'
sub_cat_id_list = item['sub_cat_id_list']
sub_cats = ItemService.get_subcats_by_ids(sub_cat_id_list)
cat_id_list = []
for i in sub_cats:
cat_id_list.extend(i['cat_id_list'])
user_coupon = None
if coupon_id:
user_coupon = CouponService.get_user_coupon(
coupon_id, request.user_id, item_id=item_id, cat_id_list=cat_id_list, sub_cat_id_list=sub_cat_id_list,
item_price=item['price']
)
if not(user_coupon):
msg = '此优惠券不存在, 请选择其他优惠券'
else:
user_coupon = user_coupon.as_dict()
coupon_list = []
where = and_()
where.append(UserCoupon.status==0)
where.append(UserCoupon.user_id==request.user_id)
where.append(UserCoupon.price<=item['price'])
where.append(UserCoupon.end_time>dt_obj.now())
or_query = or_(
UserCoupon.coupon_cat==0,
CouponService.cat_query(cat_id_list),
CouponService.sub_cat_query(sub_cat_id_list),
and_(
UserCoupon.item_id==item_id,
UserCoupon.coupon_cat==3
)
)
where.append(or_query)
final_where = or_(
and_(
where,
UserCoupon.need==0,
),
and_(
where,
UserCoupon.need<=item['price']
)
)
has_more, user_coupons = CouponService.get_paged_user_coupons(where=final_where, limit=100)
user_coupons.sort(key=lambda i:i['price'], reverse=False)
if coupon_id==None and user_coupons and not user_coupon:
for c in user_coupons[::-1]:
if c['price']<=item['price']:
user_coupon = c
if user_coupon:
print user_coupon['price'], item['price']
assert user_coupon['price']<=item['price'], '优惠券金额不能超过订单总额'
coupon_amount = 0
if user_coupon:
if user_coupon['is_trial']: #试用券 金额等于商品金额
user_coupon['price'] = item['price']
coupon_amount = format_price(user_coupon['price'])
fields = ['id', 'name']
hospital = ItemService.get_hospital_dict_by_id(item['hospital_id'], fields=fields)
credit = CreditService.get_user_credit(request.user_id)
if not credit:
CreditService.init_credit(request.user_id)
credit = CreditService.get_user_credit(request.user_id)
verified = bool(credit.status)
#period_choice_id为0时 直购
credit_amount_remain= format_price(credit.total-credit.used)
if period_choice_id==0:
credit_amount_remain = 0
if period_choice_id:
period_choice = CreditService.get_period_choice(period_choice_id)
assert period_choice, '分期选项不存在'
period_count = period_choice.period_count
result = cacl_need_pay(
period_choice, item['price']-coupon_amount, credit_amount_remain, item['has_fee'])
need_pay = result['need_pay']
period_money = result['period_money']
period_amount = result['period_amount']
period_fee = result['period_fee']
credit_used = result['credit_used']
else:
period_count = 1
period_fee = 0
period_amount = 0
period_money = 0
credit_used = 0
need_pay = item['price'] - coupon_amount
_, item_cats = ItemService.get_paged_cats(limit=1000)
_, item_subcats = ItemService.get_paged_sub_cats(limit=1000)
for i in user_coupons:
i['cat_str'] = '全部适用'
i['remain_str'] = calc_expire_remain(i['end_time'])
set_coupon_cat_str(i, item_cats, item_subcats)
coupon_title = ''
if user_coupon: coupon_title = user_coupon['title']
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg,
'item' : item,
'hospital' : hospital,
'coupon_amout' : coupon_amount,
'coupon_title' : coupon_title,
'coupon_id' : user_coupon['id'] if user_coupon else 0,
'credit_amount' : credit_used, #使用了的额度
'credit_amount_can_use': credit_amount_remain , #总分期金额
'total' : item['price'],
'period_count' : period_count,
'period_amount' : period_amount,
'period_fee' : period_fee,
'period_total' : period_money,
'coupon_list' : user_coupons,
'need_pay' : format_price(need_pay),
'credit_status' : credit.status
}
return jsonify_response(result)
confirm_order_validator = Inputs(
{
'item_id' : IdField(msg='商品id'),
'period_choice_id' : Optional(IdField(msg='分期类型id')),
'coupon_id' : Optional(IdField(msg='优惠券id'))
}
)
@wechat_loggin_dec(required=True, validator=confirm_order_validator, app=True)
def confirm_order():
item_id = request.valid_data.get('item_id')
user_coupon_id = request.valid_data.get('coupon_id')
period_choice_id = request.valid_data.get('period_choice_id')
item = ItemService.get_item_dict_by_id(item_id)
assert item, '商品不存在'
get_item_activity_price(item)
sub_cat_id_list = item['sub_cat_id_list']
sub_cats = ItemService.get_subcats_by_ids(sub_cat_id_list)
cat_id_list = []
for i in sub_cats:
cat_id_list.extend(i['cat_id_list'])
hospital_id = item['hospital_id']
if period_choice_id: assert period_choice_id in item['support_choice_list'], '商品不支持该分期选项'
user_coupon = None
if user_coupon_id:
user_coupon = CouponService.get_user_coupon(
user_coupon_id, request.user_id, item_id=item_id, cat_id_list=cat_id_list, sub_cat_id_list=sub_cat_id_list,
item_price=item['price']
)
assert user_coupon, '优惠券不存在'
assert user_coupon.status==0, '优惠券已被使用'
assert user_coupon.end_time>dt_obj.now(), '优惠券已过期'
total = item['price']
order_no = OrderService.create_no()
coupon_amount = 0
credit_amount = 0
if user_coupon_id:
if user_coupon.is_trial: #试用券 金额等于商品金额
user_coupon.price = item['price']
coupon_amount = format_price(user_coupon.price)
credit = CreditService.get_user_credit(request.user_id)
credit_amount_remain = format_price(credit.total-credit.used)
if period_choice_id:
assert credit.status!=CREDIT_STATUS.DEFAULT, '请先申请额度'
assert credit.status!=CREDIT_STATUS.REJECTED, '请重新申请额度'
credit_verified = 1 if (credit.status==CREDIT_STATUS.VERIFIED) else 0
if period_choice_id==0: credit_verified=1
if period_choice_id:
period_choice = CreditService.get_period_choice(period_choice_id)
assert period_choice, '分期选项不存在'
apply = CreditService.get_apply_dict_by_userid(request.user_id)
now = dt_obj.now()
if apply and apply.get('graduate_time'):
assert apply['graduate_time']>add_months(now, period_choice.period_count+6), '选择分期期数需小于现在到毕业前六个月的月数'
period_count = period_choice.period_count
result = cacl_need_pay(
period_choice, item['price']-coupon_amount, credit_amount_remain, item['has_fee'])
need_pay = result['need_pay']
period_money = result['period_money']
period_amount = result['period_amount']
period_fee = result['period_fee']
credit_used = result['credit_used']
else:
period_count = 1
period_fee = 0
period_amount = 0
period_money = 0
credit_used = 0
need_pay = item['price'] - coupon_amount
if user_coupon_id:
query = and_(
UserCoupon.user_id==request.user_id,
UserCoupon.id==user_coupon_id,
UserCoupon.status==0,
or_(
UserCoupon.coupon_cat==0,
CouponService.cat_query(cat_id_list),
CouponService.sub_cat_query(sub_cat_id_list),
and_(
UserCoupon.coupon_cat==3,
UserCoupon.item_id==item_id
)
)
)
or_query = or_(
and_(
query,
UserCoupon.need==0,
),
and_(
query,
UserCoupon.need<=item['price']
)
)
count = CouponService.update_user_coupon_status(or_query, 1)
assert count, '优惠券已被使用'
total_fee = format_price(period_fee*period_count)
if credit_used:
result = CreditService.modify_credit(request.user_id, credit_used)
assert result in {1,2}, '额度不足'
if need_pay:
to_status = ORDER_STATUS.NEW_ORDER
else:
to_status = ORDER_STATUS.PAY_SUCCESS
order_id = OrderService.add_order(
request.user_id, item_id, hospital_id, need_pay,
credit_used, total_fee, coupon_amount, total, period_choice_id, user_coupon_id, order_no,
credit_verified,
status=to_status)
if not(need_pay) and credit_verified:#额度已通过审核 并全部用额度购买成功
order = OrderService.get_user_order(order_id, request.user_id)
pay_success_action(order, need_pay=False)
result = {
'code': ResponseCode.SUCCESS,
'msg': '',
'order_id': order_id}
return jsonify_response(result)
order_prepay_validator = Inputs(
{
'order_id' : IdField(msg='订单id'),
}
)
@wechat_loggin_dec(required=True, validator=order_prepay_validator, app=False)
def order_pay():
order_id = request.valid_data.get('order_id')
order_info = OrderService.get_user_order(order_id, request.user_id)
assert order_info, '订单不存在'
if order_info.status==ORDER_STATUS.PAY_SUCCESS:
return redirect('/user/order_pay_success/?order_id='+str(order_id))
if order_info.price==0:
return render_template('user/pay_success_no_cash.html', order_id=order_id)
assert order_info.status!=ORDER_STATUS.PAY_SUCCESS, '订单已支付成功'
open_id = request.open_id or 'o56qvw-ThtwfthGGlZ-XbH-3fjRc'
wx_pay_params, err = get_wx_pay_params(
open_id, order_info.price, order_info.order_no, WX_PAY_NOTIFY_URL, '美分分购买商品'
)
if err:
return 'error'
print wx_pay_params, 'wx_pay_params', type(wx_pay_params)
return render_template('user/order_pay.html', order=order_info, wx_pay_params=wx_pay_params)
repayment_pay_validator = Inputs(
{
'repayment_id' : IdField(msg='还款id'),
}
)
@wechat_loggin_dec(required=True, validator=repayment_pay_validator, app=False)
def repayment_pay():
''' 还款 '''
repayment_id = request.valid_data.get('repayment_id')
repayment = OrderService.get_user_repayment(repayment_id, request.user_id)
assert repayment, '还款不存在'
open_id = request.open_id or 'o56qvw-ThtwfthGGlZ-XbH-3fjRc'
wx_pay_params, err = get_wx_pay_params(
open_id, repayment.price, repayment.order_no, WX_REPAYMENT_NOTIFY_URL, '美分分分期账单还款'
)
if err: return ''
print wx_pay_params, 'wx_pay_params', type(wx_pay_params)
return render_template('user/repayment_pay.html', repayment=repayment, wx_pay_params=wx_pay_params)
@wechat_loggin_dec(required=False, app=True)
def uploads():
''' https://github.com/qiniu/js-sdk '''
token = gen_qn_token()
return render_template('user/upload.html', token=token)
order_detail_validator = Inputs(
{
'order_id' : IdField(msg='订单id'),
}
)
@wechat_loggin_dec(required=True, validator=order_detail_validator, app=True)
def order_detail():
order_id = request.valid_data.get('order_id')
order_info = OrderService.get_user_order(order_id, request.user_id)
assert order_info, '订单不存在'
fields = ['id', 'title', 'price', 'orig_price', 'image', 'hospital_id']
item = ItemService.get_item_dict_by_id(order_info.item_id, fields=fields)
assert item, '商品不存在'
get_item_activity_price(item)
_, period_choices = CreditService.get_paged_period_choices(limit=1000)
period_amount = 0
period_count = 0
period_fee = 0
for choice in period_choices:
if order_info.credit_choice_id==choice['id']:
period_count = choice['period_count']
period_money = format_price((order_info.credit_amount)/period_count)
period_fee = format_price(order_info.total_fee/period_count)
period_amount = format_price(period_money - period_fee)
fields = ['id', 'addr', 'long_lat', 'tag_list', 'phone', 'name']
hospital = ItemService.get_hospital_dict_by_id(item['hospital_id'], fields=fields)
service_code = ''
service_status = 0
service_code_dict = {}
if order_info.credit_verified==1 and order_info.status in {ORDER_STATUS.PAY_SUCCESS, ORDER_STATUS.FINISH}:
service = OrderService.get_servicecode(order_id)
assert service, '服务码不存在'
service_code_dict = service.as_dict()
service_code = service.code
service_status = service.status
order_info = order_info.as_dict()
cancel_msg = '确认取消订单吗'
if order_info['price'] and order_info['status']==ORDER_STATUS.PAY_SUCCESS:
repayment_amount= OrderService.get_order_repayment_logs_amount(order_id)
repayment_amount= sum([format_price(i['price']) for i in repayment_amount.values()] or [0])
refund_total = order_info['price']+repayment_amount
if repayment_amount:
cancel_msg = '取消订单将退还首付金额{}元和已还款金额{}元,是否取消订单?'.format(order_info['price'], repayment_amount)
else:
cancel_msg = '取消订单将退还首付金额{}元,是否取消订单?'.format(order_info['price'])
comment = CommentService.get_comment(ItemComment.order_id==order_id)
set_order_status(order_info, comment=comment, servicecode=service_code_dict)
order_info.update({
'period_fee' : format_price(period_fee),
'period_count' : period_count,
'period_amount' : format_price(period_amount)
})
order_info['status_labbel'] = ORDER_STATUS_LABEL.get(order_info['status'])
result = {
'cancel_msg' : cancel_msg,
'item' : item,
'service_code' : service_code,
'service_status': service_status,
'hospital' : hospital,
'order_info' : order_info
}
return jsonify_response(result)
comment_post_validator = Inputs(
{
'order_id' : Optional(IdField(msg='订单id')),
'item_id' : Optional(IdField(msg='商品id')),
'content' : TextField(min_length=1, max_length=10000, msg='评价内容'),
'photos' : Optional(TextField(min_length=0, max_length=10000, msg='逗号分隔的图片列表')),
'is_anonymous' : IntChoiceField(choices=[0,1], msg='是否匿名'),
'rate' : IntChoiceField(choices=range(1,6), msg='评星'),
}
)
@wechat_loggin_dec(required=True, validator=comment_post_validator, app=True)
def comment_post():
order_id = request.valid_data.get('order_id')
item_id = request.valid_data.get('item_id')
content = request.valid_data.get('content')
photos = request.valid_data.get('photos')
is_anonymous = request.valid_data.get('is_anonymous')
rate = request.valid_data.get('rate')
order = OrderService.get_user_order(order_id, request.user_id)
can_comment = PromoteService.get_fakeuser_by_userid(request.user_id)
if not can_comment:
assert order, '订单不存在'
assert order_id or item_id, '请评论商品'
if order:
item_id = order.item_id
item = ItemService.get_item_dict_by_id(item_id)
assert item, '商品不存在'
query = and_()
query.append(ItemComment.user_id==request.user_id)
if order_id: query.append(ItemComment.order_id==order_id)
if item_id: query.append(ItemComment.item_id==item_id)
exists = bool(CommentService.get_comment(query))
comment_id = CommentService.comment_item(
item_id or order.item_id, request.user_id, content, photos, rate, is_anonymous,
order_id,
is_re_comment=exists
)
CommentService.rerate_hospital(item['hospital_id'])
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '评论成功',
'comment_id' : comment_id,
'item_id' : item_id or order.item_id
}
return jsonify_response(result)
my_period_bill_validator = Inputs(
{
'cat' : IntChoiceField(choices=range(1,3), msg='还款日期类型'),
}
)
@wechat_loggin_dec(required=True, validator=my_period_bill_validator, app=True)
def my_period_bill():
cat = request.valid_data.get('cat')
deadline = get_due_time(cat-1)
start, end = get_current_period()
where = or_()
title, thedeadline = deadline_zh(deadline)
where.append(PeriodPayLog.deadline==deadline)
if cat==1: #本期包括逾期的
where.append(
and_(
PeriodPayLog.deadline<deadline,
PeriodPayLog.status==0,
)
)
where.append(#已逾期但在本月还款了的
and_(
PeriodPayLog.deadline<deadline,
PeriodPayLog.status==1,
PeriodPayLog.repayment_time>=start+day_delta,
PeriodPayLog.repayment_time<=dt_obj.now(),
)
)
where = and_(
PeriodPayLog.status.in_([0,1]),
where)
logs = CreditService.get_period_pay_logs(request.user_id, where)
total = 0
repayed = 0
logs = [i.as_dict() for i in logs]
for log in logs:
get_delayed_info(log)
total += log['fee'] + log['amount'] + log['punish']
log['create_time_str'] = get_time_str_from_dt(log['create_time'], '%Y.%m.%d')
if log['status']==1:
repayed += log['fee'] + log['amount'] + log['punish']
else:
if log['deadline']!=str(deadline):
cacl_punish_fee(log) #预期未还分期 动态计算滞纳金
total += log['punish']
fetch_order_refs(logs)
for log in logs:
log['item_id'] = log['order']['item_id']
fetch_item_refs(logs, fields=['id', 'title'])
remain = total - repayed
result = {
'total' : format_price(total),
'remain' : format_price(remain),
'repayed' : format_price(repayed),
'infos' : logs,
'title' : title,
'deadline' : thedeadline,
}
return jsonify_response(result)
import os
@wechat_loggin_dec(required=True, app=True)
def user_home():
user = UserService.get_user_by_id(request.user_id)
user_credit = CreditService.init_credit(request.user_id)
where = and_(
UserCoupon.user_id==request.user_id,
UserCoupon.status==0,
UserCoupon.end_time>dt_obj.now()
)
coupon_count = CouponService.count_coupon(where)
verified = bool(user_credit.status)
total = user_credit.total
remain = user_credit.total - user_credit.used
apply_status = user_credit.status #0未申请 1申请中 2已通过 3被拒绝
period_to_pay = 0
deadline = get_due_time(0)
start, end = get_current_period()
where = or_(
)
where.append(
and_(
PeriodPayLog.deadline<=deadline,
PeriodPayLog.status==0,
)
)
logs = CreditService.get_period_pay_logs(request.user_id, where)
logs = [i.as_dict() for i in logs]
has_delayed = False
for log in logs:
if not has_delayed and log['status']==0:
has_delayed = str(dt_obj.now())>log['deadline']
if log['status']==1: continue
period_to_pay += log['fee'] + log['amount']
if not(log['repayment_time']) and str(dt_obj.now())>log['deadline']:
cacl_punish_fee(log)
period_to_pay += log['punish']
remain_days = get_date_delta(str(dt_obj.now())[:19], str(deadline)[:19])
can_edit_name = not UserService.get_edit_name_log(request.user_id)
if os.environ.get('APP_ENV')!='production': can_edit_name = True
result = {
'has_delayed' : has_delayed,
'can_edit_name' : can_edit_name,
'total' : float(total),
'remain' : float(remain),
'coupon_count' : coupon_count,
'apply_status' : apply_status,
'user' : user.as_dict(),
'period_to_pay' : format_price(period_to_pay), #本期应还
'remain_days' : remain_days,
}
return jsonify_response(result)
my_repayments_validator = Inputs(
{
'offset' : Optional(TextField(min_length=0, max_length=100, msg='分页参数'))
}
)
@wechat_loggin_dec(required=True, validator=my_repayments_validator, app=True)
def my_repayments():
''' 还款历史 '''
offset = request.valid_data.get('offset')
where = and_()
where.append(and_(
PeriodPayLog.user_id==request.user_id,
PeriodPayLog.status==1
))
if offset:
log_id, pay_time = offset.split('_')
pay_datetime = dt_obj.fromtimestamp(float(pay_time))
where.append(or_(PeriodPayLog.repayment_time<pay_datetime, and_(PeriodPayLog.repayment_time==pay_datetime, PeriodPayLog.id<log_id)))
has_more, logs = CreditService.get_paged_pay_logs(where=where, _sort='repayment_time')
offset = ''
if logs: offset = str(logs[-1]['id']) + '_' + str(get_timestamp(logs[-1]['repayment_time']))
fetch_order_refs(logs)
for log in logs:
log['item_id'] = log['order']['item_id']
fetch_item_refs(logs, fields=['id', 'title'])
result = {
'infos' : logs,
'has_more' : has_more,
'offset' : offset
}
return jsonify_response(result)
@wechat_loggin_dec(required=False, app=True)
def item_cats():
all_cats = ItemService.get_item_cats()
all_sub_cats = ItemService.get_item_subcats()
_, all_recommend_subcats = ItemService.get_paged_recommend_subcats(no_limit=True)
id_order_map = {i['sub_cat_id']:i['sort_order'] for i in all_recommend_subcats}
recommend_subcat_ids = set(i['sub_cat_id'] for i in all_recommend_subcats)
recommend_subcats = filter(lambda i:i['id'] in recommend_subcat_ids, all_sub_cats)
recommend_subcats.sort(key=lambda i: id_order_map[i['id']])
data = [
{
'id': 0,
'name':'推荐',
'sub_cats':recommend_subcats,
'icon' : CAT_ICONS[0],
'icon_active' : CAT_ICONS_ACTIVE[0]
}]
for cat in all_cats:
tmp = {'name': cat.name, 'id': cat.id}
tmp['sub_cats'] = [i for i in all_sub_cats if cat.id in i['cat_id_list']]
tmp['icon'] = CAT_ICONS.get(cat.id)
tmp['icon_active'] = CAT_ICONS_ACTIVE.get(cat.id)
data.append(tmp)
for i in all_sub_cats:
if i['id'] in recommend_subcat_ids:
i['cat_id_list'].append(0)
result = {
'data':data,
'all_sub_cats':all_sub_cats
}
return jsonify_response(result)
#return render_template('user/item_cats.html', nav={2:'active'}, data=data)
my_favs_validator = Inputs(
{
'offset' : Optional(TextField(min_length=0, max_length=100, msg='分页参数'))
}
)
@wechat_loggin_dec(required=True, validator=my_favs_validator, app=True)
def my_favs():
''' 我的心愿单 '''
offset = request.valid_data.get('offset')
where = ItemFav.user_id==request.user_id
has_more, favs = ItemService.get_paged_fav_items(where=where, offset=offset)
period_choices = CreditService.get_period_choices()
fetch_item_refs(favs, fields=['id', 'image', 'title','support_choice_list','price','orig_price','hospital_id'])
items = [i['item'] for i in favs]
print favs, 'favs'
fetch_hospital_refs(items, fields=['id','name'])
item_ids = [i['item']['id'] for i in favs]
activity = ActivityService.get_current_activity()
activity_id = None
if activity: activity_id = activity['id']
price_map = ItemService.get_activity_prices(item_ids, activity_id)
for i in favs:
activity_price = price_map.get(i['item']['id'])
if activity_price: i['item']['price'] = activity_price
fetch_min_period_info(items)
offset = ''
if favs: offset = str(favs[-1]['id'])
result = {
'has_more' : has_more,
'infos' : favs,
'offset' : offset
}
return jsonify_response(result)
my_coupons_validator = Inputs(
{
'cat' : IntChoiceField(choices=[1,2,3], msg='优惠券类型'), #1未使用 2已使用 3已过期
'offset' : Optional(TextField(min_length=0, max_length=100, msg='分页参数'))
}
)
@wechat_loggin_dec(required=True, validator=my_coupons_validator, app=True)
def my_coupons():
''' 我的优惠券 '''
offset = request.valid_data.get('offset')
cat = request.valid_data.get('cat')
where = None
filters = [UserCoupon.user_id==request.user_id]
if cat==1:
filters.append(
and_(
UserCoupon.status==0,
UserCoupon.end_time>dt_obj.now()
)
)
elif cat==2:
filters.append(
and_(
UserCoupon.status==1
)
)
elif cat==3:
filters.append(
and_(
UserCoupon.status==0,
UserCoupon.end_time<=dt_obj.now()
)
)
where = and_(*filters)
has_more, user_coupons = CouponService.get_paged_user_coupons(where=where, offset=offset)
fields = ['id','title']
fetch_item_refs(user_coupons, fields=fields, keep_id=True)
offset = ''
if user_coupons: offset = str(user_coupons[-1]['id'])
_, item_cats = ItemService.get_paged_cats()
_, item_subcats = ItemService.get_paged_sub_cats()
for coupon in user_coupons:
coupon['remain_str'] = calc_expire_remain(coupon['end_time'], coupon['status'])
set_coupon_cat_str(coupon, item_cats, item_subcats)
result = {
'has_more': has_more,
'infos' : user_coupons,
'offset' : offset,
}
return jsonify_response(result)
@wechat_loggin_dec(required=True, app=True)
def my_apply():
''' 我的额度申请 '''
apply = CreditService.get_apply_dict_by_userid(request.user_id)
result = {
'data': apply
}
return jsonify_response(result)
help_validator = Inputs(
{
'cat_id' : Optional(IdField(msg='分类id')),
}
)
@wechat_loggin_dec(validator=help_validator, app=True)
def help():
''' 帮助 '''
cat_id = request.valid_data.get('cat_id')
where = None
if cat_id: where = HelpCat.id==cat_id
has_more, cats = DataService.get_paged_helpcats(where=where)
has_more, entries = DataService.get_paged_helpentries()
for cat in cats:
cat['entry_list'] = [ i for i in entries if i['cat_id']==cat['id'] ]
if not cat_id:
cat['entry_list'] = cat['entry_list'][:4]
result = {
'data' : cats
}
return jsonify_response(result)
help_html_validator = Inputs(
{
'cat_id' : Optional(IdField(msg='分类id')),
}
)
@wechat_loggin_dec(required=False, validator=help_html_validator, app=True)
def help_html():
''' 帮助页面 '''
cat_id = request.valid_data.get('cat_id')
where = None
if cat_id: where = HelpCat.id==cat_id
has_more, cats = DataService.get_paged_helpcats(where=where)
has_more, entries = DataService.get_paged_helpentries()
for cat in cats:
cat['entry_list'] = [ i for i in entries if i['cat_id']==cat['id'] ]
if not cat_id:
cat['entry_list'] = cat['entry_list'][:4]
result = {
'data' : cats
}
return render_template('user/help-center.html', cats=cats, cat_id=cat_id)
return jsonify_response(result)
help_entry_validator = Inputs(
{
'entry_id' : IdField(msg='条目id')
}
)
@wechat_loggin_dec(required=False, validator=help_entry_validator, app=True)
def get_help_entry():
''' 帮助条目详情 '''
entry_id = request.valid_data.get('entry_id')
entry = DataService.get_helpentry_by_id(entry_id)
result = {
'data' : entry
}
contact = CONTACT
return render_template('user/help-center-detail.html', entry=entry, contact=contact)
return jsonify_response(result)
@wechat_loggin_dec(app=True)
def apply_credit_page():
''' 额度申请 '''
return render_template('user/apply_one.html')
project_doctor_description_validator = Inputs(
{
'item_id' : IdField(msg='商品id')
}
)
@wechat_loggin_dec(required=False, validator=project_doctor_description_validator, app=True)
def project_doctor_description():
''' 项目医生图文介绍'''
item_id = request.args.get('item_id')
item = ItemService.get_item_dict_by_id(item_id)
return render_template('user/doctor_hospital_desc.html', item=item)
def get_jssdk_js():
print request
print request.headers
print type(request.headers)
print dir(request.headers)
sign_user = get_cookie('sign_user') or ''
if sign_user:
sign_user = '&sign_user='+sign_user
referer = request.headers.get('Referer') or ''
if '127.0.0.1' in referer:
sign_user = '&sign_user=' + '2.2074eb5e01c2093a5f5f9586955d5414'
browser = request.headers.get('User-Agent')
context = get_jssdk_context(referer)
text = render_template('js_sdk.js', token=sign_user, **context)
return js_response(text)
def get_school_list():
''' 学校选择列表 '''
limit = 3000
fields = ['id', 'name']
where = School.city_name=='上海'
_, schools = DataService.get_paged_schools(
where=where, limit=limit, fields=fields)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'infos' : schools
}
return jsonify_response(result)
repayment_validator = Inputs(
{
'data' : JsonField(msg='请选择还款数据')
}
)
@wechat_loggin_dec(validator=repayment_validator, app=True)
def repayment():
''' 选择还款'''
data = request.valid_data.get('data')
user_id = request.user_id
pay_method = None
price = 0
assert data, '请选择还款'
for log in data:
assert str(log.get['amount']).isdigit() and str(log.get['fee']).isdigit() and str(log.get['punish']).isdigit(), '数据格式错误'
price += float(log['punish']) + float(log['fee']) + float(log['amount'])
coupon_id = None
order_no = OrderService.create_no()
repayment_id = OrderService.repayment(user_id, pay_method, coupon_id, price, json.dumps(data), order_no)
msg = ''
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg,
'repayment_id' : repayment_id
}
return jsonify_response(result)
hospital_detail_validator = Inputs(
{
'hospital_id': IdField(msg='医院id')
}
)
@wechat_loggin_dec(required=False, validator=hospital_detail_validator)
def hospital_detail():
''' 医院详情 '''
hospital_id = request.valid_data.get('hospital_id')
fields = ['id', 'name', 'photo_list', 'working_time', 'phone', 'long_lat', 'desc', 'tag_list', 'addr']
hospital = ItemService.get_hospital_dict_by_id(hospital_id, fields=fields)
where = Item.hospital_id==hospital_id
fields = ['id', 'photo_list', 'title', 'price', 'orig_price', 'support_choice_list', 'image', 'has_fee']
has_more, items = ItemService.get_paged_items(where=where, fields=fields, limit=5)
fetch_min_period_info(items)
result = {
'code': ResponseCode.SUCCESS,
'msg': '',
'hospital': hospital,
'infos': items
}
return render_template('user/hospital_detail.html', **result)
return jsonify_response(result)
@wechat_loggin_dec(required=None)
def get_city_list():
''' 城市列表 '''
has_more, infos = DataService.get_paged_city_list()
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'infos' : infos,
}
return jsonify_response(result)
upload_image_validator = Inputs(
{
'image_cat' : ChoiceField(choices=['avatar', 'comment', 'apply', 'room'], msg='图片类型')
}
)
@wechat_loggin_dec(validator=upload_image_validator)
def upload_image():
try:
file = request.files['file']
img_cat = request.valid_data.get('image_cat')
code = 0
msg = '上传成功'
content = file.read()
key = img_cat+ '/' + str(time.time()) + '.jpg'
upload_img(key, content)
if img_cat=='avatar':
UserService.update_user(request.user_id, avatar=key)
return jsonify_response({
'code' : ResponseCode.SUCCESS,
'msg' : '',
'image': key,
'fullpath': prefix_img_domain(key)
})
except Exception as e:
import traceback
traceback.print_exc()
return jsonify_response({'msg':'服务器异常','code': 10000})
apply_credit_post_validator = Inputs(
{
# 'name' : Optional(TextField(min_length=0, max_length=100, msg='姓名')),
# 'id_no' : TextField(min_length=0, max_length=100, msg='身份证号'),
# 'school' : TextField(min_length=0, max_length=100, msg='学校'),
# 'enrollment_time' : TextField(min_length=0, max_length=100, msg='入学时间'),
'graduate_time' : REGField(pattern='\d{4}-\d{1,2}', msg='请输入毕业时间格式如:2015-01'),
# 'major' : TextField(min_length=0, max_length=100, msg='专业'),
# 'stu_no' : TextField(min_length=0, max_length=100, msg='学号'),
# 'stu_education' : TextField(min_length=0, max_length=100, msg='学历'),
# 'addr' : TextField(min_length=0, max_length=100, msg='地址'),
'parent_contact' : TextField(min_length=0, max_length=100, msg='父母联系方式'),
'chsi_name' : TextField(min_length=0, max_length=100, msg='学信网账号'),
'chsi_passwd' : TextField(min_length=0, max_length=100, msg='学信网密码'),
'body_choice_ids' : Optional(TextField(min_length=0, max_length=100, msg='你满意的部位')),
'body_choice_text' : Optional(TextField(min_length=0, max_length=100, msg='其他内容')),
}
)
@wechat_loggin_dec(validator=apply_credit_post_validator)
def apply_credit_post():
# request.valid_data['enrollment_time'] = '{}-01 00:00:00'.format(request.valid_data['enrollment_time'])
request.valid_data['graduate_time'] = '{}-01 00:00:00'.format(request.valid_data['graduate_time'])
body_choice_ids = request.valid_data['body_choice_ids']
body_choice_text = request.valid_data['body_choice_text']
apply_id = CreditService.add_apply(request.user_id, **request.valid_data)
if not apply_id:
where = and_(
CreditApply.user_id==request.user_id,
CreditApply.status!=APPLY_STATUS.VERIFIED
)
request.valid_data['create_time'] = dt_obj.now()
request.valid_data['status'] = 1
CreditService.update_apply(where, **request.valid_data)
CreditService.update_user_credit_status(request.user_id, CREDIT_STATUS.VERIFYING)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
apply_credit_photo_validator = Inputs(
{
'id_card_photo' : TextField(min_length=0, max_length=100, msg='身份证号码'),
'stu_card_photo' : TextField(min_length=0, max_length=100, msg='学生证号'),
}
)
@wechat_loggin_dec(validator=apply_credit_photo_validator)
def apply_credit_photo():
''' '''
id_card_photo = request.valid_data.get('id_card_photo')
stu_card_photo = request.valid_data.get('stu_card_photo')
where = CreditApply.user_id==request.user_id
CreditService.update_apply(where, id_card_photo=id_card_photo, stu_card_photo=stu_card_photo, status=APPLY_STATUS.SECOND_STEP)
CreditService.update_user_credit_status(request.user_id, CREDIT_STATUS.VERIFYING)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
edit_name_validator = Inputs(
{
'name' : TextField(min_length=0, max_length=100, msg='修改名字'),
}
)
@wechat_loggin_dec(required=True, validator=edit_name_validator, app=True)
def edit_name():
''' 修改名字 '''
name = request.valid_data.get('name')
print name
count = UserService.update_name(request.user_id, name)
if count:
UserService.add_edit_name_log(request.user_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '修改成功'
}
return jsonify_response(result)
def get_current_city_id():
''' 获取当期城市id '''
city_id = request.valid_data.get('city_id')
city_code = get_cookie('choose_city_code') or get_cookie('city_code')
if city_id:
return city_id
elif city_code:
city = DataService.get_city_by_baidu_city_code(city_code)
if city: return city.id
return 1
item_list_html_validator = Inputs(
{
'sub_cat_id' : Optional(IdField(msg='分类id')),
'hospital_id' : Optional(IdField(msg='医院id')),
'city_id' : Optional(IdField(msg='城市id')),
'offset' : Optional(TextField(min_length=1, max_length=100, msg='分页参数')),
'sort_type' : Optional(IntChoiceField(choices=[1,2,3,4], msg='排序选项')),
}
)
@wechat_loggin_dec(required=False, validator=item_list_html_validator)
def item_list_html():
''' 商品列表 '''
sub_cat_id = request.valid_data.get('sub_cat_id')
sort_type = request.valid_data.get('sort_type') or 1
order_choices = [
{'id':1, 'name':'综合排序'},
{'id':2, 'name':'销量优先'},
{'id':3, 'name':'低价优先'},
{'id':4, 'name':'高价优先'},
]
has_more, citys = DataService.get_paged_cities()
cat_id = None
subcat = None
if sub_cat_id:
subcat = ItemService.get_subcat_dict_by_id(sub_cat_id)
sort_type_obj = None
if sort_type:
for i in order_choices:
if i['id'] == sort_type:
sort_type_obj = i
all_cats = ItemService.get_item_cats()
all_sub_cats = ItemService.get_item_subcats()
_, all_recommend_subcats = ItemService.get_paged_recommend_subcats(limit=1000)
id_order_map = {i['sub_cat_id']:i['sort_order'] for i in all_recommend_subcats}
recommend_subcat_ids = set(i['sub_cat_id'] for i in all_recommend_subcats)
recommend_subcats = filter(lambda i:i['id'] in recommend_subcat_ids, all_sub_cats)
recommend_subcats.sort(key=lambda i: id_order_map[i['id']])
item_cat = [
{
'id': 0,
'name':'推荐',
'sub_cats':recommend_subcats,
'icon' : CAT_ICONS[0],
'icon_active' : CAT_ICONS_ACTIVE[0]
}]
for cat in all_cats:
tmp = {'name': cat.name, 'id': cat.id}
tmp['sub_cats'] = [i for i in all_sub_cats if cat.id in i['cat_id_list']]
tmp['icon'] = CAT_ICONS.get(cat.id) or ''
tmp['icon_active'] = CAT_ICONS_ACTIVE.get(cat.id) or ''
item_cat.append(tmp)
sort_type_obj = sort_type_obj or order_choices[0]
subcat = subcat or item_cat[0]['sub_cats'][0]
city_id = get_current_city_id()
city = None
for the_city in citys:
if the_city['id']==city_id: city = the_city
for i in all_sub_cats:
if i['id'] in recommend_subcat_ids:
i['cat_id_list'].append(0)
city = city or citys[0]
result = {
'order_choices': order_choices,
'data': item_cat,
'all_sub_cats':all_sub_cats,
'citys': citys,
'sort_type_obj':sort_type_obj,
'city': city,
'subcat': subcat
}
if request.args.get('json'):
return jsonify_response(result)
return render_template('user/item_list.html', **result)
hospital_list_html_validator = Inputs(
{
'sub_cat_id' : Optional(IdField(msg='分类id')),
'hospital_id' : Optional(IdField(msg='医院id')),
'city_id' : Optional(IdField(msg='城市id')),
'offset' : Optional(TextField(min_length=1, max_length=100, msg='分页参数')),
'sort_type' : Optional(IntChoiceField(choices=[1,2,3,4], msg='排序选项')),
}
)
@wechat_loggin_dec(required=False, validator=hospital_list_html_validator)
def hospital_list_html():
''' 医院列表 '''
sub_cat_id = request.valid_data.get('sub_cat_id')
sort_type = request.valid_data.get('sort_type') or 1
order_choices = HOSPITAL_ORDER_CHOICES
has_more, citys = DataService.get_paged_cities()
cat_id = None
subcat = None
if sub_cat_id:
subcat = ItemService.get_subcat_dict_by_id(sub_cat_id)
sort_type_obj = None
if sort_type:
for i in order_choices:
if i['id'] == sort_type:
sort_type_obj = i
all_cats = ItemService.get_item_cats()
all_sub_cats = ItemService.get_item_subcats()
_, all_recommend_subcats = ItemService.get_paged_recommend_subcats(limit=1000)
id_order_map = {i['sub_cat_id']:i['sort_order'] for i in all_recommend_subcats}
recommend_subcat_ids = set(i['sub_cat_id'] for i in all_recommend_subcats)
recommend_subcats = filter(lambda i:i['id'] in recommend_subcat_ids, all_sub_cats)
recommend_subcats.sort(key=lambda i: id_order_map[i['id']])
total_cat = {'id': 0, 'name':'全部', 'cat_id_list': [0]
}
all_sub_cats.insert(0, total_cat)
recommend_subcats.insert(0, total_cat)
item_cat = [
{
'id': 0,
'name':'推荐',
'sub_cats':recommend_subcats,
'icon' : CAT_ICONS[0],
'icon_active' : CAT_ICONS_ACTIVE[0]
}]
for cat in all_cats:
tmp = {'name': cat.name, 'id': cat.id}
tmp['sub_cats'] = [i for i in all_sub_cats if cat.id in i['cat_id_list']]
tmp['icon'] = CAT_ICONS.get(cat.id) or ''
tmp['icon_active'] = CAT_ICONS_ACTIVE.get(cat.id) or ''
item_cat.append(tmp)
sort_type_obj = sort_type_obj or order_choices[0]
subcat = subcat or item_cat[0]['sub_cats'][0]
city_id = get_current_city_id()
city = None
for the_city in citys:
if the_city['id']==city_id: city = the_city
for i in all_sub_cats:
if i['id'] in recommend_subcat_ids:
i['cat_id_list'].append(0)
city = city or citys[0]
result = {
'order_choices': order_choices,
'data': item_cat,
'all_sub_cats':all_sub_cats,
'citys': citys,
'sort_type_obj':sort_type_obj,
'city': city,
'subcat': subcat
}
if request.args.get('json'):
return jsonify_response(result)
return render_template('user/hospital_list.html', **result)
@wechat_loggin_dec(required=False)
def menu_credit_apply():
''' 额度申请菜单入口 '''
if not request.user_id:
return redirect('/static/user/login.html?next=/user/menu_credit_apply/')
#return send_from_directory('static/user/', 'login.html')
apply = CreditService.get_apply_dict_by_userid(request.user_id)
if apply:
# if apply['status']==1:
# return redirect('static/user/applyer-pic.html')
predict_time = str(get_next_working_day(str(apply['create_time'])))[:10]
return render_template('user/apply_result.html', apply=apply, predict_time=predict_time)
else:
return redirect('static/user/applyer-infor.html')
my_order_bill_validator = Inputs(
{
'order_id' : IdField(msg='订单id')
}
)
@wechat_loggin_dec(required=True, validator=my_order_bill_validator, app=True)
def my_order_bill():
order_id = request.valid_data.get('order_id')
order = OrderService.get_user_order(order_id, request.user_id)
assert order, '订单不存在'
where = or_(
)
where.append(PeriodPayLog.order_id==order_id)
order = OrderService.get_order_by_id(order_id)
item = ItemService.get_item_dict_by_id(order.item_id)
hospital = ItemService.get_hospital_dict_by_id(item['hospital_id'], fields=['id','name'])
logs = CreditService.get_period_pay_logs(request.user_id, where)
total = 0
repayed = 0
logs = [i.as_dict() for i in logs]
for log in logs:
get_delayed_info(log)
total += log['fee'] + log['amount'] + log['punish']
if log['status']==1:
repayed += log['fee'] + log['amount'] + log['punish']
else:
if log['delayed']:
cacl_punish_fee(log) #预期未还分期 动态计算滞纳金
total += log['punish']
fetch_order_refs(logs)
for log in logs:
log['item_id'] = log['order']['item_id']
fetch_item_refs(logs, fields=['id', 'title'])
remain = total - repayed
item['price'] = format_price(order.total)
result = {
'item' : item,
'total' : format_price(total),
'hospital' : hospital,
'repayed' : format_price(repayed),
'remain' : format_price(remain),
'infos' : logs,
}
return jsonify_response(result)
hospital_item_list_validator = Inputs(
{
'hospital_id' : IdField(msg='医院id')
}
)
@wechat_loggin_dec(required=False, validator=hospital_item_list_validator)
def hospital_item_list():
hospital_id = request.valid_data.get('hospital_id')
where = and_()
where.append(Item.status==1)
if hospital_id:
where.append(Item.hospital_id==hospital_id)
fields = ['id', 'hospital_id', 'title', 'price', 'orig_price', 'support_choice_list', 'image', 'has_fee']
has_more, items = ItemService.get_paged_items(where=where, fields=fields)
fetch_min_period_info(items)
fetch_hospital_refs(items, fields=['id','name'])
offset = ''
if items: offset = str(items[-1]['id']) + '_' + ''
print offset, 'offset'
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'has_more' : has_more,
'infos' : items,
'offset' : offset
}
return render_template('user/hospital_item_list.html', **result)
return jsonify_response(result)
order_pay_success_validator = Inputs(
{
'order_id' : IdField(msg='订单id')
}
)
@wechat_loggin_dec(required=False, validator=order_pay_success_validator)
def order_pay_success():
''' 支付成功跳转页面 '''
order_id = request.valid_data.get('order_id')
has_more, infos = ItemService.get_paged_items(limit=2)
fetch_hospital_refs(infos)
fetch_min_period_info(infos)
context = {
'order_id' : order_id,
'infos' : infos
}
return render_template('user/order_pay_success.html', **context)
repayment_pay_success_validator = Inputs(
{
'repayment_id' : IdField(msg='还款id')
}
)
@wechat_loggin_dec(required=False, validator=repayment_pay_success_validator)
def repayment_pay_success():
''' 还款成功跳转页面 '''
return render_template('user/repayment_pay_success.html')
cancel_order_validator = Inputs(
{
'order_id' : IdField(msg='订单id')
}
)
@wechat_loggin_dec(validator=cancel_order_validator)
def cancel_order():
''' 取消订单 '''
order_id = request.valid_data.get('order_id')
order = OrderService.get_user_order(order_id, request.user_id)
assert order, '订单不存在'
where = Order.status==ORDER_STATUS.PAY_SUCCESS
count = OrderService.update_order_status(order_id, ORDER_STATUS.CANCELED, request.user_id, where)
if count:
if order.credit_amount:
repayment_amount = OrderService.order_repayment_logs_amount(order_id)
remain_to_repayment = order.credit_amount - repayment_amount
CreditService.modify_credit(request.user_id, -remain_to_repayment)
CreditService.cancel_pay_logs(order_id)
if order.coupon_id:
CouponService.update_user_coupon_status(UserCoupon.id==order.coupon_id, 0)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '取消成功'
}
return jsonify_response(result)
cancel_pay_validator = Inputs(
{
'order_id' : IdField(msg='订单id')
}
)
@wechat_loggin_dec(validator=cancel_pay_validator)
def cancel_pay():
''' 取消支付 '''
order_id = request.valid_data.get('order_id')
order = OrderService.get_user_order(order_id, request.user_id)
assert order, '订单不存在'
where = Order.status.in_([ORDER_STATUS.NEW_ORDER, ORDER_STATUS.TO_PAY])
count = OrderService.update_order_status(order_id, ORDER_STATUS.CANCEL_BEFORE_PAY, request.user_id, where)
if count:
if order.credit_amount:
CreditService.modify_credit(request.user_id, -(order.credit_amount))
if order.coupon_id:
CouponService.update_user_coupon_status(UserCoupon.id==order.coupon_id, 0)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '取消成功'
}
return jsonify_response(result)
finish_order_validator = Inputs(
{
'order_id' : IdField(msg='订单id')
}
)
@wechat_loggin_dec(validator=finish_order_validator)
def finish_order():
''' 用户完成订单 '''
order_id = request.valid_data.get('order_id')
order = OrderService.get_user_order(order_id, request.user_id)
assert order, '订单不存在'
where = and_(
Order.id==order_id,
Order.user_finished==False,
Order.status.in_([ORDER_STATUS.PAY_SUCCESS, ORDER_STATUS.FINISH])
)
count = OrderService.update_order(where, user_finished=True)
if count:
ItemService.incr_item_count(order.item_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '完成订单'
}
return jsonify_response(result)
hospital_location_validator = Inputs(
{
'hospital_id' : IdField(msg='医院id')
}
)
@wechat_loggin_dec(required=False, validator=hospital_location_validator)
def hospital_location():
hospital_id = request.valid_data.get('hospital_id')
hospital = ItemService.get_hospital_dict_by_id(hospital_id)
return render_template('user/hospital-location.html', hospital=hospital)
@wechat_loggin_dec(required=False)
def meifenfen_city():
city_code = get_cookie('city_code')
city_name = get_cookie('city_name')
city = None
if city_code:
city = DataService.get_city_by_baidu_city_code(city_code)
_, citys = DataService.get_paged_city_list()
cat = 1 #1无法定位 2城市未开通 3城市已开通
if city_code and not city:
cat = 2
elif city:
cat = 3
print city_name, city_code, type(city_name)
context = {
'city' : city,
'citys' :citys,
'city_name': city_name,
'city_code': city_code,
'cat' : cat
}
if request.args.get('json'):
response = jsonify_response(context)
response = template_response(render_template('user/meifenfen_city.html', **context))
if city:
set_cookie(response, 'city_id', str(city.id), 86400*365)
return response
@wechat_loggin_dec(required=False, need_openid=True)
def meifenfen_index():
banners = [
{'image': 'http://7xnpdb.com2.z0.glb.qiniucdn.com/o_1aco140fa18uljmo17f23cvvu111456800706495.jpg',
'link':'http://{}/static/user/Activities/home.html'.format(SERVER_NAME),
},
{'image': 'http://7xnpdb.com2.z0.glb.qiniucdn.com/o_1a5cd7ihe2aokci1uqdpfk1ivq1banner_01.jpg',
'link':'http://{}/static/user/banner1.html'.format(SERVER_NAME)},
{'image': 'http://7xnpdb.com2.z0.glb.qiniucdn.com/o_1a53eou161cs8mku16tm1h91arh1banner_02.jpg',
'link':'http://{}/static/user/banner2.html'.format(SERVER_NAME)},
{'image': 'http://7xnpdb.com2.z0.glb.qiniucdn.com/o_1a5cd7ihe2aokci1uqdpfk1ivq1banner_03.jpg',
'link':'http://{}/user/menu_credit_apply/'.format(SERVER_NAME)},
]
city = None
city_name = get_cookie('choose_city_name') or get_cookie('city_name')
city_code = get_cookie('choose_city_code') or get_cookie('city_code')
if city_code:
city = DataService.get_city_by_baidu_city_code(city_code)
_, recommend_sub_cats = ItemService.get_paged_recommend_subcats(_sort='sort_order', _sort_dir='ASC')
fetch_item_subcat_refs(recommend_sub_cats)
current_activity = ActivityService.get_current_activity() or {}
where = ActivityItem.activity_id==current_activity.get('id')
fields = ('id', 'item_id', 'price', 'image')
_, activity_items = ItemService.get_paged_activity_items(fields=fields, where=where, _sort='sort_order', _sort_dir='ASC')
fields = ('id', 'item_id', 'image', 'desc')
where = None
_, recommend_items = ItemService.get_paged_recommend_items(fields=fields, where=where, _sort='sort_order', _sort_dir='ASC')
img_keys = [get_img_key(i['image']) for i in recommend_items]
img_sizes = DataService.get_imgs_size_by_keys(img_keys)
img_key_size_map = {i['key']:{'width':i['width'],'height':i['height']} for i in img_sizes}
print img_key_size_map
for rec in recommend_items:
key = get_img_key(rec['image'])
rec['width'] = img_key_size_map[key]['width']
rec['height'] = img_key_size_map[key]['height']
fields = ['id', 'hospital_id', 'title', 'price', 'orig_price', 'has_fee', 'support_choice_list']
fetch_item_refs(chain(activity_items, recommend_items), fields=fields)
recommend_sub_cats = [
{'image': 'http://www.meifenfen.com/static/user/img/home-btn1.png', 'id':5},
{'image': 'http://www.meifenfen.com/static/user/img/home-btn2.png', 'id':8},
{'image': 'http://www.meifenfen.com/static/user/img/home-btn3.png', 'id':3},
]
first_activity_item = None
if activity_items:
first_activity_item = activity_items[0]
first_activity_item['hospital'] = ItemService.get_hospital_dict_by_id(first_activity_item['item']['hospital_id'])
item_dict_list = [i['item'] for i in chain(activity_items, recommend_items)]
item_list = []
for i in item_dict_list:
if i not in item_list:
item_list.append(i)
for item in activity_items:
item['item']['price'] = item['price']
fetch_min_period_info(item_list)
context = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'recommend_sub_cats' : recommend_sub_cats,
'activity_items' : activity_items,
'recommend_items' : recommend_items,
'activity' : current_activity,
'banners' : banners,
'city_code' : city_code,
'city_name' : city_name,
'city' : city.as_dict() if city else None
}
js_sdk_context = get_jssdk_context()
if request.args.get('json'):
return jsonify_response(context)
return render_template('user/meifenfen.html', **context)
@wechat_loggin_dec(required=False, need_openid=True)
def meifenfen_new_index():
''' 新首页 '''
banners = [
{'image': 'http://7xnpdb.com2.z0.glb.qiniucdn.com/o_1aco140fa18uljmo17f23cvvu111456800706495.jpg',
'link':'http://{}/static/user/Activities/home.html'.format(SERVER_NAME),
},
{'image': 'http://7xnpdb.com2.z0.glb.qiniucdn.com/redpack_banner.jpg',
'link': 'http://www.meifenfen.com/user/redpack_index/'
},
{'image': 'http://7xnpdb.com2.z0.glb.qiniucdn.com/o_1a5cd7ihe2aokci1uqdpfk1ivq1banner_01.jpg',
'link':'http://{}/static/user/banner1.html'.format(SERVER_NAME)},
{'image': 'http://7xnpdb.com2.z0.glb.qiniucdn.com/o_1a53eou161cs8mku16tm1h91arh1banner_02.jpg',
'link':'http://{}/static/user/banner2.html'.format(SERVER_NAME)},
{'image': 'http://7xnpdb.com2.z0.glb.qiniucdn.com/o_1a5cd7ihe2aokci1uqdpfk1ivq1banner_03.jpg',
'link':'http://{}/user/menu_credit_apply/'.format(SERVER_NAME)},
]
city_id = get_cookie('city_id')
city = DataService.get_city_dict_by_id(city_id)
_, recommend_sub_cats = ItemService.get_paged_recommend_subcats(_sort='sort_order', _sort_dir='ASC')
fetch_item_subcat_refs(recommend_sub_cats)
current_activity = ActivityService.get_current_activity() or {}
where = ActivityItem.activity_id==current_activity.get('id')
fields = ('id', 'item_id', 'price', 'image')
_, activity_items = ItemService.get_paged_activity_items(fields=fields, where=where, _sort='sort_order', _sort_dir='ASC')
fields = ('id', 'item_id', 'image', 'desc')
where = None
_, recommend_items = ItemService.get_paged_recommend_items(fields=fields, where=where, _sort='sort_order', _sort_dir='ASC')
img_keys = [get_img_key(i['image']) for i in recommend_items]
img_sizes = DataService.get_imgs_size_by_keys(img_keys)
img_key_size_map = {i['key']:{'width':i['width'],'height':i['height']} for i in img_sizes}
print img_key_size_map
for rec in recommend_items:
key = get_img_key(rec['image'])
rec['width'] = img_key_size_map[key]['width']
rec['height'] = img_key_size_map[key]['height']
fields = ['id', 'hospital_id', 'title', 'price', 'orig_price', 'has_fee', 'support_choice_list']
fetch_item_refs(chain(activity_items, recommend_items), fields=fields)
recommend_sub_cats = [
{'image': 'http://www.meifenfen.com/static/user/img/home-btn1.png', 'id':5},
{'image': 'http://www.meifenfen.com/static/user/img/home-btn2.png', 'id':8},
{'image': 'http://www.meifenfen.com/static/user/img/home-btn3.png', 'id':3},
]
first_activity_item = None
if activity_items:
first_activity_item = activity_items[0]
first_activity_item['hospital'] = ItemService.get_hospital_dict_by_id(first_activity_item['item']['hospital_id'])
item_dict_list = [i['item'] for i in chain(activity_items, recommend_items)]
item_list = []
for i in item_dict_list:
if i not in item_list:
item_list.append(i)
for item in activity_items:
item['item']['price'] = item['price']
fetch_min_period_info(item_list)
where = BeautyEntry.status==1
_, tutorials = TutorialService.get_paged_tutorial_entries(where=where)
tutorials = tutorials[:2]
tutorial_tags = ['原理', '手法', '案例', '大人说']
_sort_dir = 'ASC'
_sort = 'sort_order'
_, recommend_hospitals = ItemService.get_paged_recommend_hospitals(_sort_dir=_sort_dir, _sort=_sort)
fetch_hospital_refs(recommend_hospitals)
recommend_hospitals = recommend_hospitals[:3]
for tutorial in tutorials:
tutorial['create_time'] = get_time_str_from_dt(tutorial['create_time'], '%-m.%-d')
context = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'tutorials' : tutorials,
'recommend_sub_cats' : recommend_sub_cats,
'activity_items' : activity_items,
'recommend_items' : recommend_items,
'activity' : current_activity,
'banners' : banners,
'tutorial_tags' : tutorial_tags,
'recommend_hospitals' : recommend_hospitals,
'city' : city
}
js_sdk_context = get_jssdk_context()
if request.args.get('json'):
return jsonify_response(context)
return render_template('user/meifenfen_new.html', **context)
@wechat_loggin_dec(required=False, need_openid=False)
def api_doc():
''' 接口文档 '''
return send_from_directory('static', 'doc.html')
mei_tutorials_validator = Inputs(
{
'cat' : Optional(IntChoiceField(choices=[1,2,3], msg='攻略类型')), #1最新 2最早 3最热
'offset' : Optional(TextField(min_length=0, max_length=1000, msg='分页参数'))
}
)
@wechat_loggin_dec(required=False, validator=mei_tutorials_validator)
def mei_tutorials():
''' 美攻略 '''
cat = request.valid_data.get('cat')
offset = request.valid_data.get('offset')
offset_id = None
_sort = 'id'
_sort_dir = 'DESC'
filters = [BeautyEntry.status==1]
if cat==1:
if offset: filters.append(BeautyEntry.id<offset)
if cat==2:
_sort_dir = 'ASC'
if offset: filters.append(BeautyEntry.id>offset)
elif cat==3:
_sort = 'view_count'
_sort_dir = 'DESC'
if offset and len((offset or '').split('_')):
view_count, offset_id = offset.split('_')
where = or_(
and_(
BeautyEntry.view_count==view_count,
BeautyEntry.id<offset_id
),
and_(
BeautyEntry.view_count<view_count,
)
)
filters.append(where)
where = and_(*filters)
has_more, infos = TutorialService.get_paged_tutorial_entries(
where=where,
_sort=_sort, _sort_dir=_sort_dir)
offset = ''
if infos:
if cat!=3:
offset = str(infos[-1][_sort])
else:
offset = '{}_{}'.format(infos[-1]['view_count'], infos[-1]['id'])
for info in infos:
info['create_time'] = get_time_str_from_dt(info['create_time'], '%-m-%-d')
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'has_more' : has_more,
'cat' : cat,
'infos' : infos,
'offset' : offset
}
return jsonify_response(result)
tutorial_detail_validator = Inputs(
{
'tutorial_id': IdField(msg='攻略id')
}
)
@wechat_loggin_dec(required=False, validator=tutorial_detail_validator)
def tutorial_detail():
''' 美攻略 '''
tutorial_id = request.valid_data.get('tutorial_id')
tutorial = TutorialService.get_tutorial(tutorial_id)
assert tutorial, '美攻略不存在'
item_ids = tutorial['item_id_list']
items = ItemService.get_items_by_ids(item_ids)
fetch_min_period_info(items)
fetch_hospital_refs(items, fields=['id','name'])
TutorialService.incr_tutorial_view_count(tutorial_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'tutorial' : tutorial,
'infos' : items,
}
return jsonify_response(result)
daily_coupons_validator = Inputs(
{
'offset' : Optional(TextField(min_length=0, max_length=1000, msg='分页参数'))
}
)
@wechat_loggin_dec(required=False, validator=daily_coupons_validator)
def daily_coupons():
''' 每日优惠券 '''
offset = request.valid_data.get('offset')
now = dt_obj.now()
where = and_(
DailyCoupon.start_time<now,
DailyCoupon.end_time>now
)
limit = 1000
_sort = 'start_time'
_sort_dir = 'DESC'
has_more, coupons = TutorialService.get_paged_daily_coupons(
limit=1000, where=where, offset=offset, _sort=_sort, _sort_dir=_sort_dir
)
from collections import defaultdict
datas = defaultdict(list)
fetch_coupon_refs(coupons)
set_coupon_use_time(coupons)
for coupon in coupons:
coupon['create_time_str'] = format_dt(coupon['start_time'])
for coupon in coupons:
datas[coupon['create_time_str']].append(coupon)
daily_ids = [i['id'] for i in coupons]
daily_received_map = TutorialService.get_user_daily_by_ids(request.user_id, daily_ids)
for i in coupons:
i['has_received'] = bool(daily_received_map.get(i['id']))
offset = ''
if coupons:
offset = str(coupons[-1][_sort])
infos_by_day = []
for k,v in datas.items():
tmp = {
'title': k,
'infos': v,
#'note': '每日10点,惊喜不断!'
}
if tmp['infos'][0]['title']:
tmp['note'] = tmp['infos'][0]['title']
else:
tmp['note'] = ''
infos_by_day.append(tmp)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'infos' : infos_by_day,
'has_more' : has_more,
'offset' : offset
}
return jsonify_response(result)
receive_coupon_validator = Inputs(
{
'daily_id' : IdField(msg='请选择活动')
}
)
@wechat_loggin_dec(required=True, validator=receive_coupon_validator)
def receive_coupon():
''' 领取每日优惠券 '''
daily_id = request.valid_data.get('daily_id')
daily = TutorialService.get_user_daily(request.user_id, daily_id)
assert not daily, '您已领取过'
daily_coupon = TutorialService.get_daily_coupon(daily_id)
assert daily_coupon, '活动不存在'
assert daily_coupon['total']>daily_coupon['sent'], '已领取完'
count = TutorialService.incr_daily_coupon_received(daily_id)
assert count, '领取完了'
count = TutorialService.send_daily_coupon(request.user_id, daily_id)
if count:
CouponService.send_user_coupon(request.user_id, daily_coupon['coupon_id'])
daily_coupon = TutorialService.get_daily_coupon(daily_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '领取成功',
'count' : daily_coupon['remain']
}
return jsonify_response(result)
resend_user_coupon_validator = Inputs(
{
'user_coupon_ids' : TextField(min_length=1, max_length=100, msg='逗号分隔的优惠券id字符串'),
'phone' : MobileField(msg='用户手机号'),
}
)
@wechat_loggin_dec(required=True, validator=resend_user_coupon_validator, app=True)
def resend_user_coupon():
phone = request.valid_data.get('phone')
user_coupon_ids = request.valid_data.get('user_coupon_ids')
user_coupon_ids = str_to_int_list(user_coupon_ids)
user = UserService.get_user_by_phone(phone)
assert user, '手机号对应用户不存在'
assert user.id!=request.user_id, '不能转赠给自己'
for user_coupon_id in user_coupon_ids:
CouponService.resend_user_coupon(request.user_id, user.id, user_coupon_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '转赠成功'
}
return jsonify_response(result)
@wechat_loggin_dec(required=False)
def set_open_id():
result = {}
response= jsonify_response(result, with_response=True)
set_cookie(response, 'open_id', 'o56qvw-ThtwfthGGlZ-XbH-3fjRc', 86400*30)
return response
@wechat_loggin_dec(required=False, need_openid=True)
def login_link():
print 'login'
return send_from_directory('static', 'user/login.html')
@wechat_loggin_dec(required=False, need_openid=True)
def wechat_room_link():
print 'wechat_room_link'
return send_from_directory('static', 'user/Activities/home.html')
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,362
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/10f3ed6c72ed_.py
|
"""empty message
Revision ID: 10f3ed6c72ed
Revises: 75f96105f81
Create Date: 2015-11-27 15:05:27.624606
"""
# revision identifiers, used by Alembic.
revision = '10f3ed6c72ed'
down_revision = '75f96105f81'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('repayment', sa.Column('data', sa.String(length=10000), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repayment', 'data')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,363
|
qsq-dm/mff
|
refs/heads/master
|
/user/room_design.py
|
# -*- coding: utf-8 -*-
from flask import request
from flask import redirect
from flask import render_template
from flask import send_from_directory
from sqlalchemy import and_
from sqlalchemy import or_
from models import db
from models import School
from models import RoomDesignDetail
from util.utils import jsonify_response
from util.utils import random_str
from util.utils import str_to_int_list
from util.utils import comma_str_to_list
from util.decorators import wechat_loggin_dec
from util.validators import Optional
from util.validators import Inputs
from util.validators import MobileField
from util.validators import TextField
from util.validators import IdField
from util.validators import IntChoiceField
from util.sign import sign_user
from util.sign import set_cookie
from util.sign import del_cookie
from ops.bulks import fetch_user_refs
from ops.item import ItemService
from ops.data import DataService
from ops.user import UserService
from ops.redpack import RedpackService
from ops.promote import PromoteService
from ops.cache import RoomDesignVoteCounter
from ops.room_design import RoomDesignService
from constants import ResponseCode
from thirdparty.sms import send_sms
from thirdparty.sms import gen_vcode
from thirdparty.wechat import exchange_code_for_token
from settings import MAX_TODAY_PASSWD_ATTEMPT
from settings import MAX_TODAY_VCODE_ATTEMPT
from settings import CONTACT
from constants import VOTE_COUNT_SOURCE_MAP
def set_tip_msg(rank, is_myself=True):
''' 票数文案 '''
rank_50 = RoomDesignVoteCounter.get_vote_by_rank(50) or 0
vote = RoomDesignVoteCounter.get_vote_by_rank(rank) or 0
dif = rank_50 - vote
if not is_myself:
dif = (RoomDesignVoteCounter.get_vote_by_rank(rank-1) - vote) if rank >1 else 0
if rank==1:
return '第一名'
else:
return '距离上一名还差{}票'.format(dif)
if rank>50:
if dif < 500:
return '您只差{}票就可以获得入围大礼包了哦,加油!'.format(dif)
if dif > 500:
vote_firends = dif/50
return '您距离入围大礼包只差{}-{}个好友来帮忙咯'.format(vote_firends, vote_firends*2)
elif 21<rank<50:
dif = RoomDesignVoteCounter.get_vote_by_rank(rank-1) - vote
if dif < 500:
return '距您上一名还差{}票'.format(dif)
if dif > 500:
vote_firends = dif/50
return '您距离入围大礼包只差{}-{}个好友来帮忙咯'.format(vote_firends, vote_firends*2)
else:
dif = RoomDesignVoteCounter.get_vote_by_rank(1) - vote
if dif < 500:
return '您距离2000元红包只差{}票了哦'.format(dif)
if dif > 500:
vote_firends = dif/50
return '您距离2000元红包只差{}-{}个好友来帮忙咯'.format(vote_firends, vote_firends*2)
room_detail_validator = Inputs(
{
'room_id' : IdField(msg='请输入寝室id'),
}
)
@wechat_loggin_dec(required=False, validator=room_detail_validator, app=True)
def get_room_detail():
''' 获取寝室详情 '''
room_id = request.valid_data.get('room_id')
user = RedpackService.get_qruser_by_openid(request.open_id)
has_followed= bool(user and user.nickname)
privileges = None
if request.user_id:
privileges = RoomDesignService.get_user_vote_privilede(request.user_id)
room = RoomDesignService.get_room_dict_by_id(room_id)
assert room, '寝室不存在'
is_myself = room['user_id'] == request.user_id
vote_count = RoomDesignVoteCounter.incr(room['id'], 0)
rank = RoomDesignVoteCounter.rank(room['id'])
pre_diff = RoomDesignVoteCounter.get_vote_by_rank(rank-1)-vote_count if rank>1 else 0
room['rank']= rank
where = RoomDesignDetail.user_id==request.user_id
has_attend = bool(RoomDesignService.get_room(where))
note = set_tip_msg(rank, is_myself) if rank else ''
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '寝室详情',
'has_followed': has_followed,
'room' : room,
'note' : note,
'vote_count': vote_count,
'privileges': privileges,
'pre_diff' : pre_diff,
'has_attend': has_attend,
'is_myself' : is_myself
}
return jsonify_response(result)
apply_room_validator = Inputs(
{
'school_id' : IdField(msg='请选择学校'),
'phone' : MobileField(min_length=1, max_length=100, msg='请输入手机号'),
'room_name' : TextField(min_length=1, max_length=100, msg='请给您的寝室取一个独一无二的名字'),
'applyer_name' : TextField(min_length=1, max_length=100, msg='请输入参赛者的名字'),
'addr' : TextField(min_length=1, max_length=100, msg='请输入地址'),
}
)
@wechat_loggin_dec(required=True, validator=apply_room_validator, app=True)
def apply_room():
phone = request.valid_data.get('phone')
school_id = request.valid_data.get('school_id')
room_name = request.valid_data.get('room_name')
applyer_name= request.valid_data.get('applyer_name')
addr = request.valid_data.get('addr')
apply_no = RoomDesignVoteCounter.incr_apply_no()
pics = None
where = RoomDesignDetail.user_id==request.user_id
my_room = RoomDesignService.get_room(where)
has_attend = bool(my_room)
assert not has_attend, '您已参与过了'
room_id = RoomDesignService.create_room(request.user_id, room_name, applyer_name, apply_no, phone, addr, school_id, pics)
RoomDesignVoteCounter.add_score(0)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'room_id' : room_id
}
return jsonify_response(result)
room_list_validator = Inputs(
{
'school_id': Optional(IdField(msg='请选择学校')),
'cat' : IntChoiceField(choices=[1,2], msg='列表类型'), #1最新参与 2全部排名
'offset' : Optional(TextField(min_length=0, max_length=100, msg='请输入分页参数')),
})
@wechat_loggin_dec(required=False, validator=room_list_validator, app=True)
def room_list():
cat = request.valid_data.get('cat')
offset = request.valid_data.get('offset')
school_id = request.valid_data.get('school_id')
where = None
filters = []
if cat==1:
_sort = 'id'
if offset: filters.append(RoomDesignDetail.id<offset)
elif cat==2:
_sort = 'vote_count'
if offset and len((offset or '').split('_'))==2: #挺烦的分页
vote_count, offset_id = (offset or '').split('_')
query = or_(
and_(
RoomDesignDetail.vote_count==vote_count,
RoomDesignDetail.id<offset_id
),
and_(
RoomDesignDetail.vote_count<vote_count
)
)
filters.append(query)
if school_id:
filters.append(RoomDesignDetail.school_id==school_id)
filters.append(RoomDesignDetail.pics_count>0)
if filters: where = and_(*filters)
has_more, rooms = RoomDesignService.get_paged_rooms(where=where, _sort=_sort)
for room in rooms:
room['rank'] = RoomDesignVoteCounter.rank(room['id'])
rank = room['rank']
vote_count = RoomDesignVoteCounter.incr(room['id'], 0)
pre_diff = RoomDesignVoteCounter.get_vote_by_rank(rank-1)-vote_count if rank>1 else 0
room['note'] = set_tip_msg(rank, is_myself=False)
room['pre_diff']= pre_diff
offset = ''
if rooms:
if cat==1:
offset = str(rooms[-1]['id'])
else:
offset = '{}_{}'.format(str(rooms[-1]['vote_count']), str(rooms[-1]['id']))
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'infos' : rooms,
'has_more' : has_more,
'offset' : offset
}
return jsonify_response(result)
add_room_pics_validators = Inputs(
{
'room_id' : IdField(msg='请选择寝室'),
'pics' : TextField(min_length=1, max_length=100, msg='逗号分隔的图片链接字符串'),
})
@wechat_loggin_dec(required=True, validator=add_room_pics_validators)
def add_room_pics():
''' 添加寝室图片 '''
pics = request.valid_data.get('pics')
room_id = request.valid_data.get('room_id')
where = RoomDesignDetail.id==room_id
room = RoomDesignService.get_room_dict_by_id(room_id)
assert room, '寝室不存在'
pic_list = comma_str_to_list(pics)
assert len(pic_list), '请上传图片'
#assert len(pic_list)==4, '必须上传4张图'
pics_count=len(filter(bool, pic_list))
count = RoomDesignService.update_room(where, pics=pics, pics_count=pics_count)
RoomDesignService.set_school_pics_count(room['school_id'])
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '添加成功'
}
return jsonify_response(result)
school_rooms_validator = Inputs(
{
'offset' : Optional(TextField(min_length=0, max_length=100, msg='分页参数')),
})
@wechat_loggin_dec(required=False, validator=school_rooms_validator, app=True)
def school_rooms():
''' 学校风采 '''
offset = request.valid_data.get('offset')
where = and_(
School.city_name=='上海',
School.pics_count>0
)
limit = 100
fields = ['id', 'name']
has_more, schools = DataService.get_paged_schools(where=where, fields=fields, limit=limit)
for i in schools:
i['count'] = RoomDesignService.count_school_pics(i['id'])
offset = ''
if schools:
offset = str(schools[-1]['id'])
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'infos' : schools,
'has_more' : has_more,
'offset' : offset
}
return jsonify_response(result)
vote_room_validator = Inputs(
{
'room_id' : IdField(msg='请选择寝室'),
'source' : IntChoiceField(choices=[1,2,3], msg='投票类型'), #1申请额度通过 2成功完成一单 3普通投票
})
@wechat_loggin_dec(required=True, validator=vote_room_validator, app=True)
def vote_room():
''' 投票 '''
room_id = request.valid_data.get('room_id')
source = request.valid_data.get('source')
privileges = RoomDesignService.get_user_vote_privilede(request.user_id)
privilege_map = {i['id']:i['status'] for i in privileges}
assert privilege_map[source]!=1, '您已投过了'
assert privilege_map[source]!=-1, '您没有投票机会,快去申请额度或下单吧'
current_score = RoomDesignVoteCounter.incr(room_id, 0)
count = 1
if source!=3:
count = RoomDesignService.update_vote_privilege_status(request.user_id, source)
vote_count = VOTE_COUNT_SOURCE_MAP[source]
if count:
RoomDesignService.incr_room_vote(room_id, vote_count)
RoomDesignVoteCounter.incr(room_id, vote_count)
RoomDesignService.add_vote_log(room_id, request.user_id, source)
if not RoomDesignVoteCounter.exists_score(current_score):
if current_score>0: RoomDesignVoteCounter.remove_score(current_score)
current_score = RoomDesignVoteCounter.incr(room_id, 0)
RoomDesignVoteCounter.add_score(current_score)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '投票成功'
}
return jsonify_response(result)
@wechat_loggin_dec(required=False)
def room_index():
''' 活动首页 '''
limit = 2
first = []
second = []
third = []
user = RedpackService.get_qruser_by_openid(request.open_id)
has_followed= bool(user and user.nickname)
where = RoomDesignDetail.user_id==request.user_id
my_room = RoomDesignService.get_room(where)
has_attend = bool(my_room)
if my_room: my_room = my_room.as_dict()
_sort = 'id'
where = RoomDesignDetail.pics_count>0
has_more, first = RoomDesignService.get_paged_rooms(_sort=_sort, limit=limit, where=where)
_sort = 'vote_count'
has_more, second = RoomDesignService.get_paged_rooms(_sort=_sort, limit=limit, where=where)
where = and_(
School.city_name=='上海',
School.pics_count>0
)
_, schools = DataService.get_paged_schools(where=where, limit=4, fields=['id', 'name'])
for i in schools:
i['count'] = RoomDesignService.count_school_pics(i['id'])
for room in first+second:
room['rank'] = RoomDesignVoteCounter.rank(room['id'])
rank = room['rank']
vote_count = RoomDesignVoteCounter.incr(room['id'], 0)
pre_diff = RoomDesignVoteCounter.get_vote_by_rank(rank-1)-vote_count if rank>1 else 0
room['note'] = set_tip_msg(rank, is_myself=False)
room['pre_diff']= pre_diff
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'has_followed': has_followed,
'my_room' : my_room,
'first' : first,
'second' : second,
'third' : schools,
'has_attend': has_attend
}
return jsonify_response(result)
room_search_validator = Inputs(
{
'keyword' : TextField(min_length=1, max_length=100, msg='请输入关键字'),
})
@wechat_loggin_dec(required=False, validator=room_search_validator, app=True)
def room_search():
''' 搜索寝室 '''
keyword = request.valid_data.get('keyword')
where = or_(
RoomDesignDetail.phone==keyword,
RoomDesignDetail.room_name==keyword,
RoomDesignDetail.apply_no==keyword
)
room = RoomDesignService.get_room(where)
room_id = None
if room: room_id = room.id
room_exist = bool(room)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'room_exist': room_exist,
'room_id' : room_id
}
return jsonify_response(result)
@wechat_loggin_dec(required=True)
def get_vote_priviledges():
''' 用户投票机会详情 '''
user = RedpackService.get_qruser_by_openid(request.open_id)
has_followed= bool(user and user.nickname)
privileges = RoomDesignService.get_user_vote_privilede(request.user_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'has_followed': has_followed,
'privileges': privileges,
}
return jsonify_response(result)
@wechat_loggin_dec(required=False, need_openid=True)
def room_about():
print 'room_about'
return send_from_directory('static', 'user/Activities/about.html')
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,364
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/4f4ce8bff86a_.py
|
"""empty message
Revision ID: 4f4ce8bff86a
Revises: 11da3b568bd2
Create Date: 2016-01-09 17:18:22.196431
"""
# revision identifiers, used by Alembic.
revision = '4f4ce8bff86a'
down_revision = '11da3b568bd2'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('beauty_entry', sa.Column('view_count', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('beauty_entry', 'view_count')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,365
|
qsq-dm/mff
|
refs/heads/master
|
/ops/trial.py
|
# -*- coding: utf-8 -*-
from sqlalchemy import and_
from sqlalchemy import func
from util.sqlerr import SQL_DUPLICATE
from util.sqlerr import SQL_DUPLICATE_PHONE
from util.utils import dt_obj
from models import db
from models import Trial
from models import TrialApply
from models import TrialComment
from ops.utils import get_items
from ops.utils import get_page
from ops.utils import count_items
from thirdparty.wechat import wechat
from thirdparty.wechat import create_qrcode
from thirdparty.qn import upload_img
from settings import celery
class TrialService(object):
@staticmethod
def get_trial(trial_id):
''' '''
trial = Trial.query.filter(Trial.id==trial_id).first()
if trial: return trial.as_dict()
@staticmethod
def get_user_apply(user_id, trial_id):
''' '''
query = and_(
TrialApply.user_id==user_id,
TrialApply.trial_id==trial_id)
apply = TrialApply.query.filter(query).first()
if apply: return apply.as_dict()
@staticmethod
def get_trial_applies_by_user_ids(trial_id, user_ids):
''' 使用申请 '''
query = and_(
TrialApply.trial_id==trial_id,
TrialApply.user_id.in_(user_ids)
)
applies = TrialApply.query.filter(query).all()
return [ i.as_dict() for i in applies]
@staticmethod
def create_trial(title, image, cat, total, start_time, end_time, rules, process, coupon_id=None):
''' 创建试用 '''
trial = Trial(
process=process,
title=title, image=image, cat=cat, total=total, start_time=start_time, end_time=end_time, rules=rules, coupon_id=coupon_id
)
db.session.add(trial)
db.session.commit()
return trial.id
@staticmethod
def update_trial(item_id, **kw):
''' 更新试用 '''
count = Trial.query.filter(Trial.id==item_id).update(kw)
db.session.commit()
return count
@staticmethod
def add_apply(user_id, name, phone, school, trial_id, content, sex, addr):
try:
trial = Trial.query.filter(Trial.id==trial_id).first()
assert trial, '试用商品不存在'
cat = trial.cat
apply = TrialApply(
addr=addr, cat=cat, user_id=user_id, name=name, phone=phone, school=school, trial_id=trial_id, content=content, sex=sex)
db.session.add(apply)
db.session.commit()
return apply.id
except Exception as e:
import traceback
traceback.print_exc()
db.session.rollback()
if SQL_DUPLICATE.search(str(e)):
assert 0, '您已提交过申请'
assert 0, '申请失败'
@staticmethod
def comment(trial_id, user_id, content, photos):
''' 试用体会 '''
comment = TrialComment(trial_id=trial_id, user_id=user_id, content=content, photos=photos)
db.session.add(comment)
db.session.commit()
return comment.id
@staticmethod
def get_paged_trial_comments(**kw):
return get_page(TrialComment, {}, **kw)
@staticmethod
def get_paged_trials(**kw):
''' 试用列表 '''
return get_page(Trial, {}, **kw)
@staticmethod
def count_trial(where=None):
''' '''
return count_items(Trial, where)
@staticmethod
def count_apply(where=None):
return count_items(TrialApply, where)
@staticmethod
def incr_trial_apply_count(trial_id):
''' 试用人气加1 '''
count = Trial.query.filter(Trial.id==trial_id).update({'apply_count':Trial.apply_count+1})
db.session.commit()
return count
@staticmethod
def get_paged_apply_user_list(**kw):
''' 申请用户列表 '''
return get_page(TrialApply, {}, **kw)
@staticmethod
def update_apply_status(where, to_status):
''' 申请状态 '''
count = TrialApply.query.filter(where).update({'status':to_status})
db.session.commit()
return count
@staticmethod
def update_apply(where, **kw):
''' '''
count = TrialApply.query.filter(where).update(kw)
db.session.commit()
return count
@staticmethod
def incr_trial_sent_count(trial_id):
''' 试用发放数加1 '''
count = Trial.query.filter(Trial.id==trial_id).update({'sent':Trial.sent+1})
db.session.commit()
return count
@staticmethod
def get_apply(apply_id):
apply = TrialApply.query.filter(TrialApply.id==apply_id).first()
if apply: return apply.as_dict()
@staticmethod
def get_trial_apply(user_id, trial_id):
query = and_(
TrialApply.user_id==user_id,
TrialApply.trial_id==trial_id
)
apply = TrialApply.query.filter(query).first()
if apply: return apply.as_dict()
@staticmethod
def get_trial_comment(trial_id, user_id):
''' '''
query = and_(
TrialComment.trial_id==trial_id,
TrialComment.user_id==user_id
)
return TrialComment.query.filter(query).first()
@staticmethod
def count_user_apply(user_ids, status=None):
query = and_()
query.append(TrialApply.user_id.in_(user_ids))
if status:
query.append(TrialApply.status==status)
rows = db.session.query(TrialApply.user_id, func.count(TrialApply.id)).filter(query).group_by(TrialApply.user_id).all()
print rows
return dict(rows)
@staticmethod
def check_exist_order(sort_order):
query = and_(
Trial.sort_order==sort_order,
Trial.end_time>=dt_obj.now()
)
return db.session.query(Trial).filter(query).first()
@staticmethod
def get_latest_apply(user_id):
''' 获取用户最近一次申请 '''
apply = TrialApply.query.filter(TrialApply.user_id==user_id).order_by(TrialApply.id.desc()).first()
if apply: return apply.as_dict()
@staticmethod
def get_trial_apply_by_user_ids(user_ids):
''' '''
applys = TrialApply.query.filter(TrialApply.user_id.in_(user_ids)).all()
return [i.as_dict() for i in applys]
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,366
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/c2bb73ecf64_.py
|
"""empty message
Revision ID: c2bb73ecf64
Revises: 3c990682c3f0
Create Date: 2016-01-04 19:47:55.690716
"""
# revision identifiers, used by Alembic.
revision = 'c2bb73ecf64'
down_revision = '3c990682c3f0'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('alipay_order_user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_no', sa.String(length=100), nullable=True),
sa.Column('buyer_email', sa.String(length=100), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('order_no')
)
op.create_index(op.f('ix_alipay_order_user_buyer_email'), 'alipay_order_user', ['buyer_email'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_alipay_order_user_buyer_email'), table_name='alipay_order_user')
op.drop_table('alipay_order_user')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,367
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/53a9d06e37ce_.py
|
"""empty message
Revision ID: 53a9d06e37ce
Revises: 2eed88b994ed
Create Date: 2015-10-31 14:18:39.072291
"""
# revision identifiers, used by Alembic.
revision = '53a9d06e37ce'
down_revision = '2eed88b994ed'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('item', 'item_no')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('item', sa.Column('item_no', mysql.VARCHAR(length=50), nullable=True))
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,368
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/38dd6746c99b_.py
|
"""empty message
Revision ID: 38dd6746c99b
Revises: 42d4367e28b2
Create Date: 2015-12-10 17:50:20.145840
"""
# revision identifiers, used by Alembic.
revision = '38dd6746c99b'
down_revision = '42d4367e28b2'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('coupon', sa.Column('is_trial', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('coupon', 'is_trial')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,369
|
qsq-dm/mff
|
refs/heads/master
|
/admin/views.py
|
# -*- coding: utf-8 -*-
import json
import time
import math
import pickle
from base64 import b64decode
from sqlalchemy import and_
from sqlalchemy import or_
from sqlalchemy import not_
from flask import request
from flask import redirect
from flask import Blueprint
from flask import render_template
from flask import make_response
from util.utils import date_to_datetime
from util.utils import jsonify_response
from util.utils import template_response
from util.utils import prefix_img_domain
from util.utils import abbreviated_pages
from util.utils import get_due_time
from util.utils import format_price
from util.utils import get_current_period
from util.utils import cacl_punish_fee
from util.utils import gen_item_no
from util.utils import trans_list
from util.utils import dt_obj
from util.sign import gen_token
from util.sign import del_cookie
from util.sign import set_cookie
from util.sign import get_cookie
from util.decorators import admin_json_dec
from util.validators import Optional
from util.validators import Inputs
from util.validators import MobileField
from util.validators import TextField
from util.validators import IdField
from util.validators import IntChoiceField
from util.validators import FloatField
from util.validators import IntChoicesField
from util.validators import BoolChoiceField
from util.validators import BoolIntChoiceField
from util.validators import IntField
from util.validators import REGField
from util.utils import set_coupon_use_time
from models import db
from models import Activity
from models import AdminUser
from models import Item
from models import School
from models import Order
from models import User
from models import Promoter
from models import Trial
from models import DailyUser
from models import ItemSubCat
from models import ActivityItem
from models import CreditApply
from models import RecommendItem
from models import Hospital
from models import RecommendSubcat
from models import PeriodPayLog
from models import TrialApply
from models import RecommendHospital
from models import RedpackUserQuestion
from models import RoomDesignDetail
from ops.cache import ChsiCache
from ops.cache import AdminInvalidUserPasswdCache
from ops.cache import RoomDesignVoteCounter
from ops.common import pay_success_action
from ops.beauty_tutorial import TutorialService
from ops.admin import AdminService
from ops.redpack import RedpackService
from ops.data import DataService
from ops.promote import PromoteService
from ops.item import ItemService
from ops.user import UserService
from ops.comment import CommentService
from ops.credit import CreditService
from ops.trial import TrialService
from ops.coupon import CouponService
from ops.bulks import fetch_item_cat_refs
from ops.bulks import fetch_user_refs
from ops.bulks import fetch_item_refs
from ops.bulks import fetch_order_refs
from ops.bulks import fetch_item_subcat_refs
from ops.bulks import fetch_hospital_refs
from ops.bulks import fetch_question_refs
from ops.bulks import fetch_qrcodeuser_refs
from ops.bulks import fetch_wechatinfo_refs
from ops.bulks import fetch_servicecode_refrence
from ops.bulks import fetch_coupon_refs
from ops.bulks import fetch_school_refs
from ops.order import OrderService
from ops.order import set_order_status
from ops.hospital import HospitalService
from ops.activity import ActivityService
from ops.room_design import RoomDesignService
from thirdparty.qn import gen_qn_token
from thirdparty.qn import upload_img
from thirdparty.sms import send_sms_apply_success
from thirdparty.sms import send_sms_apply_reject
from thirdparty.sms import send_sms_refund
from thirdparty.chsi import login_xuexin
from thirdparty.chsi import refresh_chsi_captcha
from thirdparty.chsi import get_chsi_info
from thirdparty.wx_pay import refund_order
from thirdparty.wx_pay import refund_repayment
from thirdparty.wx_app_pay import refund_order as refund_app_order
from thirdparty.wx_app_pay import refund_repayment as wxapp_refund_repayment
from constants import ResponseCode
from constants import APPLY_STATUS
from constants import ORDER_ADMIN_STATUS
from constants import ORDER_STATUS
from constants import ORDER_STATUS_LABEL
from constants import ADMIN_ORDER_STATUS_CHOICES
from constants import ORDER_ADMIN_STATUS_MAP
from constants import CREDIT_STATUS
from constants import PAY_METHOD
from constants import BODY_LABEL
def index():
''' http://flask.pocoo.org/docs/0.10/blueprints/#templates '''
return render_template('admin/index.html')
login_validator = Inputs(
{
'name' : TextField(min_length=1, max_length=100, msg='用户名'),
'passwd' : TextField(min_length=1, max_length=100, msg='密码')
}
)
@admin_json_dec(required=False, validator=login_validator)
def login():
name = request.valid_data.get('name')
passwd = request.valid_data.get('passwd')
admin = AdminService.get_admin(name)
count = AdminInvalidUserPasswdCache.incr(name)
assert count<10, '今日密码错误次数超限'
if admin and admin.passwd==passwd:
response = jsonify_response({'code':ResponseCode.SUCCESS}, with_response=True)
token = gen_token(name)
set_cookie(response, 'name', name, 86400*30)
set_cookie(response, 'token', token, 86400*30)
set_cookie(response, 'cat', str(admin.cat or 0), 86400*30)
AdminInvalidUserPasswdCache.incr(name, -1)
return response
assert 0, '用户名或密码错误'
@admin_json_dec()
def logout():
response = make_response(redirect('/admin'))
del_cookie(response, 'name')
del_cookie(response, 'token')
return response
@admin_json_dec(roles=[0, 1, 2, 5])
def get_city_list(required=True, validator=None):
has_more, cities = DataService.get_paged_cities()
result = {
'infos':cities
}
return jsonify_response(result)
new_city_validator = Inputs(
{
'name' : TextField(min_length=1, max_length=100, msg='城市名'),
}
)
@admin_json_dec(required=True, validator=new_city_validator)
def new_city():
name = request.valid_data.get('name')
print name
city_id = DataService.create_city(name)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''}
return jsonify_response(result)
item_list_validator = Inputs(
{
'keyword' : Optional(TextField(min_length=1, max_length=100, msg='搜索关键字')),
'sub_cat_id' : Optional(IdField(msg='子分类id')),
'sub_cat_id' : Optional(IdField(msg='子分类id')),
'hospital_id' : Optional(IdField(msg='医院id')),
'activity_id' : Optional(IdField(msg='活动id')),
'page' : Optional(IdField(msg='页数')),
'is_recommend' : Optional(IntChoiceField(choices=[0,1], msg='是否推荐'))
}
)
@admin_json_dec(required=True, validator=item_list_validator)
def get_item_list():
keyword = request.valid_data.get('keyword')
sub_cat_id = request.valid_data.get('sub_cat_id')
activity_id = request.valid_data.get('activity_id')
hospital_id = request.valid_data.get('hospital_id')
page = request.valid_data.get('page') or 1
is_recommend = request.valid_data.get('is_recommend') or None
limit = 10
start = (page-1)*limit
filters = []
order_by = None
join = None
if keyword: filters.append(Item.title.like('%{}%'.format(keyword)))
if sub_cat_id:
query = or_(
Item.sub_cat_ids==sub_cat_id,
Item.sub_cat_ids.like('%,{}'.format(sub_cat_id)),
Item.sub_cat_ids.like('%,{},%'.format(sub_cat_id)),
Item.sub_cat_ids.like('{},%'.format(sub_cat_id))
)
filters.append(query)
if hospital_id: filters.append(Item.hospital_id==hospital_id)
if activity_id:
subquery = db.session.query(ActivityItem.item_id).filter(ActivityItem.activity_id==activity_id).subquery()
filters.append(Item.id.in_(subquery))
if is_recommend:
subquery = db.session.query(RecommendItem.item_id).subquery()
filters.append(Item.id.in_(subquery))
order_by = RecommendItem.sort_order.asc()
join = RecommendItem
where = None
if filters:
where = and_(*filters)
if request.admin.city_id:
city_item_suq = db.session.query(Hospital.id).filter(Hospital.city_id==request.admin.city_id).subquery()
item_query = Item.hospital_id.in_(city_item_suq)
if where is not None:
where = and_(
where,
item_query
)
else:
where = item_query
total = ItemService.count_items(where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
has_more, item_list = ItemService.get_the_paged_items(
limit=limit, start=start, where=where, join=join, order_by=order_by)
item_ids = [ i['id'] for i in item_list]
exists_recommend = ItemService.exists_recommend_item_ids(item_ids)
for i in item_list:
i['is_recommend'] = i['id'] in exists_recommend
i['sort_order'] = exists_recommend.get(i['id'], 0)
fetch_item_subcat_refs(item_list)
sub_cats = [i['sub_cat'] for i in item_list]
print sub_cats
fetch_item_cat_refs(sub_cats)
result = {
'infos' : item_list,
'page_info' : page_info
}
return jsonify_response(result)
item_edit_validator = Inputs(
{
'title' : TextField(min_length=1, max_length=100, msg='商品名'),
'item_no' : Optional(TextField(min_length=0, max_length=100, msg='项目编号')),
'image' : TextField(min_length=1, max_length=1000, msg='商品小图'),
'photos' : Optional(TextField(min_length=0, max_length=1000, msg='图片列表')),
'surgery_desc' : TextField(min_length=1, max_length=1000000, msg='项目介绍'),
'doctor_desc' : TextField(min_length=1, max_length=1000000, msg='医生介绍'),
'note' : TextField(min_length=1, max_length=10000, msg='特别提醒'),
'use_time' : TextField(min_length=1, max_length=10000, msg='使用时间'),
'has_fee' : BoolIntChoiceField(msg='是否免息'),
'direct_buy' : BoolIntChoiceField(msg='是否直购'),
#'sub_cat_id' : Optional(IdField(msg='子分类id')),
'sub_cat_ids' : TextField(min_length=1, max_length=1000, msg='请选择分类'),
'hospital_id' : IdField(msg='医院id'),
'price' : FloatField(msg='价格'),
'orig_price' : FloatField(msg='原价'),
'support_choice_list': IntChoicesField(choices=[1,2,3,4,5,6], msg='支持哪些分期选项'),
}
)
@admin_json_dec(required=True, validator=item_edit_validator, roles=[0,1,5])
def item_edit(item_id=None):
title = request.valid_data.get('title')
sub_cat_id = request.valid_data.get('sub_cat_id')
sub_cat_ids = request.valid_data.get('sub_cat_ids')
price = request.valid_data.get('price')
orig_price = request.valid_data.get('orig_price')
hospital_id = request.valid_data.get('hospital_id')
item_no = request.valid_data.get('item_no')
image = request.valid_data.get('image')
has_fee = request.valid_data.get('has_fee')
direct_buy = request.valid_data.get('direct_buy')
photos = request.valid_data.get('photos') or ''
doctor_desc = request.valid_data.get('doctor_desc')
use_time = request.valid_data.get('use_time')
note = request.valid_data.get('note')
surgery_desc = request.valid_data.get('surgery_desc')
support_choice_list = request.valid_data.get('support_choice_list')
support_choices = ','.join(map(str, support_choice_list))
sub_cat_id = 1
if item_id:
assert item_no, '请输入商品编号'
ItemService.update_item(item_id,
title=title,
sub_cat_id=sub_cat_id,
sub_cat_ids=sub_cat_ids,
price=price,
orig_price=orig_price,
support_choices=support_choices,
hospital_id=hospital_id, item_no=item_no,
photos=photos, surgery_desc=surgery_desc,
doctor_desc=doctor_desc, image=image, direct_buy=direct_buy,
has_fee=has_fee, use_time=use_time, note=note
)
else:
item_no = item_no or gen_item_no()
item_id = ItemService.create_item(
title, hospital_id, sub_cat_id, sub_cat_ids, price, orig_price, item_no, support_choices, photos, surgery_desc, doctor_desc, image,
has_fee, direct_buy, use_time, note)
return jsonify_response({'item_id': item_id})
edit_itemcat_validator = Inputs(
{
'name' : TextField(min_length=1, max_length=100, msg='分类名'),
}
)
@admin_json_dec(required=True, validator=edit_itemcat_validator)
def edit_itemcat(cat_id=None):
name = request.valid_data.get('name')
if cat_id:
msg = '修改成功'
ItemService.update_cat(cat_id, **request.valid_data)
else:
msg = '添加成功'
ItemService.create_cat(name)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg
}
return jsonify_response(result)
edit_itemsubcat_validator = Inputs(
{
'name' : TextField(min_length=1, max_length=100, msg='小类名'),
'icon' : TextField(min_length=1, max_length=1000, msg='图标'),
'cat_ids' : TextField(min_length=1, max_length=1000, msg='逗号分隔的父分类id'),
'cat_id' : Optional(IdField(msg='分类id')),
}
)
@admin_json_dec(required=True, validator=edit_itemsubcat_validator)
def edit_itemsubcat(sub_cat_id=None):
''' 编辑/添加小类 '''
name = request.valid_data.get('name')
icon = request.valid_data.get('icon')
cat_id = request.valid_data.get('cat_id')
cat_ids = request.valid_data.get('cat_ids')
if sub_cat_id:
msg = '修改成功'
ItemService.update_subcat(sub_cat_id=sub_cat_id, **request.valid_data)
else:
msg = '添加成功'
desc = ''
ItemService.create_sub_cat(cat_id, name, icon, desc, cat_ids)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg
}
return jsonify_response(result)
new_period_pay_choice_validator = Inputs(
{
'period_count' : IntField(msg='分期数'),
'period_fee' : FloatField(msg='分期费率')
}
)
@admin_json_dec(validator=new_period_pay_choice_validator)
def new_period_pay_choice():
period_count = request.valid_data.get('period_count')
period_fee = request.valid_data.get('period_fee')
pay_id = CreditService.create_period_choice(period_count=period_count, period_fee=period_fee)
result = {
'pay_id': pay_id
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_item():
item_id = request.args.get('item_id')
item = ItemService.get_item_dict_by_id(item_id)
item['has_fee'] = 1 if item['has_fee'] else 0
item['direct_buy'] = 1 if item['direct_buy'] else 0
result = {
'data' : item
}
response = jsonify_response(result)
return response
@admin_json_dec(required=True)
def get_cat():
item_id = request.args.get('cat_id')
item = ItemService.get_cat_dict_by_id(item_id)
result = {
'data' : item
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_subcat():
item_id = request.args.get('sub_cat_id')
item = ItemService.get_subcat_dict_by_id(item_id)
result = {
'data' : item
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_school_list():
limit = 100
page = int(request.args.get('page', 1))
city_name = request.args.get('city_name')
start = (page-1)*limit
where = None
if city_name: where = School.city_name==city_name
total = DataService.count_schools(where=where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
has_more, item_list = DataService.get_paged_schools(limit=limit, start=start, where=where, _sort='city_name')
result = {
'infos' : item_list,
'page_info' : page_info,
'total' : total,
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_cat_list():
has_more, cat_list = ItemService.get_paged_cats(limit=1000, _sort='sort_order', _sort_dir='ASC')
result = {
'infos' : cat_list,
'page_info' : None,
}
return jsonify_response(result)
cat_list_validator = Inputs(
{
'cat_id' : Optional(IdField(msg='分类id')),
'is_recommend' : Optional(IntChoiceField(choices=[0,1], msg='是否推荐'))
}
)
@admin_json_dec(required=True, validator=cat_list_validator)
def get_subcat_list():
cat_id = request.valid_data.get('cat_id')
is_recommend = request.valid_data.get('is_recommend')
filters = []
order_by = None
join = None
where = None
if is_recommend:
subquery = db.session.query(RecommendSubcat.sub_cat_id).subquery()
filters.append(ItemSubCat.id.in_(subquery))
order_by = RecommendSubcat.sort_order.asc()
join = RecommendSubcat
if cat_id:
or_query= or_(
ItemSubCat.cat_ids==cat_id,
ItemSubCat.cat_ids.like('%,{}'.format(cat_id)),
ItemSubCat.cat_ids.like('%,{},%'.format(cat_id)),
ItemSubCat.cat_ids.like('{},%'.format(cat_id))
)
filters.append(or_query)
if filters: where = and_(*filters)
has_more, subcat_list = ItemService.get_paged_sub_cats(where=where, order_by=order_by, join=join, limit=100)
fetch_item_cat_refs(subcat_list)
all_cats = ItemService.get_item_cats()
cat_id_obj = {i.id:i.as_dict() for i in all_cats}
sub_cat_ids = [i['id'] for i in subcat_list]
exists_recommend = ItemService.exists_recommend_subcat_map(sub_cat_ids)
for i in subcat_list:
i['is_recommend'] = i['id'] in exists_recommend
i['sort_order'] = exists_recommend.get(i['id'], 0)
i['cat_list'] = [cat_id_obj.get(k) for k in i['cat_id_list']]
result = {
'infos' : subcat_list,
'page_info' : None,
}
return jsonify_response(result)
hospital_list_validator = Inputs(
{
'keyword' : Optional(TextField(min_length=1, max_length=100, msg='搜索关键字')),
'is_recommend' : Optional(IntChoiceField(choices=[0,1], msg='是否推荐')),
'page' : Optional(IdField(msg='页数')),
}
)
@admin_json_dec(required=True, validator=hospital_list_validator)
def get_hospital_list():
is_recommend = request.valid_data.get('is_recommend')
keyword = request.valid_data.get('keyword')
page = request.valid_data.get('page') or 1
limit = 10
start = (page-1)*limit
filters = []
where = None
join = None
order_by = None
if is_recommend:
subquery = db.session.query(RecommendHospital.hospital_id).subquery()
filters.append(Hospital.id.in_(subquery))
order_by = RecommendHospital.sort_order.asc()
join = RecommendHospital
if keyword: filters.append(Hospital.name.like('%{}%'.format(keyword)))
if filters: where = and_(*filters)
total = ItemService.count_hospitals(where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
if request.admin.city_id:
if where is not None:
where = and_(
where,
Hospital.city_id==request.admin.city_id
)
else:
where = Hospital.city_id==request.admin.city_id
has_more, hospital_list = ItemService.get_paged_hospitals(
join=join, order_by=order_by, where=where, start=start)
item_ids = [ i['id'] for i in hospital_list]
exists_recommend = ItemService.exists_recommend_hospital_ids(item_ids)
for i in hospital_list:
i['is_recommend'] = i['id'] in exists_recommend
i['sort_order'] = exists_recommend.get(i['id'], 0)
result = {
'infos' : hospital_list,
'page_info' : page_info,
}
return jsonify_response(result)
subcat_status_validator = Inputs(
{
'subcat_id' : IdField(msg='子分类id'),
'status' : IntChoiceField(choices=[0,1], msg='状态')
}
)
@admin_json_dec(required=True, validator=subcat_status_validator)
def set_subcat_status():
subcat_id = request.valid_data.get('subcat_id')
status = request.valid_data.get('status')
ItemService.set_subcat_status(subcat_id, status)
if status==0:
ItemService.offline_subcat(subcat_id)
result = {}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_period_choice_list():
_sort = 'period_count'
_sort_dir = 'ASC'
_, choice_list= CreditService.get_paged_period_choices(_sort=_sort, _sort_dir=_sort_dir)
result = {
'infos' : choice_list,
'page_info' : None,
}
return jsonify_response(result)
@admin_json_dec(required=True)
def refresh_qntoken():
response = jsonify_response({}, with_response=True)
if 1:#not get_cookie('qntoken'):
qntoken = gen_qn_token()
set_cookie(response, 'qntoken', qntoken, 86400*30)#cookie存token一小时
return response
@admin_json_dec()
def get_apply_list():
''' 额度申请列表 '''
limit = 10
page = int(request.args.get('page', 1))
apply_status = int(request.args.get('apply_status') or 0)
where = None
if apply_status==1:
where = CreditApply.status==APPLY_STATUS.VERIFIED
elif apply_status==2:
where = CreditApply.status==APPLY_STATUS.REJECTED
elif apply_status==3:
where = CreditApply.status==APPLY_STATUS.FIRST_STEP
elif apply_status==4:
where = CreditApply.status==APPLY_STATUS.SECOND_STEP
start = (page-1)*limit
total = CreditService.count_apply(where=where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
has_more, item_list = CreditService.get_paged_apply_list(limit=limit, start=start, where=where, _sort='update_time')
fetch_user_refs(item_list)
result = {
'infos' : item_list,
'page_info' : page_info,
'total' : total,
}
return jsonify_response(result)
@admin_json_dec()
def get_apply_detail():
''' 额度申请详情 '''
apply_id = request.args.get('apply_id')
apply = CreditService.get_apply_dict_by_id(apply_id)
credit = CreditService.get_user_credit(apply['user_id'])
if credit: credit = credit.as_dict()
fetch_user_refs((apply,))
body_choice_ids = apply['body_choice_ids']
body_choice_text = apply['body_choice_text']
body_choices = []
for i in body_choice_ids.split(',') if body_choice_ids else []:
body_choices.append(BODY_LABEL.get(int(i)) or '')
apply['body_choices'] = ','.join(body_choices)
apply['body_choice_text'] = body_choice_text
result = {
'apply' : apply,
'credit' : credit
}
return jsonify_response(result)
apply_approve_validator = Inputs(
{
'apply_id' : IdField(msg='申请id'),
'total' : IntField(msg='额度'),
}
)
@admin_json_dec(validator=apply_approve_validator, roles=[0,1,5])
def apply_approve():
''' 申请通过 '''
apply_id = request.valid_data.get('apply_id')
total = request.valid_data.get('total')
where = and_(
CreditApply.id==apply_id,
)
data = {
'status':APPLY_STATUS.VERIFIED
}
CreditService.update_apply(where, **data)
apply = CreditService.get_apply_dict_by_id(apply_id)
credit = CreditService.get_user_credit(apply['user_id'])
if not credit:
CreditService.init_credit(apply['user_id'])
credit = CreditService.get_user_credit(apply['user_id'])
used = credit.used
err_msg = '审批额度不能低于当前已使用额度{}'.format(used)
assert total>=used, err_msg
CreditService.set_user_credit_total(apply['user_id'], total)
count = CreditService.update_user_credit_status(apply['user_id'], CREDIT_STATUS.VERIFIED)
if count:
where = and_(
Order.user_id==apply['user_id'],
Order.credit_verified!=1
)
orders = OrderService.get_orders(where=where)
for order in orders:
where = and_(
Order.id==order.id,
Order.credit_verified!=1,
)
count = OrderService.update_order(where, credit_verified=1)
if count and order.status==ORDER_STATUS.PAY_SUCCESS:
pay_success_action(order, send_verified=True)
where = and_(
Order.credit_verified!=1,
Order.user_id==apply['user_id']
)
OrderService.update_order(where, credit_verified=1)
user = UserService.get_user_by_id(apply['user_id'])
send_sms_apply_success.delay(user.phone, total)
RoomDesignService.add_user_vote_privilege(apply['user_id'], 1)
#PromoteService.add_rd_draw_count(apply['user_id'], 1)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
@admin_json_dec()
def get_hospital():
''' 医院详情 '''
hospital_id = request.args.get('item_id')
hospital = ItemService.get_hospital_dict_by_id(hospital_id)
result = {
'data' : hospital
}
return jsonify_response(result)
apply_reject_validator = Inputs(
{
'reason' : TextField(min_length=1, max_length=1000, msg='被拒原因'),
'apply_id' : IdField(msg='申请id'),
}
)
@admin_json_dec(validator=apply_reject_validator, roles=[0,1,5])
def apply_reject():
''' 申请拒绝 '''
apply_id = request.valid_data.get('apply_id')
reason = request.valid_data.get('reason')
apply = CreditService.get_apply_dict_by_id(apply_id)
assert apply, '申请不存在'
where = and_(
CreditApply.id==apply_id
)
data = {
'status':APPLY_STATUS.REJECTED,
'reason':reason,
}
CreditService.update_apply(where, **data)
CreditService.update_user_credit_status(apply['user_id'], CREDIT_STATUS.REJECTED)
user = UserService.get_user_by_id(apply['user_id'])
where = and_(
Order.user_id==apply['user_id'],
Order.credit_verified==0
)
orders = OrderService.get_orders(where=where)
for order in orders:
where = and_(
Order.id==order.id,
Order.credit_verified==0,
)
count = OrderService.update_order(where, credit_verified=2)
send_sms_apply_reject.delay(user.phone)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
hospital_edit_validator = Inputs(
{
'name' : TextField(min_length=1, max_length=100, msg='医院名'),
'tags' : TextField(min_length=1, max_length=100, msg='标签'),
'addr' : TextField(min_length=1, max_length=100, msg='地址'),
'phone' : TextField(min_length=1, max_length=100, msg='电话'),
'photos' : Optional(TextField(min_length=0, max_length=1000, msg='图片列表')),
'image' : TextField(min_length=1, max_length=100, msg='医院头像'),
'desc' : TextField(min_length=1, max_length=10000, msg='描述'),
'working_time' : TextField(min_length=1, max_length=100, msg='工作时间'),
'city_id' : IdField(msg='城市id'),
'lng' : FloatField(msg='经度'),
'lat' : FloatField(msg='纬度'),
}
)
@admin_json_dec(required=True, validator=hospital_edit_validator)
def hospital_edit(item_id=None):
name = request.valid_data.get('name')
phone = request.valid_data.get('phone')
image = request.valid_data.get('image')
tags = request.valid_data.get('tags')
city_id = request.valid_data.get('city_id')
lng = request.valid_data.pop('lng')
lat = request.valid_data.pop('lat')
desc = request.valid_data.get('desc')
working_time= request.valid_data.get('working_time')
long_lat = '{},{}'.format(lng, lat)
request.valid_data['long_lat'] = long_lat
if item_id:
print name
ItemService.update_hospital(item_id, **request.valid_data)
else:
item_id = ItemService.create_hospital(**request.valid_data)
return jsonify_response({'item_id': item_id})
recommend_item_validator = Inputs(
{
'item_id' : IdField(msg='商品id'),
'recommend' : BoolChoiceField(msg='是否推荐'),
}
)
@admin_json_dec(required=True, validator=recommend_item_validator)
def recommend_item():
item_id = request.valid_data.get('item_id')
recommend = request.valid_data.get('recommend')
print item_id, recommend
if recommend:
ItemService.add_recommend_item(item_id)
else:
ItemService.rm_recommend_item(item_id)
msg = '推荐成功' if recommend else '取消推荐成功'
result = {
'msg' : msg
}
return jsonify_response(result)
recommend_hospital_validator = Inputs(
{
'item_id' : IdField(msg='医院id'),
'recommend' : BoolChoiceField(msg='是否推荐'),
}
)
@admin_json_dec(required=True, validator=recommend_hospital_validator)
def recommend_hospital():
''' 取消推荐医院 '''
item_id = request.valid_data.get('item_id')
recommend = request.valid_data.get('recommend')
ItemService.rm_recommend_hospital(item_id)
msg = '取消推荐成功'
result = {
'msg' : msg,
'code' : ResponseCode.SUCCESS
}
return jsonify_response(result)
set_item_status_validator = Inputs(
{
'item_id' : IdField(msg='商品id'),
'status' : IntChoiceField(choices=[0,1], msg='商品状态'),
}
)
@admin_json_dec(required=True, validator=set_item_status_validator)
def set_item_status():
item_id = request.valid_data.get('item_id')
status = request.valid_data.get('status')
print item_id, status
data = {
'status': status
}
ItemService.update_item(item_id, **data)
msg = '上线成功' if status==1 else '下线成功'
if status==0:
ItemService.offline_item(item_id)
result = {
'msg' : msg
}
return jsonify_response(result)
set_question_status_validator = Inputs(
{
'item_id' : IdField(msg='问题id'),
'status' : IntChoiceField(choices=[0,1], msg='问题状态'),
}
)
@admin_json_dec(required=True, validator=set_question_status_validator)
def set_question_status():
item_id = request.valid_data.get('item_id')
status = request.valid_data.get('status')
print item_id, status
data = {
'status': status
}
RedpackService.update_redpack_status(item_id, status)
msg = '上线成功' if status==1 else '下线成功'
result = {
'msg' : msg
}
return jsonify_response(result)
user_list_validator = Inputs(
{
'keyword' : Optional(TextField(min_length=1, max_length=100, msg='搜索关键字')),
'page' : Optional(IdField(msg='页数')),
'promoter_id' : Optional(IdField(msg='推广员id')),
'same_user_id' : Optional(IdField(msg='相同用户注册id')),
}
)
@admin_json_dec(required=True, validator=user_list_validator, roles=[0,2,5])
def get_user_list():
''' 获取用户列表 '''
keyword = request.valid_data.get('keyword')
promoter_id = request.valid_data.get('promoter_id')
page = request.valid_data.get('page') or 1
same_user_id = request.valid_data.get('same_user_id')
limit = 10
start = (page-1)*limit
where = None
filters = []
if keyword:
filters.append(
or_(
User.name==keyword,
User.phone==keyword
)
)
if promoter_id:
sub_q = PromoteService.get_promoter_user_id_suq(promoter_id)
filters.append(User.id.in_(sub_q))
if filters: where = and_(*filters)
if same_user_id:
open_id = None
qrcode_user = PromoteService.get_qrcode_user_by_user_id(same_user_id)
if qrcode_user: open_id = qrcode_user.open_id
suq = PromoteService.open_id_user_ids_suq(open_id)
where = and_(
User.id.in_(suq),
User.id!=same_user_id
)
total = UserService.count_user(where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
has_more, item_list = UserService.get_paged_user(limit=limit, start=start, where=where)
fetch_wechatinfo_refs(item_list, id_='id', dest_key='wechat_info', keep_id=True)
user_ids = [i['id'] for i in item_list]
open_ids = PromoteService.get_open_ids_by_user_ids(user_ids)
count_map = PromoteService.count_open_id_user_count(open_ids)
user_id_open_id_map = PromoteService.get_user_id_open_id_map(open_ids)
open_id_promoter_id_map= PromoteService.get_qrcodeusers_by_open_ids(open_ids)
print user_id_open_id_map
for info in item_list:
open_id = user_id_open_id_map.get(info['id'])
if open_id:
info['same_user_count'] = (count_map.get(open_id) or 1) -1
info['open_id'] = open_id
if open_id_promoter_id_map.get(open_id):
info['promoter'] = open_id_promoter_id_map.get(open_id)['promoter']
if open_id_promoter_id_map.get(open_id):
info['parent'] = open_id_promoter_id_map.get(open_id)['parent']
else:
info['same_user_count'] = 0
result = {
'infos' : item_list,
'page_info' : page_info,
'total' : total,
}
return jsonify_response(result)
@admin_json_dec()
def get_user_detail():
''' 用户详情 '''
item_id = request.args.get('item_id')
user = UserService.get_user_by_id(item_id)
assert user, '用户不存在'
qruser = PromoteService.get_qrcode_user_by_user_id(item_id)
open_id = None
wechat_info = None
location = None
if qruser:
open_id = qruser.open_id
if not qruser.nickname:
try:
PromoteService.set_user_sex(open_id)
qruser = PromoteService.get_qrcode_user_by_user_id(item_id)
except Exception as e:
import traceback
traceback.print_exc()
if qruser and qruser.nickname:
wechat_info = {}
wechat_info['nickname'] = qruser.nickname
wechat_info['sex'] = qruser.sex
wechat_info['city'] = qruser.city
wechat_info['headimgurl'] = qruser.headimgurl
if open_id:
try:
from thirdparty.wechat import wechat
location = PromoteService.get_first_location(open_id)
from util.utils import translate_location
if location:
latlng = '{},{}'.format(location['lat'], location['lng'])
result = translate_location(latlng)
location = result.json()['result'][0]
except Exception as e:
import traceback
traceback.print_exc()
user = user.as_dict()
apply = CreditService.get_apply_dict_by_userid(item_id)
result = {
'data' : user,
'apply' : apply,
'location' : location,
'wechat_info': wechat_info
}
return jsonify_response(result)
@admin_json_dec()
def get_school_city_list():
''' 学校省市列表 '''
datas = DataService.get_school_city_names()
result = {
'infos': datas
}
return jsonify_response(result)
order_list_validator = Inputs(
{
'hospital_id' : Optional(IdField(msg='医院id')),
'sub_cat_id' : Optional(IdField(msg='子分类id')),
'keyword' : Optional(TextField(min_length=1, max_length=1000, msg='搜索订单号或用户手机号')),
'order_status' : Optional(IntChoiceField(choices=ORDER_ADMIN_STATUS_MAP.keys(), msg='订单筛选状态')),
}
)
@admin_json_dec(validator=order_list_validator)
def get_order_list():
''' 订单列表 '''
page = int(request.args.get('page', 1))
hospital_id = request.valid_data.get('hospital_id')
sub_cat_id = request.valid_data.get('sub_cat_id')
keyword = request.valid_data.get('keyword')
order_status = request.valid_data.get('order_status')
limit = 10
start = (page-1)*limit
where = None
order_by = None
conditions = []
if hospital_id: conditions.append(Order.hospital_id==hospital_id)
if keyword and len(keyword)==11:
sub_query = db.session.query(User.id).filter(User.phone==keyword).subquery()
conditions.append(Order.user_id.in_(sub_query))
elif keyword:
conditions.append(Order.order_no==keyword)
if order_status:
if order_status==ORDER_ADMIN_STATUS.TO_PAY:
conditions.append(Order.status.in_([ORDER_STATUS.NEW_ORDER, ORDER_STATUS.TO_PAY]))
elif order_status==ORDER_ADMIN_STATUS.FINISH:
conditions.append(Order.status==ORDER_STATUS.FINISH)
elif order_status==ORDER_ADMIN_STATUS.TO_SERVE: #待服务
conditions.append(and_(
Order.status==ORDER_STATUS.PAY_SUCCESS,
Order.credit_verified==1
)
)
elif order_status==ORDER_ADMIN_STATUS.CREDIT_VERIFY: #额度待审核
conditions.append(Order.credit_verified==0)
elif order_status==ORDER_ADMIN_STATUS.CANCELD:
conditions.append(Order.status.in_([ORDER_STATUS.CANCEL_BEFORE_PAY, ORDER_STATUS.CANCELED]))
elif order_status==ORDER_ADMIN_STATUS.TO_REFUND:
order_by = [Order.refund.asc(), Order.id.desc()]
sub_filter = and_(
PeriodPayLog.status==1
)
sub_q = db.session.query(PeriodPayLog.order_id).filter(sub_filter).subquery()
conditions.append(and_(
Order.status==ORDER_STATUS.CANCELED,
or_(
Order.price>0,
Order.id.in_(sub_q)
),
Order.refund==0, #分两部退款的可能某一次退款不成功
)
)
elif order_status==ORDER_ADMIN_STATUS.REFUNDED:
conditions.append(
and_(
Order.refund==1,
Order.status==ORDER_STATUS.CANCELED
)
)
else:
conditions.append(Order.status==None)
if sub_cat_id:
sub_query = db.session.query(Item.id).filter(Item.sub_cat_id==sub_cat_id).subquery()
conditions.append(Order.item_id.in_(sub_query))
if conditions:
where = and_(*conditions)
total = OrderService.count_order(where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
has_more, item_list = OrderService.get_paged_orders(
order_by=order_by, limit=limit, start=start, where=where)
fetch_servicecode_refrence(item_list, 'id', dest_key='service_code_dict', keep_id=True)
for order in item_list:
comment = None
set_order_status(order, comment, order['service_code_dict'])
trans_list(item_list, 'status', 'status_label', ORDER_STATUS_LABEL, pop=False)
order_status_choices = ADMIN_ORDER_STATUS_CHOICES
fetch_user_refs(item_list)
fetch_item_refs(item_list)
result = {
'infos' : item_list,
'page_info' : page_info,
'total' : total,
'order_status_choices': order_status_choices
}
return jsonify_response(result)
upload_image_validator = Inputs(
{
'image' : TextField(min_length=1, max_length=10000000, msg='图片'),
'prefix' : Optional(TextField(min_length=0, max_length=10000000, msg='前缀'))
}
)
@admin_json_dec(validator=upload_image_validator)
def upload_image():
try:
img_str = request.valid_data.pop('image')
prefix = request.valid_data.pop('prefix') or ''
if prefix: prefix = '{}'.format(prefix)
code = 0
msg = '上传成功'
print 'uploading...', len(img_str)
content = b64decode(img_str.split(',')[1])
key = (prefix or 'subcaticon') + '/' + str(time.time()) + '.jpg'
upload_img(key, content)
return jsonify_response({'image': key, 'fullpath': prefix_img_domain(key)})
except Exception as e:
import traceback
traceback.print_exc()
return jsonify_response({'msg':'服务器异常'})
@admin_json_dec()
def verify_chsi():
''' 验证chsi学信网 '''
user_id = request.args.get('user_id')
apply = CreditService.get_apply_dict_by_userid(user_id)
chsi_name = apply['chsi_name']
chsi_passwd = apply['chsi_passwd']
data, success, return_captcha, session = login_xuexin(chsi_name, chsi_passwd)
ChsiCache.set(user_id, pickle.dumps(session))
if return_captcha:
data = prefix_img_domain(data)
msg = '抓取成功' if success else '抓取失败'
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg,
'success' : success,
'return_captcha': return_captcha,
'data' : data,
}
return jsonify_response(result)
set_chsi_captcha_validator = Inputs(
{
'captcha' : TextField(min_length=1, max_length=10000000, msg='验证码'),
'apply_id' : IdField(msg='申请id')
}
)
@admin_json_dec(validator=set_chsi_captcha_validator)
def set_chsi_captcha():
''' 输入验证码成功 '''
captcha = request.valid_data.get('captcha')
apply_id = request.valid_data.get('apply_id')
apply = CreditService.get_apply_dict_by_id(apply_id)
chsi_name = apply['chsi_name']
chsi_passwd = apply['chsi_passwd']
user_id = apply['user_id']
session_pickle = ChsiCache.get(user_id)
session = pickle.loads(session_pickle)
msg = ''
data = get_chsi_info(chsi_name, chsi_passwd, captcha, session)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg,
'success' : True,
'data' : data,
}
return jsonify_response(result)
@admin_json_dec(validator=None)
def refresh_captcha():
''' 刷新验证码 '''
apply_id = request.args.get('apply_id')
apply = CreditService.get_apply_dict_by_id(apply_id)
session_pickle = ChsiCache.get(apply['user_id'])
session = pickle.loads(session_pickle)
print session
print apply_id
key = refresh_chsi_captcha(session)
print key
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'data' : prefix_img_domain(key),
}
return jsonify_response(result)
@admin_json_dec()
def get_advice_list():
''' 反馈列表 '''
limit = 10
page = int(request.args.get('page', 1))
start = (page-1)*limit
total = UserService.count_advices()
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
has_more, item_list = UserService.get_paged_user_advices(limit=limit, start=start)
fetch_user_refs(item_list)
result = {
'infos' : item_list,
'page_info' : page_info,
'total' : total,
}
return jsonify_response(result)
@admin_json_dec()
def get_advice_detail():
''' 反馈详情 '''
advice_id = request.args.get('advice_id')
advice = UserService.get_advice_dict_by_id(advice_id)
fetch_user_refs((advice,))
result = {
'data': advice
}
return jsonify_response(result)
refund_validator = Inputs(
{
'order_id' : IdField(msg='订单id')
}
)
@admin_json_dec(validator=refund_validator, roles=[0,1,5])
def admin_refund_order():
''' 退款 '''
order_id = request.valid_data.get('order_id')
order = OrderService.get_order_by_id(order_id)
assert order and order.status==ORDER_STATUS.CANCELED, '订单不能退款'
where = and_(
Order.id==order_id,
Order.status==ORDER_STATUS.CANCELED
)
count = OrderService.update_order(where, commit=True, refund=True)
has_alipay = False
if order.pay_method==PAY_METHOD.ALIPAY:
has_alipay = True
order_repayments= OrderService.get_order_repayment_logs_amount(order_id)
repayment_amount= sum([format_price(i['price']) for i in order_repayments.values()] or [0])
assert order.price or repayment_amount, '订单未曾支付过金额'
sms_msg = '首付金额{}'.format(format_price(order.price))
if repayment_amount:
sms_msg = sms_msg + '和已还款金额{}'.format(repayment_amount)
refund_data = {}
link = ''
for order_no in order_repayments:
info = order_repayments[order_no]
if info['pay_method'] == PAY_METHOD.ALIPAY:
has_alipay = True
#微信支付 支付宝支付退款需要分两部进行
if not has_alipay: assert count, '订单不能退款'
msg = ''
if not has_alipay:
for order_no in order_repayments:
info = order_repayments[order_no]
pay_method = info['pay_method']
amount = info['price']
total_fee = info['total']
transaction_id = info['transaction_id']
if pay_method==PAY_METHOD.WECHAT_APP:
result = wxapp_refund_repayment(amount, total_fee, order_no, transaction_id)
else:
resullt = refund_repayment(amount, total_fee, order_no, transaction_id)
if order.price:
if order.pay_method==PAY_METHOD.WECHAT_APP:
result = refund_app_order(order)
else:
result = refund_order(order)
print result
if result['result_code'] == 'SUCCESS':
msg = '退款成功'
code = ResponseCode.SUCCESS
user = UserService.get_user_by_id(order.user_id)
send_sms_refund.delay(user.phone, order.order_no, sms_msg, '14个工作日')
else:
code = ResponseCode.SERVER_ERROR
msg = '退款失败'
else: #支付宝 微信混杂退款
for order_no in order_repayments:
info = order_repayments[order_no]
pay_method = info['pay_method']
amount = info['price']
total_fee = info['total']
transaction_id = info['transaction_id']
if pay_method==PAY_METHOD.ALIPAY:
refund_data[transaction_id] = amount
elif pay_method==PAY_METHOD.WECHAT_APP:
wxapp_refund_repayment(amount, total_fee, order_no, transaction_id)
else:
refund_repayment(amount, total_fee, order_no, transaction_id)
if order.pay_method==PAY_METHOD.WECHAT_WEB:
result = refund_order(order)
elif order.pay_method==PAY_METHOD.WECHAT_APP:
result = refund_app_order(order)
else:
if order.price:
refund_data[order.transaction_id] = format_price(order.price)
from thirdparty.alipay import alipay
link = alipay.refund_order(refund_data, '美分分订单退款')
msg = '跳转到支付宝商户后台退款'
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg,
'refund_data': refund_data,
'link' : link,
'has_alipay': has_alipay
}
return jsonify_response(result)
del_item_activity_validator = Inputs(
{
'item_id' : IdField(msg='商品id')
}
)
@admin_json_dec(validator=del_item_activity_validator)
def del_item_activity():
item_id = request.valid_data.get('item_id')
ActivityService.del_item_activitys(item_id, None)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '从活动移除商品成功'
}
return jsonify_response(result)
@admin_json_dec()
def get_activity_list():
''' 活动列表 '''
limit = 10
page = int(request.args.get('page', 1))
start = (page-1)*limit
total = ActivityService.count_activitys()
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
where = None
if request.admin.city_id:
where = Activity.city_id==request.admin.city_id
has_more, item_list = ActivityService.get_paged_activitys(where=where, limit=limit, start=start)
result = {
'infos' : item_list,
'page_info' : page_info,
'total' : total,
}
return jsonify_response(result)
@admin_json_dec()
def get_activity_items():
''' 活动商品列表 '''
limit = 1000
activity_id = int(request.args.get('activity_id', 1))
where = ActivityItem.activity_id==activity_id
has_more, infos = ActivityService.get_paged_activity_items(where=where, limit=limit)
fields = ['id', 'title']
has_more, items = ItemService.get_paged_items(limit=limit, fields=fields)
selected = set(i['item_id'] for i in infos)
for i in items:
i['selected'] = i['id'] in selected
i['label'] = i['title']
result = {
'infos' : items,
}
return jsonify_response(result)
set_activity_items_validator = Inputs(
{
'activity_id' : IdField(msg='活动id'),
'ids' : IntChoicesField(all=True, msg='商品id列表')
}
)
@admin_json_dec(validator=set_activity_items_validator)
def set_activity_items():
''' 设置活动商品列表 '''
item_ids = request.valid_data.get('ids')
activity_id = request.valid_data.get('activity_id')
print item_ids, activity_id
ActivityService.set_activity_items(activity_id, item_ids)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '编辑成功'
}
return jsonify_response(result)
activity_edit_validator = Inputs(
{
'title' : TextField(min_length=1, max_length=100, msg='活动标题'),
'desc' : TextField(min_length=1, max_length=10000, msg='活动描述'),
'start_time' : TextField(min_length=1, max_length=100, msg='开始时间'),
'end_time' : TextField(min_length=1, max_length=100, msg='结束时间'),
'city_id' : IdField(msg='城市id'),
}
)
@admin_json_dec(required=True, validator=activity_edit_validator)
def activity_edit(item_id=None):
title = request.valid_data.get('title')
city_id = request.valid_data.get('city_id')
desc = request.valid_data.get('desc')
start_time = request.valid_data.get('start_time')
end_time = request.valid_data.get('end_time')
if not item_id:
assert not ActivityService.exists_activity_time(start_time, end_time, city_id), '时间范围已存在'
item_id = ActivityService.create_activity(title, desc, start_time, end_time, city_id)
msg = '添加成功'
else:
assert not ActivityService.exists_activity_time(start_time, end_time, city_id, item_id), '时间范围已存在'
ActivityService.update_activity(item_id, **request.valid_data)
msg = '修改成功'
result = {
'code' : ResponseCode.SUCCESS,
'item_id' : item_id,
'msg' : msg
}
return jsonify_response(result)
def get_activity():
item_id = request.args.get('item_id')
activity = ActivityService.get_activity_dict_by_id(item_id)
result = {
'data':activity
}
return jsonify_response(result)
top_recommend_item_validator = Inputs(
{
'item_id' : IdField(msg='商品id'),
}
)
@admin_json_dec(validator=top_recommend_item_validator)
def top_recommend_item():
''' 置顶推荐 '''
item_id = request.valid_data.get('item_id')
ItemService.top_recommend_item(item_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '置顶成功'
}
return jsonify_response(result)
top_recommend_subcat_validator = Inputs(
{
'sub_cat_id' : IdField(msg='子分类id'),
}
)
@admin_json_dec(validator=top_recommend_subcat_validator)
def top_recommend_subcat():
''' 子分类 '''
sub_cat_id = request.valid_data.get('sub_cat_id')
ItemService.top_recommend_subcat(sub_cat_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '置顶成功'
}
return jsonify_response(result)
recommend_subcat_validator = Inputs(
{
'item_id' : IdField(msg='子分类id'),
'recommend' : BoolChoiceField(msg='是否推荐'),
}
)
@admin_json_dec(required=True, validator=recommend_subcat_validator)
def recommend_subcat():
sub_cat_id = request.valid_data.get('item_id')
recommend = request.valid_data.get('recommend')
print sub_cat_id, recommend
if recommend:
ItemService.add_recommend_subcat(sub_cat_id)
else:
ItemService.rm_recommend_subcat(sub_cat_id)
msg = '推荐成功' if recommend else '取消推荐成功'
result = {
'msg' : msg
}
@admin_json_dec()
def get_item_recommend():
item_id = int(request.args.get('item_id'))
data = ItemService.get_item_recommend(item_id) or dict(item_id=item_id)
if data: fetch_item_refs((data,))
result = {
'data' : data
}
return jsonify_response(result)
@admin_json_dec()
def get_hospital_recommend():
hospital_id = int(request.args.get('hospital_id'))
data = ItemService.get_hospital_recommend(hospital_id) or dict(hospital_id=hospital_id)
if data: fetch_hospital_refs((data,))
result = {
'data' : data
}
return jsonify_response(result)
@admin_json_dec()
def get_item_activity():
item_id = int(request.args.get('item_id'))
data = ItemService.get_item_activity(item_id) or dict(item_id=item_id)
if data: fetch_item_refs((data,))
result = {
'data' : data
}
return jsonify_response(result)
@admin_json_dec()
def get_subcat_recommend():
sub_cat_id = int(request.args.get('sub_cat_id'))
data = ItemService.get_subcat_recommend(sub_cat_id) or dict(sub_cat_id=sub_cat_id)
if data: fetch_item_subcat_refs((data, ))
result = {
'data' : data
}
return jsonify_response(result)
item_activity_edit_validator = Inputs(
{
'sort_order' : IntField(msg='排序'),
'activity_id' : IdField(msg='活动id'),
'image' : TextField(min_length=1, max_length=1000, msg='图片'),
'price' : FloatField(msg='活动价格'),
}
)
@admin_json_dec(validator=item_activity_edit_validator)
def item_activity_edit(item_id=None):
sort_order = request.valid_data.get('sort_order')
activity_id = request.valid_data.get('activity_id')
price = request.valid_data.get('price')
image = request.valid_data.get('image')
item_activity = ItemService.get_item_activity(item_id)
if not item_activity:
ItemService.add_activity_item(item_id, sort_order, activity_id, price, image)
else:
ActivityService.del_item_activitys(item_id, item_activity['activity_id'])
ItemService.update_activity_item(item_id, **request.valid_data)
msg = '编辑成功'
result = {
'msg' : msg
}
return jsonify_response(result)
item_recommend_edit_validator = Inputs(
{
'sort_order' : IntField(msg='排序'),
'image' : TextField(min_length=1, max_length=1000, msg='图片'),
'desc' : TextField(min_length=1, max_length=1000, msg='描述')
}
)
@admin_json_dec(validator=item_recommend_edit_validator)
def item_recommend_edit(item_id=None):
image = request.valid_data.get('image')
desc = request.valid_data.get('desc')
sort_order = request.valid_data.get('sort_order')
recommend = ItemService.get_item_recommend(item_id)
DataService.set_img_size.delay(image)
if not recommend:
ItemService.add_recommend_item(item_id, sort_order, image, desc)
else:
ItemService.update_recommend_item(item_id, **request.valid_data)
msg = '编辑成功'
result = {
'msg' : msg
}
return jsonify_response(result)
subcat_recommend_edit_validator = Inputs(
{
'sort_order' : IntField(msg='排序'),
'icon' : TextField(min_length=1, max_length=1000, msg='图片'),
}
)
@admin_json_dec(validator=subcat_recommend_edit_validator)
def subcat_recommend_edit(item_id=None):
icon = request.valid_data.get('icon')
sort_order = request.valid_data.get('sort_order')
recommend = ItemService.get_subcat_recommend(item_id)
if not recommend:
ItemService.add_recommend_subcat(item_id, sort_order, icon)
else:
ItemService.update_recommend_subcat(item_id, **request.valid_data)
msg = '编辑成功'
result = {
'msg' : msg
}
return jsonify_response(result)
hospital_recommend_edit_validator = Inputs(
{
'sort_order' : IntField(msg='排序'),
'color' : TextField(min_length=1, max_length=1000, msg='颜色'),
'tag' : TextField(min_length=1, max_length=1000, msg='标签'),
}
)
@admin_json_dec(validator=hospital_recommend_edit_validator)
def hospital_recommend_edit(item_id=None):
tag = request.valid_data.get('tag')
color = request.valid_data.get('color')
sort_order = request.valid_data.get('sort_order')
recommend = ItemService.get_hospital_recommend(item_id)
if not recommend:
ItemService.add_recommend_hospital(item_id, sort_order, color, tag)
else:
ItemService.update_recommend_hospital(item_id, **request.valid_data)
msg = '编辑成功'
result = {
'msg' : msg
}
return jsonify_response(result)
set_recommend_order_validator = Inputs(
{
'sort_order' : IntField(msg='排序'),
'item_id' : IdField(msg='商品id'),
}
)
@admin_json_dec(validator=set_recommend_order_validator)
def set_recommend_order():
item_id = request.valid_data.get('item_id')
sort_order = request.valid_data.get('sort_order')
exist = ItemService.check_exist_order(sort_order)
assert not exist, '排序值已存在'
ItemService.update_recommend_item(item_id, sort_order=sort_order)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '修改成功'
}
return jsonify_response(result)
set_trial_order_validator = Inputs(
{
'sort_order' : IntField(msg='排序'),
'item_id' : IdField(msg='商品id'),
}
)
@admin_json_dec(validator=set_trial_order_validator)
def set_trial_order():
item_id = request.valid_data.get('item_id')
sort_order = request.valid_data.get('sort_order')
exist = TrialService.check_exist_order(sort_order)
assert not exist, '排序值已存在'
TrialService.update_trial(item_id, sort_order=sort_order)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '修改成功'
}
return jsonify_response(result)
set_recommend_subcat_order_validator = Inputs(
{
'sort_order' : IntField(msg='排序'),
'item_id' : IdField(msg='子分类id'),
}
)
@admin_json_dec(validator=set_recommend_subcat_order_validator)
def set_recommend_subcat_order():
item_id = request.valid_data.get('item_id')
sort_order = request.valid_data.get('sort_order')
exist = ItemService.check_exist_subcat_order(sort_order)
assert not exist, '排序值已存在'
ItemService.update_recommend_subcat(item_id, sort_order=sort_order)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '修改成功'
}
return jsonify_response(result)
set_recommend_hospital_order_validator = Inputs(
{
'sort_order' : IntField(msg='排序'),
'item_id' : IdField(msg='医院id'),
}
)
@admin_json_dec(validator=set_recommend_hospital_order_validator)
def set_recommend_hospital_order():
item_id = request.valid_data.get('item_id')
sort_order = request.valid_data.get('sort_order')
exist = ItemService.check_exist_hospital_order(sort_order)
assert not exist, '排序值已存在'
ItemService.update_recommend_hospital(item_id, sort_order=sort_order)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '修改成功'
}
return jsonify_response(result)
def get_period_pay_log_list():
''' 逾期分期帐单列表 '''
limit = 10
page = int(request.args.get('page') or 1)
keyword = request.args.get('keyword', '')
is_delayed = request.args.get('is_delayed')=='true'
start = (page-1)*limit
deadline = get_due_time(0)
where = and_()
if is_delayed:
where.append(and_(#逾期为还的
PeriodPayLog.repayment_time==None,
PeriodPayLog.deadline<deadline,
PeriodPayLog.status==0
))
if keyword:
user = UserService.get_user_by_phone(keyword)
user_id = None
if user: user_id = user.id
where.append(PeriodPayLog.user_id==user_id)
has_more, infos = CreditService.get_paged_period_pay_logs(where=where, limit=limit, start=start)
fetch_order_refs(infos)
for log in infos:
log['item_id'] = log['order']['item_id']
fetch_item_refs(infos, fields=['id', 'title'])
fetch_user_refs(infos)
for i in infos:
cacl_punish_fee(i)
total = CreditService.count_logs(where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
result = {
'msg' : '',
'code' : ResponseCode.SUCCESS,
'infos' : infos,
'total' : total,
'page_info' : page_info
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_refund_detail():
order_id = request.args.get('order_id')
order = OrderService.get_order_by_id(order_id)
assert order and order.status==ORDER_STATUS.CANCELED, '订单不能退款'
order_repayments= OrderService.get_order_repayment_logs_amount(order_id)
repayment_amount= sum([format_price(i['price']) for i in order_repayments.values()] or [0])
refund_data = {}
wechat_web = {}
wechat_app = {}
link = ''
has_alipay = False
for order_no in order_repayments:
info = order_repayments[order_no]
if info['pay_method'] == PAY_METHOD.ALIPAY:
has_alipay = True
if order.pay_method==PAY_METHOD.ALIPAY: has_alipay=True
if has_alipay:
for order_no in order_repayments:
info = order_repayments[order_no]
pay_method = info['pay_method']
amount = info['price']
total_fee = info['total']
transaction_id = info['transaction_id']
if pay_method==PAY_METHOD.ALIPAY:
refund_data[transaction_id] = amount
elif pay_method==PAY_METHOD.WECHAT_APP:
wechat_app[transaction_id] = amount
elif pay_method==PAY_METHOD.WECHAT_WEB:
wechat_web[transaction_id] = amount
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'has_alipay' : has_alipay,
'order_repayments' : order_repayments,
'wechat_app' : wechat_app,
'wechat_web' : wechat_web,
'repayment_amount' : repayment_amount,
'price' : format_price(order.price)
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_coupon_list():
''' '''
limit = 10
page = int(request.args.get('page') or 1)
start = (page-1)*limit
has_more, infos = CouponService.get_paged_coupons(start=start, limit=limit)
total = CouponService.count(None)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
fetch_item_subcat_refs(infos)
fetch_item_refs(infos, fields=['id', 'title'])
fetch_item_cat_refs(infos)
result = {
'msg' : '',
'code' : ResponseCode.SUCCESS,
'infos' : infos,
'total' : total,
'page_info' : page_info
}
return jsonify_response(result)
coupon_edit_validator = Inputs(
{
'title' : TextField(min_length=1, max_length=100, msg='商品名'),
'cat_id' : Optional(IdField(msg='分类id')),
'is_trial' : IntChoiceField(choices=[0, 1], msg='是否试用'),
'sub_cat_id' : Optional(IdField(msg='子分类id')),
'item_id' : Optional(IdField(msg='商品id')),
'coupon_cat' : IdField(msg='优惠券类型'),
'price' : Optional(FloatField(msg='优惠金额')),
'need' : Optional(IntField(msg='满多少使用')),
'effective' : IntField(msg='有效期')
}
)
@admin_json_dec(required=True, validator=coupon_edit_validator, roles=[0,1])
def coupon_edit(item_id=None):
title = request.valid_data.get('title')
cat_id = request.valid_data.get('cat_id')
sub_cat_id = request.valid_data.get('sub_cat_id')
mff_item_id = request.valid_data.get('item_id')
price = request.valid_data.get('price') or 0
need = request.valid_data.get('need') or 0
effective = request.valid_data.get('effective')
effective = effective * 86400
request.valid_data['effective'] = effective
coupon_cat = request.valid_data.get('coupon_cat')
is_trial = request.valid_data.get('is_trial')
request.valid_data['price'] = price
if not is_trial:
assert price, '请输入优惠金额'
if coupon_cat==1: assert cat_id, '请选择商品分类'
if coupon_cat==2: assert sub_cat_id, '请选择商品子分类'
if coupon_cat==3:
assert mff_item_id, '请输入商品id'
item = ItemService.get_item_dict_by_id(mff_item_id)
assert item, '商品不存在'
if not item_id:
coupon_id = CouponService.create_coupon(
coupon_cat, cat_id, title, price, effective,
mff_item_id, sub_cat_id, is_trial, need=need)
else:
CouponService.update_coupon(item_id, **request.valid_data)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
city_edit_validator = Inputs(
{
'name' : TextField(min_length=1, max_length=100, msg='城市名'),
'city_code' : TextField(min_length=1, max_length=100, msg='百度城市编码'),
'amap_code' : TextField(min_length=1, max_length=100, msg='高德城市编码'),
}
)
@admin_json_dec(required=True, validator=city_edit_validator, roles=[0,1])
def city_edit(item_id=None):
name = request.valid_data.get('name')
city_code = request.valid_data.get('city_code')
amap_code = request.valid_data.get('amap_code')
if not item_id:
DataService.create_city(name, city_code, amap_code)
else:
DataService.update_city(item_id, **request.valid_data)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
def get_city():
''' 获取城市 '''
city_id = request.args.get('item_id')
city = DataService.get_city_dict_by_id(city_id)
assert city, '城市不存在'
result = {
'data': city}
return jsonify_response(result)
def get_coupon():
''' '''
item_id = request.args.get('item_id')
coupon = CouponService.get_coupon(item_id)
result = {
'data': coupon}
return jsonify_response(result)
trial_edit_validator = Inputs(
{
'title' : TextField(min_length=1, max_length=100, msg='商品名'),
'image' : TextField(min_length=1, max_length=1000, msg='商品小图'),
'rules' : TextField(min_length=1, max_length=1000000, msg='攻略'),
'process' : TextField(min_length=1, max_length=1000000, msg='流程'),
'start_time' : TextField(min_length=1, max_length=10000, msg='开始时间'),
'end_time' : TextField(min_length=1, max_length=10000, msg='结束时间'),
'cat' : IntChoiceField(choices=[0,1], msg='是否免息'),
'total' : IntField(msg='总共'),
'coupon_id' : Optional(IdField(msg='子分类id')),
}
)
@admin_json_dec(required=True, validator=trial_edit_validator)
def trial_edit(item_id=None):
title = request.valid_data.get('title')
image = request.valid_data.get('image')
rules = request.valid_data.get('rules')
cat = request.valid_data.get('cat')
need = request.valid_data.get('need') or 0
total = request.valid_data.get('total')
coupon_id = request.valid_data.get('coupon_id')
total = request.valid_data.get('total')
end_time = request.valid_data.get('end_time')
start_time = request.valid_data.get('start_time')
process = request.valid_data.get('process')
print start_time, end_time
print start_time<end_time
assert start_time[:16]<end_time[:16], '开始时间必须前于结束时间'
if cat==1:
assert coupon_id, '请选择优惠券'
coupon = CouponService.get_coupon(coupon_id)
assert coupon, '优惠券不存在'
assert coupon['item_id'], '优惠券类型必须为指定商品'
if not item_id:
trial_id = TrialService.create_trial(
title, image, cat, total, start_time,
end_time, rules, process, coupon_id=coupon_id,
need=need
)
else:
trial = TrialService.get_trial(item_id)
assert trial, '申请不存在'
assert total>trial['sent'], '不能低于已发放数'
TrialService.update_trial(item_id, **request.valid_data)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_trial_list():
''' '''
limit = 10
page = int(request.args.get('page') or 1)
start = (page-1)*limit
_sort = 'end_time'
_sort_dir = 'DESC'
has_more, infos = TrialService.get_paged_trials(start=start, limit=limit, _sort=_sort, _sort_dir=_sort_dir)
total = TrialService.count_trial(where=None)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
current_time = dt_obj.now()
result = {
'msg' : '',
'now' : str(current_time),
'code' : ResponseCode.SUCCESS,
'infos' : infos,
'total' : total,
'page_info' : page_info
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_trial():
''' '''
trial_id = request.args.get('item_id')
trial = TrialService.get_trial(trial_id)
result = {
'code' : ResponseCode.SUCCESS,
'data' : trial,
'msg' : ''
}
return jsonify_response(result)
@admin_json_dec()
def trial_applyer_list():
''' 申请用户列表 '''
limit = 10
page = int(request.args.get('page') or 1)
start = (page-1)*limit
item_id = request.args.get('item_id')
trial = TrialService.get_trial(item_id)
where = TrialApply.trial_id==item_id
has_more, infos = TrialService.get_paged_apply_user_list(where=where, start=start)
user_ids = [i['user_id'] for i in infos]
apply_count_map = TrialService.count_user_apply(user_ids)
apply_received_count_map = TrialService.count_user_apply(user_ids, 1)
for info in infos:
apply_count = apply_count_map.get(info['user_id'], 0)
apply_received_count = apply_received_count_map.get(info['user_id'], 0)
info['apply_count'] = apply_count
info['apply_received_count'] = apply_received_count
total = TrialService.count_apply(where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
result = {
'code' : ResponseCode.SUCCESS,
'item' : trial,
'infos' : infos,
'total' : total,
'page_info' : page_info,
'has_more' : has_more,
'msg' : ''
}
return jsonify_response(result)
@admin_json_dec()
def daily_applyer_list():
''' 每日领取优惠券用户列表 '''
limit = 10
page = int(request.args.get('page') or 1)
start = (page-1)*limit
item_id = request.args.get('item_id')
item = TutorialService.get_daily_coupon(item_id)
where = DailyUser.daily_id==item_id
has_more, infos = TutorialService.get_daily_user_ids(where=where, start=start)
total = TutorialService.count_daily_users(where)
fetch_user_refs(infos)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
result = {
'code' : ResponseCode.SUCCESS,
'item' : item,
'infos' : infos,
'total' : total,
'page_info' : page_info,
'has_more' : has_more,
'msg' : ''
}
return jsonify_response(result)
send_trial_validator = Inputs(
{
'apply_id' : IdField(msg='申请id'),
'item_id' : IdField(msg='试用id'),
}
)
@admin_json_dec(validator=send_trial_validator, roles=[0,1,3])
def send_trial():
''' 赠送申请 '''
apply_id = request.valid_data.get('apply_id')
item_id = request.valid_data.get('item_id')
trial = TrialService.get_trial(item_id)
assert trial, '试用不存在'
apply = TrialService.get_apply(apply_id)
assert apply, '申请不存在'
where = and_(
TrialApply.id==apply_id,
TrialApply.status==0
)
to_status = 1
count = TrialService.update_apply_status(where, to_status)
if count:
TrialService.incr_trial_sent_count(item_id)
if trial['cat']==1:
user_coupon_id = CouponService.send_user_coupon(apply['user_id'], trial['coupon_id'])
TrialService.update_apply(TrialApply.id==apply_id, coupon_id=user_coupon_id)
msg = '发放成功'
else:
apply = TrialService.get_apply(apply_id)
if apply['status'] in {1,2,3}:
msg = '已发放给该用户'
else:
msg = '发放失败'
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg
}
return jsonify_response(result)
@admin_json_dec()
def get_promoter_list():
''' 推广管理员列表 '''
limit = 10
page = int(request.args.get('page') or 1)
start = (page-1)*limit
where = Promoter.status==1
has_more, infos = PromoteService.get_paged_promoters(where=where, start=start, limit=limit)
promoter_ids = [ i['id'] for i in infos ]
count = PromoteService.count_promoter_admin_reg(promoter_ids)
follow_count_map = {i[0]:int(i[1]) for i in count}
reg_count_map = {i[0]:int(i[2]) for i in count}
unfollow_count_map = {i[0]:int(i[3] or 0) for i in count}
for info in infos:
info['follow_count_total'] = follow_count_map.get(info['id'])
info['reg_count_total'] = reg_count_map.get(info['id'])
info['unfollow_count_total'] = unfollow_count_map.get(info['id'])
if info['id']==1:
promoter = PromoteService.get_promoter_by_phone('18801794295')
if promoter:
info['follow_count_total'] = promoter.follow_count
info['reg_count_total'] = promoter.reg_count
total = PromoteService.count_promoters(where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
result = {
'code' : ResponseCode.SUCCESS,
'infos' : infos,
'total' : total,
'page_info' : page_info,
'has_more' : has_more,
'msg' : ''
}
return jsonify_response(result)
add_promoter_validator = Inputs(
{
'name' : TextField(min_length=1, max_length=100, msg='请输入用户名'),
'phone' : MobileField(min_length=1, max_length=100, msg='请输入手机号'),
'passwd' : TextField(min_length=1, max_length=100, msg='请输入密码'),
}
)
@admin_json_dec(validator=add_promoter_validator)
def add_promoter():
''' 添加推广员 '''
name = request.valid_data.get('name')
phone = request.valid_data.get('phone')
passwd = request.valid_data.get('passwd')
PromoteService.create_promoter(phone, passwd, name)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
@admin_json_dec()
def get_hospital_user_list():
''' 医院管理员列表 '''
limit = 10
page = int(request.args.get('page') or 1)
start = (page-1)*limit
where = None
has_more, infos = HospitalService.get_paged_hospital_admin_users(where=where, start=start)
fetch_hospital_refs(infos)
total = HospitalService.count_admin(where)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
result = {
'code' : ResponseCode.SUCCESS,
'infos' : infos,
'total' : total,
'page_info' : page_info,
'has_more' : has_more,
'msg' : ''
}
return jsonify_response(result)
add_hospital_admin_validator = Inputs(
{
'name' : TextField(min_length=1, max_length=100, msg='请输入用户名'),
'hospital_id' : IdField(msg='医院id'),
'passwd' : TextField(min_length=1, max_length=100, msg='请输入密码'),
}
)
@admin_json_dec(validator=add_hospital_admin_validator, roles=[0,1])
def add_hospital_admin():
''' 添加医院管理员 '''
name = request.valid_data.get('name')
passwd = request.valid_data.get('passwd')
hospital_id = request.valid_data.get('hospital_id')
HospitalService.create_user(name, passwd, hospital_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
to_supply_validator = Inputs(
{
'apply_id' : IdField(msg='申请id'),
}
)
@admin_json_dec(validator=to_supply_validator, roles=[0,1])
def to_supply():
''' 补充资料 '''
apply_id = request.valid_data.get('apply_id')
where = and_(
CreditApply.id==apply_id,
)
data = {
'status':APPLY_STATUS.SECOND_STEP
}
CreditService.update_apply(where, **data)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
supply_apply_validator = Inputs(
{
'apply_id' : IdField(msg='申请id'),
'id_no' : TextField(min_length=0, max_length=100, msg='身份证号'),
'school' : TextField(min_length=0, max_length=100, msg='学校'),
'enrollment_time' : TextField(min_length=0, max_length=100, msg='入学时间'),
'graduate_time' : TextField(min_length=0, max_length=100, msg='毕业时间'),
'name' : TextField(min_length=0, max_length=100, msg='真实姓名'),
'major' : TextField(min_length=0, max_length=100, msg='专业'),
'stu_no' : TextField(min_length=0, max_length=100, msg='学号'),
'stu_years' : FloatField(msg='学制请输入浮点数如:4'),
'stu_education' : TextField(min_length=0, max_length=100, msg='学历'),
}
)
@admin_json_dec(validator=supply_apply_validator)
def supply_apply():
apply_id = request.valid_data.pop('apply_id')
try:
request.valid_data['enrollment_time'] = '{} 00:00:00'.format(request.valid_data['enrollment_time'])
request.valid_data['graduate_time'] = '{} 00:00:00'.format(request.valid_data['graduate_time'])
format = '%Y-%m-%d %H:%M:%S'
request.valid_data['graduate_time'] = date_to_datetime(request.valid_data['graduate_time'], format)
request.valid_data['enrollment_time'] = date_to_datetime(request.valid_data['enrollment_time'], format)
except Exception as e:
assert 0, '入学时间,毕业时间输入有误,请按格式2015-09-01输入'
assert len(request.valid_data['id_no'])==18, '身份证号码长度有误'
where = and_(
CreditApply.id==apply_id,
)
request.valid_data['update_time'] = dt_obj.now()
request.valid_data['has_supply'] = True
count = CreditService.update_apply(where, **request.valid_data)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '补充成功' if count else '申请不存在'
}
return jsonify_response(result)
set_hospital_status_validator = Inputs(
{
'item_id' : IdField(msg='医院id'),
'status' : IntChoiceField(choices=[0,1], msg='医院状态'),
}
)
@admin_json_dec(required=True, validator=set_hospital_status_validator)
def set_hospital_status():
item_id = request.valid_data.get('item_id')
status = request.valid_data.get('status')
print item_id, status
data = {
'status': status
}
msg = '上线成功' if status==1 else '下线成功'
count = ItemService.set_hospital_status(item_id, status)
assert count, '医院不存在'
if status==1:
where = and_(
Item.hospital_id==item_id,
Item.status==2
)
ItemService.set_hospital_item_status(where, 1)
else:
where = and_(
Item.hospital_id==item_id,
Item.status==1
)
ItemService.set_hospital_item_status(where, 2)
result = {
'msg' : msg,
'code' : ResponseCode.SUCCESS
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_daily_coupon_list():
''' 每日领取优惠券列表 '''
limit = 10
page = int(request.args.get('page') or 1)
start = (page-1)*limit
_sort = 'start_time'
has_more, infos = TutorialService.get_paged_daily_coupons(_sort=_sort, start=start, limit=limit)
total = TutorialService.count_daily_coupons(None)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
fetch_coupon_refs(infos)
set_coupon_use_time(infos)
result = {
'msg' : '',
'code' : ResponseCode.SUCCESS,
'infos' : infos,
'total' : total,
'page_info' : page_info
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_tutorial_list():
''' 美攻略列表 '''
limit = 10
page = int(request.args.get('page') or 1)
start = (page-1)*limit
has_more, infos = TutorialService.get_paged_tutorial_entries(start=start, limit=limit)
total = TutorialService.count_tutorials(None)
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
result = {
'msg' : '',
'code' : ResponseCode.SUCCESS,
'infos' : infos,
'total' : total,
'page_info' : page_info
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_tutorial():
''' '''
item_id = request.args.get('item_id')
tutorial = TutorialService.get_tutorial(item_id)
assert tutorial, '攻略不存在'
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'data' : tutorial
}
return jsonify_response(result)
daily_coupon_edit_validator = Inputs(
{
'start_time' : TextField(min_length=1, max_length=100, msg='开始时间'),
'end_time' : TextField(min_length=1, max_length=100, msg='结束时间'),
'use_condition' : TextField(min_length=0, max_length=100, msg='使用条件'),
'total' : IntField(msg='领取总数量'),
'title' : Optional(TextField(min_length=0, max_length=100, msg='每日优惠标题')),
'coupon_id' : IdField(msg='优惠券id')
}
)
@admin_json_dec(required=True, validator=daily_coupon_edit_validator, roles=[0,1])
def daily_coupon_edit(item_id=None):
start_time = request.valid_data.get('start_time')
end_time = request.valid_data.get('end_time')
total = request.valid_data.get('total')
title = request.valid_data.get('title')
coupon_id = request.valid_data.get('coupon_id')
use_condition = request.valid_data.get('use_condition')
assert start_time<end_time, '开始时间不能晚于结束时间'
if item_id:
daily = TutorialService.get_daily_coupon(item_id)
assert daily, '领取不存在'
assert total>=daily['sent'], '总数不能低于已领取数'
count = TutorialService.update_daily_coupon(item_id, **request.valid_data)
else:
use_time = ''
TutorialService.create_daily_coupon(title, coupon_id, start_time, end_time, total, use_time, use_condition)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : ''
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_daily_coupon():
''' '''
item_id = request.args.get('item_id')
daily = TutorialService.get_daily_coupon(item_id)
assert daily, '领取不存在'
result = {
'data': daily,
'code': ResponseCode.SUCCESS,
'msg': ''
}
return jsonify_response(result)
set_tutorial_status_validator = Inputs(
{
'item_id' : IdField(msg='攻略id'),
'status' : IntChoiceField(choices=[0,1], msg='攻略状态'),
}
)
@admin_json_dec(required=True, validator=set_tutorial_status_validator)
def set_tutorial_status():
item_id = request.valid_data.get('item_id')
status = request.valid_data.get('status')
data = {
'status': status
}
TutorialService.set_tutorial_status(item_id, status)
msg = '上线成功' if status==1 else '下线成功'
result = {
'code' : ResponseCode.SUCCESS,
'msg' : msg
}
return jsonify_response(result)
reset_user_vcode_validator = Inputs(
{
'phone': MobileField(msg='请输入手机号')
}
)
@admin_json_dec(validator=reset_user_vcode_validator, roles=[0,4])
def reset_user_vcode_sent():
''' 重置验证码次数 '''
from ops.cache import InvalidUserPasswdCache
from ops.cache import InvalidUserResetVcodeCache
from ops.cache import InvalidUserSignupVcodeCache
from ops.cache import SmsCache
phone = request.valid_data.get('phone')
InvalidUserPasswdCache.clear_today_counter(phone)
InvalidUserResetVcodeCache.clear_today_counter(phone)
InvalidUserSignupVcodeCache.clear_today_counter(phone)
SmsCache.clear_sent_count(phone)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '重置成功'
}
return jsonify_response(result)
get_user_vcode_validator = Inputs(
{
'phone': MobileField(msg='请输入手机号'),
'cat' : IntChoiceField(choices=[1,2], msg='请选择类型')
}
)
@admin_json_dec(validator=get_user_vcode_validator, roles=[0,4])
def get_user_vcode():
''' 获取用户验证码 '''
from ops.cache import SmsCache
phone = request.valid_data.get('phone')
cat = request.valid_data.get('cat')
user = UserService.get_user_by_phone(phone)
if cat==1:
assert not user, '用户已存在'
else:
assert user, '用户不存在'
vcode = SmsCache.get_vcode(phone)
sent = SmsCache.get_sent_count(phone)
assert vcode, '验证码不存在, 请获取验证码'
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'vcode' : vcode,
'count' : sent
}
return jsonify_response(result)
tutorial_edit_validator = Inputs(
{
'title' : TextField(min_length=1, max_length=100, msg='攻略标题'),
'image' : TextField(min_length=1, max_length=100, msg='攻略首页推荐图'),
'icon' : TextField(min_length=1, max_length=100, msg='攻略列表icon'),
'photo' : TextField(min_length=1, max_length=100, msg='攻略详情大图'),
'items' : REGField(pattern='^(\d+,?)+$', msg='请输入项目id,逗号隔开')
}
)
@admin_json_dec(required=True, validator=tutorial_edit_validator, roles=[0,1])
def tutorial_edit(item_id=None):
title = request.valid_data.get('title')
image = request.valid_data.get('image')
photo = request.valid_data.get('photo')
items = request.valid_data.get('items')
icon = request.valid_data.get('icon')
for the_item_id in items.split(','):
item = ItemService.get_item_dict_by_id(the_item_id)
assert item, 'ID为{}的项目不存在'.format(the_item_id)
if item_id:
TutorialService.update_tutorial_entry(item_id, **request.valid_data)
else:
item_id = TutorialService.create_tutorial_entry(title, icon, image, photo, items)
return jsonify_response({'item_id': item_id})
send_user_coupon_validator = Inputs(
{
'phone' : MobileField(min_length=1, max_length=100, msg='请输入用户手机号码'),
'coupon_id' : IdField(msg='请选择优惠券')
}
)
@admin_json_dec(required=True, validator=send_user_coupon_validator, roles=[0,1,4])
def send_user_coupon():
phone = request.valid_data.get('phone')
coupon_id = request.valid_data.get('coupon_id')
user = UserService.get_user_by_phone(phone)
assert user, '用户不存在'
coupon = CouponService.get_coupon(coupon_id)
assert coupon, '优惠券不存在'
CouponService.send_user_coupon(user.id, coupon_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '发放成功'
}
return jsonify_response(result)
@admin_json_dec(required=True, validator=None, roles=[0,1,4])
def set_cats_order():
data = json.loads(request.data)
for index, i in enumerate(data):
ItemService.set_item_cat_order(i, index)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '排序成功'
}
return jsonify_response(result)
set_city_validator = Inputs(
{
'city_id' : Optional(IdField(msg='请选择城市'))
}
)
@admin_json_dec(required=True, validator=None)
def set_city():
city_id = json.loads(request.data).get('city_id')
print city_id, 'city_id'
where = AdminUser.name==request.name
AdminService.update(where, city_id=city_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '设置成功'
}
response = jsonify_response(result)
set_cookie(response, 'city_id', str(city_id or ''), 86400*300)
return response
@admin_json_dec(required=True)
def get_question_list():
limit = 100
page = int(request.args.get('page') or 1)
start = (page-1)*limit
has_more, infos = RedpackService.get_paged_redpack_questions(limit=limit, start=start)
total = RedpackService.count_redpack_question()
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
result = {
'infos' : infos,
'page_info' : page_info,
'total' : total,
}
return jsonify_response(result)
new_question_validator = Inputs(
{
'content' : TextField(msg='问题内容')
}
)
@admin_json_dec(required=True, validator=new_question_validator)
def new_question():
content = request.valid_data.get('content')
question_id = RedpackService.create_question(content)
result = {
'code' : ResponseCode.SUCCESS,
'question_id' : question_id,
'msg' : '创建成功',
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_user_question_list():
limit = 10
page = int(request.args.get('page') or 1)
_sort = request.args.get('_sort') or 'view_count'
is_random = request.args.get('is_random')
start = (page-1)*limit
where = None
if is_random:
where = RedpackUserQuestion.is_random==is_random
has_more, infos = RedpackService.get_paged_user_question(where=where, limit=limit, _sort=_sort, start=start)
total = RedpackService.count_redpack_user_question(where)
question_ids = [i['id'] for i in infos]
total_money = RedpackService.total_money()
total_redpack = RedpackService.count_redpack()
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
question_infos = filter(lambda i:i['question_id'], infos)
fetch_question_refs(question_infos, dest_key='the_question')
fetch_qrcodeuser_refs(infos)
result = {
'infos' : infos,
'page_info' : page_info,
'total' : total,
'total_money' : total_money,
'total_redpack' : total_redpack
}
return jsonify_response(result)
@admin_json_dec(required=True)
def get_room_list():
''' 寝室列表 '''
limit = 10
page = int(request.args.get('page', 1))
start = (page-1)*limit
total = RoomDesignService.count_rooms()
page_info = abbreviated_pages(int(math.ceil(total/(limit*1.0))), page)
_sort = request.args.get('_sort') or 'vote_count'
room_ids = RoomDesignVoteCounter.get_paged_rank_room_ids(start, start+limit-1)
where = RoomDesignDetail.id.in_(room_ids)
sort_map = {i:index for index, i in enumerate(room_ids)}
if _sort=='vote_count':
has_more, item_list = RoomDesignService.get_paged_rooms(where=where)
item_list.sort(key=lambda i:(sort_map[i['id']]))
else:
where = None
has_more, item_list = RoomDesignService.get_paged_rooms(where=where, _sort=_sort, start=start, limit=limit)
for item in item_list:
item['rank'] = RoomDesignVoteCounter.rank(item['id'])
fetch_user_refs(item_list)
fetch_school_refs(item_list)
result = {
'infos' : item_list,
'page_info' : page_info,
'total' : total,
}
return jsonify_response(result)
@admin_json_dec()
def get_room_detail():
''' 寝室详情 '''
room_id = request.args.get('room_id')
room = RoomDesignService.get_room_dict_by_id(room_id)
fetch_user_refs((room,))
pics = []
for index, i in enumerate(room['pic_list']):
if not i:
pics.append('http://7xnpdb.com2.z0.glb.qiniucdn.com/o_1a4u9raim1rpcckf59vq7q1gth1LG11IG]7F5G5%7D861P1IUW[T.jpg'+'?'+str(index))
else:
pics.append(i)
room['pic_list'] = pics
room['rank'] = RoomDesignVoteCounter.rank(room_id)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '',
'room' : room
}
return jsonify_response(result)
remark_order_validator = Inputs(
{
'remark' : TextField(msg='备注内容'),
'order_id' : IdField(msg='订单id'),
}
)
@admin_json_dec(validator=remark_order_validator)
def remark_order():
remark = request.valid_data.get('remark')
order_id = request.valid_data.get('order_id')
where = Order.id==order_id
count = OrderService.update_order(where, commit=True, remark=remark)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '备注成功'
}
return jsonify_response(result)
remark_advice_validator = Inputs(
{
'remark' : TextField(msg='备注内容'),
'advice_id' : IdField(msg='反馈id'),
}
)
@admin_json_dec(validator=remark_advice_validator)
def remark_useradvice():
remark = request.valid_data.get('remark')
advice_id = request.valid_data.get('advice_id')
count = AdminService.remark_useradvice(advice_id, remark=remark)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '备注成功'
}
return jsonify_response(result)
remark_apply_validator = Inputs(
{
'remark' : TextField(msg='备注内容'),
'remark_img' : TextField(msg='备注图片'),
'apply_id' : IdField(msg='申请id'),
}
)
@admin_json_dec(validator=remark_apply_validator)
def remark_apply():
remark = request.valid_data.get('remark')
remark_img = request.valid_data.get('remark_img')
apply_id = request.valid_data.get('apply_id')
where = CreditApply.id==apply_id
count = CreditService.update_apply(where, remark=remark, remark_img=remark_img)
result = {
'code' : ResponseCode.SUCCESS,
'msg' : '备注成功'
}
return jsonify_response(result)
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,370
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/42d4367e28b2_.py
|
"""empty message
Revision ID: 42d4367e28b2
Revises: 569e3d7f70ab
Create Date: 2015-12-10 10:58:36.611750
"""
# revision identifiers, used by Alembic.
revision = '42d4367e28b2'
down_revision = '569e3d7f70ab'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('coupon', sa.Column('title', sa.String(length=300), nullable=True))
op.add_column('user_coupon', sa.Column('title', sa.String(length=300), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user_coupon', 'title')
op.drop_column('coupon', 'title')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,371
|
qsq-dm/mff
|
refs/heads/master
|
/ops/data.py
|
# -*- coding: utf-8 -*-
import requests
from util.sqlerr import SQL_DUPLICATE_NAME
from models import db
from models import School
from models import City
from models import HelpCat
from models import HelpEntry
from models import ImageSize
from ops.utils import get_items
from ops.utils import get_page
from ops.utils import count_items
from util.utils import prefix_img_domain
from settings import celery
class DataService(object):
@staticmethod
def create_school(name, link, city_name):
try:
school = School(name=name, link=link, city_name=city_name)
db.session.add(school)
db.session.commit()
return school.id
except Exception as e:
db.session.rollback()
import traceback
traceback.print_exc()
@staticmethod
def get_paged_schools(**kw):
return get_page(School, {}, **kw)
@staticmethod
def get_schools():
return School.query.all()
@staticmethod
def get_paged_cities(**kw):
return get_page(City, {}, **kw)
@staticmethod
def create_city(name, city_code, amap_code):
city = City(name=name, amap_code=amap_code, city_code=city_code)
db.session.add(city)
db.session.commit()
return city.id
@staticmethod
def update_city(city_id, **kw):
count = City.query.filter(City.id==city_id).update(kw)
db.session.commit()
return count
@staticmethod
def get_city_by_baidu_city_code(city_code):
return City.query.filter(City.city_code==city_code).first()
@staticmethod
def count_schools(where=None):
return count_items(School, where)
@staticmethod
def get_school_city_names():
rows = db.session.query(School.city_name).distinct().all()
return [i.city_name for i in rows]
@staticmethod
def create_help_cat(id, name):
try:
cat = HelpCat(id=id, name=name)
db.session.add(cat)
db.session.commit()
return cat.id
except Exception as e:
db.session.rollback()
if SQL_DUPLICATE_NAME.search(str(e)):
print 'duplicate entry name'
@staticmethod
def create_help_entry(cat_id, title, content):
entry = HelpEntry(cat_id=cat_id, title=title, content=content)
db.session.add(entry)
db.session.commit()
@staticmethod
def get_paged_helpcats(**kw):
_sort_dir = 'ASC'
return get_page(HelpCat, {}, limit=1000, _sort_dir=_sort_dir, **kw)
@staticmethod
def get_paged_helpentries(**kw):
_sort_dir = 'ASC'
return get_page(HelpEntry, {}, limit=1000, _sort_dir=_sort_dir, **kw)
@staticmethod
def get_helpentry_by_id(entry_id):
entry = HelpEntry.query.filter(HelpEntry.id==entry_id).first()
if entry: return entry.as_dict()
@staticmethod
def get_paged_city_list(**kw):
return get_page(City, {}, **kw)
@staticmethod
def get_city_dict_by_id(city_id):
city = City.query.filter(City.id==city_id).first()
if city: return city.as_dict()
@staticmethod
@celery.task
def set_img_size(key):
''' 设置图片宽高 '''
full_url = prefix_img_domain(key)
print full_url
result = requests.get('{}?imageInfo'.format(full_url))
if result.status_code!=200:
assert 0, '图片不存在'
data = result.json()
width = data['width']
height = data['height']
try:
img = ImageSize(key=key, width=width, height=height)
db.session.add(img)
db.session.commit()
except Exception as e:
import traceback
traceback.print_exc()
db.session.rollback()
@staticmethod
def get_imgs_size_by_keys(keys):
if isinstance(keys, (tuple, list)):
query = ImageSize.key.in_(keys)
else:
query = ImageSize.key==keys
sizes = ImageSize.query.filter(query).all()
return [ i.as_dict() for i in sizes]
@staticmethod
def get_schools_dict_by_ids(school_ids):
''' '''
where = School.id.in_(school_ids)
results = School.query.filter(where).all()
return [i.as_dict() for i in results]
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,372
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/4c9fdc97c246_.py
|
"""empty message
Revision ID: 4c9fdc97c246
Revises: 51187e1e4dbc
Create Date: 2015-11-16 11:05:15.297821
"""
# revision identifiers, used by Alembic.
revision = '4c9fdc97c246'
down_revision = '51187e1e4dbc'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('avatar', sa.String(length=1000), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'avatar')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,373
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/c855246d7e8_.py
|
"""empty message
Revision ID: c855246d7e8
Revises: 1bef4d9bd99b
Create Date: 2015-11-03 10:38:03.675764
"""
# revision identifiers, used by Alembic.
revision = 'c855246d7e8'
down_revision = '1bef4d9bd99b'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('credit_apply', sa.Column('reason', sa.String(length=500), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('credit_apply', 'reason')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,374
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/2ab4005efb6c_.py
|
"""empty message
Revision ID: 2ab4005efb6c
Revises: 2a01c5929823
Create Date: 2016-01-27 16:01:27.623336
"""
# revision identifiers, used by Alembic.
revision = '2ab4005efb6c'
down_revision = '2a01c5929823'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('room_design_detail', sa.Column('addr', sa.String(length=30), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('room_design_detail', 'addr')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,375
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/5784ac6510c3_.py
|
"""empty message
Revision ID: 5784ac6510c3
Revises: 345ee23bca8
Create Date: 2015-12-09 18:03:16.566805
"""
# revision identifiers, used by Alembic.
revision = '5784ac6510c3'
down_revision = '345ee23bca8'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('trial_apply', sa.Column('cat', sa.Integer(), nullable=True))
op.add_column('trial_apply', sa.Column('coupon_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'trial_apply', 'coupon', ['coupon_id'], ['id'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'trial_apply', type_='foreignkey')
op.drop_column('trial_apply', 'coupon_id')
op.drop_column('trial_apply', 'cat')
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,376
|
qsq-dm/mff
|
refs/heads/master
|
/ops/credit.py
|
# -*- coding: utf-8 -*-
from decimal import Decimal
from sqlalchemy import and_
from models import db
from models import CreditUseLog
from models import Order
from models import UserCredit
from models import PeriodPayLog
from models import PeriodPayChoice
from models import CreditApply
from util.sqlerr import SQL_DUPLICATE
from util.utils import get_due_time
from util.utils import dt_obj
from ops.utils import count_items
from ops.utils import get_page
from settings import DEFAULT_CREDIT
from constants import CREDIT_STATUS
class CreditService(object):
''' '''
@staticmethod
def create_default_credit(user_id, total=DEFAULT_CREDIT, status=0):
''' 创建额度 '''
try:
credit = UserCredit(user_id=user_id, total=total, status=status)
db.session.add(credit)
db.session.commit()
return True
except Exception as e:
db.session.rollback()
if SQL_DUPLICATE.search(str(e)):
return False
else:
raise(e)
@staticmethod
def get_user_credit(user_id):
credit = UserCredit.query.filter(UserCredit.user_id==user_id).first()
return credit
@staticmethod
def set_user_credit_total(user_id, total):
''' 审核通过设置用户额度 '''
credit = CreditService.get_user_credit(user_id)
if not credit:
CreditService.create_default_credit(user_id, total, status=1)
else:
UserCredit.query.filter(UserCredit.user_id==user_id).update({'total':total, 'status':CREDIT_STATUS.VERIFIED})
db.session.commit()
@staticmethod
def update_user_credit_status(user_id, status):
count = UserCredit.query.filter(UserCredit.user_id==user_id).update({'status':status})
db.session.commit()
return count
@staticmethod
def init_credit(user_id):
credit = CreditService.get_user_credit(user_id)
if not credit:
CreditService.create_default_credit(user_id)
credit = CreditService.get_user_credit(user_id)
return credit
@staticmethod
def modify_credit(user_id, amount):
'''
变更信用额度
1变更成功
2变更成功 但额度是虚假的 未通过审核
0额度不足
'''
amount = Decimal(str(amount))
verified_query = and_(
UserCredit.user_id==user_id,
UserCredit.status==2,
UserCredit.used+amount<=UserCredit.total,
UserCredit.used+amount>=0,
)
unverified_query = and_(
UserCredit.user_id==user_id,
UserCredit.status==1,
UserCredit.used+amount<=UserCredit.total,
UserCredit.used+amount>=0,
)
update_data = {
'used':UserCredit.used+amount
}
count = UserCredit.query.filter(verified_query).update(update_data)
if count:
log = CreditUseLog(user_id=user_id, amount=amount, status=1)
db.session.add(log)
db.session.commit()
return 1
count = UserCredit.query.filter(unverified_query).update(update_data)
if count:
log = CreditUseLog(user_id=user_id, amount=amount, status=2)
db.session.add(log)
db.session.commit()
return 2
db.session.commit()
return 0
@staticmethod
def get_period_choices():
return PeriodPayChoice.query.all()
@staticmethod
def get_period_choice(choice_id):
choice = PeriodPayChoice.query.filter(PeriodPayChoice.id==choice_id).first()
return choice
@staticmethod
def gen_order_period_logs(order_id):
order = Order.query.filter(Order.id==order_id).first()
assert order, '订单不存在'
choice = PeriodPayChoice.query.filter(PeriodPayChoice.id==order.credit_choice_id).first()
total_amount= order.credit_amount - order.total_fee
period_amount = total_amount/choice.period_count
period_fee = float(period_amount)*choice.period_fee
for i in range(1, 1+choice.period_count):
due_time = get_due_time(i)
log = PeriodPayLog(
order_id = order_id,
period_count = choice.period_count,
user_id = order.user_id,
period_pay_index = i,
amount = period_amount,
fee = period_fee,
deadline = due_time
)
db.session.add(log)
db.session.commit()
@staticmethod
def get_period_pay_logs(user_id, where=None):
query = and_()
query.append(PeriodPayLog.user_id==user_id)
if where is not None: query.append(where)
logs = PeriodPayLog.query.filter(query).all()
return logs
@staticmethod
def get_paged_pay_logs(**kw):
return get_page(PeriodPayLog, {}, **kw)
@staticmethod
def add_apply(user_id, **kw):
''' 提交第一步申请资料 '''
try:
kw['create_time'] = dt_obj.now()
apply = CreditApply(user_id=user_id, **kw)
db.session.add(apply)
db.session.commit()
return apply.id
except Exception as e:
db.session.rollback()
if SQL_DUPLICATE.search(str(e)):
pass
else:
raise(e)
@staticmethod
def update_apply(where, **kw):
''' 更新申请 '''
kw.setdefault('update_time', dt_obj.now())
count = CreditApply.query.filter(where).update(kw)
db.session.commit()
return count
@staticmethod
def create_period_choice(**kw):
try:
choice = PeriodPayChoice(**kw)
db.session.add(choice)
db.session.commit()
return choice.id
except Exception as e:
db.session.rollback()
@staticmethod
def get_paged_apply_list(**kw):
return get_page(CreditApply, {}, **kw)
@staticmethod
def count_apply(where=None):
return count_items(CreditApply, where=where)
@staticmethod
def get_apply_dict_by_id(apply_id):
apply = CreditApply.query.filter(CreditApply.id==apply_id).first()
if apply: return apply.as_dict()
@staticmethod
def get_apply_dict_by_userid(user_id):
apply = CreditApply.query.filter(CreditApply.user_id==user_id).first()
if apply: return apply.as_dict()
@staticmethod
def update_pay_log(log_ids):
''' 还款 '''
query = and_(
PeriodPayLog.id.in_(log_ids),
PeriodPayLog.status==0
)
repayment_time = dt_obj.now()
count = PeriodPayLog.query.filter(query).update({'status':1, 'repayment_time':repayment_time}, synchronize_session=False)
db.session.commit()
if count==len(log_ids):
return True
else:
db.session.rollback()
return False
@staticmethod
def cancel_pay_logs(order_id):
query = and_(
PeriodPayLog.order_id==order_id,
PeriodPayLog.status==0
)
count = PeriodPayLog.query.filter(query).update({'status':2})
db.session.commit()
return count
@staticmethod
def get_paged_period_choices(**kw):
return get_page(PeriodPayChoice, {}, **kw)
@staticmethod
def get_paged_period_pay_logs(**kw):
return get_page(PeriodPayLog, {}, **kw)
@staticmethod
def count_logs(where=None):
return count_items(PeriodPayLog, where=where)
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,377
|
qsq-dm/mff
|
refs/heads/master
|
/migrations/versions/3ae16db9c83e_.py
|
"""empty message
Revision ID: 3ae16db9c83e
Revises: 3d0882a6044
Create Date: 2016-01-27 15:03:50.881072
"""
# revision identifiers, used by Alembic.
revision = '3ae16db9c83e'
down_revision = '3d0882a6044'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
''' '''
#foreign key constriant del order @xianpeng
op.drop_constraint(u'room_design_detail_ibfk_1', 'room_design_detail', type_='foreignkey')
op.drop_constraint(u'room_design_vote_log_ibfk_1', 'room_design_vote_log', type_='foreignkey')
op.drop_column('room_design_detail', 'room_id')
### commands auto generated by Alembic - please adjust! ###
op.drop_table('room_design_apply')
op.add_column('room_design_detail', sa.Column('applyer_name', sa.String(length=30), nullable=True))
op.add_column('room_design_detail', sa.Column('room_name', sa.String(length=30), nullable=True))
op.create_unique_constraint(None, 'room_design_detail', ['room_name'])
op.create_foreign_key(None, 'room_design_vote_log', 'room_design_detail', ['room_id'], ['id'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'room_design_vote_log', type_='foreignkey')
op.create_foreign_key(u'room_design_vote_log_ibfk_1', 'room_design_vote_log', 'room_design_apply', ['room_id'], ['id'])
op.add_column('room_design_detail', sa.Column('room_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.create_foreign_key(u'room_design_detail_ibfk_1', 'room_design_detail', 'room_design_apply', ['room_id'], ['id'])
op.drop_constraint(None, 'room_design_detail', type_='unique')
op.drop_column('room_design_detail', 'room_name')
op.drop_column('room_design_detail', 'applyer_name')
op.create_table('room_design_apply',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('school_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('user_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('room_name', mysql.VARCHAR(length=30), nullable=True),
sa.Column('applyer_name', mysql.VARCHAR(length=30), nullable=True),
sa.Column('phone', mysql.VARCHAR(length=30), nullable=True),
sa.Column('addr', mysql.VARCHAR(length=30), nullable=True),
sa.Column('create_time', mysql.DATETIME(), nullable=True),
sa.ForeignKeyConstraint(['school_id'], [u'school.id'], name=u'room_design_apply_ibfk_1'),
sa.ForeignKeyConstraint(['user_id'], [u'user.id'], name=u'room_design_apply_ibfk_2'),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
### end Alembic commands ###
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
19,378
|
qsq-dm/mff
|
refs/heads/master
|
/ops/common.py
|
# -*- coding: utf-8 -*-
from sqlalchemy import and_
from models import db
from models import Order
from ops.user import UserService
from ops.order import OrderService
from ops.credit import CreditService
from ops.activity import ActivityService
from ops.room_design import RoomDesignService
from ops.item import ItemService
from constants import ORDER_STATUS
from thirdparty.sms import send_sms_new_order
def pay_success_action(order, send_verified=False, need_pay=True, **kw):
''' 支付成功 处理函数
send_verified #需要额外微信或支付宝付钱并且额度待审核订单 审核前已支付成功现金部分 没发送短信就return了 审核通过后不管count多少 需要发送短信
'''
new_status = ORDER_STATUS.PAY_SUCCESS
where = and_(
Order.id==order.id,
Order.status.in_([ORDER_STATUS.TO_PAY, ORDER_STATUS.NEW_ORDER])
)
kw['status'] = new_status
count = OrderService.update_order(where, **kw)
if not order.credit_verified: return
user = UserService.get_user_by_id(order.user_id)
phone = user.phone
hospital = ItemService.get_hospital_dict_by_id(order.hospital_id)
hospital_name = hospital['name']
hospital_addr = hospital['addr']
hospital_phone = hospital['phone']
item = ItemService.get_item_dict_by_id(order.item_id)
item_name = item['title']
desc = '{},{},{}'.format(hospital_name, hospital_addr, hospital_phone)
print 'desc', desc
if count or send_verified or not need_pay:
service_code= OrderService.create_servicecode(order.id)
if order.credit_amount:
CreditService.gen_order_period_logs(order.id)
#给用户发送通知,确认购买成功
send_sms_new_order.delay(phone, item_name, desc, service_code)
def get_item_activity_price(item):
activity = ActivityService.get_current_activity()
if activity:
activity_item = ItemService.get_item_activity(item['id'], activity['id'])
if activity_item:
item['price'] = activity_item['price']
|
{"/admin/urls.py": ["/admin/views.py"], "/ops/room_design.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/util/sign.py": ["/settings.py"], "/ops/hospital.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/utils.py": ["/util/utils.py", "/models.py"], "/ops/credit.py": ["/models.py", "/util/sqlerr.py", "/util/utils.py", "/ops/utils.py", "/settings.py", "/constants.py"], "/user/api_urls.py": ["/user/auth.py", "/user/trial.py"], "/migrations/versions/3621ae6c4339_.py": ["/models.py"], "/ops/comment.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/273db5f3044f_.py": ["/models.py"], "/ops/notification.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/ops/activity.py": ["/models.py", "/util/utils.py", "/ops/utils.py"], "/migrations/versions/18e20ed0da8d_.py": ["/models.py"], "/hospital/urls.py": ["/hospital/views.py"], "/ops/coupon.py": ["/util/utils.py", "/models.py", "/ops/utils.py"], "/ops/log.py": ["/models.py", "/util/utils.py"], "/migrations/versions/55f4c256c989_.py": ["/models.py"], "/ops/beauty_tutorial.py": ["/models.py", "/ops/utils.py", "/util/utils.py"], "/migrations/versions/42e923c1238_.py": ["/models.py"], "/user/urls.py": ["/user/views.py", "/user/auth.py", "/user/trial.py", "/user/room_design.py", "/user/redpack.py", "/user/draw_money.py"], "/ops/actions.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py"], "/migrations/versions/36d5b6be1479_.py": ["/models.py"], "/ops/redpack.py": ["/util/sqlerr.py", "/util/utils.py", "/models.py", "/ops/utils.py", "/ops/cache.py", "/settings.py"], "/ops/user.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/ops/item.py": ["/models.py", "/util/utils.py", "/util/sqlerr.py", "/ops/utils.py"], "/migrations/versions/18e507e87862_.py": ["/models.py"], "/user/draw_money.py": ["/models.py", "/util/utils.py", "/util/decorators.py", "/util/validators.py", "/util/sign.py", "/util/drawgift.py", "/ops/bulks.py", "/ops/item.py", "/ops/data.py", "/ops/user.py", "/ops/redpack.py", "/ops/promote.py", "/ops/cache.py", "/ops/room_design.py", "/constants.py", "/thirdparty/sms.py", "/thirdparty/wechat.py", "/settings.py"], "/thirdparty/alipay/config.py": ["/settings.py"], "/promote/urls.py": ["/promote/views.py"], "/ops/admin.py": ["/util/sqlerr.py", "/models.py", "/ops/utils.py"], "/udp_server.py": ["/settings.py"], "/migrations/versions/4eefa5b6eb51_.py": ["/models.py"], "/demo.py": ["/thirdparty/wechat.py"], "/user/common.py": ["/models.py", "/ops/order.py", "/ops/coupon.py", "/ops/credit.py", "/constants.py"], "/models.py": ["/util/utils.py", "/settings.py", "/constants.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.