index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
344
|
ewheeler/nomenklatura
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
setup(
name='nomenklatura',
version='0.1',
description="Make record linkages on the web.",
long_description='',
classifiers=[
],
keywords='data mapping identity linkage record',
author='Open Knowledge Foundation',
author_email='info@okfn.org',
url='http://okfn.org',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
],
tests_require=[],
entry_points=\
""" """,
)
|
{"/nomenklatura/core.py": ["/nomenklatura/__init__.py"], "/nomenklatura/views/sessions.py": ["/nomenklatura/__init__.py", "/nomenklatura/core.py", "/nomenklatura/model/__init__.py"], "/nomenklatura/assets.py": ["/nomenklatura/core.py"], "/nomenklatura/manage.py": ["/nomenklatura/core.py", "/nomenklatura/model/__init__.py", "/nomenklatura/assets.py"]}
|
345
|
ewheeler/nomenklatura
|
refs/heads/master
|
/nomenklatura/model/__init__.py
|
from nomenklatura.model.dataset import Dataset
from nomenklatura.model.entity import Entity
from nomenklatura.model.account import Account
from nomenklatura.model.upload import Upload
|
{"/nomenklatura/core.py": ["/nomenklatura/__init__.py"], "/nomenklatura/views/sessions.py": ["/nomenklatura/__init__.py", "/nomenklatura/core.py", "/nomenklatura/model/__init__.py"], "/nomenklatura/assets.py": ["/nomenklatura/core.py"], "/nomenklatura/manage.py": ["/nomenklatura/core.py", "/nomenklatura/model/__init__.py", "/nomenklatura/assets.py"]}
|
346
|
ewheeler/nomenklatura
|
refs/heads/master
|
/nomenklatura/core.py
|
import logging
from logging.handlers import RotatingFileHandler
from flask import Flask
from flask import url_for as _url_for
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.oauth import OAuth
from flask.ext.assets import Environment
import certifi
from kombu import Exchange, Queue
from celery import Celery
from nomenklatura import default_settings
logging.basicConfig(level=logging.DEBUG)
app = Flask(__name__)
app.config.from_object(default_settings)
app.config.from_envvar('NOMENKLATURA_SETTINGS', silent=True)
app_name = app.config.get('APP_NAME')
file_handler = RotatingFileHandler('/var/log/nomenklatura/errors.log',
maxBytes=1024 * 1024 * 100,
backupCount=20)
file_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
file_handler.setFormatter(formatter)
app.logger.addHandler(file_handler)
if app.debug is not True:
from raven.contrib.flask import Sentry
sentry = Sentry(app, dsn=app.config.get('SENTRY_DSN'))
db = SQLAlchemy(app)
assets = Environment(app)
celery = Celery('nomenklatura', broker=app.config['CELERY_BROKER_URL'])
queue_name = app_name + '_q'
app.config['CELERY_DEFAULT_QUEUE'] = queue_name
app.config['CELERY_QUEUES'] = (
Queue(queue_name, Exchange(queue_name), routing_key=queue_name),
)
celery = Celery(app_name, broker=app.config['CELERY_BROKER_URL'])
celery.config_from_object(app.config)
oauth = OAuth()
github = oauth.remote_app('github',
base_url='https://github.com/login/oauth/',
authorize_url='https://github.com/login/oauth/authorize',
request_token_url=None,
access_token_url='https://github.com/login/oauth/access_token',
consumer_key=app.config.get('GITHUB_CLIENT_ID'),
consumer_secret=app.config.get('GITHUB_CLIENT_SECRET'))
github._client.ca_certs = certifi.where()
def url_for(*a, **kw):
try:
kw['_external'] = True
return _url_for(*a, **kw)
except RuntimeError:
return None
|
{"/nomenklatura/core.py": ["/nomenklatura/__init__.py"], "/nomenklatura/views/sessions.py": ["/nomenklatura/__init__.py", "/nomenklatura/core.py", "/nomenklatura/model/__init__.py"], "/nomenklatura/assets.py": ["/nomenklatura/core.py"], "/nomenklatura/manage.py": ["/nomenklatura/core.py", "/nomenklatura/model/__init__.py", "/nomenklatura/assets.py"]}
|
347
|
ewheeler/nomenklatura
|
refs/heads/master
|
/contrib/heroku_settings.py
|
import os
def bool_env(val):
"""Replaces string based environment values with Python booleans"""
return True if os.environ.get(val, 'False').lower() == 'true' else False
#DEBUG = True
SECRET_KEY = os.environ.get('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL',
os.environ.get('SHARED_DATABASE_URL'))
APP_NAME = os.environ.get('APP_NAME', 'nomenklatura')
GITHUB_CLIENT_ID = os.environ.get('GITHUB_CLIENT_ID')
GITHUB_CLIENT_SECRET = os.environ.get('GITHUB_CLIENT_SECRET')
MEMCACHE_HOST = os.environ.get('MEMCACHIER_SERVERS')
S3_BUCKET = os.environ.get('S3_BUCKET', 'nomenklatura')
S3_ACCESS_KEY = os.environ.get('S3_ACCESS_KEY')
S3_SECRET_KEY = os.environ.get('S3_SECRET_KEY')
CELERY_BROKER = os.environ.get('CLOUDAMQP_URL')
SIGNUP_DISABLED = bool_env('SIGNUP_DISABLED')
|
{"/nomenklatura/core.py": ["/nomenklatura/__init__.py"], "/nomenklatura/views/sessions.py": ["/nomenklatura/__init__.py", "/nomenklatura/core.py", "/nomenklatura/model/__init__.py"], "/nomenklatura/assets.py": ["/nomenklatura/core.py"], "/nomenklatura/manage.py": ["/nomenklatura/core.py", "/nomenklatura/model/__init__.py", "/nomenklatura/assets.py"]}
|
348
|
ewheeler/nomenklatura
|
refs/heads/master
|
/nomenklatura/default_settings.py
|
DEBUG = False
APP_NAME = 'nomenklatura'
CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672//'
ALLOWED_EXTENSIONS = set(['csv', 'tsv', 'ods', 'xls', 'xlsx', 'txt'])
SIGNUP_DISABLED = False
|
{"/nomenklatura/core.py": ["/nomenklatura/__init__.py"], "/nomenklatura/views/sessions.py": ["/nomenklatura/__init__.py", "/nomenklatura/core.py", "/nomenklatura/model/__init__.py"], "/nomenklatura/assets.py": ["/nomenklatura/core.py"], "/nomenklatura/manage.py": ["/nomenklatura/core.py", "/nomenklatura/model/__init__.py", "/nomenklatura/assets.py"]}
|
349
|
ewheeler/nomenklatura
|
refs/heads/master
|
/nomenklatura/views/sessions.py
|
import logging
import requests
from flask import url_for, session, Blueprint, redirect
from flask import request
from apikit import jsonify
from werkzeug.exceptions import Forbidden
from nomenklatura import authz
from nomenklatura.core import app, db, github
from nomenklatura.model import Account, Dataset
section = Blueprint('sessions', __name__)
@section.route('/sessions')
def status():
return jsonify({
'logged_in': authz.logged_in(),
'api_key': request.account.api_key if authz.logged_in() else None,
'account': request.account,
'base_url': url_for('index', _external=True)
})
@section.route('/sessions/authz')
def get_authz():
permissions = {}
dataset_name = request.args.get('dataset')
if dataset_name is not None:
dataset = Dataset.find(dataset_name)
permissions[dataset_name] = {
'view': True,
'edit': authz.dataset_edit(dataset),
'manage': authz.dataset_manage(dataset)
}
return jsonify(permissions)
@section.route('/sessions/login')
def login():
callback = url_for('sessions.authorized', _external=True)
return github.authorize(callback=callback)
@section.route('/sessions/logout')
def logout():
logging.info(authz.require(authz.logged_in()))
session.clear()
return redirect('/')
@section.route('/sessions/callback')
@github.authorized_handler
def authorized(resp):
if 'access_token' not in resp:
return redirect(url_for('index', _external=True))
access_token = resp['access_token']
session['access_token'] = access_token, ''
res = requests.get('https://api.github.com/user?access_token=%s' % access_token,
verify=False)
data = res.json()
for k, v in data.items():
session[k] = v
account = Account.by_github_id(data.get('id'))
if account is None:
if app.config.get('SIGNUP_DISABLED'):
raise Forbidden("Sorry, account creation is disabled")
account = Account.create(data)
db.session.commit()
return redirect('/')
|
{"/nomenklatura/core.py": ["/nomenklatura/__init__.py"], "/nomenklatura/views/sessions.py": ["/nomenklatura/__init__.py", "/nomenklatura/core.py", "/nomenklatura/model/__init__.py"], "/nomenklatura/assets.py": ["/nomenklatura/core.py"], "/nomenklatura/manage.py": ["/nomenklatura/core.py", "/nomenklatura/model/__init__.py", "/nomenklatura/assets.py"]}
|
350
|
ewheeler/nomenklatura
|
refs/heads/master
|
/nomenklatura/assets.py
|
from flask.ext.assets import Bundle
from nomenklatura.core import assets
deps_assets = Bundle(
'vendor/jquery/dist/jquery.js',
'vendor/bootstrap/js/collapse.js',
'vendor/angular/angular.js',
'vendor/angular-route/angular-route.js',
'vendor/angular-bootstrap/ui-bootstrap-tpls.js',
'vendor/ngUpload/ng-upload.js',
filters='uglifyjs',
output='assets/deps.js'
)
app_assets = Bundle(
'js/app.js',
'js/services/session.js',
'js/directives/pagination.js',
'js/directives/keybinding.js',
'js/directives/authz.js',
'js/controllers/app.js',
'js/controllers/import.js',
'js/controllers/home.js',
'js/controllers/docs.js',
'js/controllers/review.js',
'js/controllers/datasets.js',
'js/controllers/entities.js',
'js/controllers/profile.js',
filters='uglifyjs',
output='assets/app.js'
)
css_assets = Bundle(
'vendor/bootstrap/less/bootstrap.less',
'vendor/font-awesome/less/font-awesome.less',
'style/style.less',
filters='less,cssrewrite',
output='assets/style.css'
)
assets.register('deps', deps_assets)
assets.register('app', app_assets)
assets.register('css', css_assets)
|
{"/nomenklatura/core.py": ["/nomenklatura/__init__.py"], "/nomenklatura/views/sessions.py": ["/nomenklatura/__init__.py", "/nomenklatura/core.py", "/nomenklatura/model/__init__.py"], "/nomenklatura/assets.py": ["/nomenklatura/core.py"], "/nomenklatura/manage.py": ["/nomenklatura/core.py", "/nomenklatura/model/__init__.py", "/nomenklatura/assets.py"]}
|
351
|
ewheeler/nomenklatura
|
refs/heads/master
|
/nomenklatura/__init__.py
|
# shut up useless SA warning:
import warnings
warnings.filterwarnings('ignore', 'Unicode type received non-unicode bind param value.')
|
{"/nomenklatura/core.py": ["/nomenklatura/__init__.py"], "/nomenklatura/views/sessions.py": ["/nomenklatura/__init__.py", "/nomenklatura/core.py", "/nomenklatura/model/__init__.py"], "/nomenklatura/assets.py": ["/nomenklatura/core.py"], "/nomenklatura/manage.py": ["/nomenklatura/core.py", "/nomenklatura/model/__init__.py", "/nomenklatura/assets.py"]}
|
352
|
ewheeler/nomenklatura
|
refs/heads/master
|
/nomenklatura/manage.py
|
from normality import normalize
from flask.ext.script import Manager
from flask.ext.assets import ManageAssets
from nomenklatura.core import db
from nomenklatura.model import Entity
from nomenklatura.views import app
from nomenklatura.assets import assets
manager = Manager(app)
manager.add_command('assets', ManageAssets(assets))
@manager.command
def createdb():
""" Make the database. """
db.engine.execute("CREATE EXTENSION IF NOT EXISTS hstore;")
db.engine.execute("CREATE EXTENSION IF NOT EXISTS fuzzystrmatch;")
db.create_all()
@manager.command
def flush(dataset):
ds = Dataset.by_name(dataset)
for alias in Alias.all_unmatched(ds):
db.session.delete(alias)
db.session.commit()
if __name__ == '__main__':
manager.run()
|
{"/nomenklatura/core.py": ["/nomenklatura/__init__.py"], "/nomenklatura/views/sessions.py": ["/nomenklatura/__init__.py", "/nomenklatura/core.py", "/nomenklatura/model/__init__.py"], "/nomenklatura/assets.py": ["/nomenklatura/core.py"], "/nomenklatura/manage.py": ["/nomenklatura/core.py", "/nomenklatura/model/__init__.py", "/nomenklatura/assets.py"]}
|
353
|
devikadayanand16/todo
|
refs/heads/main
|
/todolist/forms.py
|
from django import forms
class TodoListForm(forms.Form):
text = forms.CharField(max_length=50,
widget=forms.TextInput(
attrs={'class':'form-control','placeholder':'Enter todo e.g. Grocery Shopping', 'aria-label':'Todo', 'aria-describeby':'add-btn'}))
|
{"/todolist/views.py": ["/todolist/models.py", "/todolist/forms.py"]}
|
354
|
devikadayanand16/todo
|
refs/heads/main
|
/todolist/models.py
|
from django.db import models
class Todolist(models.Model):
text=models.CharField(max_length=50)
completed=models.BooleanField(default=False)
def __str__(self):
return self.text
|
{"/todolist/views.py": ["/todolist/models.py", "/todolist/forms.py"]}
|
355
|
devikadayanand16/todo
|
refs/heads/main
|
/todolist/views.py
|
from django.shortcuts import render, redirect
from .models import Todolist
from .forms import TodoListForm
from django.views.decorators.http import require_POST
def index(request):
todo_items=Todolist.objects.order_by('id')
form = TodoListForm()
context = {'todo_items' : todo_items, 'form' : form }
return render(request, 'todolist/index.html', context)
@require_POST
def addTodoItem(request):
form=TodoListForm(request.POST)
if form.is_valid():
new_todo = Todolist(text=request.POST['text'])
new_todo.save()
return redirect('index')
def completedTodo(request, todo_id):
todo= Todolist.objects.get(pk=todo_id)
todo.completed=True
todo.save()
return redirect('index')
def deleteCompleted(request):
Todolist.objects.filter(completed__exact=True).delete()
return redirect('index')
def deleteAll(request):
Todolist.objects.all().delete()
return redirect('index')
|
{"/todolist/views.py": ["/todolist/models.py", "/todolist/forms.py"]}
|
357
|
FUZIK/secret_punto
|
refs/heads/master
|
/main.py
|
import tg_manager_bot.bot as manager_bot
if __name__ == '__main__':
manager_bot.main()
|
{"/core/database_adapter.py": ["/config.py"]}
|
358
|
FUZIK/secret_punto
|
refs/heads/master
|
/config.py
|
DATABASE_HOST = "ec2-54-247-169-129.eu-west-1.compute.amazonaws.com"
DATABASE_PORT = 5432
DATABASE_NAME = "dbm6aqb8gc2vd3"
DATABASE_USER = "lwejloxflohbkt"
DATABASE_PASSWORD = "963dca4e85ea295a09653fad768c530c2035732fd07800146d04b9ebc28186ca"
# PuntoManagerBot
MANAGER_TG_BOT_TOKEN = "1161956935:AAEelrfE2ksdxAjdanj-Uq1kIkjnFqAX1us"
|
{"/core/database_adapter.py": ["/config.py"]}
|
359
|
FUZIK/secret_punto
|
refs/heads/master
|
/core/database_adapter.py
|
import config
from peewee import PostgresqlDatabase, Model, AutoField, IntegerField, TextField, ForeignKeyField, TimestampField
from playhouse.postgres_ext import ArrayField, BlobField
_connection = PostgresqlDatabase(config.DATABASE_NAME,
host=config.DATABASE_HOST,
port=config.DATABASE_PORT,
user=config.DATABASE_USER,
password=config.DATABASE_PASSWORD)
class _BaseModel(Model):
id = AutoField()
class Meta:
database = _connection
class _NamedModel(_BaseModel):
name = TextField()
class Category(_NamedModel):
pass
class Brand(_NamedModel):
pass
class UserRole(_NamedModel):
pass
class MediaResource(_BaseModel):
content = BlobField()
telegram_upload_id = TextField()
class User(_BaseModel):
user_role = ForeignKeyField(UserRole, column_name="user_role_id")
telegram_user_id = TextField()
telegram_username = TextField()
first_name = TextField()
class Item(_BaseModel):
title = TextField()
description = TextField()
category = ForeignKeyField(Category, column_name="category_id")
brand = ForeignKeyField(Brand, column_name="brand_id")
price = IntegerField()
in_stock = IntegerField()
media_resources = ArrayField(IntegerField, column_name='media_resource_ids')
# edited_by = IntegerField()
# created_at = TimestampField()
# updated_at = TimestampField()
# flags_ids is unused
|
{"/core/database_adapter.py": ["/config.py"]}
|
375
|
xergio/redtorrent
|
refs/heads/master
|
/tracker/views.py
|
# -*- coding: utf-8 -*-
import django
from django.shortcuts import render_to_response
from django.http import HttpResponse
from tracker.models import AnnounceForm, ScrapeForm, Store
import sys
import socket
import bencode
import struct
import time
import redis
"""
http://bittorrent.org/beps/bep_0003.html
http://wiki.theory.org/BitTorrentSpecification#Tracker_HTTP.2FHTTPS_Protocol
/announce?
info_hash=gK%91d%e0%ec%fc%c0G%c1%0a%9bD8%85%a9%99%88%27%da&
peer_id=-TR2330-fnovv1t92c12&
port=51413&
uploaded=0&
downloaded=0&
left=0&
numwant=80&
key=6083d376&
compact=1&
supportcrypto=1&
event=started
/scrape?
info_hash=gK%91d%e0%ec%fc%c0G%c1%0a%9bD8%85%a9%99%88%27%da
start complete torrent
[07/Apr/2012 08:55:26] "GET /announce?info_hash=7%cc%08%1fG%60%a6%ab%05%1d%b8%d6%fa%d6%cd%2b%a1gl%98&peer_id=-TR2500-46xugddkkm12&port=51413&uploaded=0&downloaded=0&left=0&numwant=80&key=5085515f&compact=1&supportcrypto=1&event=started HTTP/1.1" 200 25
ping complete torrent
[07/Apr/2012 08:56:27] "GET /announce?info_hash=7%cc%08%1fG%60%a6%ab%05%1d%b8%d6%fa%d6%cd%2b%a1gl%98&peer_id=-TR2500-46xugddkkm12&port=51413&uploaded=0&downloaded=0&left=0&numwant=80&key=5085515f&compact=1&supportcrypto=1 HTTP/1.1" 200 25
start nuevo cliente
[07/Apr/2012 08:56:01] "GET /announce?info_hash=7%cc%08%1fG%60%a6%ab%05%1d%b8%d6%fa%d6%cd%2b%a1gl%98&peer_id=M7-2-2--%c9d%e2%b2T%85%f8%93%ce%d9%ac%1d&port=15644&uploaded=0&downloaded=0&left=733261824&corrupt=0&key=6C7ED1C1&event=started&numwant=200&compact=1&no_peer_id=1&ipv6=fe80%3a%3a21c%3ab3ff%3afec5%3aa4a1 HTTP/1.1" 200 25
ping nuevo cliente
[07/Apr/2012 08:57:02] "GET /announce?info_hash=7%cc%08%1fG%60%a6%ab%05%1d%b8%d6%fa%d6%cd%2b%a1gl%98&peer_id=M7-2-2--%c9d%e2%b2T%85%f8%93%ce%d9%ac%1d&port=15644&uploaded=0&downloaded=0&left=733261824&corrupt=0&key=6C7ED1C1&numwant=200&compact=1&no_peer_id=1&ipv6=fe80%3a%3a21c%3ab3ff%3afec5%3aa4a1 HTTP/1.1" 200 25
"""
def announce(request):
qs = request.GET.copy()
qs.update({'ip': request.GET.get('ip') or request.META.get('REMOTE_ADDR')})
ann = AnnounceForm(qs.dict())
if not ann.is_valid():
raise Exception(ann.errors)
qs = ann.cleaned_data
r = Store(host='localhost')
r.set_info(qs['info_hash'], qs['peer_id'])
# save ALL the params!
r.save_peer(qs)
# save ALL the states!
if qs['event'] == 'completed':
r.add_seeder()
r.del_leecher()
elif qs['event'] == 'stopped':
r.del_seeder()
r.del_leecher()
r.delete_peer()
else:
if qs['left'] == 0:
r.add_seeder()
r.del_leecher()
else:
r.add_seeder()
r.add_leecher()
# get ALL the peers!
nmembers = r.len_seeders()
if nmembers < qs['numwant']:
peer_ids = r.all_seeders()
elif nmembers > 0:
peer_ids = r.get_seeders(qs['numwant'])
else:
peer_ids = set()
# clean ALL the peers!
peers_data = []
now = time.time()
for peer_id in peer_ids:
data = r.get_peer(peer_id)
if not data or int(data['seen']) < now-(60*2):
r.del_peer(peer_id)
else:
peers_data.append(data)
# send ALL the peers
if qs['compact']:
peers_l = ""
for peer in peers_data:
peers_l += struct.pack('>4sH', socket.inet_aton(peer['ip']), int(peer['port']))
elif qs['no_peer_id']:
peers_l = []
for peer in peers_data:
peers_l.append({'ip': peer['ip'], 'port': int(peer['port'])})
else:
peers_l = []
for peer in peers_data:
peers_l.append({'peer id': peer['peer_id'], 'ip': peer['ip'], 'port': peer['port']})
try:
return HttpResponse(
bencode.bencode({
'interval': 60,
'peers': peers_l
}),
content_type = 'text/plain'
)
except:
return response_fail(sys.exc_info()[1])
def scrape(request):
"""qs = request.GET.copy()
scp = ScrapeForm(qs.dict())
if not scp.is_valid():
raise Exception(scp.errors)
qs = scp.cleaned_data
r = redis.Redis(host='localhost')
seeders_key = 'redtracker:seeders:'+ qs['info_hash']
leechers_key = 'redtracker:leechers:'+ qs['info_hash']
return HttpResponse(
bencode.bencode({
'files': {
qs['info_hash']: {
'complete': r.sdiffstore('tmp', seeders_key, leechers_key),
'incomplete': r.scard(leechers_key),
'downloaded': 0 #TODO
}
}
}),
content_type = 'text/plain'
)"""
return render_to_response('tracker/scrape.html', {})
def response_fail(reason):
return HttpResponse(
bencode.bencode({'failure reason': reason or 'unknown'}),
content_type = 'text/plain'
)
|
{"/tracker/views.py": ["/tracker/models.py"]}
|
376
|
xergio/redtorrent
|
refs/heads/master
|
/tracker/models.py
|
# -*- coding: utf-8 -*-
from django import forms
import redis
import time
class AnnounceForm(forms.Form):
info_hash = forms.CharField(max_length=100)
peer_id = forms.CharField(max_length=100)
port = forms.IntegerField()
uploaded = forms.IntegerField()
downloaded = forms.IntegerField()
left = forms.IntegerField()
compact = forms.BooleanField(required=False, initial=False)
no_peer_id = forms.BooleanField(required=False, initial=False)
event = forms.CharField(max_length=9, required=False)
ip = forms.CharField(max_length=100, required=False)
numwant = forms.IntegerField(required=False, initial=50)
key = forms.CharField(max_length=20, required=False)
trackerid = forms.CharField(max_length=20, required=False)
supportcrypto = forms.BooleanField(required=False, initial=False)
requirecrypto = forms.BooleanField(required=False, initial=False)
def clean_event(self):
event = self.cleaned_data['event'].strip()
if event not in ['started', 'completed', 'stopped'] and len(event) > 0:
raise forms.ValidationError("event '%s' is invalid." % event)
return event
class ScrapeForm(forms.Form):
info_hash = forms.CharField(max_length=100)
class Store(redis.Redis):
def set_info(self, info_hash, peer_id):
self.info_hash = info_hash
self.peer_id = peer_id
self.peer_key = "redtorrent:peer:%s" % self.peer_id
self.seeders_key = "redtracker:seeders:%s" % self.info_hash
self.leechers_key = "redtracker:leechers:%s" % self.info_hash
def save_peer(self, data):
data.update({'seen': int(time.time())})
return self.hmset(self.peer_key, data)
def delete_peer(self):
return self.delete(self.peer_key)
def get_peer(self, peer_id):
return self.hgetall(u"redtorrent:peer:%s" % peer_id)
def del_peer(self, peer_id):
self.srem(self.seeders_key, peer_id)
self.srem(self.leechers_key, peer_id)
return self.delete(u"redtorrent:peer:%s" % peer_id)
def add_seeder(self):
return self.sadd(self.seeders_key, self.peer_id)
def del_seeder(self):
return self.srem(self.seeders_key, self.peer_id)
def add_leecher(self):
return self.sadd(self.leechers_key, self.peer_id)
def del_leecher(self):
return self.srem(self.leechers_key, self.peer_id)
def len_seeders(self):
return self.scard(self.seeders_key)
def len_leechers(self):
return self.scard(self.leechers_key)
def all_seeders(self):
return self.smembers(self.seeders_key)
def get_seeders(self, num=50):
peer_ids = set()
i = 0
while len(peer_ids) < num and i < 1000:
peer_ids.add(self.srandmember(self.seeders_key))
i += 1
return peer_ids
|
{"/tracker/views.py": ["/tracker/models.py"]}
|
377
|
xergio/redtorrent
|
refs/heads/master
|
/redtorrent/urls.py
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^$', 'tracker.views.announce', name='announce'),
url(r'^announce', 'tracker.views.announce', name='announce'),
url(r'^scrape$', 'tracker.views.scrape', name='scrape'),
)
|
{"/tracker/views.py": ["/tracker/models.py"]}
|
380
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/apps.py
|
from django.apps import AppConfig
class OhjelmaConfig(AppConfig):
name = 'ohjelma'
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
381
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/migrations/0003_song_release_year.py
|
# Generated by Django 3.0.2 on 2020-03-15 16:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ohjelma', '0002_song'),
]
operations = [
migrations.AddField(
model_name='song',
name='release_year',
field=models.IntegerField(default=2000),
),
]
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
382
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name = 'home'),
path('songs/', views.SongList.as_view(), name = 'song_list'),
path('view/<int:pk>', views.SongView.as_view(), name = 'song_view'),
path('new', views.SongCreate.as_view(), name = 'song_new'),
path('view/<int:pk>', views.SongView.as_view(), name = 'song_view'),
path('edit/<int:pk>', views.SongUpdate.as_view(), name = 'song_edit'),
path('delete/<int:pk>', views.SongDelete.as_view(), name = 'song_delete'),
path('tracks/', views.TrackView, name = 'track_list'),
path('yearanalysis/', views.YearAnalysis, name = 'year_analysis'),
path('analysis/<int:pk>', views.Analysis.as_view(), name = 'track_detail'),
#url(r'^tracks/(?P<tracksyear>\w+)/$', views.TrackView, name = "TrackView")
path('tracks/<int:tracksyear>', views.TrackView, name = "TrackView")
]
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
383
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/migrations/0002_song.py
|
# Generated by Django 3.0.2 on 2020-03-13 17:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ohjelma', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Song',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('song_name', models.CharField(max_length=200)),
('song_artist', models.CharField(max_length=200)),
],
),
]
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
384
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/migrations/0005_auto_20200329_1313.py
|
# Generated by Django 3.0.2 on 2020-03-29 10:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ohjelma', '0004_track'),
]
operations = [
migrations.AlterField(
model_name='track',
name='track_duration',
field=models.CharField(max_length=5),
),
]
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
385
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/migrations/0007_track_track_id.py
|
# Generated by Django 3.0.2 on 2020-04-11 18:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ohjelma', '0006_auto_20200329_1329'),
]
operations = [
migrations.AddField(
model_name='track',
name='track_id',
field=models.CharField(default=0, max_length=30),
preserve_default=False,
),
]
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
386
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.urls import reverse_lazy
from ohjelma.models import Song
from ohjelma.models import Track
import json
import spotipy
from spotipy.oauth2 import SpotifyClientCredentials
def index(request):
return HttpResponse('Welcome.')
class SongList(ListView):
model = Song
class SongView(DetailView):
model = Song
class SongCreate(CreateView):
model = Song
fields = ['song_name', 'song_artist', 'release_year']
success_url = reverse_lazy('song_list')
class SongUpdate(UpdateView):
model = Song
fields = ['song_name', 'song_artist', 'release_year']
success_url = reverse_lazy('song_list')
class SongDelete(DeleteView):
model = Song
success_url = reverse_lazy('song_list')
#Formatting the duration time
#Takes milliseconds as parameter and returns a string mm:ss
def MsFormat(milliseconds):
dur_s = (milliseconds/1000)%60
dur_s = int(dur_s)
if dur_s < 10:
dur_s = "0{}".format(dur_s)
dur_m = (milliseconds/(1000*60))%60
dur_m = int(dur_m)
dur = "{}:{}".format(dur_m, dur_s)
return dur
def TrackView(request, tracksyear):
Track.objects.all().delete() #Clear old info
query = 'year:{}'.format(tracksyear)
#Spotify developer keys
cid = '8f91d5aff7b54e1e93daa49f123d9ee9'
secret = 'f23421ee54b144cabeab9e2dbe9104a7'
client_credentials_manager = SpotifyClientCredentials(client_id=cid, client_secret=secret)
sp = spotipy.Spotify(client_credentials_manager = client_credentials_manager)
#Lists for counting year averages
l_dance = []
l_en = []
l_aco = []
l_val = []
for i in range(0,100,50):
track_results = sp.search(q=query, type='track', limit=50,offset=i)
for i, t in enumerate(track_results['tracks']['items']):
id = t['id']
artist = t['artists'][0]['name']
song = t['name']
dur_ms = t['duration_ms']
pop = t['popularity']
dur = MsFormat(dur_ms)
trackinfo = sp.audio_features(id)
dance = trackinfo[0]['danceability']
en = trackinfo[0]['energy']
key = trackinfo[0]['key']
loud = trackinfo[0]['loudness']
spee = trackinfo[0]['speechiness']
aco = trackinfo[0]['acousticness']
inst = trackinfo[0]['instrumentalness']
live = trackinfo[0]['liveness']
val = trackinfo[0]['valence']
temp = trackinfo[0]['tempo']
l_dance.append(dance)
l_en.append(en)
l_aco.append(aco)
l_val.append(val)
Track.objects.create(track_id = id, track_artist = artist,
track_name = song, track_duration = dur, track_popularity = pop,
track_danceability = dance, track_energy = en, track_key = key,
track_loudness = loud, track_speechiness = spee,
track_acousticness = aco, track_instrumentalness = inst,
track_liveness = live, track_valence = val, track_tempo = temp)
avgdance = calculate_average(l_dance)*100
avgene = calculate_average(l_en)*100
avgaco = calculate_average(l_aco)*100
avgval = calculate_average(l_val)*100
alltracks = Track.objects.all()
context = {'alltracks': alltracks, 'year': tracksyear, 'avgdance': avgdance, 'avgene': avgene, 'avgaco': avgaco, 'avgval': avgval}
return render(request, 'tracks.html', context)
#View for each track detailed information
class Analysis(DetailView):
model = Track
#Takes a list (of numbers) as parameter, returns the average
def calculate_average(num):
sum_num = 0
for t in num:
sum_num = sum_num + t
avg = sum_num / len(num)
return avg
#View for analytics
def YearAnalysis(request):
#Spotify developer keys
cid = '8f91d5aff7b54e1e93daa49f123d9ee9'
secret = 'f23421ee54b144cabeab9e2dbe9104a7'
client_credentials_manager = SpotifyClientCredentials(client_id=cid, client_secret=secret)
sp = spotipy.Spotify(client_credentials_manager = client_credentials_manager)
#Lists for saving yearly averages
dance = []
en = []
aco = []
val = []
years = []
most_populars = []
most_danceable = ""
best_dance = 0
happiest = ""
best_val = 0
most_acoustic = ""
best_aco = 0
most_energetic = ""
best_en = 0
for year in range (1980, 2020):
bestpop = 0
mostpop = ""
l_dance = []
l_en = []
l_aco = []
l_val = []
for i in range(0,100,50):
query = 'year:{}'.format(year)
track_results = sp.search(q=query, type='track', limit=50, offset=i)
for i, t in enumerate(track_results['tracks']['items']):
#Popularity check
pop = t['popularity']
if pop > bestpop:
mostpop = "{} by {}. Popularity: {}.".format(t['name'], t['artists'][0]['name'], pop)
bestpop = pop
elif pop == bestpop:
mostpop = mostpop + " AND {} by {}. Popularity: {}.".format(t['name'], t['artists'][0]['name'], pop)
id = t['id']
trackinfo = sp.audio_features(id)
d = trackinfo[0]['danceability']
e = trackinfo[0]['energy']
a = trackinfo[0]['acousticness']
v = trackinfo[0]['valence']
l_dance.append(d)
l_en.append(e)
l_aco.append(a)
l_val.append(v)
if d > best_dance:
most_danceable = "{} by {}. ({}) Danceability: {}.".format(t['name'], t['artists'][0]['name'], year, d)
best_dance = d
elif d == best_dance:
most_danceable = most_danceable + " AND {} by {}. ({}) Danceability: {}.".format(t['name'], t['artists'][0]['name'], year, d)
if e > best_en:
most_energetic = "{} by {}. ({}) Energy: {}.".format(t['name'], t['artists'][0]['name'], year, e)
best_en = e
elif e == best_en:
most_energetic = most_energetic + " AND {} by {}. ({}) Energy: {}.".format(t['name'], t['artists'][0]['name'], year, e)
if a > best_aco:
most_acoustic = "{} by {}. ({}) Acousticness: {}.".format(t['name'], t['artists'][0]['name'], year, a)
best_aco = a
elif a == best_aco:
most_acoustic = most_acoustic + " AND {} by {}. ({}) Acousticness: {}.".format(t['name'], t['artists'][0]['name'], year, a)
if v > best_val:
happiest = "{} by {}. ({}) Valence: {}.".format(t['name'], t['artists'][0]['name'], year, v)
best_val = v
elif v == best_val:
happiest = happiest + " AND {} by {}. ({}) Valence: {}.".format(t['name'], t['artists'][0]['name'], year, v)
#Calculate year averages and add to lists
dance.append(calculate_average(l_dance))
en.append(calculate_average(l_en))
aco.append(calculate_average(l_aco))
val.append(calculate_average(l_val))
years.append(year)
most_populars.append(mostpop)
#Zip year and most popular song to a list of 2-valued tuples
yearly_populars = zip(years, most_populars)
context = {"years": years, "danceability": dance, "energy": en,
"acousticness": aco, "valence": val, "yearly_populars": yearly_populars,
"most_acoustic": most_acoustic, "most_energetic": most_energetic,
"most_danceable": most_danceable, "happiest": happiest}
return render(request, 'analysis.html', context)
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
387
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/models.py
|
from django.db import models
from django.urls import reverse
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('Date published')
class Choice(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
class Song(models.Model):
song_name = models.CharField(max_length=200)
song_artist = models.CharField(max_length = 200)
release_year = models.IntegerField(default=2000)
def __str__(self):
return self.song_name
def get_absolute_url(self):
return reverse('song_edit', kwargs={'pk': self.pk})
class Track(models.Model):
track_id = models.CharField(max_length=30)
track_name = models.CharField(max_length=500)
track_artist = models.CharField(max_length = 500)
track_duration = models.CharField(max_length = 10)
track_popularity = models.IntegerField(default=100)
track_danceability = models.FloatField(max_length=10)
track_energy = models.FloatField(max_length=10)
track_key = models.IntegerField(max_length=3)
track_loudness = models.FloatField(max_length=10)
track_speechiness = models.FloatField(max_length=10)
track_acousticness = models.FloatField(max_length=10)
track_instrumentalness = models.FloatField(max_length=10)
track_liveness = models.FloatField(max_length=10)
track_valence = models.FloatField(max_length=10)
track_tempo = models.FloatField(max_length=10)
def __str__(self):
return self.track_name
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
388
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/migrations/0006_auto_20200329_1329.py
|
# Generated by Django 3.0.2 on 2020-03-29 10:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ohjelma', '0005_auto_20200329_1313'),
]
operations = [
migrations.AlterField(
model_name='track',
name='track_duration',
field=models.CharField(max_length=10),
),
]
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
389
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/migrations/0009_auto_20200411_2211.py
|
# Generated by Django 3.0.2 on 2020-04-11 19:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ohjelma', '0008_track_track_danceability'),
]
operations = [
migrations.AddField(
model_name='track',
name='track_acousticness',
field=models.FloatField(default=0, max_length=10),
preserve_default=False,
),
migrations.AddField(
model_name='track',
name='track_energy',
field=models.FloatField(default=0, max_length=10),
preserve_default=False,
),
migrations.AddField(
model_name='track',
name='track_instrumentalness',
field=models.FloatField(default=0, max_length=10),
preserve_default=False,
),
migrations.AddField(
model_name='track',
name='track_key',
field=models.IntegerField(default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='track',
name='track_liveness',
field=models.FloatField(default=0, max_length=10),
preserve_default=False,
),
migrations.AddField(
model_name='track',
name='track_loudness',
field=models.FloatField(default=0, max_length=10),
preserve_default=False,
),
migrations.AddField(
model_name='track',
name='track_speechiness',
field=models.FloatField(default=0, max_length=10),
preserve_default=False,
),
migrations.AddField(
model_name='track',
name='track_tempo',
field=models.FloatField(default=0, max_length=10),
preserve_default=False,
),
migrations.AddField(
model_name='track',
name='track_valence',
field=models.FloatField(default=0, max_length=10),
preserve_default=False,
),
]
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
390
|
katrii/ohsiha
|
refs/heads/master
|
/ohjelma/migrations/0004_track.py
|
# Generated by Django 3.0.2 on 2020-03-28 23:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ohjelma', '0003_song_release_year'),
]
operations = [
migrations.CreateModel(
name='Track',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('track_name', models.CharField(max_length=500)),
('track_artist', models.CharField(max_length=500)),
('track_duration', models.IntegerField(default=200000)),
('track_popularity', models.IntegerField(default=100)),
],
),
]
|
{"/ohjelma/views.py": ["/ohjelma/models.py"]}
|
394
|
Bthelisma/repTravelbuddy
|
refs/heads/master
|
/apps/travel_app/migrations/0003_trip.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-12-27 10:38
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('travel_app', '0002_auto_20171227_0048'),
]
operations = [
migrations.CreateModel(
name='Trip',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('destination', models.CharField(max_length=255)),
('description', models.CharField(max_length=255)),
('travelfrom', models.DateTimeField(auto_now_add=True)),
('travelto', models.DateTimeField(auto_now_add=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('my_trip', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='planner', to='travel_app.User')),
('travellers', models.ManyToManyField(related_name='joiner', to='travel_app.User')),
],
),
]
|
{"/apps/travel_app/views.py": ["/apps/travel_app/models.py"]}
|
395
|
Bthelisma/repTravelbuddy
|
refs/heads/master
|
/apps/travel_app/migrations/0004_auto_20171227_0320.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-12-27 11:20
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('travel_app', '0003_trip'),
]
operations = [
migrations.RenameField(
model_name='trip',
old_name='my_trip',
new_name='created_by',
),
]
|
{"/apps/travel_app/views.py": ["/apps/travel_app/models.py"]}
|
396
|
Bthelisma/repTravelbuddy
|
refs/heads/master
|
/apps/travel_app/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import bcrypt
import datetime
from django.db import models
class UserManager(models.Manager):
def register_validate(self, postData):
errors = []
name = postData['name']
username = postData['username']
password = postData['password']
cpassword = postData['cpassword']
if not name or not username or not password or not cpassword:
errors.append( "All fields are required")
# check name
if len(name) < 3 :
errors.append( "name fields should be at least 3 characters")
# check username
if len(username) < 1:
errors.append( "Username cannot be blank")
# check password
if len(password ) < 8:
errors.append ( "password must be at least 8 characters")
elif password != cpassword:
errors.append ( "password must be match")
if not errors:
if User.objects.filter(username=username):
errors.append("username is not unique")
else:
hashed = bcrypt.hashpw((password.encode()), bcrypt.gensalt(5))
return self.create(
name=name,
username=username,
password=hashed
)
return errors
def login_validate(self, postData):
errors = []
password = postData['password']
username = postData['username']
# check DB for username
try:
# check user's password
user = self.get(username=username)
if bcrypt.checkpw(password.encode(), user.password.encode()):
return user
except:
pass
errors.append('Invalid login info')
return errors
class TripManager(models.Manager):
def trip_validate(self, postData, id):
errors=[]
destination=postData['destination']
description=postData['description']
start_date=postData['start_date']
end_date=postData['end_date']
if start_date < datetime.datetime.now().strftime('%m-%d-%Y'):
errors.append('Start Date must be after today')
elif start_date > end_date:
errors.append('End Date must be after Start Date')
if not destination or not destination or not start_date or not end_date:
errors.append( "All fields are required")
if len(destination)<1:
errors.append('please enter a destination')
if len(description)<1:
errors.append('please enter a description')
if not errors:
user = User.objects.get(id=id)
trip = self.create(
destination = destination,
description = description,
start_date = start_date,
end_date= end_date,
created_by = user
)
trip.travellers.add(user)
return trip
return errors
class User(models.Model):
name = models.CharField(max_length=255)
username = models.CharField(max_length=255)
password = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
objects = UserManager()
class Trip(models.Model):
destination = models.CharField(max_length=255)
description = models.CharField(max_length=255)
start_date = models.DateTimeField()
end_date = models.DateTimeField()
created_by = models.ForeignKey(User, related_name="planner")
travellers = models.ManyToManyField(User, related_name="joiner")
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
objects=TripManager()
|
{"/apps/travel_app/views.py": ["/apps/travel_app/models.py"]}
|
397
|
Bthelisma/repTravelbuddy
|
refs/heads/master
|
/apps/travel_app/views.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render, redirect
from .models import User
from .models import Trip
from django.contrib import messages
#==================================================#
# RENDER METHODS #
#==================================================#
def index(request):
context = {
'users': User.objects.all()
}
return render(request, "travel_app/index.html", context)
def addplan(request):
return render(request, "travel_app/new.html")
def dashboard(request):
try:
context = {
'user': User.objects.get(id=request.session['user_id']),
'my_trips':Trip.objects.filter(travellers=request.session['user_id']),
'other_plans': Trip.objects.exclude(travellers=request.session['user_id']),
}
return render (request, "travel_app/dashboard.html", context)
except KeyError:
return redirect('/')
def show(request, id):
context={
'trip':Trip.objects.get(id=id),
'jointrips': Trip.objects.exclude(travellers =request.session['user_id'])
}
return render (request, "travel_app/show.html", context)
#==================================================#
# PROCESS METHODS #
#==================================================#
def register(request):
result = User.objects.register_validate(request.POST)
if type(result) == list:
for error in result:
messages.error(request, error)
return redirect('/')
request.session['user_id'] = result.id
return redirect('/dashboard')
def login(request):
result = User.objects.login_validate(request.POST)
if type(result) == list:
for error in result:
messages.error(request, error)
return redirect ("/")
request.session['user_id'] = result.id
return redirect("/dashboard")
def logout(request):
request.session.clear()
return redirect('/')
def create(request):
result = Trip.objects.trip_validate(request.POST, request.session['user_id'])
if type(result) == list:
for error in result:
messages.error(request, error)
return redirect ("/addplan")
return redirect('/dashboard')
def join(request, id):
other_plans = Trip.objects.get(id=id)
user=User.objects.get(id=request.session['user_id'])
user.joiner.add(other_plans)
return redirect('/dashboard')
|
{"/apps/travel_app/views.py": ["/apps/travel_app/models.py"]}
|
398
|
Bthelisma/repTravelbuddy
|
refs/heads/master
|
/apps/travel_app/migrations/0005_auto_20171227_1455.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-12-27 22:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('travel_app', '0004_auto_20171227_0320'),
]
operations = [
migrations.AlterField(
model_name='trip',
name='travelfrom',
field=models.DateTimeField(),
),
migrations.AlterField(
model_name='trip',
name='travelto',
field=models.DateTimeField(),
),
]
|
{"/apps/travel_app/views.py": ["/apps/travel_app/models.py"]}
|
399
|
EStepzz/LogData
|
refs/heads/master
|
/tools/GenPic.py
|
#coding = utf-8
#author:QINWANG
'''
使用pyecharts 创建不同的图形图像
目前有:xxxx等图像
'''
from pyecharts import Line
from tools.ConnectDB import DbSomething
a = DbSomething('localhost','dns_query', 'postgres', 111111)
v1,v2 = a.search()
print (v1,v2)
class GenPic:
'''生成折线图'''
def lineChart(self):
line= Line("QPS图")
line.add('', v1, v2)
line.show_config()
line.render()
if __name__=='__main__':
pic = GenPic()
pic.lineChart()
|
{"/tools/GenPic.py": ["/tools/ConnectDB.py"]}
|
400
|
EStepzz/LogData
|
refs/heads/master
|
/tools/ConnectDB.py
|
#coding=utf-8
import psycopg2
import datetime
import time
class DbSomething():
def __init__(self,ip,database,username, password,port=5432):
self.ip = ip
self.database = database
self.username = username
self.password = password
self.port = port
'''如果把connection写到__init__中,每次初始化类就好?可以一试'''
def connection(self):
conn = psycopg2.connect(host=self.ip,
database=self.database,
user=self.username,
password=self.password,
port=self.port)
cur = conn.cursor()
return cur, conn
'''创建表操作'''
def creatTable(self, sql):
cur, conn = self.connection()
cur.execute(sql)
print ("table is created")
conn.commit()
cur.close()
conn.close()
'''插入数据
#table,which table insert to
#data,what to insert
'''
def Insert(self,sql):
pass
'''
#通过输入查询条件返回查询结果
#table
#conditions
'''
def search(self):
v1 =[]
v2=[]
'''sql = select ctime ,count(*)
from qps
group by ctime order by ctime
'''
sql = 'select ctime ,count(*) from qps group by ctime order by ctime'
cur, conn = self.connection()
cur.execute(sql)
data = cur.fetchall()
#data list type
for i in data:
date = datetime.datetime.strftime(i[0], '%Y-%m-%d %H:%M:%S')
print (type(date),type(i[1]))
v1.append(date)
v2.append(i[1])
cur.close()
conn.close()
return v1,v2
|
{"/tools/GenPic.py": ["/tools/ConnectDB.py"]}
|
402
|
SSRomanSS/flask_blog
|
refs/heads/master
|
/manage.py
|
from blog import app, db, manager
from blog.models import *
if __name__ == '__main__':
manager.run()
|
{"/manage.py": ["/blog/__init__.py"], "/run.py": ["/blog/__init__.py"], "/blog/routes.py": ["/blog/__init__.py"]}
|
403
|
SSRomanSS/flask_blog
|
refs/heads/master
|
/blog/__init__.py
|
from flask import Flask, request
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from flask_login import LoginManager
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_babel import Babel, lazy_gettext as _l
from flask_admin import Admin
from flask_admin.contrib.sqla import ModelView
from config import Config
app = Flask(__name__)
app.config.from_object(Config)
login = LoginManager(app)
login.login_view = 'login'
login.login_message = _l('Please log in to access this page')
login.login_message_category = 'info'
bootstrap = Bootstrap(app)
moment = Moment(app)
babel = Babel(app)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@babel.localeselector
def get_locale():
return request.accept_languages.best_match(app.config['LANGUAGES'])
# from blog import routes, models, errors
from blog.models import User, Post
# Admin Panel
admin = Admin(app)
admin.add_view(ModelView(User, db.session))
admin.add_view(ModelView(Post, db.session))
|
{"/manage.py": ["/blog/__init__.py"], "/run.py": ["/blog/__init__.py"], "/blog/routes.py": ["/blog/__init__.py"]}
|
404
|
SSRomanSS/flask_blog
|
refs/heads/master
|
/migrations/versions/89b140c56c4d_fix_create_followers_relationship.py
|
"""fix create followers relationship
Revision ID: 89b140c56c4d
Revises: 7d84ff36825f
Create Date: 2021-03-30 14:57:47.528704
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '89b140c56c4d'
down_revision = '7d84ff36825f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('followed_followers',
sa.Column('followed_id', sa.Integer(), nullable=True),
sa.Column('follower_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['followed_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['follower_id'], ['users.id'], )
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('followed_followers')
# ### end Alembic commands ###
|
{"/manage.py": ["/blog/__init__.py"], "/run.py": ["/blog/__init__.py"], "/blog/routes.py": ["/blog/__init__.py"]}
|
405
|
SSRomanSS/flask_blog
|
refs/heads/master
|
/run.py
|
from blog import app, db
from blog import routes, models, errors, set_logger
@app.shell_context_processor
def make_shell_context():
return {
'db': db,
'User': models.User,
'Post': models.Post
}
if __name__ == '__main__':
app.run(debug=True)
|
{"/manage.py": ["/blog/__init__.py"], "/run.py": ["/blog/__init__.py"], "/blog/routes.py": ["/blog/__init__.py"]}
|
406
|
SSRomanSS/flask_blog
|
refs/heads/master
|
/migrations/versions/5e12ea69ab10_add_two_new_column_to_user.py
|
"""Add two new column to User
Revision ID: 5e12ea69ab10
Revises: a89dbfef15cc
Create Date: 2021-03-29 20:46:23.445651
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5e12ea69ab10'
down_revision = 'a89dbfef15cc'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('about_me', sa.String(length=160), nullable=True))
op.add_column('users', sa.Column('last_seen', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'last_seen')
op.drop_column('users', 'about_me')
# ### end Alembic commands ###
|
{"/manage.py": ["/blog/__init__.py"], "/run.py": ["/blog/__init__.py"], "/blog/routes.py": ["/blog/__init__.py"]}
|
407
|
SSRomanSS/flask_blog
|
refs/heads/master
|
/blog/routes.py
|
from datetime import datetime
from flask import render_template, flash, redirect, url_for, request
from flask_login import current_user, login_user, logout_user, login_required
from flask_babel import _
from werkzeug.urls import url_parse
from blog import app, db
from blog.forms import LoginForm, RegisterForm, EditProfileForm, PostForm, EmptyForm
from blog.models import User, Post
@app.before_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
@app.route('/', methods=['GET', 'POST'])
@login_required
def index():
form = PostForm()
if form.validate_on_submit():
post = Post(body=form.post.data, author=current_user)
db.session.add(post)
db.session.commit()
flash(_('Your post is live now!'), 'info')
return redirect(url_for('index'))
page = request.args.get('page', 1, type=int)
posts = current_user.get_followed_posts().paginate(page, app.config['POST_PER_PAGE'], False)
next_url = url_for('index', page=posts.next_num) if posts.has_next else None
prev_url = url_for('index', page=posts.prev_num) if posts.has_prev else None
app.logger.info('message')
return render_template('index.html', posts=posts.items, form=form, next_url=next_url, prev_url=prev_url)
@app.route('/explore')
@login_required
def explore():
page = request.args.get('page', 1, type=int)
posts = Post.query.order_by(Post.timestamp.desc()).paginate(page, app.config['POST_PER_PAGE'], False)
next_url = url_for('index', page=posts.next_num) if posts.has_next else None
prev_url = url_for('index', page=posts.prev_num) if posts.has_prev else None
return render_template('index.html', posts=posts.items, next_url=next_url, prev_url=prev_url)
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first_or_404()
if not user or not user.check_password(form.password.data):
flash('Invalid username or password', 'error')
return redirect(url_for('login'))
login_user(user, remember=form.remember_me.data)
flash(f'Login successful for {user.username} ({user.email})', 'success')
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
next_page = url_for('index')
return redirect(next_page)
return render_template('login.html', title='Sign In', form=form)
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
redirect(url_for('index'))
form = RegisterForm()
if form.validate_on_submit():
user = User(username=form.username.data, email=form.email.data)
user.set_password(form.password.data)
db.session.add(user)
db.session.commit()
flash('Congratulations, you successfully registered!', 'success')
return redirect(url_for('index'))
form = RegisterForm()
return render_template('register.html', title='Registration', form=form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
if user == current_user:
posts = user.get_followed_posts().paginate(page, app.config['POST_PER_PAGE'], False)
else:
posts = user.posts.order_by(Post.timestamp.desc()).paginate(page, app.config['POST_PER_PAGE'], False)
next_url = url_for('user', username=user.username, page=posts.next_num) if posts.has_next else None
prev_url = url_for('user', username=user.username, page=posts.prev_num) if posts.has_prev else None
form = EmptyForm()
return render_template('user.html', user=user, form=form, posts=posts.items, next_url=next_url, prev_url=prev_url)
@app.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm(formdata=request.form, obj=current_user)
if form.validate_on_submit():
form.populate_obj(current_user)
db.session.commit()
flash('Profile successfully updated', 'success')
return redirect(url_for('user', username=current_user.username))
return render_template('edit_profile.html', title='Edit Profile', form=form)
@app.route('/follow/<username>', methods=['POST'])
@login_required
def follow(username):
form = EmptyForm()
if form.validate_on_submit():
user = User.query.filter_by(username=username).first_or_404()
if not user:
flash(f'User {username} is not found', 'info')
return redirect(url_for('index'))
elif user == current_user:
flash('You cannot follow yourself', 'info')
return redirect(url_for('user', username=username))
else:
current_user.follow(user)
db.session.commit()
flash(f'You are following {username}!', 'success')
return redirect(url_for('user', username=username))
else:
return redirect(url_for('index'))
@app.route('/unfollow/<username>', methods=['POST'])
@login_required
def unfollow(username):
form = EmptyForm()
if form.validate_on_submit():
user = User.query.filter_by(username=username).first_or_404()
if not user:
flash(f'User {username} is not found', 'info')
return redirect(url_for('index'))
elif user == current_user:
flash('You cannot unfollow yourself', 'info')
return redirect(url_for('user', username=username))
else:
current_user.unfollow(user)
db.session.commit()
flash(f'You are unfollowing {username}!', 'info')
return redirect(url_for('user', username=username))
else:
return redirect(url_for('index'))
|
{"/manage.py": ["/blog/__init__.py"], "/run.py": ["/blog/__init__.py"], "/blog/routes.py": ["/blog/__init__.py"]}
|
411
|
xiaohan2012/random_steiner_tree
|
refs/heads/master
|
/test_loop_erased_weighted.py
|
import pytest
import numpy as np
from graph_tool import Graph
from random_steiner_tree import random_steiner_tree
from random_steiner_tree.util import from_gt
from collections import Counter
EPSILON = 1e-10
def graph():
"""
0 (root)
/ \
1 2
\ /
3 (X)
"""
g = Graph()
g.add_vertex(4)
g.add_edge(0, 1)
g.add_edge(0, 2)
g.add_edge(1, 3)
g.add_edge(2, 3)
return g
case1 = {
(0, 1): 1,
(0, 2): EPSILON,
(1, 3): 1,
(2, 3): EPSILON
}
case2 = {
(0, 1): 1,
(0, 2): 2,
(1, 3): 1,
(2, 3): 1
}
def build_gi_by_weights(edge2weight):
g = graph()
weights = g.new_edge_property('float')
for (u, v), w in edge2weight.items():
weights[g.edge(u, v)] = w
return from_gt(g, weights=weights)
@pytest.mark.parametrize("edge2weight,expected_fraction", [(case1, 0), (case2, 4/3)])
@pytest.mark.parametrize("sampling_method", ["loop_erased"])
def test_distribution(edge2weight, expected_fraction, sampling_method):
gi = build_gi_by_weights(edge2weight)
root = 0
X = [3]
n = 100000
steiner_node_freq = Counter()
for i in range(n):
edges = random_steiner_tree(gi, X, root, method=sampling_method, seed=None)
steiner_nodes = {u for e in edges for u in e} - {root} - set(X)
for u in steiner_nodes:
steiner_node_freq[u] += 1
np.testing.assert_almost_equal(steiner_node_freq[2] / steiner_node_freq[1],
expected_fraction, decimal=2)
# if the following assertion fails, you can buy a lottery
# assert steiner_node_freq[2] == 0
# assert steiner_node_freq[1] == n
# np.testing.assert_almost_equal(steiner_node_freq[2] / steiner_node_freq[1], 0)
|
{"/test_loop_erased_weighted.py": ["/random_steiner_tree/__init__.py"], "/distribution.py": ["/random_steiner_tree/__init__.py"], "/test.py": ["/random_steiner_tree/__init__.py"]}
|
412
|
xiaohan2012/random_steiner_tree
|
refs/heads/master
|
/random_steiner_tree/__init__.py
|
import random
from .interface import loop_erased, cut_based
def random_steiner_tree(gi, X, root, method="loop_erased", seed=None, verbose=False):
assert method in {"loop_erased", "closure", "cut"}
# C++ is strict with type...
X = list(map(int, X))
root = int(root)
if seed is None:
seed = random.randint(0, 2147483647) # int32
if method == "loop_erased":
return loop_erased(gi, X, root, seed, verbose)
elif method == "cut":
return cut_based(gi, X, root, seed, verbose)
else:
raise NotImplemented('yet')
|
{"/test_loop_erased_weighted.py": ["/random_steiner_tree/__init__.py"], "/distribution.py": ["/random_steiner_tree/__init__.py"], "/test.py": ["/random_steiner_tree/__init__.py"]}
|
413
|
xiaohan2012/random_steiner_tree
|
refs/heads/master
|
/distribution.py
|
# coding: utf-8
import networkx as nx
import numpy as np
import random
import pandas as pd
from scipy.spatial.distance import cosine
from tqdm import tqdm
from collections import Counter
from random_steiner_tree import random_steiner_tree
from random_steiner_tree.util import from_nx
from joblib import Parallel, delayed
# random.seed(1)
# np.random.seed(1)
# nx.florentine_families_graph().number_of_nodes()
# nx.davis_southern_women_graph().number_of_nodes()
# g = nx.karate_club_graph()
g = nx.florentine_families_graph()
g = nx.convert_node_labels_to_integers(g)
# add some random edges
n_rand_edges = 5
for i in range(n_rand_edges):
while True:
u, v = map(int, np.random.permutation(g.nodes())[:2])
if not g.has_edge(u, v):
g.add_edge(u, v)
break
# u, v = random.choice(g.nodes()), random.choice(g.nodes())
print(g.number_of_nodes(), g.number_of_edges())
for u, v in g.edges_iter():
g[u][v]['weight'] = 1
def one_run(g, k, N):
gi = from_nx(g)
X = np.random.permutation(g.number_of_nodes())[:k]
root = random.choice(g.nodes())
# tree_sizes = [len(random_steiner_tree(gi, X, root))
# for i in tqdm(range(N))]
def sort_edges(edges):
return tuple(sorted(edges))
tree_freq = Counter()
# for i in tqdm(range(N)):
for i in range(N):
edges = sort_edges(random_steiner_tree(gi, X, root))
tree_freq[edges] += 1
def tree_proba(edges):
prod = np.product([g.degree(u) for u, v in edges])
return 1 / prod
probas = np.array([tree_proba(t)
for t in tree_freq.keys()])
# for t in tqdm(tree_freq.keys(),
# total=len(tree_freq.keys()))])
probas /= probas.sum()
actual_probas = np.array(list(tree_freq.values())) / N
# print('using {} samples on {} terminals, the cosine similarity is {}'.format(
# N, k, 1-cosine(probas, actual_probas)))
return 1-cosine(probas, actual_probas)
k = 5
N = 10000000
# N = 10000
n_rounds = 800
sims = Parallel(n_jobs=-1)(delayed(one_run)(g, k, N)
for i in range(n_rounds))
print(pd.Series(sims).describe())
|
{"/test_loop_erased_weighted.py": ["/random_steiner_tree/__init__.py"], "/distribution.py": ["/random_steiner_tree/__init__.py"], "/test.py": ["/random_steiner_tree/__init__.py"]}
|
414
|
xiaohan2012/random_steiner_tree
|
refs/heads/master
|
/setup.py
|
# from distutils.core import setup, Extension
import os
from setuptools import setup, Extension
os.environ["CC"] = "g++"
os.environ["CXX"] = "g++"
core_module = Extension(
'random_steiner_tree/interface',
include_dirs=['/usr/include/python3.5/'],
libraries=['boost_python-py35', 'boost_graph'],
library_dirs=['/usr/lib/x86_64-linux-gnu/'],
extra_compile_args=['-std=c++11', '-O2', '-Wall'],
extra_link_args=['-Wl,--export-dynamic'],
sources=['random_steiner_tree/interface.cpp']
)
setup(name='rand_steiner_tree',
version='0.1',
description='Random Steiner tree sampling algorithm',
url='http://github.com/xiaohan2012/random_steiner_tree',
author='Han Xiao',
author_email='xiaohan2012@gmail.com',
license='MIT',
packages=['random_steiner_tree'],
ext_modules=[core_module],
setup_requires=['pytest-runner'],
tests_require=['pytest']
)
|
{"/test_loop_erased_weighted.py": ["/random_steiner_tree/__init__.py"], "/distribution.py": ["/random_steiner_tree/__init__.py"], "/test.py": ["/random_steiner_tree/__init__.py"]}
|
415
|
xiaohan2012/random_steiner_tree
|
refs/heads/master
|
/test.py
|
import pytest
import random
import numpy as np
import networkx as nx
from graph_tool import Graph
from random_steiner_tree import random_steiner_tree
from random_steiner_tree.util import (from_nx, from_gt,
num_vertices,
isolate_vertex,
vertices,
edges,
reachable_vertices)
def check_feasiblity(tree, root, X):
X = set(X) | {int(root)}
# number of components
ncc = nx.number_connected_components(tree)
assert ncc == 1, 'number_connected_components: {} != 1'.format(ncc)
nodes = set(tree.nodes())
assert X.issubset(nodes), 'tree does not contain all X'
# leaves are terminals
# no extra edges
for n in tree.nodes_iter():
if tree.degree(n) == 1:
assert n in X, 'one leaf does not belong to terminal'
def input_data_nx():
g = nx.karate_club_graph().to_directed()
for u, v in g.edges_iter():
g[u][v]['weight'] = 1
return g, from_nx(g, 'weight'), g.number_of_nodes()
def input_data_gt():
g_nx = nx.karate_club_graph()
g = Graph(directed=True)
g.add_vertex(g_nx.number_of_nodes())
for u, v in g_nx.edges():
g.add_edge(u, v)
g.add_edge(v, u) # the other direction
return g, from_gt(g, None), g.num_vertices()
@pytest.mark.parametrize("data_type", ["gt", "nx"])
@pytest.mark.parametrize("method", ["loop_erased", "cut"])
def test_feasiblility(data_type, method):
if data_type == 'nx':
data = input_data_nx()
elif data_type == 'gt':
data = input_data_gt()
g, gi, N = data
for i in range(10):
# try different number of terminals1
for k in range(2, N+1):
X = np.random.permutation(N)[:10]
if data_type == 'nx':
nodes = g.nodes()
elif data_type == 'gt':
nodes = list(map(int, g.vertices()))
root = random.choice(nodes)
tree_edges = random_steiner_tree(gi, X, root, method=method, verbose=True)
t = nx.Graph()
t.add_edges_from(tree_edges)
check_feasiblity(t, root, X)
@pytest.fixture
def line_g():
g = Graph(directed=True)
g.add_edge(0, 1)
g.add_edge(1, 0)
g.add_edge(1, 2)
g.add_edge(2, 1)
return g
def test_edges(line_g):
gi = from_gt(line_g, None)
assert set(edges(gi)) == {(0, 1), (1, 0), (1, 2), (2, 1)}
def test_isolate_vertex(line_g):
gi = from_gt(line_g, None)
isolate_vertex(gi, 0)
assert set(edges(gi)) == {(2, 1), (1, 2)}
isolate_vertex(gi, 1)
assert set(edges(gi)) == set()
def test_isolate_vertex_num_vertices():
_, gi, _ = input_data_gt()
prev_N = num_vertices(gi)
isolate_vertex(gi, 0)
nodes_with_edges = {u for e in edges(gi) for u in e}
assert 0 not in nodes_with_edges
assert prev_N == num_vertices(gi)
isolate_vertex(gi, 1)
assert prev_N == num_vertices(gi)
@pytest.fixture
def disconnected_line_graph():
"""0 -- 1 -- 2 3 -- 4
"""
g = nx.Graph()
g.add_nodes_from([0, 1, 2, 3, 4])
g.add_edges_from([(0, 1), (1, 2), (3, 4)])
g = g.to_directed()
return from_nx(g)
def test_remove_vertex_node_index(disconnected_line_graph):
gi = disconnected_line_graph
isolate_vertex(gi, 0)
assert set(vertices(gi)) == {0, 1, 2, 3, 4}
assert reachable_vertices(gi, 0) == [0]
assert reachable_vertices(gi, 1) == [1, 2]
assert reachable_vertices(gi, 3) == [3, 4]
@pytest.mark.parametrize("expected, pivot", [({0, 1, 2}, 1), ({3, 4}, 3)])
def test_reachable_vertices(disconnected_line_graph, expected, pivot):
gi = disconnected_line_graph
nodes = reachable_vertices(gi, pivot)
print('num_vertices', num_vertices(gi))
# 0, 1, 2 remains
assert set(nodes) == expected
@pytest.mark.parametrize("method", ['cut', 'loop_erased'])
def test_steiner_tree_with_disconnected_component(disconnected_line_graph, method):
gi = disconnected_line_graph
edges = random_steiner_tree(gi, X=[0, 2], root=1, method=method)
assert set(edges) == {(1, 0), (1, 2)}
|
{"/test_loop_erased_weighted.py": ["/random_steiner_tree/__init__.py"], "/distribution.py": ["/random_steiner_tree/__init__.py"], "/test.py": ["/random_steiner_tree/__init__.py"]}
|
440
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_expanded_add_capacity.py
|
from stroke_functions import *
# Initialize
T = 10000
repl_num = 100
service_rate_h = 1./7
service_rate_i = 1./3
Mean1_psc_cap = []
STD1_psc_cap = []
Mean2_psc_cap = []
STD2_psc_cap = []
Mean3_psc_cap = []
STD3_psc_cap = []
Mean4_psc_cap = []
STD4_psc_cap = []
Mean5_psc_cap = []
STD5_psc_cap = []
Mean6_psc_cap = []
STD6_psc_cap = []
cc0 = 17 # number of CSC beds when transfer rate is 15%
cc1 = 17 # number of CSC beds when transfer rate is 35%
cc2 = 17 # number of CSC beds when transfer rate is 55%
for ph in np.arange(0.15, 0.66, 0.2):
X_outer = []
cc = csc_bed(ph, cc0, cc1, cc2)
for iteration in np.arange(repl_num):
Dist = queue_ext(ph, c1 = cc0, c2 = cc1, c3 = cc2, T = T)
X_outer.append(Dist/T)
if 0.14 <= ph <= 0.16:
Mean1_psc_cap.append(np.mean(X_outer, axis = 0))
STD1_psc_cap.append(np.std(X_outer, axis = 0))
elif 0.24 <= ph <= 0.26:
Mean2_psc_cap.append(np.mean(X_outer, axis = 0))
STD2_psc_cap.append(np.std(X_outer, axis = 0))
elif 0.34 <= ph <= 0.36:
Mean3_psc_cap.append(np.mean(X_outer, axis = 0))
STD3_psc_cap.append(np.std(X_outer, axis = 0))
elif 0.44 <= ph <= 0.46:
Mean4_psc_cap.append(np.mean(X_outer, axis = 0))
STD4_psc_cap.append(np.std(X_outer, axis = 0))
elif 0.54 <= ph <= 0.56:
Mean5_psc_cap.append(np.mean(X_outer, axis = 0))
STD5_psc_cap.append(np.std(X_outer, axis = 0))
elif 0.64 <= ph <= 0.66:
Mean6_psc_cap.append(np.mean(X_outer, axis = 0))
STD6_psc_cap.append(np.std(X_outer, axis = 0))
else:
print("ERROR")
fig, (ax1, ax2, ax3) = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.5)
ax1.bar(np.arange(cc0+1), Mean1_psc_cap[0], yerr = 1.96*STD1_psc_cap[0]/np.sqrt(repl_num))
ax2.bar(np.arange(cc1+1), Mean3_psc_cap[0], yerr = 1.96*STD3_psc_cap[0]/np.sqrt(repl_num))
ax3.bar(np.arange(cc2+1), Mean5_psc_cap[0], yerr = 1.96*STD5_psc_cap[0]/np.sqrt(repl_num))
ax1.title.set_text('(a)')
ax2.title.set_text('(b)')
ax3.title.set_text('(c)')
fig.text(0.5, 0.0, 'Bed occupancy', ha='center')
fig.text(0.0, 0.5, 'Occupancy probability', va='center', rotation='vertical')
plt.savefig("5_bed_distribution_add_psc_cap.pdf")
plt.savefig("5_bed_distribution_add_psc_cap.jpg")
save_list = [Mean1_psc_cap, Mean3_psc_cap, Mean5_psc_cap]
open_file = open("base_psc_cap_mean.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
save_list = [STD1_psc_cap, STD3_psc_cap, STD5_psc_cap]
open_file = open("base_psc_cap_std.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
441
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_functions.py
|
from stroke_source import *
g = r.Random(1234)
def next_arrival(arrival_rate):
U = g.uniform(0,1)
arrival_time = -1./arrival_rate * m.log(U)
return arrival_time
def next_service(service_rate):
U = g.uniform(0,1)
service_time = -1./service_rate * m.log(U)
return service_time
def redirect(p):
U = g.uniform(0,1)
if p >= U:
red = 1
else:
red = 0
return(red)
def countX(lst, x):
count = 0
for ele in lst:
if (ele == x):
count = count + 1
return count
def queue_base_only(ph, arrival_rate_p_h = 2.0*0.15, arrival_rate_p_i = 2.0*0.85,
arrival_rate_c_h = 3.0*0.15, arrival_rate_c_i = 3.0*0.85,
service_rate_h = 1./7, service_rate_i = 1./3,
c1 = 15, c2 = 15, c3 = 15,
psc1_tr_h = 0.95,
psc2_tr_h = 0.95, psc2_tr_i = 0.15,
psc3_tr_h = 0.95, psc3_tr_i = 0.15,
T = 1000):
# Initialize
pi = ph
patid = 0
red_prop_h1 = psc1_tr_h # ph
red_prop_i1 = pi
red_prop_h2 = psc2_tr_h # 0.15
red_prop_i2 = psc2_tr_i # 0.15
red_prop_h3 = psc3_tr_h # 0.15
red_prop_i3 = psc3_tr_i # 0.15
Q = []
X = []
if 0.14 <= ph <= 0.16:
cc = c1
elif 0.24 <= ph <= 0.26:
cc = cc0
elif 0.34 <= ph <= 0.36:
cc = c2
elif 0.44 <= ph <= 0.46:
cc = cc0
elif 0.54 <= ph <= 0.56:
cc = c3
elif 0.64 <= ph <= 0.66:
cc = cc0
else:
print("ERROR", ph)
sent = 0
overflown = 0
#####
# Degugging
#####
CSC = []
csc_entered = 0
total_busy_serv1 = 0
#####
LenQ = []
LenX = []
Time = []
Dist = np.zeros(cc+1)
next_arrival_P1_h = next_arrival(arrival_rate_p_h)
next_arrival_P1_i = next_arrival(arrival_rate_p_i)
next_arrival_P2_h = next_arrival(arrival_rate_p_h)
next_arrival_P2_i = next_arrival(arrival_rate_p_i)
next_arrival_P3_h = next_arrival(arrival_rate_p_h)
next_arrival_P3_i = next_arrival(arrival_rate_p_i)
next_arrival_C_h = next_arrival(arrival_rate_c_h)
next_arrival_C_i = next_arrival(arrival_rate_c_i)
next_complete = m.inf
Event = [next_arrival_P1_h, next_arrival_P1_i, next_arrival_P2_h, next_arrival_P2_i, next_arrival_P3_h, next_arrival_P3_i, next_arrival_C_h, next_arrival_C_i, next_complete]
# Next event
t = min(Event)
while t < T:
Time.append(t)
LenQ.append(len(Q))
LenX.append(len(X))
Update_vec = np.zeros(cc + 1)
Update_vec[len(X)] = 1
if t == next_arrival_P1_h:
patid += 1
if redirect(red_prop_h1) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype]) # type == 1: hem; type == 2: isch
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P1_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P1_i:
patid += 1
if redirect(red_prop_i1) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P1_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_P2_h:
patid += 1
if redirect(red_prop_h2) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P2_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P2_i:
patid += 1
if redirect(red_prop_i2) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P2_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_P3_h:
patid += 1
if redirect(red_prop_h3) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P3_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P3_i:
patid += 1
if redirect(red_prop_i3) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P3_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_C_h:
patid += 1
csc_entered += 1
stype = 1
if len(X) >= cc:
overflown += 1
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_C_h = t + next_arrival(arrival_rate_c_h)
elif t == next_arrival_C_i:
patid += 1
csc_entered += 1
stype = 2
if len(X) >= cc:
overflown += 1
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_C_i = t + next_arrival(arrival_rate_c_i)
elif t == next_complete:
compl = min(sublist[2] for sublist in X)
for i in np.arange(len(X)):
if X[i][2] == compl:
ind = i
X.pop(ind)
if len(X) > 0 :
next_complete = min(sublist[2] for sublist in X)
else:
next_complete = m.inf
Event = [next_arrival_P1_h, next_arrival_P1_i, next_arrival_P2_h, next_arrival_P2_i, next_arrival_P3_h, next_arrival_P3_i, next_arrival_C_h, next_arrival_C_i, next_complete]
tp = t
t = min(Event)
total_busy_serv1 = total_busy_serv1 + len(X)*(t-tp)
Dist = Dist + Update_vec * (t - tp)
if len(X) >= cc + 1:
print("ERROR!")
break
return(Dist, total_busy_serv1)
def queue(ph, arrival_rate_p_h = 2.0*0.15, arrival_rate_p_i = 2.0*0.85,
arrival_rate_c_h = 3.0*0.15, arrival_rate_c_i = 3.0*0.85,
service_rate_h = 1./7, service_rate_i = 1./3,
c1 = 15, c2 = 15, c3 = 15,
psc1_tr_h = 0.95,
psc2_tr_h = 0.95, psc2_tr_i = 0.15,
psc3_tr_h = 0.95, psc3_tr_i = 0.15,
T = 1000):
# Initialize
pi = ph
patid = 0
red_prop_h1 = psc1_tr_h # ph
red_prop_i1 = pi
red_prop_h2 = psc2_tr_h # 0.15
red_prop_i2 = psc2_tr_i # 0.15
red_prop_h3 = psc3_tr_h # 0.15
red_prop_i3 = psc3_tr_i # 0.15
Q = []
X = []
if 0.14 <= ph <= 0.16:
cc = c1
elif 0.24 <= ph <= 0.26:
cc = cc0
elif 0.34 <= ph <= 0.36:
cc = c2
elif 0.44 <= ph <= 0.46:
cc = cc0
elif 0.54 <= ph <= 0.56:
cc = c3
elif 0.64 <= ph <= 0.66:
cc = cc0
else:
print("ERROR", ph)
sent = 0
overflown = 0
#####
# Degugging
#####
CSC = []
csc_entered = 0
total_busy_serv1 = 0
#####
LenQ = []
LenX = []
Time = []
Dist = np.zeros(cc+1)
next_arrival_P1_h = next_arrival(arrival_rate_p_h)
next_arrival_P1_i = next_arrival(arrival_rate_p_i)
next_arrival_P2_h = next_arrival(arrival_rate_p_h)
next_arrival_P2_i = next_arrival(arrival_rate_p_i)
next_arrival_P3_h = next_arrival(arrival_rate_p_h)
next_arrival_P3_i = next_arrival(arrival_rate_p_i)
next_arrival_C_h = next_arrival(arrival_rate_c_h)
next_arrival_C_i = next_arrival(arrival_rate_c_i)
next_complete = m.inf
Event = [next_arrival_P1_h, next_arrival_P1_i, next_arrival_P2_h, next_arrival_P2_i, next_arrival_P3_h, next_arrival_P3_i, next_arrival_C_h, next_arrival_C_i, next_complete]
# Next event
t = min(Event)
while t < T:
Time.append(t)
LenQ.append(len(Q))
LenX.append(len(X))
Update_vec = np.zeros(cc + 1)
Update_vec[len(X)] = 1
if t == next_arrival_P1_h:
patid += 1
if redirect(red_prop_h1) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype]) # type == 1: hem; type == 2: isch
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P1_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P1_i:
patid += 1
if redirect(red_prop_i1) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P1_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_P2_h:
patid += 1
if redirect(red_prop_h2) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P2_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P2_i:
patid += 1
if redirect(red_prop_i2) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P2_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_P3_h:
patid += 1
if redirect(red_prop_h3) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P3_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P3_i:
patid += 1
if redirect(red_prop_i3) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P3_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_C_h:
patid += 1
csc_entered += 1
stype = 1
if len(X) >= cc:
overflown += 1
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_C_h = t + next_arrival(arrival_rate_c_h)
elif t == next_arrival_C_i:
patid += 1
csc_entered += 1
stype = 2
if len(X) >= cc:
overflown += 1
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_C_i = t + next_arrival(arrival_rate_c_i)
elif t == next_complete:
compl = min(sublist[2] for sublist in X)
for i in np.arange(len(X)):
if X[i][2] == compl:
ind = i
X.pop(ind)
if len(X) > 0 :
next_complete = min(sublist[2] for sublist in X)
else:
next_complete = m.inf
Event = [next_arrival_P1_h, next_arrival_P1_i, next_arrival_P2_h, next_arrival_P2_i, next_arrival_P3_h, next_arrival_P3_i, next_arrival_C_h, next_arrival_C_i, next_complete]
tp = t
t = min(Event)
total_busy_serv1 = total_busy_serv1 + len(X)*(t-tp)
Dist = Dist + Update_vec * (t - tp)
if len(X) >= cc + 1:
print("ERROR!")
break
return(Dist)
def csc_bed(ph, cc0, cc1, cc2):
if 0.14 <= ph <= 0.16:
cc = cc0
elif 0.24 <= ph <= 0.26:
cc = cc0
elif 0.34 <= ph <= 0.36:
cc = cc0
elif 0.44 <= ph <= 0.46:
cc = cc0
elif 0.54 <= ph <= 0.56:
cc = cc0
elif 0.64 <= ph <= 0.66:
cc = cc0
else:
print("error")
return(cc)
def queue_ext(ph, arrival_rate_p_h = 2.0*0.15, arrival_rate_p_i = 2.0*0.85,
arrival_rate_c_h = 3.0*0.15, arrival_rate_c_i = 3.0*0.85,
service_rate_h = 1./7, service_rate_i = 1./3,
c1 = 15, c2 = 15, c3 = 15,
psc1_tr_h = 0.95,
psc2_tr_h = 0.95, psc2_tr_i = 0.15,
psc3_tr_h = 0.95, psc3_tr_i = 0.15,
psc4_tr_h = 0.95, psc4_tr_i = 0.15,
T = 1000):
# Initialize
pi = ph
patid = 0
red_prop_h1 = psc1_tr_h # ph
red_prop_i1 = pi
red_prop_h2 = psc2_tr_h
red_prop_i2 = psc2_tr_i
red_prop_h3 = psc3_tr_h
red_prop_i3 = psc3_tr_i
red_prop_h4 = psc4_tr_h
red_prop_i4 = psc4_tr_i
Q = []
X = []
if 0.14 <= ph <= 0.16:
cc = c1
elif 0.24 <= ph <= 0.26:
cc = cc0
elif 0.34 <= ph <= 0.36:
cc = c2
elif 0.44 <= ph <= 0.46:
cc = cc0
elif 0.54 <= ph <= 0.56:
cc = c3
elif 0.64 <= ph <= 0.66:
cc = cc0
else:
print("ERROR", ph)
sent = 0
overflown = 0
#####
# Degugging
#####
CSC = []
csc_entered = 0
total_busy_serv1 = 0
#####
LenQ = []
LenX = []
Time = []
Dist = np.zeros(cc+1)
next_arrival_P1_h = next_arrival(arrival_rate_p_h)
next_arrival_P1_i = next_arrival(arrival_rate_p_i)
next_arrival_P2_h = next_arrival(arrival_rate_p_h)
next_arrival_P2_i = next_arrival(arrival_rate_p_i)
next_arrival_P3_h = next_arrival(arrival_rate_p_h)
next_arrival_P3_i = next_arrival(arrival_rate_p_i)
next_arrival_P4_h = next_arrival(arrival_rate_p_h)
next_arrival_P4_i = next_arrival(arrival_rate_p_i)
next_arrival_C_h = next_arrival(arrival_rate_c_h)
next_arrival_C_i = next_arrival(arrival_rate_c_i)
next_complete = m.inf
Event = [
next_arrival_P1_h, next_arrival_P1_i,
next_arrival_P2_h, next_arrival_P2_i,
next_arrival_P3_h, next_arrival_P3_i,
next_arrival_P4_h, next_arrival_P4_i,
next_arrival_C_h, next_arrival_C_i,
next_complete
]
# Next event
t = min(Event)
while t < T:
Time.append(t)
LenQ.append(len(Q))
LenX.append(len(X))
Update_vec = np.zeros(cc + 1)
Update_vec[len(X)] = 1
if t == next_arrival_P1_h:
patid += 1
if redirect(red_prop_h1) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype]) # type == 1: hem; type == 2: isch
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P1_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P1_i:
patid += 1
if redirect(red_prop_i1) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P1_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_P2_h:
patid += 1
if redirect(red_prop_h2) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P2_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P2_i:
patid += 1
if redirect(red_prop_i2) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P2_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_P3_h:
patid += 1
if redirect(red_prop_h3) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P3_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P3_i:
patid += 1
if redirect(red_prop_i3) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P3_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_P4_h:
patid += 1
if redirect(red_prop_h4) == 1:
sent += 1
stype = 1
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P4_h = t + next_arrival(arrival_rate_p_h)
elif t == next_arrival_P4_i:
patid += 1
if redirect(red_prop_i4) == 1:
sent += 1
stype = 2
if len(X) >= cc:
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_P4_i = t + next_arrival(arrival_rate_p_i)
elif t == next_arrival_C_h:
patid += 1
csc_entered += 1
stype = 1
if len(X) >= cc:
overflown += 1
Q.append([patid, stype])
else:
LOS = next_service(service_rate_h)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_C_h = t + next_arrival(arrival_rate_c_h)
elif t == next_arrival_C_i:
patid += 1
csc_entered += 1
stype = 2
if len(X) >= cc:
overflown += 1
Q.append([patid, stype])
else:
LOS = next_service(service_rate_i)
X.append([patid, stype, t + LOS])
next_complete = min(sublist[2] for sublist in X)
next_arrival_C_i = t + next_arrival(arrival_rate_c_i)
elif t == next_complete:
compl = min(sublist[2] for sublist in X)
for i in np.arange(len(X)):
if X[i][2] == compl:
ind = i
X.pop(ind)
if len(X) > 0 :
next_complete = min(sublist[2] for sublist in X)
else:
next_complete = m.inf
Event = [
next_arrival_P1_h, next_arrival_P1_i,
next_arrival_P2_h, next_arrival_P2_i,
next_arrival_P3_h, next_arrival_P3_i,
next_arrival_P4_h, next_arrival_P4_i,
next_arrival_C_h, next_arrival_C_i,
next_complete
]
tp = t
t = min(Event)
total_busy_serv1 = total_busy_serv1 + len(X)*(t-tp)
Dist = Dist + Update_vec * (t - tp)
if len(X) >= cc + 1:
print("ERROR!")
break
return(Dist)
def queue_customization(
psc_hemorrhagic, psc_ischemic,
csc_hemorrhagic, csc_ischemic,
LOS_hemorrhagic, LOS_ischemic,
psc1_transfer_rate_hemorrhagic,
psc1_transfer_rate_ischemic,
psc2_transfer_rate_hemorrhagic,
psc2_transfer_rate_ischemic,
psc3_transfer_rate_hemorrhagic,
psc3_transfer_rate_ischemic,
csc_bed_capacity, T, repl_num):
Mean = []
STD = []
X_outer = []
for iteration in np.arange(repl_num):
Dist = queue(
c1 = csc_bed_capacity, c2 = csc_bed_capacity, c3 = csc_bed_capacity,
arrival_rate_p_h = psc_hemorrhagic, arrival_rate_p_i = psc_ischemic,
arrival_rate_c_h = csc_hemorrhagic, arrival_rate_c_i = csc_ischemic,
service_rate_h = 1./LOS_hemorrhagic, service_rate_i = 1./LOS_ischemic,
psc1_tr_h = psc1_transfer_rate_hemorrhagic, ph = psc1_transfer_rate_ischemic,
psc2_tr_h = psc2_transfer_rate_hemorrhagic, psc2_tr_i = psc2_transfer_rate_ischemic,
psc3_tr_h = psc3_transfer_rate_hemorrhagic, psc3_tr_i = psc3_transfer_rate_ischemic,
T = T)
X_outer.append(Dist/T)
Mean.append(np.mean(X_outer, axis = 0))
STD.append(np.std(X_outer, axis = 0))
fig, (ax1) = plt.subplots(1, 1)
fig.subplots_adjust(hspace=0.5)
ax1.bar(np.arange(csc_bed_capacity+1), Mean[0], yerr = 1.96*STD[0]/np.sqrt(repl_num))
#ax1.title.set_text('(a)')
fig.text(0.5, 0.0, 'Bed occupancy', ha='center')
fig.text(0.0, 0.5, 'Occupancy probability', va='center', rotation='vertical')
plt.savefig("bed_distribution_cust.pdf")
plt.savefig("bed_distribution_cust.jpg")
plt.figure()
plt.bar([psc1_transfer_rate_ischemic],
[
Mean[0][len(Mean[0])-1]
],
yerr = [
1.96*STD[0][len(STD[0])-1]/np.sqrt(repl_num)
])
plt.xlabel("Transfer rates at PSC 1")
plt.ylabel("Overflow probability")
plt.savefig("overflow_probability_cust.pdf")
plt.savefig("overflow_probability_cust.jpg")
mean_fin = Mean[0][len(Mean[0])-1]*100
std_fin = 1.96*STD[0][len(STD[0])-1]/np.sqrt(repl_num)*100
print("Overflow probability is {mean:.2f} +/- {CI:.2f}" \
.format(mean = mean_fin, CI = std_fin))
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
442
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_overall_comparison.py
|
from stroke_functions import *
repl_num = 100
# Base case
open_file = open("base_mean.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
Mean1 = loaded_list[0]
Mean2 = loaded_list[1]
Mean3 = loaded_list[2]
open_file = open("base_std.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
STD1 = loaded_list[0]
STD2 = loaded_list[1]
STD3 = loaded_list[2]
# Base case + added capacity
open_file = open("base_cap_mean.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
Mean1_cap = loaded_list[0]
Mean2_cap = loaded_list[1]
Mean3_cap = loaded_list[2]
open_file = open("base_cap_std.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
STD1_cap = loaded_list[0]
STD2_cap = loaded_list[1]
STD3_cap = loaded_list[2]
# Expanded case
open_file = open("base_psc_mean.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
Mean1_psc = loaded_list[0]
Mean2_psc = loaded_list[1]
Mean3_psc = loaded_list[2]
open_file = open("base_psc_std.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
STD1_psc = loaded_list[0]
STD2_psc = loaded_list[1]
STD3_psc = loaded_list[2]
# Expanded case + added capacity
open_file = open("base_psc_cap_mean.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
Mean1_psc_cap = loaded_list[0]
Mean2_psc_cap = loaded_list[1]
Mean3_psc_cap = loaded_list[2]
open_file = open("base_psc_cap_std.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
STD1_psc_cap = loaded_list[0]
STD2_psc_cap = loaded_list[1]
STD3_psc_cap = loaded_list[2]
# Expanded case + reduced transfer rates
open_file = open("base_psc_red_mean.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
Mean1_psc_red = loaded_list[0]
Mean2_psc_red = loaded_list[1]
Mean3_psc_red = loaded_list[2]
open_file = open("base_psc_red_std.pkl", "rb")
loaded_list = pickle.load(open_file)
open_file.close()
STD1_psc_red = loaded_list[0]
STD2_psc_red = loaded_list[1]
STD3_psc_red = loaded_list[2]
labels = ["0.15", "0.35", "0.55"]
M1 = [Mean1[0][len(Mean1[0])-1], Mean2[0][len(Mean2[0])-1], Mean3[0][len(Mean3[0])-1]]
M2 = [Mean1_psc[0][len(Mean1_psc[0])-1], Mean2_psc[0][len(Mean2_psc[0])-1], Mean3_psc[0][len(Mean3_psc[0])-1]]
M3 = [Mean1_psc_red[0][len(Mean1_psc_red[0])-1], Mean2_psc_red[0][len(Mean2_psc_red[0])-1], Mean3_psc_red[0][len(Mean3_psc_red[0])-1]]
M4 = [Mean1_psc_cap[0][len(Mean1_psc_cap[0])-1], Mean2_psc_cap[0][len(Mean2_psc_cap[0])-1], Mean3_psc_cap[0][len(Mean3_psc_cap[0])-1]]
x = np.arange(len(labels)) # the label locations
width = 0.125 # the width of the bars
fig, ax = plt.subplots(figsize=(12,8), dpi= 100)
rects1 = ax.bar(x - 4.5*width/3, M1, width, yerr = [1.96*STD1[0][len(STD1[0])-1]/np.sqrt(repl_num), 1.96*STD2[0][len(STD2[0])-1]/np.sqrt(repl_num), 1.96*STD3[0][len(STD3[0])-1]/np.sqrt(repl_num)], label='Base case')
rects2 = ax.bar(x - 1.5*width/3, M2, width, yerr = [1.96*STD1_psc[0][len(STD1_psc[0])-1]/np.sqrt(repl_num), 1.96*STD2_psc[0][len(STD2_psc[0])-1]/np.sqrt(repl_num), 1.96*STD3_psc[0][len(STD3_psc[0])-1]/np.sqrt(repl_num)], label='Expanded case')
rects3 = ax.bar(x + 1.5*width/3, M3, width, yerr = [1.96*STD1_psc_red[0][len(STD1_psc_red[0])-1]/np.sqrt(repl_num), 1.96*STD2_psc_red[0][len(STD2_psc_red[0])-1]/np.sqrt(repl_num), 1.96*STD3_psc_red[0][len(STD3_psc_red[0])-1]/np.sqrt(repl_num)], label='Expanded case, reduced transfer')
rects4 = ax.bar(x + 4.5*width/3, M4, width, yerr = [1.96*STD1_psc_cap[0][len(STD1_psc_cap[0])-1]/np.sqrt(repl_num), 1.96*STD2_psc_cap[0][len(STD2_psc_cap[0])-1]/np.sqrt(repl_num), 1.96*STD3_psc_cap[0][len(STD3_psc_cap[0])-1]/np.sqrt(repl_num)], label='Expanded case, additional Neuro-ICU beds')
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('Overflow probability')
ax.set_ylabel('Transfer rates at PSC 1')
ax.set_title('Overflow probability by case')
ax.set_xticks(x)
ax.set_xticklabels(labels)
ax.set_yticks([0.00, 0.10, 0.20, 0.30, 0.40, 0.50])
ax.legend()
plt.savefig("6_overflow_prob_by_case.pdf")
plt.savefig("6_overflow_prob_by_case.jpg")
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
443
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_expanded.py
|
from stroke_functions import *
# Initialize
T = 10000
repl_num = 10
service_rate_h = 1./7
service_rate_i = 1./3
Mean1_psc = []
STD1_psc = []
Mean2_psc = []
STD2_psc = []
Mean3_psc = []
STD3_psc = []
Mean4_psc = []
STD4_psc = []
Mean5_psc = []
STD5_psc = []
Mean6_psc = []
STD6_psc = []
cc0 = 15 # number of CSC beds when transfer rate is 15%
cc1 = 15 # number of CSC beds when transfer rate is 35%
cc2 = 15 # number of CSC beds when transfer rate is 55%
for ph in np.arange(0.15, 0.66, 0.2):
X_outer = []
cc = csc_bed(ph, cc0, cc1, cc2)
for iteration in np.arange(repl_num):
Dist = queue_ext(ph, c1 = cc0, c2 = cc1, c3 = cc2, T = T)
X_outer.append(Dist/T)
if 0.14 <= ph <= 0.16:
Mean1_psc.append(np.mean(X_outer, axis = 0))
STD1_psc.append(np.std(X_outer, axis = 0))
elif 0.24 <= ph <= 0.26:
Mean2_psc.append(np.mean(X_outer, axis = 0))
STD2_psc.append(np.std(X_outer, axis = 0))
elif 0.34 <= ph <= 0.36:
Mean3_psc.append(np.mean(X_outer, axis = 0))
STD3_psc.append(np.std(X_outer, axis = 0))
elif 0.44 <= ph <= 0.46:
Mean4_psc.append(np.mean(X_outer, axis = 0))
STD4_psc.append(np.std(X_outer, axis = 0))
elif 0.54 <= ph <= 0.56:
Mean5_psc.append(np.mean(X_outer, axis = 0))
STD5_psc.append(np.std(X_outer, axis = 0))
elif 0.64 <= ph <= 0.66:
Mean6_psc.append(np.mean(X_outer, axis = 0))
STD6_psc.append(np.std(X_outer, axis = 0))
else:
print("ERROR")
fig, (ax1, ax2, ax3) = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.5)
ax1.bar(np.arange(cc0+1), Mean1_psc[0], yerr = 1.96*STD1_psc[0]/np.sqrt(repl_num))
ax2.bar(np.arange(cc1+1), Mean3_psc[0], yerr = 1.96*STD3_psc[0]/np.sqrt(repl_num))
ax3.bar(np.arange(cc2+1), Mean5_psc[0], yerr = 1.96*STD5_psc[0]/np.sqrt(repl_num))
ax1.title.set_text('(a)')
ax2.title.set_text('(b)')
ax3.title.set_text('(c)')
fig.text(0.5, 0.0, 'Bed occupancy', ha='center')
fig.text(0.0, 0.5, 'Occupancy probability', va='center', rotation='vertical')
plt.savefig("3_bed_distribution_add_psc.pdf")
plt.savefig("3_bed_distribution_add_psc.jpg")
plt.figure()
plt.bar(["0.15", "0.35", "0.55"],
[
Mean1_psc[0][len(Mean1_psc[0])-1],
Mean3_psc[0][len(Mean3_psc[0])-1],
Mean5_psc[0][len(Mean5_psc[0])-1]
],
yerr = [
1.96*STD1_psc[0][len(STD1_psc[0])-1]/np.sqrt(repl_num),
1.96*STD3_psc[0][len(STD3_psc[0])-1]/np.sqrt(repl_num),
1.96*STD5_psc[0][len(STD5_psc[0])-1]/np.sqrt(repl_num)
])
plt.xlabel("Transfer rates at PSC 1")
plt.ylabel("Overflow probability")
plt.savefig("3_overflow_probability_add_psc.pdf")
plt.savefig("3_overflow_probability_add_psc.jpg")
save_list = [Mean1_psc, Mean3_psc, Mean5_psc]
open_file = open("base_psc_mean.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
save_list = [STD1_psc, STD3_psc, STD5_psc]
open_file = open("base_psc_std.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
444
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_main.py
|
from stroke_functions import *
import stroke_base
import stroke_base_add_capacity
import stroke_expanded
import stroke_expanded_reduced_rate
import stroke_expanded_add_capacity
import stroke_overall_comparison
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
445
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_source.py
|
import numpy as np
import random as r
import math as m
import matplotlib.pyplot as plt
import pickle
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
446
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_base_add_capacity.py
|
from stroke_functions import *
# Initialize
T = 10000
repl_num = 100
service_rate_h = 1./7
service_rate_i = 1./3
Mean1_cap = []
STD1_cap = []
Mean2_cap = []
STD2_cap = []
Mean3_cap = []
STD3_cap = []
Mean4_cap = []
STD4_cap = []
Mean5_cap = []
STD5_cap = []
Mean6_cap = []
STD6_cap = []
cc0 = 15 # number of CSC beds when transfer rate is 15%
cc1 = 16 # number of CSC beds when transfer rate is 35%
cc2 = 17 # number of CSC beds when transfer rate is 55%
for ph in np.arange(0.15, 0.66, 0.2):
X_outer = []
cc = csc_bed(ph, cc0, cc1, cc2)
for iteration in np.arange(repl_num):
Dist = queue(ph, c1 = cc0, c2 = cc1, c3 = cc2, T = T)
X_outer.append(Dist/T)
if 0.14 <= ph <= 0.16:
Mean1_cap.append(np.mean(X_outer, axis = 0))
STD1_cap.append(np.std(X_outer, axis = 0))
elif 0.24 <= ph <= 0.26:
Mean2_cap.append(np.mean(X_outer, axis = 0))
STD2_cap.append(np.std(X_outer, axis = 0))
elif 0.34 <= ph <= 0.36:
Mean3_cap.append(np.mean(X_outer, axis = 0))
STD3_cap.append(np.std(X_outer, axis = 0))
elif 0.44 <= ph <= 0.46:
Mean4_cap.append(np.mean(X_outer, axis = 0))
STD4_cap.append(np.std(X_outer, axis = 0))
elif 0.54 <= ph <= 0.56:
Mean5_cap.append(np.mean(X_outer, axis = 0))
STD5_cap.append(np.std(X_outer, axis = 0))
elif 0.64 <= ph <= 0.66:
Mean6_cap.append(np.mean(X_outer, axis = 0))
STD6_cap.append(np.std(X_outer, axis = 0))
else:
print("ERROR")
fig, (ax1, ax2, ax3) = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.5)
ax1.bar(np.arange(cc0+1), Mean1_cap[0], yerr = 1.96*STD1_cap[0]/np.sqrt(repl_num))
ax2.bar(np.arange(cc1+1), Mean3_cap[0], yerr = 1.96*STD3_cap[0]/np.sqrt(repl_num))
ax3.bar(np.arange(cc2+1), Mean5_cap[0], yerr = 1.96*STD5_cap[0]/np.sqrt(repl_num))
ax1.title.set_text('(a)')
ax2.title.set_text('(b)')
ax3.title.set_text('(c)')
fig.text(0.5, 0.0, 'Bed occupancy', ha='center')
fig.text(0.0, 0.5, 'Occupancy probability', va='center', rotation='vertical')
plt.savefig("2_bed_distribution_base_add_cap.pdf")
plt.savefig("2_bed_distribution_base_add_cap.jpg")
plt.figure()
plt.bar(["0.15", "0.35", "0.55"],
[
Mean1_cap[0][len(Mean1_cap[0])-1],
Mean3_cap[0][len(Mean3_cap[0])-1],
Mean5_cap[0][len(Mean5_cap[0])-1]
],
yerr = [
1.96*STD1_cap[0][len(STD1_cap[0])-1]/np.sqrt(repl_num),
1.96*STD3_cap[0][len(STD3_cap[0])-1]/np.sqrt(repl_num),
1.96*STD5_cap[0][len(STD5_cap[0])-1]/np.sqrt(repl_num)
])
plt.xlabel("Transfer rates at PSC 1")
plt.ylabel("Overflow probability")
plt.savefig("2_overflow_probability_base_add_cap.pdf")
plt.savefig("2_overflow_probability_base_add_cap.jpg")
save_list = [Mean1_cap, Mean3_cap, Mean5_cap]
open_file = open("base_cap_mean.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
save_list = [STD1_cap, STD3_cap, STD5_cap]
open_file = open("base_cap_std.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
447
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_expanded_reduced_rate.py
|
from stroke_functions import *
# Initialize
T = 10000
repl_num = 100
service_rate_h = 1./7
service_rate_i = 1./3
Mean1_psc_red = []
STD1_psc_red = []
Mean2_psc_red = []
STD2_psc_red = []
Mean3_psc_red = []
STD3_psc_red = []
Mean4_psc_red = []
STD4_psc_red = []
Mean5_psc_red = []
STD5_psc_red = []
Mean6_psc_red = []
STD6_psc_red = []
cc0 = 15 # number of CSC beds when transfer rate is 15%
cc1 = 15 # number of CSC beds when transfer rate is 35%
cc2 = 15 # number of CSC beds when transfer rate is 55%
for ph in np.arange(0.15, 0.66, 0.2):
X_outer = []
cc = csc_bed(ph, cc0, cc1, cc2)
for iteration in np.arange(repl_num):
Dist = queue_ext(ph, c1 = cc0, c2 = cc1, c3 = cc2,
psc2_tr_i = 0.025,
psc3_tr_i = 0.025,
psc4_tr_i = 0.025,
T = T)
X_outer.append(Dist/T)
if 0.14 <= ph <= 0.16:
Mean1_psc_red.append(np.mean(X_outer, axis = 0))
STD1_psc_red.append(np.std(X_outer, axis = 0))
elif 0.24 <= ph <= 0.26:
Mean2_psc_red.append(np.mean(X_outer, axis = 0))
STD2_psc_red.append(np.std(X_outer, axis = 0))
elif 0.34 <= ph <= 0.36:
Mean3_psc_red.append(np.mean(X_outer, axis = 0))
STD3_psc_red.append(np.std(X_outer, axis = 0))
elif 0.44 <= ph <= 0.46:
Mean4_psc_red.append(np.mean(X_outer, axis = 0))
STD4_psc_red.append(np.std(X_outer, axis = 0))
elif 0.54 <= ph <= 0.56:
Mean5_psc_red.append(np.mean(X_outer, axis = 0))
STD5_psc_red.append(np.std(X_outer, axis = 0))
elif 0.64 <= ph <= 0.66:
Mean6_psc_red.append(np.mean(X_outer, axis = 0))
STD6_psc_red.append(np.std(X_outer, axis = 0))
else:
print("ERROR")
fig, (ax1, ax2, ax3) = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.5)
ax1.bar(np.arange(cc0+1), Mean1_psc_red[0], yerr = 1.96*STD1_psc_red[0]/np.sqrt(repl_num))
ax2.bar(np.arange(cc1+1), Mean3_psc_red[0], yerr = 1.96*STD3_psc_red[0]/np.sqrt(repl_num))
ax3.bar(np.arange(cc2+1), Mean5_psc_red[0], yerr = 1.96*STD5_psc_red[0]/np.sqrt(repl_num))
ax1.title.set_text('(a)')
ax2.title.set_text('(b)')
ax3.title.set_text('(c)')
fig.text(0.5, 0.0, 'Bed occupancy', ha='center')
fig.text(0.0, 0.5, 'Occupancy probability', va='center', rotation='vertical')
plt.savefig("4_bed_distribution_add_psc_red.pdf")
plt.savefig("4_bed_distribution_add_psc_red.jpg")
save_list = [Mean1_psc_red, Mean3_psc_red, Mean5_psc_red]
open_file = open("base_psc_red_mean.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
save_list = [STD1_psc_red, STD3_psc_red, STD5_psc_red]
open_file = open("base_psc_red_std.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
448
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_base.py
|
from stroke_functions import *
# Initialize
T = 10000
repl_num = 100
service_rate_h = 1./7
service_rate_i = 1./3
Mean1 = []
STD1 = []
Mean2 = []
STD2 = []
Mean3 = []
STD3 = []
Mean4 = []
STD4 = []
Mean5 = []
STD5 = []
Mean6 = []
STD6 = []
MeanBed1 = []
MeanBed2 = []
MeanBed3 = []
MeanBed4 = []
MeanBed5 = []
MeanBed6 = []
StdBed1 = []
StdBed2 = []
StdBed3 = []
StdBed4 = []
StdBed5 = []
StdBed6 = []
cc0 = 15 # number of CSC beds when transfer rate is 15%
cc1 = 15 # number of CSC beds when transfer rate is 35%
cc2 = 15 # number of CSC beds when transfer rate is 55%
for ph in np.arange(0.15, 0.66, 0.2):
X_outer = []
Mean_outer = []
cc = csc_bed(ph, cc0, cc1, cc2)
for iteration in np.arange(repl_num):
Dist, busy_serv = queue_base_only(ph, c1 = cc0, c2 = cc1, c3 = cc2, T = T)
X_outer.append(Dist/T)
Mean_outer.append(busy_serv/T)
if 0.14 <= ph <= 0.16:
Mean1.append(np.mean(X_outer, axis = 0))
STD1.append(np.std(X_outer, axis = 0))
MeanBed1.append(np.mean(Mean_outer, axis = 0))
StdBed1.append(np.std(Mean_outer, axis = 0))
elif 0.24 <= ph <= 0.26:
Mean2.append(np.mean(X_outer, axis = 0))
STD2.append(np.std(X_outer, axis = 0))
MeanBed2.append(np.mean(Mean_outer, axis = 0))
StdBed2.append(np.std(Mean_outer, axis = 0))
elif 0.34 <= ph <= 0.36:
Mean3.append(np.mean(X_outer, axis = 0))
STD3.append(np.std(X_outer, axis = 0))
MeanBed3.append(np.mean(Mean_outer, axis = 0))
StdBed3.append(np.std(Mean_outer, axis = 0))
elif 0.44 <= ph <= 0.46:
Mean4.append(np.mean(X_outer, axis = 0))
STD4.append(np.std(X_outer, axis = 0))
MeanBed4.append(np.mean(Mean_outer, axis = 0))
StdBed4.append(np.std(Mean_outer, axis = 0))
elif 0.54 <= ph <= 0.56:
Mean5.append(np.mean(X_outer, axis = 0))
STD5.append(np.std(X_outer, axis = 0))
MeanBed5.append(np.mean(Mean_outer, axis = 0))
StdBed5.append(np.std(Mean_outer, axis = 0))
elif 0.64 <= ph <= 0.66:
Mean6.append(np.mean(X_outer, axis = 0))
STD6.append(np.std(X_outer, axis = 0))
MeanBed6.append(np.mean(Mean_outer, axis = 0))
StdBed6.append(np.std(Mean_outer, axis = 0))
else:
print("ERROR")
fig, (ax1, ax2, ax3) = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.5)
ax1.bar(np.arange(cc0+1), Mean1[0], yerr = 1.96*STD1[0]/np.sqrt(repl_num))
ax2.bar(np.arange(cc1+1), Mean3[0], yerr = 1.96*STD3[0]/np.sqrt(repl_num))
ax3.bar(np.arange(cc2+1), Mean5[0], yerr = 1.96*STD5[0]/np.sqrt(repl_num))
ax1.title.set_text('(a)')
ax2.title.set_text('(b)')
ax3.title.set_text('(c)')
fig.text(0.5, 0.0, 'Bed occupancy', ha='center')
fig.text(0.0, 0.5, 'Occupancy probability', va='center', rotation='vertical')
plt.savefig("1_bed_distribution_base.pdf")
plt.savefig("1_bed_distribution_base.jpg")
plt.figure()
plt.bar(["0.15", "0.35", "0.55"],
[
Mean1[0][len(Mean1[0])-1],
Mean3[0][len(Mean3[0])-1],
Mean5[0][len(Mean5[0])-1]
],
yerr = [
1.96*STD1[0][len(STD1[0])-1]/np.sqrt(repl_num),
1.96*STD3[0][len(STD3[0])-1]/np.sqrt(repl_num),
1.96*STD5[0][len(STD5[0])-1]/np.sqrt(repl_num)
])
plt.xlabel("Transfer rates at PSC 1")
plt.ylabel("Overflow probability")
plt.savefig("1_overflow_probability_base.pdf")
plt.savefig("1_overflow_probability_base.jpg")
plt.figure()
plt.bar(["0.15", "0.35", "0.55"],
[
MeanBed1[0],
MeanBed3[0],
MeanBed5[0]
],
yerr = [
1.96*StdBed1[0]/np.sqrt(repl_num),
1.96*StdBed3[0]/np.sqrt(repl_num),
1.96*StdBed5[0]/np.sqrt(repl_num)
]
)
plt.xlabel("Transfer rates at PSC 1")
plt.ylabel("Mean number of beds occupied")
plt.savefig("1_mean_base.pdf")
plt.savefig("1_mean_base.jpg")
save_list = [Mean1, Mean3, Mean5]
open_file = open("base_mean.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
save_list = [STD1, STD3, STD5]
open_file = open("base_std.pkl", "wb")
pickle.dump(save_list, open_file)
open_file.close()
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
449
|
hjtree0825/stroke_network_ctmc_simulations
|
refs/heads/main
|
/stroke_customization.py
|
from stroke_functions import *
############################################################################
############################################################################
############################################################################
# Simply change the numbers in this section.
# LOS (in days)
LOS_hemorrhagic = 7
LOS_ischemic = 3
# Number of beds at CSC Neuro-ICU
csc_bed_capacity = 15
# Average daily number of stroke patients examined at PSC
psc_hemorrhagic = 0.3
psc_ischemic = 1.7
# Average daily number of stroke patients examined at CSC
csc_hemorrhagic = 0.45
csc_ischemic = 2.55
# Transfer rates
# (i) PSC 1
# hemorrhagic
psc1_transfer_rate_hemorrhagic = 0.95
# ischemic
psc1_transfer_rate_ischemic = 0.15
# (ii) PSC 2
# hemorrhagic
psc2_transfer_rate_hemorrhagic = 0.95
# ischemic
psc2_transfer_rate_ischemic = 0.15
# (iii) PSC 3
# hemorrhagic
psc3_transfer_rate_hemorrhagic = 0.95
# ischemic
psc3_transfer_rate_ischemic = 0.15
############################################################################
############################################################################
############################################################################
# Initialize (no need to change, in general)
T = 10000
repl_num = 100
# Run simulations
queue_customization(
psc_hemorrhagic = psc_hemorrhagic, psc_ischemic = psc_ischemic,
csc_hemorrhagic = csc_hemorrhagic, csc_ischemic = csc_ischemic,
LOS_hemorrhagic = LOS_hemorrhagic, LOS_ischemic = LOS_ischemic,
psc1_transfer_rate_hemorrhagic = psc1_transfer_rate_hemorrhagic,
psc1_transfer_rate_ischemic = psc1_transfer_rate_ischemic,
psc2_transfer_rate_hemorrhagic = psc2_transfer_rate_hemorrhagic,
psc2_transfer_rate_ischemic = psc2_transfer_rate_ischemic,
psc3_transfer_rate_hemorrhagic = psc3_transfer_rate_hemorrhagic,
psc3_transfer_rate_ischemic = psc3_transfer_rate_ischemic,
csc_bed_capacity = csc_bed_capacity,
T = T, repl_num = repl_num
)
|
{"/stroke_expanded_add_capacity.py": ["/stroke_functions.py"], "/stroke_functions.py": ["/stroke_source.py"], "/stroke_overall_comparison.py": ["/stroke_functions.py"], "/stroke_expanded.py": ["/stroke_functions.py"], "/stroke_main.py": ["/stroke_functions.py", "/stroke_base.py", "/stroke_base_add_capacity.py", "/stroke_expanded.py", "/stroke_expanded_reduced_rate.py", "/stroke_expanded_add_capacity.py", "/stroke_overall_comparison.py"], "/stroke_base_add_capacity.py": ["/stroke_functions.py"], "/stroke_expanded_reduced_rate.py": ["/stroke_functions.py"], "/stroke_base.py": ["/stroke_functions.py"], "/stroke_customization.py": ["/stroke_functions.py"]}
|
452
|
jlstack/Online-Marketplace
|
refs/heads/master
|
/application/models.py
|
from application import db
class Product(db.Model):
id = db.Column('id', db.Integer, primary_key=True)
name = db.Column('name', db.String(128), nullable=False)
description = db.Column('description', db.TEXT, nullable=False)
image_path = db.Column('image_path', db.String(128), nullable=True)
quantity = db.Column('quantity', db.Integer, default=1)
price = db.Column('price', db.FLOAT, default=0.0)
def __init__(self, name, description, image_path='', quantity=1, price=0.0):
self.name = name
self.description = description
self.image_path = image_path
self.quantity = quantity
self.price = price
def __repr__(self):
return str({'name':self.name, 'description':self.description, 'image_path': self.image_path, 'quantity': self.quantity, 'price': self.price})
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(128), index=True, unique=True)
password = db.Column(db.String(256), nullable=False)
def __init__(self, username, password):
self.username = username
self.password = password
def __repr__(self):
return '<User %r>' % (self.username)
class Image(db.Model):
id = db.Column('id', db.Integer, primary_key=True)
name = db.Column('name', db.String(128), nullable=False)
image_path = db.Column('image_path', db.String(128), nullable=False)
display_number = db.Column('display_number', db.Integer, nullable=False)
def __init__(self, name, image_path, display_number):
self.name = name
self.image_path = image_path
self.display_number = display_number
def __repr__(self):
return str({'name': self.name, 'image_path': self.image_path, 'display_number': self.display_number})
|
{"/application/models.py": ["/application.py"]}
|
453
|
jlstack/Online-Marketplace
|
refs/heads/master
|
/application.py
|
from flask import Flask, Response, session, flash, request, redirect, render_template, g
import sys
import os
import base64
from flask_login import LoginManager, UserMixin, current_user, login_required, login_user, logout_user
import hashlib
from flask_openid import OpenID
errors = []
try:
from application import db
from application.models import Product, User, Image
import yaml
with open("db.yml") as db_file:
db_entries = yaml.safe_load(db_file)
db.create_all()
for user in db_entries["users"]:
usr = User(user["username"], user["password_hash"])
db.session.add(usr)
db.session.commit()
for project in db_entries["projects"]:
proj = Product(project["name"], project["description"], project["default_image"], 1, 0)
db.session.add(proj)
db.session.commit()
for i in range(0, len(project["images"])):
img = Image(project['name'], project["images"][i], i)
db.session.add(img)
db.session.commit()
db.session.close()
except Exception as err:
errors.append(err.message)
# EB looks for an 'application' callable by default.
application = Flask(__name__)
# config
application.config.update(
DEBUG = True,
SECRET_KEY = os.urandom(24)
)
@application.route("/login", methods=["GET", "POST"])
def login():
if str(request.method) == 'GET':
if not session.get('logged_in'):
return render_template('login.html')
else:
redirect("/")
username = request.form['username']
password = request.form['password']
password = hashlib.sha224(password.encode('utf-8')).hexdigest()
user = User.query.filter_by(username=username, password=password).first()
if user is not None:
session['logged_in'] = True
return redirect("/")
return redirect("/login")
@application.route("/logout")
def logout():
session['logged_in'] = False
return redirect('/')
@application.route('/')
def index():
return render_template('home.html')
@application.route('/gallery')
def gallery():
products = Product.query.order_by(Product.id.asc())
return render_template('products.html', products=products)
@application.route('/about')
def about():
return render_template('about.html')
@application.route('/contact')
def contact():
return render_template('contact.html')
@application.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@application.route('/dir')
def stuff():
return str(dir(Product.id))
@application.route('/add', methods=['GET', 'POST'])
def add():
if not session.get('logged_in'):
return render_template('login.html')
if str(request.method) == 'POST':
try:
vals = request.form.to_dict()
files = request.files.getlist("image")
for i in range(0, len(files)):
file = files[i]
ext = file.filename.rsplit('.', 1)[1].lower()
if ext in ['png', 'jpg', 'jpeg']:
filename = "/static/images/" + base64.urlsafe_b64encode(file.filename) + "." + ext
file.save("." + filename)
if i == 0:
product = Product(vals['name'], vals['description'], filename, 1, 0)
db.session.add(product)
db.session.commit()
db.session.close()
img = Image(vals['name'], filename, i)
db.session.add(img)
db.session.commit()
db.session.close()
except Exception as err:
db.session.rollback()
return err.message
return render_template('add_product.html')
@application.route('/errors')
def get_errors():
return str(errors)
@application.route('/products')
def get_products():
products = Product.query.order_by(Product.id.desc())
stuff = [x.name for x in products]
return str(stuff)
@application.route('/pin/<pin_id>')
def pin_enlarge(pin_id):
p = Product.query.filter_by(id=pin_id).first()
images = Image.query.filter_by(name=p.name).order_by(Image.display_number.asc())
return render_template('pin_focus.html', p=p, images=images)
@application.route('/delete/<pin_id>')
def delete(pin_id):
Product.query.filter_by(id = pin_id).delete()
db.session.commit()
db.session.close()
return redirect("/gallery")
# run the app.
if __name__ == "__main__":
# Setting debug to True enables debug output. This line should be
# removed before deploying a production app.
application.debug = True
application.run()
|
{"/application/models.py": ["/application.py"]}
|
454
|
jlstack/Online-Marketplace
|
refs/heads/master
|
/application/__init__.py
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
def get_config():
config = {}
if 'RDS_HOSTNAME' in os.environ:
env = {
'NAME': os.environ['RDS_DB_NAME'],
'USER': os.environ['RDS_USERNAME'],
'PASSWORD': os.environ['RDS_PASSWORD'],
'HOST': os.environ['RDS_HOSTNAME'],
'PORT': os.environ['RDS_PORT'],
}
config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://' + env['USER'] + ':' + env['PASSWORD'] + '@' + env['HOST'] + ':' + env['PORT'] + '/' + env['NAME']
config['SQLALCHEMY_POOL_RECYCLE'] = 3600
config['WTF_CSRF_ENABLED'] = True
else:
config = None
return config
config = get_config()
application = Flask(__name__)
db = None
if config is not None:
application.config.from_object(config)
try:
db = SQLAlchemy(application)
except Exception as err:
print(err.message)
|
{"/application/models.py": ["/application.py"]}
|
459
|
kaustavbhattacharjee/labeling
|
refs/heads/main
|
/main.py
|
# This is a sample Python script.
# Press ⌃R to execute it or replace it with your code.
# Press Double ⇧ to search everywhere for classes, files, tool windows, actions, and settings.
from utils import Tweet
def print_hi(name):
# Use a breakpoint in the code line below to debug your script.
print(f'Hi, {name}') # Press ⌘F8 to toggle the breakpoint.
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
print_hi('Start Labeling')
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
#PATH = "Jun/test.csv"
PATH = "Kebby/MarchNonExpertsManualLabel3.csv" #first save the .xlsx file as .csv
tweet = Tweet()
tweets = tweet.import_data(PATH, "csv")
tweets_labeled = tweet.create_labels(tweets)
tweet.save_labels(tweets_labeled, PATH, "csv", index=False)
|
{"/main.py": ["/utils.py"]}
|
460
|
kaustavbhattacharjee/labeling
|
refs/heads/main
|
/utils.py
|
import pandas as pd
import csv
import os
from pandas import ExcelWriter
class Tweet:
def import_data(self, PATH, type):
if type == "xlsx":
xl = pd.ExcelFile(PATH)
data = xl.parse("Sheet1")
if type == "csv":
data = pd.read_csv(PATH)
# if type == "csv":
# with open(PATH, newline='') as f:
# reader = csv.reader(f)
# data = list(reader)
return data
def label_key2char(self, key):
"""
:param num: the input x,y,z from keyboard
:return: fact, opinion, anti-fact, if other than x,y,z return ""
"""
if key == "0":
return "fact"
elif key == "1":
return "opinion"
elif key == "2":
return "misinformation"
else:
return ""
def create_labels(self, df):
"""
:param df: imported data in dataframe format
:return: dataframe with added label in ManualLabel column
"""
labels = df["ManualLabel"].tolist()
for index, row in df.iterrows():
if pd.isna(row["ManualLabel"]):
print("===========")
print("Tweet Text")
print(row["Tweet Text"])
print("===========")
print("Row Number: "+ str(index))
print("Subjective: " + str(row["SubjectivityScores"]))
print("Sentiment: " + str(row["FlairSentimentScore"]) + " " + str(row["FlairSentiment"]))
print("===========")
print('Classify as fact(0), opinion(1), misinformation(2) OR Skip(s), Quit(q): ')
print("Your Label:")
getch = _Getch()
label = getch()
label_char = self.label_key2char(label)
os.system('cls' if os.name == 'nt' else 'clear')
if label == "q":
break
labels[index] = label_char
else:
continue
df.drop(columns=["ManualLabel"], inplace=True)
df["ManualLabel"] = labels
return df
def save_labels(self, tweets_labeled, PATH, type, index):
df = tweets_labeled
if type == "xlsx":
writer = ExcelWriter(PATH)
df.to_excel(writer, 'Sheet1', index=index)
writer.save()
if type == "csv":
df.to_csv(PATH, index=index)
class _Getch:
"""Gets a single character from standard input. Does not echo to the
screen."""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
self.impl = _GetchUnix()
def __call__(self): return self.impl()
class _GetchUnix:
def __init__(self):
import tty, sys
def __call__(self):
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows:
def __init__(self):
import msvcrt
def __call__(self):
import msvcrt
return msvcrt.getch()
|
{"/main.py": ["/utils.py"]}
|
481
|
sciaso/greenpass-covid19-qrcode-decoder
|
refs/heads/master
|
/lib/greenpass.py
|
from pyzbar.pyzbar import decode
from PIL import Image
from base45 import b45decode
from zlib import decompress
from flynn import decoder as flynn_decoder
from lib.datamapper import DataMapper as data_mapper
class GreenPassDecoder(object):
stream_data = None
def __init__(self, stream_data):
self.stream_data = decode(Image.open(stream_data))[0].data
def decode(self, schema):
qr_decoded = self.stream_data[4:]
qrcode_data = decompress(b45decode(qr_decoded))
(_, (header_1, header_2, cbor_payload, sign)) = flynn_decoder.loads(qrcode_data)
data = flynn_decoder.loads(cbor_payload)
dm = data_mapper(data, schema)
return dm.convert_json()
|
{"/lib/greenpass.py": ["/lib/datamapper.py"], "/app.py": ["/lib/greenpass.py"]}
|
482
|
sciaso/greenpass-covid19-qrcode-decoder
|
refs/heads/master
|
/lib/datamapper.py
|
import json
from urllib.request import urlopen
class DataMapperError(Exception):
pass
class DataMapper:
qr_data = None
schema = None
json = ''
new_json = {}
def _save_json(self, data, schema, level=0):
for key, value in data.items():
try:
description = schema[key].get('title') or schema[key].get('description') or key
description, _, _ = description.partition(' - ')
if type(value) is dict:
self.json += '<p>' + (' ' * level) + '<strong>' + description + '</strong></p>'
_, _, sch_ref = schema[key]['$ref'].rpartition('/')
self._save_json(value, self.schema['$defs'][sch_ref]['properties'], level + 1)
elif type(value) is list:
self.json += '<p>' + (' ' * level) + '<strong>' + description + '</strong></p>'
_, _, sch_ref = schema[key]['items']['$ref'].rpartition('/')
for v in value:
self._save_json(v, self.schema['$defs'][sch_ref]['properties'], level + 1)
else:
self.json += '<p>' + (' ' * level) + '<strong>' + description + '</strong>' + ':' + str(
value) + '</p>'
except KeyError:
print('error keys')
print(data)
def __set_schema(self, schema_url):
sch = urlopen(schema_url)
self.schema = json.load(sch)
def __init__(self, qr_data, schema_url, params_string=False):
i = -260
j = 1
if params_string:
i = str(i)
j = str(j)
self.json = ''
self.qr_data = qr_data[i][j]
self.__set_schema(schema_url)
def convert_json(self):
if self.qr_data is None:
raise DataMapperError("QR_DATA_IS_WRONG")
if self.schema is None:
raise DataMapperError("SCHEMA_IS_WRONG")
self._save_json(self.qr_data, self.schema['properties'])
return self.json
|
{"/lib/greenpass.py": ["/lib/datamapper.py"], "/app.py": ["/lib/greenpass.py"]}
|
483
|
sciaso/greenpass-covid19-qrcode-decoder
|
refs/heads/master
|
/app.py
|
from flask import Flask, redirect, request, render_template
from os.path import splitext
from flask_sslify import SSLify
from flask_babel import Babel, gettext
import os
from lib.greenpass import GreenPassDecoder as greenpass_decoder
is_prod = os.environ.get('PRODUCTION', None)
ga_id = os.environ.get('GA_ID', None)
sharethis_script_src = os.environ.get('SHARETHIS_SCRIPT_SRC', None)
app_url = os.environ.get('APP_URL', None)
app = Flask(__name__)
app.config['BABEL_DEFAULT_LOCALE'] = 'en'
app.config['MAX_CONTENT_LENGTH'] = 4096 * 1024
app.config['UPLOAD_EXTENSIONS'] = ['.jpg', '.png', '.jpeg']
app.config['GITHUB_PROJECT'] = 'https://github.com/debba/greenpass-covid19-qrcode-decoder'
app.config[
'DCC_SCHEMA'] = 'https://raw.githubusercontent.com/ehn-dcc-development/ehn-dcc-schema/release/1.3.0/DCC.combined-schema.json'
app.glb_schema = {}
app.converted_schema = ''
app.config['LANGUAGES'] = {
'en': 'English',
'it': 'Italiano'
}
babel = Babel(app)
@babel.localeselector
def get_locale():
return request.accept_languages.best_match(app.config['LANGUAGES'].keys())
if is_prod:
sslify = SSLify(app)
@app.context_processor
def inject_user():
return dict(github_project=app.config['GITHUB_PROJECT'], is_prod=is_prod, ga_id=ga_id,
sharethis_script_src=sharethis_script_src, app_url=app_url,
app_name=gettext('Green Pass COVID-19 QRCode Decoder'))
@app.route('/', methods=['GET'])
def home():
return render_template('home.html')
@app.route('/qrdata', methods=['GET', 'POST'])
def qrdata():
if request.method == 'POST':
if request.files['image'].filename != '':
app.converted_schema = ''
image = request.files['image']
filename = image.filename
file_ext = splitext(filename)[1]
if filename != '':
if file_ext not in app.config['UPLOAD_EXTENSIONS']:
return render_template('error.html', error='UPLOAD_EXTENSIONS_ERROR', file_ext=file_ext), 400
try:
decoder = greenpass_decoder(image.stream)
return render_template('data.html', data=decoder.decode(app.config['DCC_SCHEMA']))
except (ValueError, IndexError) as e:
print(e)
return render_template('error.html', error='UPLOAD_IMAGE_NOT_VALID'), 400
return render_template('error.html', error='UPLOAD_IMAGE_WITH_NO_NAME'), 500
else:
return redirect('/')
|
{"/lib/greenpass.py": ["/lib/datamapper.py"], "/app.py": ["/lib/greenpass.py"]}
|
501
|
FazilovDev/GraduateWork
|
refs/heads/main
|
/main.py
|
from Algorithms.Winnowing import get_fingerprints, get_text_from_file
from tkinter import *
from tkinter import filedialog as fd
import locale
k = 15
q = 259#259
w = 4
class PlagiarismDetect(Frame):
def __init__(self, parent):
Frame.__init__(self, parent, background="white")
self.parent = parent
self.width = self.winfo_screenwidth()
self.height = self.winfo_screenheight()
self.parent.title("DetectPlagiarismMoss")
self.pack(fill=BOTH, expand=True)
self.file1 = 'file1'
self.file2 = 'file2'
self.create_main_menu()
def choice_f1(self):
self.file1 = fd.askopenfilename(defaultextension='.cpp', filetypes=[('CPP', '.cpp'),('TXT', '.txt'), ('Py', '.py')])
self.text_info_menu['text'] = "Загрузите\n {}\n {}:".format(self.file1, self.file2)
def choice_f2(self):
self.file2 = fd.askopenfilename(defaultextension='.cpp', filetypes=[('CPP', '.cpp'),('TXT', '.txt'),('Py', '.py')])
self.text_info_menu['text'] = "Загрузите\n {}\n {}:".format(self.file1, self.file2)
def print_file1(self,text, points, side):
newCode = text[: points[0][0]]
if side == 0:
textfield = self.text1
else:
textfield = self.text2
textfield.insert('end', newCode)
plagCount = 0
for i in range(len(points)):
if points[i][1] > points[i][0]:
plagCount += points[i][1] - points[i][0]
newCode = newCode + text[points[i][0] : points[i][1]]
textfield.insert('end', text[points[i][0] : points[i][1]], 'warning')
if i < len(points) - 1:
newCode = newCode + text[points[i][1] : points[i+1][0]]
textfield.insert('end', text[points[i][1] : points[i+1][0]])
else:
newCode = newCode + text[points[i][1] :]
textfield.insert('end', text[points[i][1] :])
return plagCount / len(text)
def analyze(self):
self.text1.tag_config('warning', background="orange",)
self.text2.tag_config('warning', background="orange")
text1 = get_text_from_file(self.file1)
text2 = get_text_from_file(self.file2)
mergedPoints = get_fingerprints(self.file1, self.file2, k, q, w)
res = self.print_file1(text1, mergedPoints[0], 0)
res1 = self.print_file1(text2, mergedPoints[1], 1)
self.text_plagiarism['text'] = "Уникальность файла: {} : {}%\nУникальность файла: {} : {}%".format(self.file1.split('/')[-1::][0], int((1-res)*100), self.file2.split('/')[-1::][0], int((1-res1)*100))
def create_main_menu(self):
frame1 = Frame(self)
frame1.pack(fill=X)
frame1.config(bg="white")
self.text_info_menu = Label(frame1, text="Загрузите \n{} \n{}:".format(self.file1, self.file2), font=("Arial Bold", 20))
self.text_info_menu.config(bg="white")
self.text_info_menu.pack()
self.text_plagiarism = Label(frame1, text="Уникальность файла: {} : {}%\nУникальность файла: {} : {}%".format("",0, "", 0), font=("Arial Bold", 20))
self.text_plagiarism.config(bg="white")
self.text_plagiarism.pack()
choice_file2 = Button(frame1, text="Файл №2", command=self.choice_f2)
choice_file2.pack(side=RIGHT, expand=True)
choice_file1 = Button(frame1, text="Файл №1", command=self.choice_f1)
choice_file1.pack(side=RIGHT, expand=True)
frame2 = Frame(self)
frame2.pack(fill=X)
frame2.config(bg="white")
analyze = Button(frame2, text="Обработать", command=self.analyze)
analyze.pack()
frame3 = Frame(self)
frame3.pack(fill=X)
frame3.config(bg="white")
self.text1 = Text(frame3, width=int(100), height=int(100))
self.text1.pack(side=LEFT)
self.text2 = Text(frame3, width=int(100), height=int(100))
self.text2.pack(side=LEFT)
def main():
locale.setlocale(locale.LC_ALL, 'ru_RU.UTF8')
root = Tk()
root.geometry("{}x{}".format(root.winfo_screenwidth(), root.winfo_screenheight()))
app = PlagiarismDetect(root)
root.mainloop()
if __name__ == '__main__':
main()
|
{"/main.py": ["/Algorithms/Winnowing.py"]}
|
502
|
FazilovDev/GraduateWork
|
refs/heads/main
|
/Algorithms/Winnowing.py
|
from Preprocessing.cleantext import *
class Gram:
def __init__(self, text, hash_gram, start_pos, end_pos):
self.text = text
self.hash = hash_gram
self.start_pos = start_pos
self.end_pos = end_pos
def get_text_from_file(filename):
with open(filename, 'r') as f:
text = f.read().lower()
return text
def get_text_processing(text):
stop_symbols = [' ', ',']
return ''.join(j for j in text if not j in stop_symbols)
def get_hash_from_gram(gram, q):
h = 0
k = len(gram)
for char in gram:
x = int(ord(char)-ord('a') + 1)
h = (h * k + x) % q
return h
def get_k_grams_from_text(text, k = 25, q = 31):
grams = []
for i in range(0, len(text)-k+1):
hash_gram = get_hash_from_gram(text[i:i+k], q)
gram = Gram(text[i:i+k], hash_gram, i, i+k)
grams.append(gram)
return grams
def get_hashes_from_grams(grams):
hashes = []
for gram in grams:
hashes.append(gram.hash)
return hashes
def min_index(window):
min_ = window[0]
min_i = 0
for i in range(len(window)):
if window[i] < min_:
min_ = window[i]
min_i = i
return min_i
def winnow(hashes, w):
n = len(hashes)
prints = []
windows = []
prev_min = 0
current_min = 0
for i in range(n - w):
window = hashes[i:i+w]
windows.append(window)
current_min = i + min_index(window)
if not current_min == prev_min:
prints.append(hashes[current_min])
prev_min = current_min
return prints
def get_points(fp1, fp2, token, hashes, grams):
points = []
for i in fp1:
for j in fp2:
if i == j:
flag = 0
startx = endx = None
match = hashes.index(i)
newStart = grams[match].start_pos
newEnd = grams[match].end_pos
for k in token:
if k[2] == newStart:
startx = k[1]
flag = 1
if k[2] == newEnd:
endx = k[1]
if flag == 1 and endx != None:
points.append([startx, endx])
points.sort(key = lambda x: x[0])
points = points[1:]
return points
def get_merged_points(points):
mergedPoints = []
mergedPoints.append(points[0])
for i in range(1, len(points)):
last = mergedPoints[len(mergedPoints) - 1]
if points[i][0] >= last[0] and points[i][0] <= last[1]:
if points[i][1] > last[1]:
mergedPoints = mergedPoints[: len(mergedPoints)-1]
mergedPoints.append([last[0], points[i][1]])
else:
pass
else:
mergedPoints.append(points[i])
return mergedPoints
def get_fingerprints(file1, file2, k, q, w):
token1 = tokenize(file1)
token2 = tokenize(file2)
text1proc = toText(token1)
text2proc = toText(token2)
grams1 = get_k_grams_from_text(text1proc, k, q)
grams2 = get_k_grams_from_text(text2proc, k, q)
hashes1 = get_hashes_from_grams(grams1)
hashes2 = get_hashes_from_grams(grams2)
fp1 = winnow(hashes1, w)
fp2 = winnow(hashes2, w)
points1 = get_points(fp1, fp2, token1, hashes1, grams1)
points2 = get_points(fp1, fp2, token2, hashes2, grams2)
merged_points1 = get_merged_points(points1)
merged_points2 = get_merged_points(points2)
return (merged_points1, merged_points2)
|
{"/main.py": ["/Algorithms/Winnowing.py"]}
|
503
|
Nimunex/TFG
|
refs/heads/master
|
/Device.py
|
from bluepy import btle
from bluepy.btle import Peripheral, DefaultDelegate
import Services
from Services import EnvironmentService, BatterySensor, UserInterfaceService, MotionService, DeviceDelegate
## Thingy52 Definition
class Device(Peripheral):
##Thingy:52 module. Instance the class and enable to get access to the Thingy:52 Sensors.
#The addr of your device has to be know, or can be found by using the hcitool command line
#tool, for example. Call "> sudo hcitool lescan" and your Thingy's address should show up.
def __init__(self, addr):
Peripheral.__init__(self, addr, addrType="random")
#Thingy configuration service not implemented
self.battery = BatterySensor(self)
self.environment = EnvironmentService(self)
self.ui = UserInterfaceService(self)
self.motion = MotionService(self)
#self.sound = SoundService(self)
|
{"/Device.py": ["/Services.py"], "/mainMotion.py": ["/Services.py", "/Device.py"]}
|
504
|
Nimunex/TFG
|
refs/heads/master
|
/call.py
|
#####################################################################
# BLE devices handler #
# A new subprocess is created for each preregistered device in: #
# ./devices.mac #
#####################################################################
import subprocess
import time
#~ mac_file = open('devices.mac', 'r')
#~ for mac_address in mac_file:
#~ subprocess.call(['gnome-terminal', '-e', 'python3 main.py ' + mac_address])
#~ time.sleep(10)
subprocess.call(['gnome-terminal', '-e', 'python3 main.py FD:88:50:58:E7:45' ])
time.sleep(20)
subprocess.call(['gnome-terminal', '-e', 'python3 mainMotion.py E4:F6:C5:F7:03:39' ])
|
{"/Device.py": ["/Services.py"], "/mainMotion.py": ["/Services.py", "/Device.py"]}
|
505
|
Nimunex/TFG
|
refs/heads/master
|
/Services.py
|
from bluepy import btle
from bluepy.btle import UUID,Peripheral, DefaultDelegate
import os.path
import struct
import sys
import binascii
from urllib.request import urlopen
import bitstring
import fxpmath
from bitstring import BitArray
from fxpmath import Fxp
#Useful functions
def write_uint16(data, value, index):
## Write 16bit value into data string at index and return new string
data = data.decode('utf-8') # This line is added to make sure both Python 2 and 3 works
return '{}{:02x}{:02x}{}'.format(
data[:index*4],
value & 0xFF, value >> 8,
data[index*4 + 4:])
def write_uint8(data, value, index):
## Write 8bit value into data string at index and return new string
data = data.decode('utf-8') # This line is added to make sure both Python 2 and 3 works
return '{}{:02x}{}'.format(
data[:index*2],
value,
data[index*2 + 2:])
def getTimeStamp():
ts = time.time()
ts_str = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
return ts_str
#API key for environment services
WRITE_API = "AZOKZQAG2ZC1P2Z2"
BASE_URL = "https://api.thingspeak.com/update?api_key={}".format(WRITE_API)
#API key for motion services
WRITE_API_2 = "L8IVUKY6GII5QP95"
BASE_URL_2 = "https://api.thingspeak.com/update?api_key={}".format(WRITE_API_2)
ThingSpeakPrevSec = 0
ThingSpeakInterval = 20 # 20 seconds
## Definition of all UUID used for Environment Service
CCCD_UUID = 0x2902
##Environment UUID
ENVIRONMENT_SERVICE_UUID = "ef680200-9b35-4933-9B10-52FFA9740042"
TEMPERATURE_CHAR_UUID = "ef680201-9b35-4933-9B10-52FFA9740042"
PRESSURE_CHAR_UUID = "ef680202-9b35-4933-9B10-52FFA9740042"
HUMIDITY_CHAR_UUID = "ef680203-9b35-4933-9B10-52FFA9740042"
GAS_CHAR_UUID = "ef680204-9b35-4933-9B10-52FFA9740042"
COLOR_CHAR_UUID = "ef680205-9b35-4933-9B10-52FFA9740042"
CONFIG_CHAR_UUID = "ef680206-9b35-4933-9B10-52FFA9740042"
##Battery UUID
BATTERY_SERVICE_UUID = 0x180F
BATTERY_LEVEL_UUID = 0x2A19
##UI UUID
USER_INTERFACE_SERVICE_UUID = "ef680300-9b35-4933-9B10-52FFA9740042"
LED_CHAR_UUID = "ef680301-9b35-4933-9B10-52FFA9740042"
BUTTON_CHAR_UUID = "ef680302-9b35-4933-9B10-52FFA9740042"
EXT_PIN_CHAR_UUID = "ef680303-9b35-4933-9B10-52FFA9740042"
##Motion UUID
MOTION_SERVICE_UUID = "ef680400-9b35-4933-9B10-52FFA9740042"
TAP_CHAR_UUID = "ef680402-9b35-4933-9B10-52FFA9740042"
ORIENTATION_CHAR_UUID = "ef680403-9b35-4933-9B10-52FFA9740042"
QUATERNION_CHAR_UUID = "ef680404-9b35-4933-9B10-52FFA9740042"
STEP_COUNTER_CHAR_UUID = "ef680405-9b35-4933-9B10-52FFA9740042"
RAW_DATA_CHAR_UUID = "ef680406-9b35-4933-9B10-52FFA9740042"
EULER_CHAR_UUID = "ef680407-9b35-4933-9B10-52FFA9740042"
ROTATION_MATRIX_CHAR_UUID = "ef680408-9b35-4933-9B10-52FFA9740042"
HEADING_CHAR_UUID = "ef680409-9b35-4933-9B10-52FFA9740042"
GRAVITY_VECTOR_CHAR_UUID = "ef68040A-9b35-4933-9B10-52FFA9740042"
M_CONFIG_CHAR_UUID = "ef680401-9b35-4933-9B10-52FFA9740042"
## Notification handles used in notification delegate
##Environment handles
temperature_handle = None
pressure_handle = None
humidity_handle = None
gas_handle = None
color_handle = None
##Battery handles
battery_handle = None
##UI handles
button_handle = None
##Motion handles
tap_handle = None
orient_handle = None
quaternion_handle = None
stepcount_handle = None
rawdata_handle = None
euler_handle = None
rotation_handle = None
heading_handle = None
gravity_handle = None
## Notifications /Indications Handler
class DeviceDelegate(DefaultDelegate):
def handleNotification(self, hnd, data):
##Environment delegate
if (hnd == temperature_handle):
data = bytearray(data)
temperature_int = data[0]
temperature_dec = data[1]
print("A notification was received -> Temperature:", temperature_int, ',', temperature_dec, "ºC")
#~ if time() - ThingSpeakPrevSec > ThingSpeakInterval:
#~ ThingSpeakPrevSec = time()
thingspeakHttp = BASE_URL + "&field1={:.2f}".format(temperature_int + temperature_dec*0.01)
conn = urlopen(thingspeakHttp)
print("Response: {}".format(conn.read()))
conn.close()
elif (hnd == pressure_handle):
teptep = binascii.b2a_hex(data)
pressure_int = 0
for i in range(0, 4):
pressure_int += (int(teptep[i*2:(i*2)+2], 16) << 8*i)
pressure_dec = int(teptep[-2:], 16)
print("A notification was received -> Pressure: ", pressure_int,',', pressure_dec, " hPa")
#~ if time() - ThingSpeakPrevSec > ThingSpeakInterval:
#~ ThingSpeakPrevSec = time()
thingspeakHttp2 = BASE_URL + "&field2={:.2f}".format(pressure_int + pressure_dec*0.01)
conn = urlopen(thingspeakHttp2)
print("Response: {}".format(conn.read()))
conn.close()
elif (hnd == humidity_handle):
data = bytearray(data)
humidity_value =int.from_bytes(data, byteorder='big', signed=False)
# timestamp = getTimeStamp()
print("A notification was received -> Humidity: ", humidity_value, " %")
#~ if time() - ThingSpeakPrevSec > ThingSpeakInterval:
#~ ThingSpeakPrevSec = time()
thingspeakHttp3 = BASE_URL + "&field3={:.2f}".format(humidity_value)
conn = urlopen(thingspeakHttp3)
print("Response: {}".format(conn.read()))
conn.close()
elif (hnd == gas_handle):
teptep = binascii.b2a_hex(data)
eco2 = 0
tvoc = 0
for i in range(0, 2):
eco2 += (int(teptep[i*2:(i*2)+2], 16) << 8*i)
for i in range(2, 4):
tvoc += (int(teptep[i*2:(i*2)+2], 16) << 8*(i-2))
print("A notification was received -> Gas: ", eco2, " ppm", tvoc,"ppb")
#~ if time() - ThingSpeakPrevSec > ThingSpeakInterval:
#~ ThingSpeakPrevSec = time()
thingspeakHttp4 = BASE_URL + "&field3={:.2f}".format(eco2)
conn = urlopen(thingspeakHttp4)
print("Response: {}".format(conn.read()))
conn.close()
elif (hnd == color_handle):
teptep = binascii.b2a_hex(data)
red = 0
green = 0
blue = 0
clear = 0
for i in range(0, 2):
red += (int(teptep[i*2:(i*2)+2], 16) << 8*i)
for i in range(2, 4):
green += (int(teptep[i*2:(i*2)+2], 16) << 8*(i-2))
for i in range(4, 6):
blue += (int(teptep[i*2:(i*2)+2], 16) << 8*(i-4))
for i in range(6, 8):
clear += (int(teptep[i*2:(i*2)+2], 16) << 8*(i-6))
print("A notification was received -> Color: ", red, green, blue, clear)
thingspeakHttp13 = BASE_URL + "&field5={:.2f}".format(red)
conn = urlopen(thingspeakHttp13)
print("Response: {}".format(conn.read()))
conn.close()
thingspeakHttp14 = BASE_URL + "&field6={:.2f}".format(green)
conn = urlopen(thingspeakHttp14)
print("Response: {}".format(conn.read()))
conn.close()
thingspeakHttp15 = BASE_URL + "&field7={:.2f}".format(blue)
conn = urlopen(thingspeakHttp15)
print("Response: {}".format(conn.read()))
conn.close()
##Battery delegate
elif (hnd == battery_handle):
data = bytearray(data)
battery_value = data[0]
print("A notification was received -> Battery:", battery_value, "%")
##UI delegate
elif (hnd == button_handle):
data = bytearray(data)
button = data[0]
print("A notification was received -> Button[1-> pressed]: ", button)
thingspeakHttp6 = BASE_URL + "&field8={:}".format(button)
conn = urlopen(thingspeakHttp6)
print("Response: {}".format(conn.read()))
conn.close()
##Motion delegate
elif (hnd == tap_handle):
data = bytearray(data)
tap = data[0]
count = data[1]
if tap == 0x01:
print("A notification was received -> TAP_X_UP, count: ", count)
elif tap == 0x02:
print("A notification was received -> TAP_X_DOWN, count: ", count)
elif tap == 0x03:
print("A notification was received -> TAP_Y_UP, count: ", count)
elif tap == 0x04:
print("A notification was received -> TAP_Y_DOWN, count: ", count)
elif tap == 0x05:
print("A notification was received -> TAP_Z_UP, count: ", count)
elif tap == 0x06:
print("A notification was received -> TAP_Z_DOWN, count: ", count)
elif (hnd == orient_handle):
data = bytearray(data)
orientation = data[0]
if orientation == 0x00:
print("A notification was received -> Orientation: Portrait ")
elif orientation == 0x01:
print("A notification was received -> Orientation: Landscape ")
elif orientation == 0x02:
print("A notification was received -> Orientation: Reverse Portrait ")
elif orientation == 0x03:
print("A notification was received -> Orientation: Reverse Landscape ")
elif (hnd == quaternion_handle):
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[3] & 0x80:
negative = True
result = data[3] << 24
result += data[2] << 16
result += data[1] << 8
result += data[0]
w = 0.
if negative:
#this is negative
result = (1 << 32) - 1 - result
result = result+1
w = -1. * (float(result) / 1073741823.)
else:
#this is positive
w = float(result) / 1073741823.
#~ print( "{:.4f}".format( resultF ))
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[7] & 0x80:
negative = True
result = data[7] << 24
result += data[6] << 16
result += data[5] << 8
result += data[4]
x = 0.
if negative:
#this is negative
result = (1 << 32) - 1 - result
result = result+1
x = -1. * (float(result) / 1073741823.)
else:
#this is positive
x = float(result) / 1073741823.
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[11] & 0x80:
negative = True
result = data[11] << 24
result += data[10] << 16
result += data[9] << 8
result += data[8]
y = 0.
if negative:
#this is negative
result = (1 << 32) - 1 - result
result = result+1
y = -1. * (float(result) / 1073741823.)
else:
#this is positive
y = float(result) / 1073741823.
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[15] & 0x80:
negative = True
result = data[15] << 24
result += data[14] << 16
result += data[13] << 8
result += data[12]
z = 0.
if negative:
#this is negative
result = (1 << 32) - 1 - result
result = result+1
z = -1. * (float(result) / 1073741823.)
else:
#this is positive
z = float(result) / 1073741823.
print("A notification was received -> Quaternion(w,x,y,z): {:.2f}, {:.2f}, {:.2f}, {:.2f}".format(w,x,y,z))
elif (hnd == stepcount_handle):
teptep = binascii.b2a_hex(data)
steps = 0
time = 0
for i in range(0, 4):
steps += (int(teptep[i*2:(i*2)+2], 16) << 8*i)
for i in range(4, 8):
time += (int(teptep[i*2:(i*2)+2], 16) << 8*(i-4))
print("A notification was received -> Stepcount(steps,time): ", steps, time)
#~ print('Notification: Step Count: {}'.format(teptep))
elif (hnd == rawdata_handle):
##Accelerometer
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[1] & 0x80:
negative = True
result = data[1] << 8
result += data[0]
ax = 0.
if negative:
#this is negative
result = (1 << 16) - 1 - result
result = result+1
ax = -1. * (float(result) / 1023.)
else:
#this is positive
ax = float(result) / 1023.
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[3] & 0x80:
negative = True
result = data[3] << 8
result += data[2]
ay = 0.
if negative:
#this is negative
result = (1 << 16) - 1 - result
result = result+1
ay = -1. * (float(result) / 1023.)
else:
#this is positive
ay = float(result) / 1023.
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[5] & 0x80:
negative = True
result = data[5] << 8
result += data[4]
az = 0.
if negative:
#this is negative
result = (1 << 16) - 1 - result
result = result+1
az = -1. * (float(result) / 1023.)
else:
#this is positive
az = float(result) / 1023.
##Gyroscope
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[7] & 0x80:
negative = True
result = data[7] << 8
result += data[6]
gx = 0.
if negative:
#this is negative
result = (1 << 16) - 1 - result
result = result+1
gx = -1. * (float(result) / 31.)
else:
#this is positive
gx = float(result) / 31.
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[9] & 0x80:
negative = True
result = data[9] << 8
result += data[8]
gy = 0.
if negative:
#this is negative
result = (1 << 16) - 1 - result
result = result+1
gy = -1. * (float(result) / 31.)
else:
#this is positive
gy = float(result) / 31.
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[11] & 0x80:
negative = True
result = data[11] << 8
result += data[10]
gz = 0.
if negative:
#this is negative
result = (1 << 16) - 1 - result
result = result+1
gz = -1. * (float(result) / 31.)
else:
#this is positive
gz = float(result) / 31.
##Compass
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[13] & 0x80:
negative = True
result = data[13] << 8
result += data[12]
cx = 0.
if negative:
#this is negative
result = (1 << 16) - 1 - result
result = result+1
cx = -1. * (float(result) / 15.)
else:
#this is positive
cx = float(result) / 15.
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[15] & 0x80:
negative = True
result = data[15] << 8
result += data[14]
cy = 0.
if negative:
#this is negative
result = (1 << 16) - 1 - result
result = result+1
cy = -1. * (float(result) / 15.)
else:
#this is positive
cy = float(result) / 15.
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[17] & 0x80:
negative = True
result = data[17] << 8
result += data[16]
cz = 0.
if negative:
#this is negative
result = (1 << 16) - 1 - result
result = result+1
cz = -1. * (float(result) / 15.)
else:
#this is positive
cz = float(result) / 15.
print("A notification was received -> Raw data: Accelerometer(G):{:.2f}, {:.2f}, {:.2f} Gyroscope(deg/s): {:.2f}, {:.2f}, {:.2f} Compass(uT): {:.2f}, {:.2f}, {:.2f}".format(ax,ay,az,gx,gy,gz,cx,cy,cz))
elif (hnd == euler_handle):
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[3] & 0x80:
negative = True
result = data[3] << 24
result += data[2] << 16
result += data[1] << 8
result += data[0]
roll = 0.
if negative:
#this is negative
result = (1 << 32) - 1 - result
result = result+1
roll = -1. * (float(result) / 65535.)
else:
#this is positive
roll = float(result) / 65535.
#~ print( "{:.4f}".format( resultF ))
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[7] & 0x80:
negative = True
result = data[7] << 24
result += data[6] << 16
result += data[5] << 8
result += data[4]
pitch = 0.
if negative:
#this is negative
result = (1 << 32) - 1 - result
result = result+1
pitch = -1. * (float(result) / 65535.)
else:
#this is positive
pitch = float(result) / 65535.
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[11] & 0x80:
negative = True
result = data[11] << 24
result += data[10] << 16
result += data[9] << 8
result += data[8]
yaw = 0.
if negative:
#this is negative
result = (1 << 32) - 1 - result
result = result+1
yaw = -1. * (float(result) / 65535.)
else:
#this is positive
yaw = float(result) / 65535.
print("A notification was received -> Euler(roll,pitch,yaw)[degrees]: {:.2f}, {:.2f}, {:.2f}".format(roll,pitch,yaw))
thingspeakHttp7 = BASE_URL_2 + "&field1={:.2f}".format(roll)
conn = urlopen(thingspeakHttp7)
print("Response: {}".format(conn.read()))
conn.close()
thingspeakHttp8 = BASE_URL_2 + "&field2={:.2f}".format(pitch)
conn = urlopen(thingspeakHttp8)
print("Response: {}".format(conn.read()))
conn.close()
thingspeakHttp9 = BASE_URL_2 + "&field3={:.2f}".format(yaw)
conn = urlopen(thingspeakHttp9)
print("Response: {}".format(conn.read()))
conn.close()
elif (hnd == rotation_handle):
teptep = binascii.b2a_hex(data)
print('Notification: Rotation matrix: {}'.format(teptep))
elif (hnd == heading_handle):
#True if this is negative number
negative = False
result = 0
#check oldest bit
if data[3] & 0x80:
negative = True
result = data[3] << 24
result += data[2] << 16
result += data[1] << 8
result += data[0]
heading = 0.
if negative:
#this is negative
result = (1 << 32) - 1 - result
result = result+1
heading = -1. * (float(result) / 65535.)
else:
#this is positive
heading = float(result) / 65535.
print("A notification was received -> Heading(degrees): ", heading)
elif (hnd == gravity_handle):
d2=data[0:4]
[gx] = struct.unpack('f', d2)
d3=data[4:8]
[gy] = struct.unpack('f', d3)
d4=data[8:12]
[gz] = struct.unpack('f', d4)
print("A notification was received -> Gravity(x,y,z): {:.2f}, {:.2f}, {:.2f}".format(gx,gy,gz))
thingspeakHttp10 = BASE_URL_2 + "&field1={:.2f}".format(roll)
conn = urlopen(thingspeakHttp10)
print("Response: {}".format(conn.read()))
conn.close()
thingspeakHttp11 = BASE_URL_2 + "&field2={:.2f}".format(pitch)
conn = urlopen(thingspeakHttp11)
print("Response: {}".format(conn.read()))
conn.close()
thingspeakHttp12 = BASE_URL_2 + "&field3={:.2f}".format(yaw)
conn = urlopen(thingspeakHttp12)
print("Response: {}".format(conn.read()))
conn.close()
class EnvironmentService():
##Environment service module. Instance the class and enable to get access to the Environment interface.
serviceUUID = ENVIRONMENT_SERVICE_UUID
temperature_char_uuid = TEMPERATURE_CHAR_UUID
pressure_char_uuid = PRESSURE_CHAR_UUID
humidity_char_uuid = HUMIDITY_CHAR_UUID
gas_char_uuid = GAS_CHAR_UUID
color_char_uuid = COLOR_CHAR_UUID
config_char_uuid = CONFIG_CHAR_UUID
def __init__(self, periph):
self.periph = periph
self.environment_service = None
self.temperature_char = None
self.temperature_cccd = None
self.pressure_char = None
self.pressure_cccd = None
self.humidity_char = None
self.humidity_cccd = None
self.gas_char = None
self.gas_cccd = None
self.color_char = None
self.color_cccd = None
self.config_char = None
def enable(self):
##Enables the class by finding the service and its characteristics.
global temperature_handle
global pressure_handle
global humidity_handle
global gas_handle
global color_handle
if self.environment_service is None:
self.environment_service = self.periph.getServiceByUUID(self.serviceUUID)
if self.temperature_char is None:
self.temperature_char = self.environment_service.getCharacteristics(self.temperature_char_uuid)[0]
temperature_handle = self.temperature_char.getHandle()
self.temperature_cccd = self.temperature_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.pressure_char is None:
self.pressure_char = self.environment_service.getCharacteristics(self.pressure_char_uuid)[0]
pressure_handle = self.pressure_char.getHandle()
self.pressure_cccd = self.pressure_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.humidity_char is None:
self.humidity_char = self.environment_service.getCharacteristics(self.humidity_char_uuid)[0]
humidity_handle = self.humidity_char.getHandle()
self.humidity_cccd = self.humidity_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.gas_char is None:
self.gas_char = self.environment_service.getCharacteristics(self.gas_char_uuid)[0]
gas_handle = self.gas_char.getHandle()
self.gas_cccd = self.gas_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.color_char is None:
self.color_char = self.environment_service.getCharacteristics(self.color_char_uuid)[0]
color_handle = self.color_char.getHandle()
self.color_cccd = self.color_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.config_char is None:
self.config_char = self.environment_service.getCharacteristics(self.config_char_uuid)[0]
def set_temperature_notification(self, state):
## Enable/Disable Temperature Notifications
if self.temperature_cccd is not None:
if state == True:
self.temperature_cccd.write(b"\x01\x00", True)
else:
self.temperature_cccd.write(b"\x00\x00", True)
def set_pressure_notification(self, state):
## Enable/Disable Pressure Notifications
if self.pressure_cccd is not None:
if state == True:
self.pressure_cccd.write(b"\x01\x00", True)
else:
self.pressure_cccd.write(b"\x00\x00", True)
def set_humidity_notification(self, state):
## Enable/Disable Humidity Notifications
if self.humidity_cccd is not None:
if state == True:
self.humidity_cccd.write(b"\x01\x00", True)
else:
self.humidity_cccd.write(b"\x00\x00", True)
def set_gas_notification(self, state):
## Enable/Disable Gas Notifications
if self.gas_cccd is not None:
if state == True:
self.gas_cccd.write(b"\x01\x00", True)
else:
self.gas_cccd.write(b"\x00\x00", True)
def set_color_notification(self, state):
## Enable/Disable Color Notifications
if self.color_cccd is not None:
if state == True:
self.color_cccd.write(b"\x01\x00", True)
else:
self.color_cccd.write(b"\x00\x00", True)
def configure(self, temp_int=None, press_int=None, humid_int=None, gas_mode_int=None,
color_int=None, color_sens_calib=None):
if temp_int is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint16(current_config, temp_int, 0)
self.config_char.write(binascii.a2b_hex(new_config), True)
if press_int is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint16(current_config, press_int, 1)
self.config_char.write(binascii.a2b_hex(new_config), True)
if humid_int is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint16(current_config, humid_int, 2)
self.config_char.write(binascii.a2b_hex(new_config), True)
if gas_mode_int is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint8(current_config, gas_mode_int, 8)
self.config_char.write(binascii.a2b_hex(new_config), True)
if color_int is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint16(current_config, color_int, 3)
self.config_char.write(binascii.a2b_hex(new_config), True)
if color_sens_calib is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint8(current_config, color_sens_calib[0], 9)
new_config = write_uint8(current_config, color_sens_calib[1], 10)
new_config = write_uint8(current_config, color_sens_calib[2], 11)
self.config_char.write(binascii.a2b_hex(new_config), True)
def disable(self):
## Disable Environment Notifications
self.set_temperature_notification(False)
self.set_pressure_notification(False)
self.set_humidity_notification(False)
self.set_gas_notification(False)
self.set_color_notification(False)
class BatterySensor():
##Battery Service module. Instance the class and enable to get access to Battery interface.
svcUUID = UUID(BATTERY_SERVICE_UUID) # Ref https://www.bluetooth.com/specifications/gatt/services
dataUUID = UUID(BATTERY_LEVEL_UUID) # Ref https://www.bluetooth.com/specifications/gatt/characteristics
def __init__(self, periph):
self.periph = periph
self.service = None
self.data = None
self.data_cccd = None
def enable(self):
##Enables the class by finding the service and its characteristics.
global battery_handle
if self.service is None:
self.service = self.periph.getServiceByUUID(self.svcUUID)
if self.data is None:
self.data = self.service.getCharacteristics(self.dataUUID)[0]
battery_handle = self.data.getHandle()
self.data_cccd = self.data.getDescriptors(forUUID=CCCD_UUID)[0]
def b_read(self):
## Returns the battery level in percent
val = ord(self.data.read())
return val
def set_battery_notification(self, state):
## Enable/Disable Battery Notifications
if self.data_cccd is not None:
if state == True:
self.data_cccd.write(b"\x01\x00", True)
else:
self.data_cccd.write(b"\x00\x00", True)
def disable(self):
## Disable Battery Notifications
self.set_battery_notification(False)
class UserInterfaceService():
"""
User interface service module. Instance the class and enable to get access to the UI interface.
"""
serviceUUID = USER_INTERFACE_SERVICE_UUID
led_char_uuid = LED_CHAR_UUID
btn_char_uuid = BUTTON_CHAR_UUID
# To be added: EXT PIN CHAR
def __init__(self, periph):
self.periph = periph
self.ui_service = None
self.led_char = None
self.btn_char = None
self.btn_char_cccd = None
# To be added: EXT PIN CHAR
def enable(self):
""" Enables the class by finding the service and its characteristics. """
global button_handle
if self.ui_service is None:
self.ui_service = self.periph.getServiceByUUID(self.serviceUUID)
if self.led_char is None:
self.led_char = self.ui_service.getCharacteristics(self.led_char_uuid)[0]
if self.btn_char is None:
self.btn_char = self.ui_service.getCharacteristics(self.btn_char_uuid)[0]
button_handle = self.btn_char.getHandle()
self.btn_char_cccd = self.btn_char.getDescriptors(forUUID=CCCD_UUID)[0]
def set_led_mode_off(self):
self.led_char.write(b"\x00", True)
def set_led_mode_constant(self, r, g, b):
teptep = "01{:02X}{:02X}{:02X}".format(r, g, b)
self.led_char.write(binascii.a2b_hex(teptep), True)
def set_led_mode_breathe(self, color, intensity, delay):
"""
Set LED to breathe mode.
color has to be within 0x01 and 0x07
intensity [%] has to be within 1-100
delay [ms] has to be within 1 ms - 10 s
"""
teptep = "02{:02X}{:02X}{:02X}{:02X}".format(color, intensity,
delay & 0xFF, delay >> 8)
self.led_char.write(binascii.a2b_hex(teptep), True)
def set_led_mode_one_shot(self, color, intensity):
"""
Set LED to one shot mode.
color has to be within 0x01 and 0x07
intensity [%] has to be within 1-100
"""
teptep = "03{:02X}{:02X}".format(color, intensity)
self.led_char.write(binascii.a2b_hex(teptep), True)
def set_button_notification(self, state):
if self.btn_char_cccd is not None:
if state == True:
self.btn_char_cccd.write(b"\x01\x00", True)
else:
self.btn_char_cccd.write(b"\x00\x00", True)
def disable(self):
set_button_notification(False)
class MotionService():
##Motion service module. Instance the class and enable to get access to the Motion interface.
serviceUUID = MOTION_SERVICE_UUID
config_char_uuid = M_CONFIG_CHAR_UUID
tap_char_uuid = TAP_CHAR_UUID
orient_char_uuid = ORIENTATION_CHAR_UUID
quaternion_char_uuid = QUATERNION_CHAR_UUID
stepcnt_char_uuid = STEP_COUNTER_CHAR_UUID
rawdata_char_uuid = RAW_DATA_CHAR_UUID
euler_char_uuid = EULER_CHAR_UUID
rotation_char_uuid = ROTATION_MATRIX_CHAR_UUID
heading_char_uuid = HEADING_CHAR_UUID
gravity_char_uuid = GRAVITY_VECTOR_CHAR_UUID
def __init__(self, periph):
self.periph = periph
self.motion_service = None
self.config_char = None
self.tap_char = None
self.tap_char_cccd = None
self.orient_char = None
self.orient_cccd = None
self.quaternion_char = None
self.quaternion_cccd = None
self.stepcnt_char = None
self.stepcnt_cccd = None
self.rawdata_char = None
self.rawdata_cccd = None
self.euler_char = None
self.euler_cccd = None
self.rotation_char = None
self.rotation_cccd = None
self.heading_char = None
self.heading_cccd = None
self.gravity_char = None
self.gravity_cccd = None
def enable(self):
##Enables the class by finding the service and its characteristics.
global tap_handle
global orient_handle
global quaternion_handle
global stepcount_handle
global rawdata_handle
global euler_handle
global rotation_handle
global heading_handle
global gravity_handle
if self.motion_service is None:
self.motion_service = self.periph.getServiceByUUID(self.serviceUUID)
if self.config_char is None:
self.config_char = self.motion_service.getCharacteristics(self.config_char_uuid)[0]
if self.tap_char is None:
self.tap_char = self.motion_service.getCharacteristics(self.tap_char_uuid)[0]
tap_handle = self.tap_char.getHandle()
self.tap_char_cccd = self.tap_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.orient_char is None:
self.orient_char = self.motion_service.getCharacteristics(self.orient_char_uuid)[0]
orient_handle = self.orient_char.getHandle()
self.orient_cccd = self.orient_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.quaternion_char is None:
self.quaternion_char = self.motion_service.getCharacteristics(self.quaternion_char_uuid)[0]
quaternion_handle = self.quaternion_char.getHandle()
self.quaternion_cccd = self.quaternion_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.stepcnt_char is None:
self.stepcnt_char = self.motion_service.getCharacteristics(self.stepcnt_char_uuid)[0]
stepcount_handle = self.stepcnt_char.getHandle()
self.stepcnt_cccd = self.stepcnt_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.rawdata_char is None:
self.rawdata_char = self.motion_service.getCharacteristics(self.rawdata_char_uuid)[0]
rawdata_handle = self.rawdata_char.getHandle()
self.rawdata_cccd = self.rawdata_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.euler_char is None:
self.euler_char = self.motion_service.getCharacteristics(self.euler_char_uuid)[0]
euler_handle = self.euler_char.getHandle()
self.euler_cccd = self.euler_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.rotation_char is None:
self.rotation_char = self.motion_service.getCharacteristics(self.rotation_char_uuid)[0]
rotation_handle = self.rotation_char.getHandle()
self.rotation_cccd = self.rotation_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.heading_char is None:
self.heading_char = self.motion_service.getCharacteristics(self.heading_char_uuid)[0]
heading_handle = self.heading_char.getHandle()
self.heading_cccd = self.heading_char.getDescriptors(forUUID=CCCD_UUID)[0]
if self.gravity_char is None:
self.gravity_char = self.motion_service.getCharacteristics(self.gravity_char_uuid)[0]
gravity_handle = self.gravity_char.getHandle()
self.gravity_cccd = self.gravity_char.getDescriptors(forUUID=CCCD_UUID)[0]
def set_tap_notification(self, state):
if self.tap_char_cccd is not None:
if state == True:
self.tap_char_cccd.write(b"\x01\x00", True)
else:
self.tap_char_cccd.write(b"\x00\x00", True)
def set_orient_notification(self, state):
if self.orient_cccd is not None:
if state == True:
self.orient_cccd.write(b"\x01\x00", True)
else:
self.orient_cccd.write(b"\x00\x00", True)
def set_quaternion_notification(self, state):
if self.quaternion_cccd is not None:
if state == True:
self.quaternion_cccd.write(b"\x01\x00", True)
else:
self.quaternion_cccd.write(b"\x00\x00", True)
def set_stepcount_notification(self, state):
if self.stepcnt_cccd is not None:
if state == True:
self.stepcnt_cccd.write(b"\x01\x00", True)
else:
self.stepcnt_cccd.write(b"\x00\x00", True)
def set_rawdata_notification(self, state):
if self.rawdata_cccd is not None:
if state == True:
self.rawdata_cccd.write(b"\x01\x00", True)
else:
self.rawdata_cccd.write(b"\x00\x00", True)
def set_euler_notification(self, state):
if self.euler_cccd is not None:
if state == True:
self.euler_cccd.write(b"\x01\x00", True)
else:
self.euler_cccd.write(b"\x00\x00", True)
def set_rotation_notification(self, state):
if self.rotation_cccd is not None:
if state == True:
self.rotation_cccd.write(b"\x01\x00", True)
else:
self.rotation_cccd.write(b"\x00\x00", True)
def set_heading_notification(self, state):
if self.heading_cccd is not None:
if state == True:
self.heading_cccd.write(b"\x01\x00", True)
else:
self.heading_cccd.write(b"\x00\x00", True)
def set_gravity_notification(self, state):
if self.gravity_cccd is not None:
if state == True:
self.gravity_cccd.write(b"\x01\x00", True)
else:
self.gravity_cccd.write(b"\x00\x00", True)
def configure(self, step_int=None, temp_comp_int=None, magnet_comp_int=None,
motion_freq=None, wake_on_motion=None):
if step_int is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint16(current_config, step_int, 0)
self.config_char.write(binascii.a2b_hex(new_config), True)
if temp_comp_int is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint16(current_config, temp_comp_int, 1)
self.config_char.write(binascii.a2b_hex(new_config), True)
if magnet_comp_int is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint16(current_config, magnet_comp_int, 2)
self.config_char.write(binascii.a2b_hex(new_config), True)
if motion_freq is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint16(current_config, motion_freq, 3)
self.config_char.write(binascii.a2b_hex(new_config), True)
if wake_on_motion is not None and self.config_char is not None:
current_config = binascii.b2a_hex(self.config_char.read())
new_config = write_uint8(current_config, wake_on_motion, 8)
self.config_char.write(binascii.a2b_hex(new_config), True)
def disable(self):
set_tap_notification(False)
set_orient_notification(False)
set_quaternion_notification(False)
set_stepcount_notification(False)
set_rawdata_notification(False)
set_euler_notification(False)
set_rotation_notification(False)
set_heading_notification(False)
set_gravity_notification(False)
|
{"/Device.py": ["/Services.py"], "/mainMotion.py": ["/Services.py", "/Device.py"]}
|
506
|
Nimunex/TFG
|
refs/heads/master
|
/mainMotion.py
|
##Main
from bluepy import btle
from bluepy.btle import Peripheral, DefaultDelegate
import os.path
import struct
import binascii
import sys
import datetime
import time
from time import time,sleep
import Services
from Services import EnvironmentService, BatterySensor, UserInterfaceService, MotionService, DeviceDelegate
import Device
from Device import Device
from urllib.request import urlopen
##Mac 1: FD:88:50:58:E7:45
##Mac 2: E4:F6:C5:F7:03:39
## MAC address Device device
global MAC
if __name__ == "__main__":
MAC = str(sys.argv[1])
print("Connecting to " + MAC)
Device1 = Device(MAC)
print("Connected...")
print("Bonding...")
Device1.setSecurityLevel("medium")
print("Bonded...")
print("Enabling Services...")
Device1.battery.enable()
#~ Device1.ui.enable()
Device1.motion.enable()
Device1.setDelegate(DeviceDelegate())
print('Services Enabled...')
print('Battery Level(1): ', Device1.battery.b_read(), '%')
#~ Device1.ui.set_led_mode_breathe(0x02, 50, 1000)
##Battery sensor
#~ Device1.battery.set_battery_notification(True)
##UI service
#~ Device1.ui.set_button_notification(True)
##Motion Services
Device1.motion.configure(motion_freq=5)
#~ Device1.motion.set_tap_notification(True)
#~ Device1.motion.set_orient_notification(True)
#~ Device1.motion.set_quaternion_notification(True)
#~ Device1.motion.set_stepcount_notification(True)
#~ Device1.motion.set_rawdata_notification(True)
Device1.motion.set_euler_notification(True)
#~ Device1.motion.set_rotation_notification(True)
#~ Device1.motion.set_heading_notification(True)
#~ Device1.motion.set_gravity_notification(True)
try:
while True:
if Device1.waitForNotifications(180.0) :
# handleNotification() was called
continue
print("Waiting...")
except KeyboardInterrupt:
print("Disabling Notifications and Indications...")
Device1.battery.disable()
Device1.ui.disable()
Device1.motion.disable()
print("Notifications and Indications Disabled...")
print("Device Session Finished...")
|
{"/Device.py": ["/Services.py"], "/mainMotion.py": ["/Services.py", "/Device.py"]}
|
512
|
Frozen/jinja2-precompiler
|
refs/heads/master
|
/jinja2precompiler.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from optparse import OptionParser
import logging
import os
import re
import sys
import jinja2
def option_parse():
parser = OptionParser()
parser.add_option("-a", "--all", action="store_true", dest="all_files", help="all files")
parser.add_option("-b", "--base", dest="base", default="", help="base dir name", metavar="DIR")
parser.add_option("-c", "--pyc", action="store_true", dest="pyc", help="byte compile")
parser.add_option("-d", "--debug", action="store_true", dest="debug", help="debug")
parser.add_option("-e", "--ext", dest="extensions", default="html,xhtml", help="list of extension [default: %default]", metavar="EXT[,...]")
parser.add_option("-m", "--modulename", action="store_true", dest="modulename", help="return compiled module file name")
parser.add_option("-q", "--quit", action="store_true", dest="quit", help="no message")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="more messages")
(options, args) = parser.parse_args()
return parser, options, args
def get_module_filename(filename, py_compile=False):
module_filename = jinja2.ModuleLoader.get_module_filename(filename)
if py_compile:
module_filename += "c"
return module_filename
def make_filter_func(target, env, extensions=None, all_files=False):
def filter_func(tpl):
if extensions is not None and os.path.splitext(tpl)[1][1:] not in extensions:
return False
if all_files:
return True
_content, filename, _update = env.loader.get_source(env, tpl)
module_filename = os.path.join(target, get_module_filename(tpl))
if not os.path.isfile(module_filename):
module_filename_pyc = module_filename + "c"
if not os.path.isfile(module_filename_pyc):
return True
else:
module_filename = module_filename_pyc
if os.path.getmtime(filename) > os.path.getmtime(module_filename):
return True
return False
return filter_func
if jinja2.__version__[:3] >= "2.8":
"""
jinja2 2.8 supports walking symlink directories.
see: https://github.com/mitsuhiko/jinja2/issues/71
"""
from jinja2 import FileSystemLoader
else:
class FileSystemLoader(jinja2.FileSystemLoader):
def __init__(self, searchpath, encoding='utf-8', followlinks=False):
super(FileSystemLoader, self).__init__(searchpath, encoding)
self.followlinks = followlinks
def list_templates(self):
found = set()
for searchpath in self.searchpath:
walk_dir = os.walk(searchpath, followlinks=self.followlinks)
for dirpath, dirnames, filenames in walk_dir:
for filename in filenames:
template = os.path.join(dirpath, filename) \
[len(searchpath):].strip(os.path.sep) \
.replace(os.path.sep, '/')
if template[:2] == './':
template = template[2:]
if template not in found:
found.add(template)
return sorted(found)
def main():
def logger(msg):
sys.stderr.write("%s\n" % msg)
parser, options, args = option_parse()
if options.debug:
logging.getLogger().setLevel(logging.DEBUG)
elif options.verbose:
logging.getLogger().setLevel(logging.INFO)
elif options.quit:
logging.getLogger().setLevel(logging.CRITICAL)
logger = None
logging.debug("parse_options: options %s" % options)
logging.debug("parse_options: args %s" % args)
for i in args:
if not os.path.exists(i):
logging.warning("No such directory: '%s'" % i)
sys.exit(1)
if options.modulename:
basedir = re.compile(options.base)
results = list()
for i in args:
results.append(os.path.join(options.base, get_module_filename(basedir.sub("", i).lstrip("/"), py_compile=options.pyc)))
print(" ".join(results))
sys.exit(0)
if len(args) != 1:
parser.print_help()
sys.exit(1)
logging.info("Compiling bundled templates...")
arg = args[0]
if not arg.endswith(os.path.sep):
arg = "".join((arg, os.path.sep))
env = jinja2.Environment(loader=FileSystemLoader([os.path.dirname(arg)], followlinks=True))
if os.path.isdir(arg):
if options.extensions is not None:
extensions = options.extensions.split(",")
else:
extensions = None
filter_func = make_filter_func(arg, env, extensions, options.all_files)
target = arg
logging.info("Now compiling templates in %s." % arg)
else:
basename = os.path.basename(arg)
filter_func = lambda x: x == basename
target = os.path.dirname(arg)
logging.info("Now compiling a template: %s." % arg)
env.compile_templates(target, extensions=None,
filter_func=filter_func, zip=None, log_function=logger,
ignore_errors=False, py_compile=options.pyc)
logging.info("Finished compiling bundled templates...")
if __name__== "__main__":
logging.getLogger().setLevel(logging.WARNING)
main()
|
{"/tests/test_bugs.py": ["/jinja2precompiler.py"]}
|
513
|
Frozen/jinja2-precompiler
|
refs/heads/master
|
/tests/test_bugs.py
|
# -*- coding: utf-8 -*-
import jinja2
import pytest
import jinja2precompiler
def test_IndexError():
env = jinja2.Environment(loader=jinja2.FileSystemLoader(["."]))
filter_func = jinja2precompiler.make_filter_func("", env, extensions=["html"], all_files=True)
assert filter_func("test.html") == True
assert filter_func("test.xml") == False
assert filter_func("html") == False
|
{"/tests/test_bugs.py": ["/jinja2precompiler.py"]}
|
515
|
furotsu/turret_game
|
refs/heads/master
|
/player.py
|
import pygame
import sys
import math
from random import randint, choice
from constants import *
class Player(pygame.sprite.Sprite):
def __init__(self, pos_x, pos_y, screen):
super(Player, self).__init__()
self.screen = screen
self.original_image = pygame.image.load(player_img).convert_alpha() # we should rotate original image instead
self.image = self.original_image # of current to keep it quality
self.rect = self.image.get_rect().move((pos_x, pos_y))
self.charger = pygame.Surface((0, CHARGER_HEIGHT))
self.charger.fill(pygame.Color('sienna2'))
self.shot_power = 0
self.cooldown = pygame.Surface((COOLDOWN_WIDTH, 0))
self.cooldown.fill(YELLOW)
self.shot_cooldown = 0
self.current_angle = START_CANNON_ANGLE
self.motion = STOP
self.missile = None
self.already_shoot = False
self.is_charging = False
self.increase_shot_power = True
def draw(self):
self.screen.blit(self.image, self.rect)
def shoot(self):
self.already_shoot = True
self.missile = Missile(self.current_angle + 15, MISSILE_POS_X, MISSILE_POS_Y, self.shot_power,
self.screen)
def get_missile_rect(self):
if self.already_shoot:
return self.missile.rect
else:
return None
def action(self, event):
"""processing pressed button """
if event.type == pygame.QUIT:
sys.exit()
else:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
self.motion = UP
elif event.key == pygame.K_DOWN:
self.motion = DOWN
elif event.key == pygame.K_SPACE and not self.shot_cooldown:
self.motion = STOP
self.is_charging = True
elif event.type == pygame.KEYUP:
if event.key in [pygame.K_UP, pygame.K_DOWN]:
self.motion = STOP
elif event.key == pygame.K_SPACE and not self.shot_cooldown:
self.is_charging = False
self.shoot()
self.shot_cooldown = COOLDOWN
self.shot_power = 0
def draw_game_elements(self):
self.screen.fill(WHITE)
self.draw()
if self.is_charging:
self.draw_charger()
self.draw_trajectory()
if self.shot_cooldown:
self.draw_cooldown()
if self.already_shoot:
self.missile.draw()
def update_game_elements(self):
self.update_player(self.motion)
if self.is_charging:
self.update_charger()
elif self.already_shoot:
self.update_missile()
def update_missile(self):
self.missile.update_velocity()
self.missile.move()
def update_player(self, angle):
self.image = pygame.transform.rotate(self.original_image, self.current_angle + angle)
x, y, = self.rect.center
self.current_angle += angle
self.rect = self.image.get_rect() # make image rotate around its center
self.rect.center = (x, y) # and preventing it from moving around screen
self.update_cooldown()
def update_charger(self):
self.check_power_limits()
if self.increase_shot_power:
self.shot_power += POWER_CHARGE
else:
self.shot_power -= POWER_CHARGE
self.charger = pygame.transform.scale(self.charger, (self.shot_power, CHARGER_HEIGHT))
def update_cooldown(self):
if self.shot_cooldown != 0:
self.shot_cooldown -= 1
self.cooldown = pygame.transform.scale(self.cooldown, (COOLDOWN_WIDTH, self.shot_cooldown))
def check_power_limits(self):
if self.shot_power >= MAX_SHOT_POWER:
self.increase_shot_power = False
elif self.shot_power <= 0:
self.increase_shot_power = True
def draw_charger(self):
self.screen.blit(self.charger, (PLAYER_POS_X, PLAYER_POS_Y - 80))
def draw_cooldown(self):
self.screen.blit(self.cooldown, (PLAYER_POS_X + 80, PLAYER_POS_Y - 100))
def draw_trajectory(self):
time = 2
if self.shot_power != 0:
velocity_x = self.shot_power * math.cos((self.current_angle + 15) * math.pi / 180)
velocity_y = -self.shot_power * math.sin((self.current_angle + 15) * math.pi / 180)
while time != 20:
pos_x = int(MISSILE_POS_X + velocity_x * time)
pos_y = int(MISSILE_POS_Y + velocity_y * time - (ACCELERATION * time ** 2) / 2)
pygame.draw.circle(self.screen, RED, (pos_x, pos_y), 10)
time += 1
class Missile(pygame.sprite.Sprite):
def __init__(self, angle, pos_x, pos_y, shot_power, screen):
super(Missile, self).__init__()
self.image = pygame.image.load(missile_img).convert_alpha()
self.screen = screen
self.velocity_x = shot_power * math.cos(angle * math.pi / 180)
self.velocity_y = -shot_power * math.sin(angle * math.pi / 180)
self.rect = self.image.get_rect().move((pos_x, pos_y))
def update_velocity(self):
self.velocity_y -= ACCELERATION
def move(self):
self.rect.x += self.velocity_x
self.rect.y += self.velocity_y
def draw(self):
self.screen.blit(self.image, self.rect)
class Enemies(pygame.sprite.Sprite):
def __init__(self, screen, *groups):
super(Enemies, self).__init__()
self.image = choice([enemy1_img, enemy2_img, enemy3_img])
self.image = pygame.image.load(self.image).convert_alpha()
self.rect = self.image.get_rect().move((randint(500, 700), -20))
self.screen = screen
self.velocity_x = ENEMY_VELOCITY_X
self.velocity_y = ENEMY_VELOCITY_Y
def move(self):
self.rect.x += self.velocity_x
self.rect.y += self.velocity_y
def draw(self):
self.screen.blit(self.image, self.rect)
def check_destiny(self):
if display_height + 100 >= self.rect.y >= display_height:
self.rect.y = display_height + 1000
return True
return False
class AlienArmy:
def __init__(self, player, screen):
self.enemies = []
self.screen = screen
self.time_before_new_enemy = 3
self.player = player
self.kill_count = 0
def update_enemies(self): # move enemies and check for collide with missile
self.check_army_integrity()
for enemy in self.enemies:
enemy.move()
enemy.draw()
self.enemy_hit(self.player.get_missile_rect()) # check if enemy hit by missile and kill it if positive
def defeat(self): # check if player lost of not
for enemy in self.enemies:
if enemy.check_destiny():
return True
return False
def enemy_hit(self, missile):
if missile is None:
return
counter = 0
for enemy in self.enemies:
if missile.colliderect(enemy):
self.kill_enemy(counter)
counter += 1
def add_enemy(self):
self.enemies.append(Enemies(self.screen))
def check_army_integrity(self):
if self.time_before_new_enemy == 0:
self.add_enemy()
self.time_before_new_enemy = TIME_BETWEEN_ENEMIES
self.time_before_new_enemy -= 1
def kill_enemy(self, pos):
self.enemies.pop(pos)
self.kill_count += 1
def renew_kill_count(self):
self.kill_count = 0
|
{"/player.py": ["/constants.py"], "/menu.py": ["/constants.py"], "/terrain.py": ["/constants.py"], "/main.py": ["/controller.py", "/menu.py", "/constants.py"], "/death_screen.py": ["/constants.py"], "/controller.py": ["/menu.py", "/player.py", "/leaderboard.py", "/death_screen.py", "/terrain.py", "/constants.py"], "/leaderboard.py": ["/constants.py"]}
|
516
|
furotsu/turret_game
|
refs/heads/master
|
/menu.py
|
import pygame
from constants import *
class MenuButton:
"""Create a button """
def __init__(self, pos_x, pos_y, image, button_type):
self.button_type = button_type
self.image = pygame.image.load(image).convert_alpha()
self.size = self.image.get_rect().size
self.rect_pos = self.image.get_rect().move((pos_x, pos_y))
def draw(self, screen):
screen.blit(self.image, self.rect_pos)
class MainMenu:
"""manage all of the buttons in menu """
def __init__(self, screen, *buttons):
self.buttons = buttons
self.screen = screen
def draw(self):
for button in self.buttons:
self.screen.blit(button.image, button.rect_pos)
|
{"/player.py": ["/constants.py"], "/menu.py": ["/constants.py"], "/terrain.py": ["/constants.py"], "/main.py": ["/controller.py", "/menu.py", "/constants.py"], "/death_screen.py": ["/constants.py"], "/controller.py": ["/menu.py", "/player.py", "/leaderboard.py", "/death_screen.py", "/terrain.py", "/constants.py"], "/leaderboard.py": ["/constants.py"]}
|
517
|
furotsu/turret_game
|
refs/heads/master
|
/terrain.py
|
import pygame
from constants import *
class Terrain:
def __init__(self):
pass
|
{"/player.py": ["/constants.py"], "/menu.py": ["/constants.py"], "/terrain.py": ["/constants.py"], "/main.py": ["/controller.py", "/menu.py", "/constants.py"], "/death_screen.py": ["/constants.py"], "/controller.py": ["/menu.py", "/player.py", "/leaderboard.py", "/death_screen.py", "/terrain.py", "/constants.py"], "/leaderboard.py": ["/constants.py"]}
|
518
|
furotsu/turret_game
|
refs/heads/master
|
/main.py
|
import pygame
from controller import *
from menu import *
from constants import *
def main():
pygame.init()
screen = pygame.display.set_mode((display_width, display_height))
pygame.display.set_caption("Cannon defend v0.08")
clock = pygame.time.Clock()
controller = Controller(screen, pygame.time.Clock())
while True:
controller.set_menu()
while not controller.game_started: # main menu loop
for event in pygame.event.get():
if event.type == pygame.QUIT:
return 0
else:
controller.menu_action(event)
controller.draw_new_screen()
pygame.display.flip()
clock.tick(FPS)
controller.start_game()
if __name__ == "__main__":
main()
|
{"/player.py": ["/constants.py"], "/menu.py": ["/constants.py"], "/terrain.py": ["/constants.py"], "/main.py": ["/controller.py", "/menu.py", "/constants.py"], "/death_screen.py": ["/constants.py"], "/controller.py": ["/menu.py", "/player.py", "/leaderboard.py", "/death_screen.py", "/terrain.py", "/constants.py"], "/leaderboard.py": ["/constants.py"]}
|
519
|
furotsu/turret_game
|
refs/heads/master
|
/death_screen.py
|
import pygame
from constants import *
class Death_screen:
def __init__(self, screen, *buttons):
self.main_block = pygame.Surface((display_width - 200, display_height - 100))
self.main_block.fill(pygame.Color('sienna2'))
self.screen = screen
self.buttons = buttons
def draw(self, score):
font = pygame.font.Font('freesansbold.ttf', 16)
self.draw_main_block()
self.screen.blit(font.render("Your score is: {}".format(score), True, BLACK), (80, 70))
for button in self.buttons:
button.draw(self.screen)
def draw_main_block(self):
self.screen.blit(self.main_block, (100, 50))
|
{"/player.py": ["/constants.py"], "/menu.py": ["/constants.py"], "/terrain.py": ["/constants.py"], "/main.py": ["/controller.py", "/menu.py", "/constants.py"], "/death_screen.py": ["/constants.py"], "/controller.py": ["/menu.py", "/player.py", "/leaderboard.py", "/death_screen.py", "/terrain.py", "/constants.py"], "/leaderboard.py": ["/constants.py"]}
|
520
|
furotsu/turret_game
|
refs/heads/master
|
/controller.py
|
import pygame
import sys
import menu
import player
import leaderboard
import death_screen
import terrain
from constants import *
class Controller:
"""
Class that control all game actions
"""
def __init__(self, screen, clock):
self.screen = screen
self.clock = clock
self.game_started = False
self.quit_button = menu.MenuButton(display_width / 2 - 150, display_height / 2, quit_button_img, "quit")
self.start_button = menu.MenuButton(display_width / 2 - 150, display_height / 4, start_button_img, "start")
self.leaderboard_button = menu.MenuButton(display_width / 2 - 450, display_height / 6, leaderboard_button_img,
"leaderboard")
self.back_button = menu.MenuButton(display_width / 4, display_height - 100, back_button_img, "back")
self.menu_table = menu.MainMenu(self.screen, self.quit_button, self.start_button, self.leaderboard_button)
self.leaderboard_table = leaderboard.Leaderboard(leaderboard_storage, screen)
self.create_start_leaderboard()
self.death_screen_table = death_screen.Death_screen(screen, self.back_button)
self.game_surface = terrain.Terrain()
self.player = player.Player(PLAYER_POS_X, PLAYER_POS_Y, self.screen)
self.army = player.AlienArmy(self.player, self.screen)
def menu_action(self, event):
if event.type == pygame.MOUSEBUTTONDOWN:
for button in self.menu_table.buttons:
if button.rect_pos.collidepoint(event.pos): # trigger pressed button
self.trigger(button)
else:
pass
def back_button_action(self, event):
if event.type == pygame.MOUSEBUTTONDOWN and self.back_button.rect_pos.collidepoint(event.pos):
self.back_pressed()
def trigger(self, button):
if button.button_type == "quit":
self.quit_pressed()
elif button.button_type == "start":
self.start_pressed()
elif button.button_type == "leaderboard":
self.leaderboard_pressed()
self.show_leaderboard()
def quit_pressed(self):
sys.exit()
def start_pressed(self):
self.game_started = True # make main game loop in main.py start
def leaderboard_pressed(self):
self.leaderboard_table.closed = False
def back_pressed(self):
if not self.leaderboard_table.closed:
self.leaderboard_table.closed = True
self.leaderboard_table.renew_board()
elif self.game_started:
self.game_started = False
def show_leaderboard(self):
self.leaderboard_table.generate_text()
self.leaderboard_table.render_text()
while not self.leaderboard_table.closed:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
else:
self.back_button_action(event)
self.screen.fill(WHITE)
self.leaderboard_table.draw()
self.draw_back_button()
pygame.display.flip()
self.clock.tick(FPS)
def create_start_leaderboard(self):
for key, item in computer_scores.items():
self.leaderboard_table.add_score(key, item)
def draw_back_button(self):
self.back_button.draw(self.screen)
def draw_new_screen(self):
self.screen.fill(WHITE)
self.set_menu()
def set_menu(self):
self.menu_table.draw()
def start_game(self):
self.player_name = self.get_player_name()
self.screen.fill(WHITE)
self.game_loop()
def get_player_name(self):
player_name = ""
flag = True
while flag:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_0:
return player_name
elif event.key == pygame.K_BACKSPACE:
player_name = player_name[:-1] # delete last element of name if backspace pressed
elif 97 <= event.key <= 122:
player_name += chr(event.key)
else:
pass
self.display_player_name(player_name)
def display_player_name(self, player_name):
font = pygame.font.Font('freesansbold.ttf', 16)
left = (display_width / 2) - 250
top = (display_height / 2) - 100
self.screen.fill(WHITE)
pygame.draw.rect(self.screen, YELLOW, (left, top, 320, 150))
self.screen.blit(font.render(player_name, True, BLACK), (left + 80, top + 70))
pygame.display.flip()
def game_over(self):
self.leaderboard_table.add_score(self.player_name, self.army.kill_count)
self.death_screen_table.draw(self.army.kill_count)
self.army.renew_kill_count()
while self.game_started:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
else:
self.back_button_action(event)
pygame.display.flip()
def check_for_pause(self, event):
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
self.pause_game()
def pause_game(self):
while True:
self.draw_back_button()
pygame.display.flip()
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
return
def game_loop(self):
self.player.draw()
while self.game_started:
for event in pygame.event.get():
self.player.action(event)
self.check_for_pause(event)
self.player.update_game_elements()
self.player.draw_game_elements()
self.army.update_enemies()
if self.army.defeat():
self.game_over()
pygame.display.flip()
self.clock.tick(FPS)
|
{"/player.py": ["/constants.py"], "/menu.py": ["/constants.py"], "/terrain.py": ["/constants.py"], "/main.py": ["/controller.py", "/menu.py", "/constants.py"], "/death_screen.py": ["/constants.py"], "/controller.py": ["/menu.py", "/player.py", "/leaderboard.py", "/death_screen.py", "/terrain.py", "/constants.py"], "/leaderboard.py": ["/constants.py"]}
|
521
|
furotsu/turret_game
|
refs/heads/master
|
/leaderboard.py
|
import shelve
import pygame
from constants import *
class Leaderboard:
def __init__(self, filename, screen):
self.file = shelve.open(filename)
self.closed = True
self.screen = screen
self.sorted_leaderboard = []
self.text = []
self.rendered_text = []
self.sorted_leaderboard = []
def draw(self): # draw scores one by one
counter = 0
for score in self.rendered_text:
self.screen.blit(score, (display_width / 4, 20 + counter)) # make indent between scores
counter += 20
def generate_text(self): # get scores by one and add it to a str list
self.sort_leaderboard()
for i in range(len(self.sorted_leaderboard), 0, -1):
player_name = self.sorted_leaderboard[i - 1][0]
score = self.sorted_leaderboard[i - 1][1]
self.text.append("{} |==| {}".format(player_name, score))
def render_text(self):
font = pygame.font.Font('freesansbold.ttf', 16)
for score in self.text:
self.rendered_text.append(font.render(score, True, BLACK, WHITE))
def add_score(self, player_name, score):
if player_name in self.file.keys() and score > self.file[player_name]:
self.file[player_name] = score
elif player_name not in self.file.keys():
self.file[player_name] = score
def renew_board(self):
self.text = []
self.rendered_text = []
def sort_leaderboard(self):
self.sorted_leaderboard = [(k, v) for k, v in sorted(self.file.items(), key=lambda item: item[1])]
|
{"/player.py": ["/constants.py"], "/menu.py": ["/constants.py"], "/terrain.py": ["/constants.py"], "/main.py": ["/controller.py", "/menu.py", "/constants.py"], "/death_screen.py": ["/constants.py"], "/controller.py": ["/menu.py", "/player.py", "/leaderboard.py", "/death_screen.py", "/terrain.py", "/constants.py"], "/leaderboard.py": ["/constants.py"]}
|
522
|
furotsu/turret_game
|
refs/heads/master
|
/constants.py
|
import os.path
display_height = 600
display_width = 1000
CHARGER_HEIGHT = 60
COOLDOWN_WIDTH = 50
PLAYER_POS_X = 50
PLAYER_POS_Y = 430
START_CANNON_ANGLE = 25
MISSILE_POS_X = 70
MISSILE_POS_Y = 470
ACCELERATION = -2
MAX_SHOT_POWER = 50
POWER_CHARGE = 5
COOLDOWN = 40
ENEMY_VELOCITY_X = 0
ENEMY_VELOCITY_Y = 4
TIME_BETWEEN_ENEMIES = 100
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
YELLOW = (255, 255, 51)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
STOP = 0
UP = 1
DOWN = -1
FPS = 30
# extracting images from their folders
start_button_img = os.path.join("data", "start_button.png")
quit_button_img = os.path.join("data", "quit_button.png")
leaderboard_button_img = os.path.join("data", "leaderboard_button.png")
back_button_img = os.path.join("data", "back_button.png")
player_img = os.path.join("data", "player_image.png")
missile_img = os.path.join("data", "missile_image.png")
enemy1_img = os.path.join("data", "enemy1.png")
enemy2_img = os.path.join("data", "enemy2.png")
enemy3_img = os.path.join("data", "enemy3.png")
leaderboard_storage = os.path.join("data", "leaderboard.db")
computer_scores = dict([
("Vlad", 100000),
("Misha", 5000),
("Arthur", 2500),
("Max", 2000),
("Kirrilishche", 10)
])
|
{"/player.py": ["/constants.py"], "/menu.py": ["/constants.py"], "/terrain.py": ["/constants.py"], "/main.py": ["/controller.py", "/menu.py", "/constants.py"], "/death_screen.py": ["/constants.py"], "/controller.py": ["/menu.py", "/player.py", "/leaderboard.py", "/death_screen.py", "/terrain.py", "/constants.py"], "/leaderboard.py": ["/constants.py"]}
|
529
|
junprog/contrastive-baseline
|
refs/heads/main
|
/train.py
|
from utils.contrastive_trainer import CoTrainer
from utils.simsiam_trainer import SimSiamTrainer
import argparse
import os
import math
import torch
args = None
def parse_args():
parser = argparse.ArgumentParser(description='Train ')
parser.add_argument('--data-dir', default='/mnt/hdd02/process-ucf',
help='training data directory')
parser.add_argument('--save-dir', default='D:/exp_results',
help='directory to save models.')
parser.add_argument('--cifar10', action='store_true',
help='use cifar10 dataset')
parser.add_argument('--SimSiam', action='store_true',
help='try Simple Siamese Net')
parser.add_argument('--arch', type=str, default='vgg19',
help='the model architecture [vgg19, vgg19_bn, resnet18]')
parser.add_argument('--pattern-feature', type=str, default='conv-512x1x1',
help='the feature to contrast [conv-512x1x1, fc-4096]')
parser.add_argument('--projection', action='store_true',
help='use MLP projection')
parser.add_argument('--prediction', action='store_true',
help='use MLP prediction')
parser.add_argument('--mlp-bn', action='store_true',
help='use MLP Batch Normalization')
parser.add_argument('--lr', type=float, default=1e-2,
help='the initial learning rate')
parser.add_argument('--weight-decay', type=float, default=1e-4,
help='the weight decay')
parser.add_argument('--momentum', type=float, default=0.9,
help='the momentum')
parser.add_argument('--div-row', type=int, default=3,
help='one side`s number of pathes')
parser.add_argument('--div-col', type=int, default=3,
help='one side`s number of pathes')
parser.add_argument('--aug', action='store_true',
help='the weight decay')
parser.add_argument('--margin', type=float, default=1.0,
help='the margin of loss function')
parser.add_argument('--resume', default='',
help='the path of resume training model')
parser.add_argument('--max-model-num', type=int, default=30,
help='max models num to save ')
parser.add_argument('--check_point', type=int, default=100,
help='milestone of save model checkpoint')
parser.add_argument('--max-epoch', type=int, default=300,
help='max training epoch')
parser.add_argument('--val-epoch', type=int, default=10,
help='the num of steps to log training information')
parser.add_argument('--val-start', type=int, default=0,
help='the epoch start to val')
parser.add_argument('--batch-size', type=int, default=8,
help='train batch size')
parser.add_argument('--device', default='0', help='assign device')
parser.add_argument('--num-workers', type=int, default=8,
help='the num of training process')
parser.add_argument('--crop-size', type=int, default=224,
help='the crop size of the train image')
parser.add_argument('--visual-num', type=int, default=4,
help='the number of visualize images')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
torch.backends.cudnn.benchmark = True
os.environ['CUDA_VISIBLE_DEVICES'] = args.device.strip('-') # set vis gpu
if args.SimSiam:
trainer = SimSiamTrainer(args)
else:
trainer = CoTrainer(args)
trainer.setup()
trainer.train()
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
530
|
junprog/contrastive-baseline
|
refs/heads/main
|
/datasets/cifar10.py
|
from typing import Callable, Optional
import random
from PIL import Image
import numpy as np
import torch
import torchvision
from torchvision import transforms
from torchvision.datasets import CIFAR10
np.random.seed(765)
random.seed(765)
class SupervisedPosNegCifar10(torch.utils.data.Dataset):
def __init__(self, dataset, phase):
# split by some thresholds here 80% anchors, 20% for posnegs
lengths = [int(len(dataset)*0.8), int(len(dataset)*0.2)]
self.anchors, self.posnegs = torch.utils.data.random_split(dataset, lengths)
if phase == 'train':
self.anchor_transform = transforms.Compose([transforms.Resize(64),
transforms.RandomResizedCrop(scale=(0.16, 1), ratio=(0.75, 1.33), size=64),
transforms.RandomHorizontalFlip(0.5),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
self.posneg_transform = transforms.Compose([transforms.Resize(64),
transforms.RandomResizedCrop(scale=(0.16, 1), ratio=(0.75, 1.33), size=64),
transforms.RandomHorizontalFlip(0.5),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
else:
self.anchor_transform = transforms.Compose([transforms.Resize(64),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
self.posneg_transform = transforms.Compose([transforms.Resize(64),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
def __len__(self):
return len(self.anchors)
def __getitem__(self, index):
anchor, label = self.anchors[index]
if self.anchor_transform is not None:
anchor = self.anchor_transform(anchor)
# now pair this up with an image from the same class in the second stream
if random.random() > 0.5:
A = np.where(np.array(self.posnegs.dataset.targets) == label)[0]
posneg_idx = np.random.choice(A[np.in1d(A, self.posnegs.indices)])
posneg, label = self.posnegs[np.where(self.posnegs.indices==posneg_idx)[0][0]]
target = torch.tensor([1]).long()
else:
A = np.where(np.array(self.posnegs.dataset.targets) != label)[0]
posneg_idx = np.random.choice(A[np.in1d(A, self.posnegs.indices)])
posneg, label = self.posnegs[np.where(self.posnegs.indices==posneg_idx)[0][0]]
target = torch.tensor([0]).long()
if self.posneg_transform is not None:
posneg = self.posneg_transform(posneg)
return anchor, posneg, target, label
class PosNegCifar10(torch.utils.data.Dataset):
def __init__(self, dataset, phase):
# split by some thresholds here 80% anchors, 20% for posnegs
self.dataset = dataset
if phase == 'train':
self.anchor_transform = transforms.Compose([transforms.Resize(64),
transforms.RandomResizedCrop(scale=(0.16, 1), ratio=(0.75, 1.33), size=64),
transforms.RandomHorizontalFlip(0.5),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
self.posneg_transform = transforms.Compose([transforms.Resize(64),
transforms.RandomResizedCrop(scale=(0.16, 1), ratio=(0.75, 1.33), size=64),
transforms.RandomHorizontalFlip(0.5),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
else:
self.anchor_transform = transforms.Compose([transforms.Resize(64),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
self.posneg_transform = transforms.Compose([transforms.Resize(64),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
def __len__(self):
return len(self.dataset)
def __getitem__(self, index):
anchor, label = self.dataset[index]
# now pair this up with an image from the same class in the second stream
if random.random() > 0.5:
posneg = anchor
target = torch.tensor([1]).long()
else:
while True:
neg_idx = random.randint(0, len(self.dataset)-1)
if neg_idx != index:
break
posneg, label = self.dataset[neg_idx]
target = torch.tensor([0]).long()
if self.anchor_transform is not None:
anchor = self.anchor_transform(anchor)
if self.posneg_transform is not None:
posneg = self.posneg_transform(posneg)
return anchor, posneg, target, label
### Simple Siamese code
imagenet_mean_std = [[0.485, 0.456, 0.406],[0.229, 0.224, 0.225]]
class SimSiamTransform():
def __init__(self, image_size, train, mean_std=imagenet_mean_std):
self.train = train
if self.train:
image_size = 224 if image_size is None else image_size # by default simsiam use image size 224
p_blur = 0.5 if image_size > 32 else 0 # exclude cifar
# the paper didn't specify this, feel free to change this value
# I use the setting from simclr which is 50% chance applying the gaussian blur
# the 32 is prepared for cifar training where they disabled gaussian blur
self.transform = transforms.Compose([
transforms.RandomResizedCrop(image_size, scale=(0.2, 1.0)),
transforms.RandomHorizontalFlip(),
transforms.RandomApply([transforms.ColorJitter(0.4,0.4,0.4,0.1)], p=0.8),
transforms.RandomGrayscale(p=0.2),
transforms.RandomApply([transforms.GaussianBlur(kernel_size=image_size//20*2+1, sigma=(0.1, 2.0))], p=p_blur),
transforms.ToTensor(),
transforms.Normalize(*mean_std)
])
else:
self.transform = transforms.Compose([
transforms.Resize(int(image_size*(8/7)), interpolation=Image.BICUBIC), # 224 -> 256
transforms.CenterCrop(image_size),
transforms.ToTensor(),
transforms.Normalize(*mean_std)
])
def __call__(self, x):
x1 = self.transform(x)
x2 = self.transform(x)
return x1, x2
def get_simsiam_dataset(args, phase, download=True, debug_subset_size=None):
if phase == 'train':
train = True
transform = SimSiamTransform(args.crop_size, train)
elif phase == 'val':
train = False
transform = SimSiamTransform(args.crop_size, train)
elif phase == 'linear_train':
train = True
transform = transforms.Compose([
transforms.RandomResizedCrop(args.crop_size, scale=(0.08, 1.0), ratio=(3.0/4.0,4.0/3.0), interpolation=Image.BICUBIC),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(*imagenet_mean_std)
])
elif phase == 'linear_val':
train = False
transform = transforms.Compose([
transforms.Resize(int(args.crop_size*(8/7)), interpolation=Image.BICUBIC), # 224 -> 256
transforms.CenterCrop(args.crop_size),
transforms.ToTensor(),
transforms.Normalize(*imagenet_mean_std)
])
dataset = torchvision.datasets.CIFAR10(root="CIFAR10_Dataset", train=train, transform=transform, download=download)
if debug_subset_size is not None:
dataset = torch.utils.data.Subset(dataset, range(0, debug_subset_size)) # take only one batch
dataset.classes = dataset.dataset.classes
dataset.targets = dataset.dataset.targets
return dataset
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
531
|
junprog/contrastive-baseline
|
refs/heads/main
|
/models/l2_contrastive_loss.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class L2ContrastiveLoss(nn.Module):
"""
Contrastive loss
Takes embeddings of two samples and a target label == 1 if samples are from the same class and label == 0 otherwise
Args :
output1 & output2 : [N, dim]
target : [N]
"""
def __init__(self, margin=1.0):
super().__init__()
self.margin = margin
self.eps = 1e-9
def forward(self, output1, output2, target, size_average=True):
target = target.squeeze()
distances = (output2 - output1).pow(2).sum(1) # squared distances
losses = 0.5 * (target.float() * distances +
(1 + -1 * target).float() * F.relu(self.margin - (distances + self.eps).sqrt()).pow(2))
return losses.mean() if size_average else losses.sum()
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
532
|
junprog/contrastive-baseline
|
refs/heads/main
|
/datasets/spatial.py
|
# in : original image
# out : cropped img1 (anchor)
# cropped img2 (compete)
# target (positive img1 - img2 : 1, negative img1 - img2 : 0)
import os
from glob import glob
import random
import numpy as np
from PIL import Image
from PIL import ImageFilter
import torch
import torch.utils.data as data
import torchvision.transforms.functional as F
from torchvision import transforms
random.seed(765)
def divide_patches(img, row, col):
patche_size_w = int(img.size[0] / col)
patche_size_h = int(img.size[1] / row)
patches = []
for cnt_i, i in enumerate(range(0, img.size[1], patche_size_h)):
if cnt_i == row:
break
for cnt_j, j in enumerate(range(0, img.size[0], patche_size_w)):
if cnt_j == col:
break
box = (j, i, j+patche_size_w, i+patche_size_h)
patches.append(img.crop(box))
return patches
def create_pos_pair(patches):
idx = random.randint(0, len(patches)-1)
img1 = patches[idx]
img2 = patches[idx]
target = np.array([1])
return img1, img2, target
def create_neg_pair(patches):
idx = random.sample(range(0, len(patches)-1), k=2)
img1 = patches[idx[0]]
img2 = patches[idx[1]]
target = np.array([0])
return img1, img2, target
def random_crop(im_h, im_w, crop_h, crop_w):
res_h = im_h - crop_h
res_w = im_w - crop_w
i = random.randint(0, res_h)
j = random.randint(0, res_w)
return i, j, crop_h, crop_w
class GaussianBlur(object):
"""Gaussian blur augmentation in SimCLR https://arxiv.org/abs/2002.05709"""
def __init__(self, sigma=[.1, 2.]):
self.sigma = sigma
def __call__(self, x):
sigma = random.uniform(self.sigma[0], self.sigma[1])
x = x.filter(ImageFilter.GaussianBlur(radius=sigma))
return x
class PosNegSpatialDataset(data.Dataset):
# divide_num : 3 -> 3x3= 9 paches
def __init__(self, data_path, crop_size, divide_num=(3,3), aug=True):
self.data_path = data_path
self.im_list = sorted(glob(os.path.join(self.data_path, '*.jpg')))
self.c_size = crop_size
self.d_row = divide_num[0]
self.d_col = divide_num[1]
if aug:
self.aug = transforms.Compose([
transforms.CenterCrop(self.c_size),
transforms.RandomApply([transforms.ColorJitter(0.4, 0.4, 0.4, 0.1)], p=0.8),
transforms.RandomGrayscale(p=0.2),
transforms.RandomApply([GaussianBlur([.1, 2.])], p=0.5),
transforms.RandomHorizontalFlip()
])
else:
self.aug = transforms.CenterCrop(self.c_size)
self.trans = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
def __len__(self):
return len(self.im_list)
def __getitem__(self, index):
img_path = self.im_list[index]
img = Image.open(img_path).convert('RGB')
patches = divide_patches(img, self.d_row, self.d_col)
if random.random() > 0.5:
img1, img2, target = create_pos_pair(patches)
else:
img1, img2, target = create_neg_pair(patches)
img1 = self.aug(img1)
img2 = self.aug(img2)
target = torch.from_numpy(target).long()
img1 = self.trans(img1)
img2 = self.trans(img2)
return img1, img2, target, None
class SpatialDataset(data.Dataset):
# divide_num : 3 -> 3x3= 9 paches
def __init__(self, phase, data_path, crop_size, divide_num=(3,3), aug=True):
with open(os.path.join(data_path, '{}.txt'.format(phase)), 'r') as f:
im_list = f.readlines()
self.im_list = [im_name.replace('\n', '') for im_name in im_list]
self.c_size = crop_size
self.d_row = divide_num[0]
self.d_col = divide_num[1]
self.trans = transforms.Compose([
transforms.RandomApply([transforms.ColorJitter(0.4, 0.4, 0.4, 0.1)], p=0.8),
transforms.RandomGrayscale(p=0.2),
transforms.RandomApply([GaussianBlur([.1, 2.])], p=0.5),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
def __len__(self):
return len(self.im_list)
def __getitem__(self, index):
img_path = self.im_list[index]
img = Image.open(img_path).convert('RGB')
patches = divide_patches(img, self.d_row, self.d_col)
img1, img2, label = create_pos_pair(patches)
assert img1.size == img2.size
wd, ht = img1.size
i, j, h, w = random_crop(ht, wd, self.c_size, self.c_size)
img1 = F.crop(img1, i, j, h, w)
img2 = F.crop(img2, i, j, h, w)
img1 = self.trans(img1)
img2 = self.trans(img2)
imgs = (img1, img2)
return imgs, label
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
533
|
junprog/contrastive-baseline
|
refs/heads/main
|
/exp.py
|
import torch
import torchvision
from PIL import Image
from matplotlib import pyplot as plt
import random
model = torchvision.models.__dict__['vgg19']()
print(model)
img = torch.rand(1,3,256,256)
out = model.features(img)
print(out.size())
import torchvision.transforms as trans
crop = trans.RandomCrop(224)
img = torch.rand(1,3,256,256)
out = crop(img)
print(out.size())
def divide_patches(img, row, col):
patche_size_w = int(img.size[0] / col)
patche_size_h = int(img.size[1] / row)
patches = []
for cnt_i, i in enumerate(range(0, img.size[1], patche_size_h)):
if cnt_i == row:
break
for cnt_j, j in enumerate(range(0, img.size[0], patche_size_w)):
if cnt_j == col:
break
box = (j, i, j+patche_size_w, i+patche_size_h)
patches.append(img.crop(box))
return patches
def display_images(
images: [Image],
row=3, col=3, width=10, height=4, max_images=15,
label_wrap_length=50, label_font_size=8):
if not images:
print("No images to display.")
return
if len(images) > max_images:
print(f"Showing {max_images} images of {len(images)}:")
images=images[0:max_images]
height = max(height, int(len(images)/col) * height)
plt.figure(figsize=(width, height))
for i, image in enumerate(images):
plt.subplot(row, col, i + 1)
plt.imshow(image)
plt.show()
image = Image.open("/mnt/hdd02/shibuya_scramble/image_000294.jpg").convert("RGB")
p = divide_patches(image, 2, 3)
print(len(p))
display_images(p, row=2, col=3)
def create_pos_pair(patches):
idx = random.randint(0, len(patches)-1)
img1 = patches[idx]
img2 = patches[idx]
label = 1
return img1, img2, label
def create_neg_pair(patches):
idx = random.sample(range(0, len(patches)-1), k=2)
img1 = patches[idx[0]]
img2 = patches[idx[1]]
label = 0
return img1, img2, label
def get_img(img):
patches = divide_patches(img, 3, 2)
if random.random() > 0.5:
img1, img2, label = create_pos_pair(patches)
else:
img1, img2, label = create_neg_pair(patches)
return img1, img2, label
res = []
for i in range(10):
img1, img2, label = get_img(image)
flag = False
if img1 == img2:
flag = True
res.append([flag, label])
print(res)
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
534
|
junprog/contrastive-baseline
|
refs/heads/main
|
/models/create_linear_eval_model.py
|
import os
from collections import OrderedDict
import torch
import torch.nn as nn
import torchvision.models as models
class LinearEvalModel(nn.Module):
def __init__(self, arch='vgg19', dim=512, num_classes=10):
super().__init__()
if arch == 'vgg19':
self.features = models.vgg19().features
if arch == 'vgg19_bn':
self.features = models.vgg19_bn().features
elif arch == 'resnet18':
resnet18 = models.resnet18(pretrained=False)
self.features = nn.Sequential(*list(resnet18.children())[:-1])
self.avg_pool = nn.AdaptiveAvgPool2d((1,1))
self.fc = nn.Linear(dim, num_classes)
def weight_init(self, weight_path, device, arch):
state_dict = torch.load(os.path.join(weight_path, 'best_model.pth'), device)
new_state_dict = OrderedDict()
if 'resnet' in arch:
for k, v in state_dict.items():
if 'encoder' in k:
k = k.replace('encoder.', '')
new_state_dict[k] = v
self.features.load_state_dict(new_state_dict)
elif 'vgg' in arch:
for k, v in state_dict.items():
if 'encoder' in k:
k = k.replace('encoder.0.', '')
new_state_dict[k] = v
self.features.load_state_dict(new_state_dict)
for m in self.features.parameters():
m.requires_grad = False
def forward(self, x):
x = self.features(x)
x = self.avg_pool(x)
x = x.squeeze()
out = self.fc(x)
return out
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
535
|
junprog/contrastive-baseline
|
refs/heads/main
|
/models/cosine_contrastive_loss.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
def D(p, z, version='simplified'): # negative cosine similarity
if version == 'original':
z = z.detach() # stop gradient
p = F.normalize(p, dim=1) # l2-normalize
z = F.normalize(z, dim=1) # l2-normalize
return -(p*z).sum(dim=1).mean()
elif version == 'simplified':
return - F.cosine_similarity(p, z.detach(), dim=-1).mean()
else:
raise Exception
class CosineContrastiveLoss(nn.Module):
def __init__(self):
super().__init__()
def forward(self, z1, z2, p1, p2):
if z1.dim() != 2:
z1 = z1.squeeze()
if z2.dim() != 2:
z2 = z2.squeeze()
if p1 is not None or p2 is not None:
loss = D(p1, z2) / 2 + D(p2, z1) / 2
else:
loss = D(z1, z2)
return loss
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
536
|
junprog/contrastive-baseline
|
refs/heads/main
|
/utils/helper.py
|
import os
import numpy as np
import torch
def worker_init_fn(worker_id):
np.random.seed(np.random.get_state()[1][0] + worker_id)
class Save_Handle(object):
"""handle the number of """
def __init__(self, max_num):
self.save_list = []
self.max_num = max_num
def append(self, save_path):
if len(self.save_list) < self.max_num:
self.save_list.append(save_path)
else:
remove_path = self.save_list[0]
del self.save_list[0]
self.save_list.append(save_path)
if os.path.exists(remove_path):
os.remove(remove_path)
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = 1.0 * self.sum / self.count
def get_avg(self):
return self.avg
def get_count(self):
return self.count
## cannot use in training
@torch.no_grad()
def accuracy(meter, output1, output2, target):
"""Computes the accuracy overthe predictions"""
for logit in [output1, output2]:
corrects = (torch.max(logit, 1)[1].data == target.squeeze().long().data).sum()
accu = float(corrects) / float(target.size()[0])
meter.update(accu)
return meter
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
537
|
junprog/contrastive-baseline
|
refs/heads/main
|
/utils/visualizer.py
|
import os
import numpy as np
from PIL import Image
import torch
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
### torch テンソル(バッチ)を受け取って、args.div_numに応じて、描画する
mean = np.array([0.485, 0.456, 0.406])
std = np.array([0.229, 0.224, 0.225])
def invnorm(img, N):
img = img[N,:,:,:].to('cpu').detach().numpy().copy()
img = img.transpose(1,2,0)
img = img*std+mean
return img
class ImageDisplayer:
def __init__(self, args, save_fir):
# N is number of batch to display
self.args = args
self.save_dir = save_fir
self.N = args.visual_num
@torch.no_grad()
def __call__(self, epoch, prefix, img1, img2, target):
imgs1 = []
imgs2 = []
targets = []
for n in range(self.N):
imgs1.append(invnorm(img1,n))
imgs2.append(invnorm(img2,n))
if target is not None:
targets.append(target[n].item())
else:
targets = None
self.display_images(epoch, prefix, imgs1, imgs2, targets)
def display_images(self, epoch, prefix, images1: [Image], images2: [Image], targets,
columns=2, width=8, height=8, label_wrap_length=50, label_font_size=8):
if not (images1 and images2):
print("No images to display.")
return
height = max(height, int(len(images1)/columns) * height)
plt.figure(figsize=(width, height))
i = 1
if targets is not None:
for (im1, im2, tar) in zip(images1, images2, targets):
im1 = Image.fromarray(np.uint8(im1*255))
im2 = Image.fromarray(np.uint8(im2*255))
plt.subplot(self.N, 2, i)
plt.title(tar, fontsize=20)
plt.imshow(im1)
i += 1
plt.subplot(self.N, 2, i)
plt.title(tar, fontsize=20)
plt.imshow(im2)
i += 1
else:
for (im1, im2) in zip(images1, images2):
im1 = Image.fromarray(np.uint8(im1*255))
im2 = Image.fromarray(np.uint8(im2*255))
plt.subplot(self.N, 2, i)
plt.imshow(im1)
i += 1
plt.subplot(self.N, 2, i)
plt.imshow(im2)
i += 1
plt.tight_layout()
output_img_name = 'imgs_{}_{}.png'.format(prefix, epoch)
plt.savefig(os.path.join(self.save_dir, 'images', output_img_name))
plt.close()
class EmbeddingDisplayer:
def __init__(self, args, save_fir):
self.args = args
self.save_dir = save_fir
self.cifar10_classes = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
self.colors = ['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728',
'#9467bd', '#8c564b', '#e377c2', '#7f7f7f',
'#bcbd22', '#17becf']
@torch.no_grad()
def __call__(self, embeddings, targets, epoch, prefix, xlim=None, ylim=None):
embeddings = embeddings.to('cpu').detach().numpy().copy()
targets = targets.to('cpu').detach().numpy().copy()
plt.figure(figsize=(10,10))
for i in range(10):
inds = np.where(targets==i)[0]
plt.scatter(embeddings[inds,0], embeddings[inds,1], alpha=0.5, color=self.colors[i])
if xlim:
plt.xlim(xlim[0], xlim[1])
if ylim:
plt.ylim(ylim[0], ylim[1])
plt.legend(self.cifar10_classes)
output_img_name = 'emb_{}_{}.png'.format(prefix, epoch)
plt.savefig(os.path.join(self.save_dir, 'images', output_img_name))
plt.close()
class LossGraphPloter:
def __init__(self, save_fir):
self.save_dir = save_fir
self.epochs = []
self.losses = []
def __call__(self, epoch, loss, prefix):
self.epochs.append(epoch)
self.losses.append(loss)
output_img_name = '{}_loss.svg'.format(prefix)
plt.plot(self.epochs, self.losses)
plt.title('Loss')
plt.savefig(os.path.join(self.save_dir, 'images', output_img_name))
plt.close()
class AccLossGraphPloter:
def __init__(self, save_fir):
self.save_dir = save_fir
self.tr_accs = []
self.vl_accs = []
self.tr_losses = []
self.vl_losses = []
self.epochs = []
def __call__(self, epoch, tr_acc, vl_acc, tr_loss, vl_loss, prefix):
self.tr_accs.append(tr_acc)
self.vl_accs.append(vl_acc)
self.tr_losses.append(tr_loss)
self.vl_losses.append(vl_loss)
self.epochs.append(epoch)
output_img_name = '{}_eval.svg'.format(prefix)
fig, (axL, axR) = plt.subplots(ncols=2, figsize=(10,4))
axL.plot(self.epochs, self.tr_accs, label='train')
axL.plot(self.epochs, self.vl_accs, label='val')
axL.set_title('Top-1 Accuracy')
axL.set_xlabel('epoch')
axL.set_ylabel('acc [%]')
axL.legend(loc="lower right")
axR.plot(self.epochs, self.tr_losses, label='train')
axR.plot(self.epochs, self.vl_losses, label='val')
axR.set_title('Loss')
axR.set_xlabel('epoch')
axR.set_ylabel('loss')
axR.legend(loc="upper right")
plt.savefig(os.path.join(self.save_dir, 'images', output_img_name))
plt.close()
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
538
|
junprog/contrastive-baseline
|
refs/heads/main
|
/train_val_split.py
|
import os
from glob import glob
import numpy as np
import argparse
def parse_args():
parser = argparse.ArgumentParser(description='Test ')
parser.add_argument('--data-dir', default='/mnt/hdd02/shibuya_scramble',
help='original data directory')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
## Random Train-Val split
im_list = sorted(glob(os.path.join(args.data_dir, '*.jpg')))
im_list = [im_name for im_name in im_list]
tr_im_list = list(np.random.choice(im_list, size=int(len(im_list)*0.8), replace=False))
vl_im_list = list(set(im_list) - set(tr_im_list))
for phase in ['train', 'val']:
with open(os.path.join(args.data_dir, './{}.txt'.format(phase)), mode='w') as f:
if phase == 'train':
f.write('\n'.join(tr_im_list))
elif phase == 'val':
f.write('\n'.join(vl_im_list))
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
539
|
junprog/contrastive-baseline
|
refs/heads/main
|
/utils/simsiam_trainer.py
|
import os
import sys
import time
import logging
import numpy as np
import torch
from torch import optim
from torch.optim import lr_scheduler
from torch.utils.data import DataLoader
import torchvision.models as models
import torchvision.datasets as datasets
from models.simple_siamese_net import SiameseNetwork
from models.cosine_contrastive_loss import CosineContrastiveLoss
from utils.trainer import Trainer
from utils.helper import Save_Handle, AverageMeter, worker_init_fn
from utils.visualizer import ImageDisplayer, LossGraphPloter
from datasets.spatial import SpatialDataset
from datasets.cifar10 import PosNegCifar10, get_simsiam_dataset
class SimSiamTrainer(Trainer):
def setup(self):
"""initialize the datasets, model, loss and optimizer"""
args = self.args
self.vis = ImageDisplayer(args, self.save_dir)
self.tr_graph = LossGraphPloter(self.save_dir)
self.vl_graph = LossGraphPloter(self.save_dir)
if torch.cuda.is_available():
self.device = torch.device("cuda")
self.device_count = torch.cuda.device_count()
logging.info('using {} gpus'.format(self.device_count))
else:
raise Exception("gpu is not available")
if args.cifar10:
self.datasets = {x: get_simsiam_dataset(args, x) for x in ['train', 'val']}
else:
self.datasets = {x: SpatialDataset(x,
args.data_dir,
args.crop_size,
(args.div_row, args.div_col),
args.aug) for x in ['train', 'val']}
self.dataloaders = {x: DataLoader(self.datasets[x],
batch_size=args.batch_size,
shuffle=(True if x == 'train' else False),
num_workers=args.num_workers*self.device_count,
pin_memory=(True if x == 'train' else False),
worker_init_fn=worker_init_fn) for x in ['train', 'val']}
# Define model, loss, optim
self.model = SiameseNetwork(args)
self.model.to(self.device)
self.criterion = CosineContrastiveLoss()
self.criterion.to(self.device)
self.optimizer = optim.SGD(self.model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay)
#self.scheduler = lr_scheduler.MultiStepLR(self.optimizer, milestones=[80, 120, 160, 200, 250], gamma=0.1)
self.scheduler = lr_scheduler.CosineAnnealingLR(self.optimizer, T_max=args.max_epoch)
self.start_epoch = 0
self.best_loss = np.inf
if args.resume:
suf = args.resume.rsplit('.', 1)[-1]
if suf == 'tar':
checkpoint = torch.load(args.resume, self.device)
self.model.load_state_dict(checkpoint['model_state_dict'])
self.optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
self.start_epoch = checkpoint['epoch'] + 1
elif suf == 'pth':
self.model.load_state_dict(torch.load(args.resume, self.device))
self.save_list = Save_Handle(max_num=args.max_model_num)
def train(self):
"""training process"""
args = self.args
for epoch in range(self.start_epoch, args.max_epoch):
logging.info('-'*5 + 'Epoch {}/{}'.format(epoch, args.max_epoch - 1) + '-'*5)
self.epoch = epoch
self.train_epoch(epoch)
if epoch % args.val_epoch == 0 and epoch >= args.val_start:
self.val_epoch(epoch)
def train_epoch(self, epoch):
epoch_loss = AverageMeter()
epoch_start = time.time()
self.model.train() # Set model to training mode
for step, ((input1, input2), label) in enumerate(self.dataloaders['train']):
input1 = input1.to(self.device)
input2 = input2.to(self.device)
with torch.set_grad_enabled(True):
(z1, z2), (p1, p2) = self.model(input1, input2)
loss = self.criterion(z1, z2, p1, p2)
epoch_loss.update(loss.item(), input1.size(0))
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
self.scheduler.step()
# visualize
if step == 0:
self.vis(epoch, 'train', input1, input2, label)
pass
logging.info('Epoch {} Train, Loss: {:.5f}, lr: {:.5f}, Cost {:.1f} sec'
.format(self.epoch, epoch_loss.get_avg(), self.optimizer.param_groups[0]['lr'], time.time()-epoch_start))
self.tr_graph(self.epoch, epoch_loss.get_avg(), 'tr')
if epoch % self.args.check_point == 0:
model_state_dic = self.model.state_dict()
save_path = os.path.join(self.save_dir, '{}_ckpt.tar'.format(self.epoch))
torch.save({
'epoch': self.epoch,
'optimizer_state_dict': self.optimizer.state_dict(),
'model_state_dict': model_state_dic
}, save_path)
self.save_list.append(save_path) # control the number of saved models
def val_epoch(self, epoch):
epoch_start = time.time()
self.model.eval() # Set model to evaluate mode
epoch_loss = AverageMeter()
for step, ((input1, input2), label) in enumerate(self.dataloaders['val']):
input1 = input1.to(self.device)
input2 = input2.to(self.device)
with torch.set_grad_enabled(False):
(z1, z2), (p1, p2) = self.model(input1, input2)
loss = self.criterion(z1, z2, p1, p2)
epoch_loss.update(loss.item(), input1.size(0))
# visualize
if step == 0:
self.vis(epoch, 'val', input1, input2, label)
pass
logging.info('Epoch {} Val, Loss: {:.5f}, Cost {:.1f} sec'
.format(self.epoch, epoch_loss.get_avg(), time.time()-epoch_start))
self.vl_graph(self.epoch, epoch_loss.get_avg(), 'vl')
model_state_dic = self.model.state_dict()
if self.best_loss > epoch_loss.get_avg():
self.best_loss = epoch_loss.get_avg()
logging.info("save min loss {:.2f} model epoch {}".format(self.best_loss, self.epoch))
torch.save(model_state_dic, os.path.join(self.save_dir, 'best_model.pth'))
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
540
|
junprog/contrastive-baseline
|
refs/heads/main
|
/models/simple_siamese_net_tmp.py
|
import torch
import torch.nn as nn
class projection_MLP(nn.Module):
def __init__(self, in_dim=512, hidden_dim=512, out_dim=512): # bottleneck structure
super().__init__()
self.layers = nn.Sequential(
nn.Linear(in_dim, hidden_dim),
nn.ReLU(),
nn.Linear(hidden_dim, hidden_dim),
nn.ReLU(),
nn.Linear(hidden_dim, out_dim)
)
def forward(self, x):
if x.dim() != 2:
x = x.squeeze()
x = self.layers(x)
return x
class prediction_MLP(nn.Module):
def __init__(self, in_dim=512, hidden_dim=256, out_dim=512): # bottleneck structure
super().__init__()
self.layer1 = nn.Sequential(
nn.Linear(in_dim, hidden_dim),
nn.ReLU(inplace=True)
)
self.layer2 = nn.Linear(hidden_dim, out_dim)
def forward(self, x):
if x.dim() != 2:
x = x.squeeze()
x = self.layer1(x)
x = self.layer2(x)
return x
class SiameseNetwork(nn.Module):
def __init__(self, model, pattern_feature = 'conv-512x1x1', projection=False, prediction=False):
super(SiameseNetwork, self).__init__()
self.projection = projection
self.prediction = prediction
if pattern_feature == 'conv-512x1x1':
features = model().features
max_pool = nn.AdaptiveAvgPool2d((1,1))
self.encoder = nn.Sequential(features, max_pool)
if projection:
self.projector = projection_MLP(in_dim=512, hidden_dim=512, out_dim=512)
if prediction:
self.predictor = prediction_MLP(in_dim=512, out_dim=512)
elif pattern_feature == 'fc-4096':
features = model()
self.encoder = nn.Sequential(*[self.encoder.classifier[0]])
if projection:
self.projector = projection_MLP(in_dim=4096, hidden_dim=4096, out_dim=4096)
if prediction:
self.predictor = prediction_MLP(in_dim=4096, out_dim=4096)
def forward(self, input1, input2):
if self.prediction:
f, h = self.encoder, self.predictor
z1, z2 = f(input1), f(input2)
if self.projection:
z1, z2 = self.projection(input1), self.projection(input2)
p1, p2 = h(z1), h(z2)
else:
f = self.encoder
z1, z2 = f(input1), f(input2)
if self.projection:
z1, z2 = self.projection(input1), self.projection(input2)
p1, p2 = None, None
return (z1, z2), (p1, p2)
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
541
|
junprog/contrastive-baseline
|
refs/heads/main
|
/utils/contrastive_trainer.py
|
import os
import sys
import time
import logging
import numpy as np
import torch
from torch import optim
from torch.optim import lr_scheduler
from torch.utils.data import DataLoader
import torchvision.models as models
import torchvision.datasets as datasets
from models.siamese_net import SiameseNetwork
from models.l2_contrastive_loss import L2ContrastiveLoss
from utils.trainer import Trainer
from utils.helper import Save_Handle, AverageMeter, worker_init_fn
from utils.visualizer import ImageDisplayer, EmbeddingDisplayer
from datasets.spatial import SpatialDataset
from datasets.cifar10 import PosNegCifar10
class CoTrainer(Trainer):
def setup(self):
"""initialize the datasets, model, loss and optimizer"""
args = self.args
self.vis = ImageDisplayer(args, self.save_dir)
self.emb = EmbeddingDisplayer(args, self.save_dir)
if torch.cuda.is_available():
self.device = torch.device("cuda")
self.device_count = torch.cuda.device_count()
logging.info('using {} gpus'.format(self.device_count))
else:
raise Exception("gpu is not available")
if args.cifar10:
# Download and create datasets
or_train = datasets.CIFAR10(root="CIFAR10_Dataset", train=True, transform=None, download=True)
or_val = datasets.CIFAR10(root="CIFAR10_Dataset", train=False, transform=None, download=True)
# splits CIFAR10 into two streams
self.datasets = {x: PosNegCifar10((or_train if x == 'train' else or_val),
phase=x) for x in ['train', 'val']}
else:
self.datasets = {x: SpatialDataset(os.path.join(args.data_dir, x),
args.crop_size,
args.div_num,
args.aug) for x in ['train', 'val']}
self.dataloaders = {x: DataLoader(self.datasets[x],
batch_size=args.batch_size,
shuffle=(True if x == 'train' else False),
num_workers=args.num_workers*self.device_count,
pin_memory=(True if x == 'train' else False),
worker_init_fn=worker_init_fn) for x in ['train', 'val']}
# Define model, loss, optim
self.model = SiameseNetwork(models.__dict__[args.arch], pattern_feature = args.pattern_feature)
self.model.to(self.device)
self.criterion = L2ContrastiveLoss(args.margin)
self.criterion.to(self.device)
self.optimizer = optim.SGD(self.model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay)
self.scheduler = lr_scheduler.MultiStepLR(self.optimizer, milestones=[80, 120, 160, 200, 250], gamma=0.1)
self.start_epoch = 0
self.best_loss = np.inf
if args.resume:
suf = args.resume.rsplit('.', 1)[-1]
if suf == 'tar':
checkpoint = torch.load(args.resume, self.device)
self.model.load_state_dict(checkpoint['model_state_dict'])
self.optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
self.start_epoch = checkpoint['epoch'] + 1
elif suf == 'pth':
self.model.load_state_dict(torch.load(args.resume, self.device))
self.save_list = Save_Handle(max_num=args.max_model_num)
def train(self):
"""training process"""
args = self.args
for epoch in range(self.start_epoch, args.max_epoch):
logging.info('-'*5 + 'Epoch {}/{}'.format(epoch, args.max_epoch - 1) + '-'*5)
self.epoch = epoch
self.train_epoch(epoch)
if epoch % args.val_epoch == 0 and epoch >= args.val_start:
self.val_epoch(epoch)
def train_epoch(self, epoch):
epoch_loss = AverageMeter()
epoch_start = time.time()
self.model.train() # Set model to training mode
for step, (input1, input2, target, label) in enumerate(self.dataloaders['train']):
input1 = input1.to(self.device)
input2 = input2.to(self.device)
target = target.to(self.device)
with torch.set_grad_enabled(True):
output1, output2 = self.model(input1, input2)
loss = self.criterion(output1, output2, target)
epoch_loss.update(loss.item(), input1.size(0))
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
self.scheduler.step()
# visualize
if step == 0:
self.vis(epoch, 'train', input1, input2, target)
self.emb(output1, label, epoch, 'train')
logging.info('Epoch {} Train, Loss: {:.5f}, Cost {:.1f} sec'
.format(self.epoch, epoch_loss.get_avg(), time.time()-epoch_start))
model_state_dic = self.model.state_dict()
save_path = os.path.join(self.save_dir, '{}_ckpt.tar'.format(self.epoch))
torch.save({
'epoch': self.epoch,
'optimizer_state_dict': self.optimizer.state_dict(),
'model_state_dict': model_state_dic
}, save_path)
self.save_list.append(save_path) # control the number of saved models
def val_epoch(self, epoch):
epoch_start = time.time()
self.model.eval() # Set model to evaluate mode
epoch_loss = AverageMeter()
for step, (input1, input2, target, label) in enumerate(self.dataloaders['val']):
input1 = input1.to(self.device)
input2 = input2.to(self.device)
target = target.to(self.device)
with torch.set_grad_enabled(False):
output1, output2 = self.model(input1, input2)
loss = self.criterion(output1, output2, target)
epoch_loss.update(loss.item(), input1.size(0))
# visualize
if step == 0:
self.vis(epoch, 'val', input1, input2, target)
self.emb(output1, label, epoch, 'val')
logging.info('Epoch {} Val, Loss: {:.5f}, Cost {:.1f} sec'
.format(self.epoch, epoch_loss.get_avg(), time.time()-epoch_start))
model_state_dic = self.model.state_dict()
if self.best_loss > epoch_loss.get_avg():
self.best_loss = epoch_loss.get_avg()
logging.info("save min loss {:.2f} model epoch {}".format(self.best_loss, self.epoch))
torch.save(model_state_dic, os.path.join(self.save_dir, 'best_model.pth'))
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
542
|
junprog/contrastive-baseline
|
refs/heads/main
|
/linear_eval.py
|
import os
import argparse
import logging
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.optim import lr_scheduler
from torch.utils.data import DataLoader
import torchvision.models as models
from datasets.cifar10 import get_simsiam_dataset
from models.create_linear_eval_model import LinearEvalModel
from utils.visualizer import AccLossGraphPloter
from utils.logger import setlogger
args = None
def parse_args():
parser = argparse.ArgumentParser(description='Test ')
parser.add_argument('--save-dir', default='/mnt/hdd02/contrastive-learn/0113-193048',
help='model directory')
parser.add_argument('--device', default='0', help='assign device')
parser.add_argument('--arch', default='vgg19', help='model architecture')
parser.add_argument('--max-epoch', default=100, type=int, help='train epoch')
parser.add_argument('--crop-size', default=224, type=int, help='input size')
parser.add_argument('--batch-size', default=512, type=int, help='input size')
parser.add_argument('--lr', default=1e-1, type=float, help='learning rate')
parser.add_argument('--momentum', default=0.9, type=float, help='momentum')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
os.environ['CUDA_VISIBLE_DEVICES'] = args.device.strip() # set vis gpu
plotter = AccLossGraphPloter(args.save_dir)
setlogger(os.path.join(args.save_dir, 'eval.log')) # set logger
datasets = {x: get_simsiam_dataset(args, x) for x in ['linear_train', 'linear_val']}
dataloaders = {x: DataLoader(datasets[x],
batch_size=(args.batch_size),
shuffle=(True if x == 'linear_train' else False),
num_workers=8,
pin_memory=(True if x == 'linear_train' else False)) for x in ['linear_train', 'linear_val']}
device = torch.device('cuda')
model = LinearEvalModel(arch=args.arch)
model.weight_init(args.save_dir, device, args.arch) ## initialize & freeze
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum)
scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=[40, 60, 80], gamma=0.1)
## Training & Test Roop
model.to(device)
for epoch in range(args.max_epoch):
model.train()
losses, acc, step, total = 0., 0., 0., 0.
for data, target in dataloaders['linear_train']:
data, target = data.to(device), target.to(device)
logits = model(data)
optimizer.zero_grad()
loss = criterion(logits, target)
loss.backward()
losses += loss.item()
optimizer.step()
scheduler.step()
pred = F.softmax(logits, dim=-1).max(-1)[1]
acc += pred.eq(target).sum().item()
step += 1
total += target.size(0)
tr_loss = losses / step
tr_acc = acc / total * 100.
logging.info('[Train Epoch: {0:2d}], loss: {1:.3f}, acc: {2:.3f}'.format(epoch, tr_loss, tr_acc))
model.eval()
losses, acc, step, total = 0., 0., 0., 0.
with torch.no_grad():
for data, target in dataloaders['linear_val']:
data, target = data.to(device), target.to(device)
logits = model(data)
loss = criterion(logits, target)
losses += loss.item()
pred = F.softmax(logits, dim=-1).max(-1)[1]
acc += pred.eq(target).sum().item()
step += 1
total += target.size(0)
vl_loss = losses / step
vl_acc = acc / total * 100.
logging.info('[Test Epoch: {0:2d}], loss: {1:.3f} acc: {2:.2f}'.format(epoch, vl_loss, vl_acc))
plotter(epoch, tr_acc, vl_acc, tr_loss, vl_loss, args.arch)
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
543
|
junprog/contrastive-baseline
|
refs/heads/main
|
/models/siamese_net.py
|
import torch
import torch.nn as nn
class SiameseNetwork(nn.Module):
def __init__(self, model, pretrained=False, simple_model=False):
super(SiameseNetwork, self).__init__()
self.simple_model = simple_model
if simple_model:
self.features = nn.Sequential(nn.Conv2d(3, 32, 5), nn.PReLU(),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(32, 64, 5), nn.PReLU(),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(64, 64, 5), nn.PReLU(),
nn.MaxPool2d(2, stride=2))
self.classifier = nn.Sequential(nn.Linear(64 * 4 * 4, 256),
nn.PReLU(),
nn.Linear(256, 256),
nn.PReLU(),
nn.Linear(256, 2))
else:
if pretrained:
self.encoder = model(pretrained=True)
self.encoder.classifier = nn.Sequential(*[self.encoder.classifier[i] for i in range(6)])
self.encoder.classifier.add_module('out', nn.Linear(4096, 2))
else:
self.encoder = model(num_classes=2)
def forward_once(self, x):
if self.simple_model:
output = self.features(x)
output = output.view(output.size()[0], -1)
output = self.classifier(output)
else:
output = self.encoder(x)
return output
def forward(self, input1, input2):
output1 = self.forward_once(input1)
output2 = self.forward_once(input2)
return output1, output2
|
{"/train.py": ["/utils/contrastive_trainer.py", "/utils/simsiam_trainer.py"], "/utils/simsiam_trainer.py": ["/models/cosine_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/utils/contrastive_trainer.py": ["/models/siamese_net.py", "/models/l2_contrastive_loss.py", "/utils/helper.py", "/utils/visualizer.py", "/datasets/spatial.py", "/datasets/cifar10.py"], "/linear_eval.py": ["/datasets/cifar10.py", "/models/create_linear_eval_model.py", "/utils/visualizer.py"]}
|
544
|
EricHughesABC/T2EPGviewer
|
refs/heads/master
|
/t2fit.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 3 11:30:41 2018
@author: ERIC
"""
import numpy as np
import lmfit
from epg import cpmg_epg_b1 as cpmg_epg_b1_c
from scipy import integrate
mxyz90 = np.fromfile( 'epg/mxyz90.txt', sep=' ' )
mxyz180 = np.fromfile('epg/mxyz180.txt', sep=' ')
mxyz90 = mxyz90.reshape(5,512)
mxyz180 = mxyz180.reshape(5,512)
offset=130
step=10
epg_slice_xxx =mxyz90[0][offset:-offset+step:step] # mm
epg_p90 = mxyz90[-1][offset:-offset+step:step] # degrees
epg_p180 = mxyz180[-1][offset:-offset+step:step] # degrees
epg_dx=epg_slice_xxx[1]-epg_slice_xxx[0]
def fit_cpmg_epg_muscle_philips_hargreaves_c( params, xxx, dx, p90_array, p180_array, yyy_exp=None):
parvals = params.valuesdict()
T1fat = parvals[ 'T1fat' ] # fixed
T1muscle = parvals[ 'T1muscle' ] # fixed
echo = parvals[ 'echo' ] # fixed
T2fat = parvals[ 'T2fat' ] # fixed/optimized
T2muscle = parvals['T2muscle'] # optimized
Afat = parvals[ 'Afat'] # optimized
Amuscle = parvals['Amuscle'] # optimized
B1scale = parvals['B1scale']
Nechos = len(xxx)
Ngauss = len(p90_array)
signal = np.zeros([Ngauss,Nechos])
fat_signal = np.zeros(Nechos)
muscle_signal = np.zeros(Nechos)
for i,(p90,p180) in enumerate(zip(p90_array,p180_array)):
cpmg_epg_b1_c( fat_signal, p90, p180, T1fat, T2fat, echo, B1scale )
cpmg_epg_b1_c( muscle_signal, p90, p180, T1muscle, T2muscle, echo, B1scale )
signal[i] = Afat*fat_signal+Amuscle*muscle_signal
int_signal = integrate.simps(signal, dx=dx,axis=0)
if isinstance(yyy_exp, np.ndarray):
return( int_signal-yyy_exp)
else:
return(int_signal)
def calculate_T2values_on_slice_muscleEPG(lmparams, yyy_exp):
# params = lmfit.Parameters()
# params.add('T2fat', value = 180.0, min=0, max=5000, vary=False)
# params.add('T2muscle', value = 35, min=0, max=100, vary=True )
# params.add('Afat', value = 0.01, min=0, max=10, vary=True )
# params.add('Amuscle', value = 0.1, min=0, max=10, vary=True )
# params.add('T1fat', value = 365.0, vary=False)
# params.add('T1muscle', value = 1400, vary=False)
# params.add('echo', value = 10.0, vary=False)
params = lmparams['epgt2fitparams']
echo_time = params['echo'].value
num_echoes = yyy_exp.size
parvals = params.valuesdict()
print("parvals")
for k,v in parvals.items():
print(k,v)
print("EPG echo time =", echo_time)
xxx = np.linspace( echo_time, echo_time*num_echoes, num_echoes)
dx = xxx[1]-xxx[0]
yyy_exp_max =yyy_exp.max()
if yyy_exp_max == 0:
yyy_exp_max = 1.0
yyy_exp_norm = yyy_exp/yyy_exp_max
fitModel = lmfit.Minimizer(fit_cpmg_epg_muscle_philips_hargreaves_c, lmparams['epgt2fitparams'], fcn_args=( xxx, dx, epg_p90, epg_p180, yyy_exp_norm))
results = fitModel.minimize()
fit_plot = np.zeros(num_echoes)
if results.success:
fit_plot = results.residual + yyy_exp_norm
return( fit_plot, yyy_exp_norm, results, xxx)
def calculate_T2values_on_slice_muscleAzz(lmparams, yyy_exp):
params = lmparams['azzt2fitparams']
echo_time = params['echo'].value
num_echoes = yyy_exp.size
model = lmfit.models.ExpressionModel('Afat * (c_l*exp(-x/t2_fl)+c_s*exp(-x/t2_fs)) + Amuscle * (exp(-x/T2muscle))')
parvals = params.valuesdict()
print("parvals")
for k,v in parvals.items():
print(k,v)
print("azzabou echo time", echo_time)
# saved_output = {'T2muscle_value': [],
# 'T2muscle_stderr': [],
# 'Amuscle_value': [],
# 'Amuscle_stderr': [],
# 'Afat_value': [],
# 'Afat_stderr': [],
# 'chisqr': [],
# 'redchi':[],
# 'AIC':[],
# 'BIC':[],
# 'slice':[],
# 'pixel_index':[],
# }
xxx = np.linspace( echo_time, echo_time*num_echoes, num_echoes)
yyy_exp_max = yyy_exp.max()
fit_plot = np.zeros(num_echoes-2)
if yyy_exp_max == 0.0:
yyy_exp_max = 1.0
yyy_exp_norm = yyy_exp/yyy_exp_max
print("fitting data")
results = model.fit(yyy_exp_norm[2:] , x=xxx[2:], params=lmparams['azzt2fitparams'])
#mi.plot()
#saved_output['name'].append('t2_m')
# saved_output['T2muscle_value'].append(results.params['T2muscle'].value)
# saved_output['T2muscle_stderr'].append(results.params['T2muscle'].stderr)
# saved_output['chisqr'].append(results.chisqr)
# saved_output['redchi'].append(results.redchi)
# saved_output['AIC'].append(results.aic)
# saved_output['BIC'].append(results.bic)
#
#
# saved_output['Amuscle_value'].append(results.params['Amuscle'].value)
# saved_output['Amuscle_stderr'].append(results.params['Amuscle'].stderr)
# saved_output['Afat_value'].append(results.params['Afat'].value)
# saved_output['Afat_stderr'].append(results.params['Afat'].stderr)
fit_plot = results.residual + yyy_exp_norm[2:]
return( fit_plot, yyy_exp_norm, results, xxx)
|
{"/visionplot_widgets.py": ["/t2fit.py", "/ImageData.py", "/epgT2paramsDialog.py", "/azzT2paramsDialog.py"], "/simple_pandas_plot.py": ["/visionplot_widgets.py", "/mriplotwidget.py", "/ImageData.py"]}
|
545
|
EricHughesABC/T2EPGviewer
|
refs/heads/master
|
/visionplot_widgets.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 28 13:11:07 2018
@author: neh69
"""
import sys
import numpy as np
#import matplotlib
import pandas as pd
#import mplcursors
from uncertainties import ufloat
import t2fit
import lmfit as lm
from matplotlib import pyplot as plt
#import seaborn as sns
from matplotlib.backends.qt_compat import QtCore, QtWidgets, is_pyqt5
import seaborn as sns
if is_pyqt5():
print("pyqt5")
from matplotlib.backends.backend_qt5agg import (
FigureCanvas, NavigationToolbar2QT as NavigationToolbar)
else:
print("pyqt4")
from matplotlib.backends.backend_qt4agg import (
FigureCanvas, NavigationToolbar2QT as NavigationToolbar)
from matplotlib.figure import Figure
from ImageData import T2imageData
import epgT2paramsDialog
import azzT2paramsDialog
#mxyz90 = np.fromfile( 'epg\mxyz90.txt', sep=' ' )
#mxyz180 = np.fromfile('epg\mxyz180.txt', sep=' ')
#
#mxyz90 = mxyz90.reshape(5,512)
#mxyz180 = mxyz180.reshape(5,512)
#
#offset=130
#step=10
#epg_slice_xxx =mxyz90[0][offset:-offset+step:step] # mm
#epg_p90 = mxyz90[-1][offset:-offset+step:step] # degrees
#epg_p180 = mxyz180[-1][offset:-offset+step:step] # degrees
#epg_dx=epg_slice_xxx[1]-epg_slice_xxx[0]
class PlotWidget(QtWidgets.QWidget):
def __init__(self, parent=None, showToolbar=True):
super(PlotWidget,self).__init__(parent)
fig =Figure(figsize=(3, 5))
fig.set_tight_layout(True)
self.plot_canvas = FigureCanvas(fig)
self.ax = fig.add_subplot(111)
self.layout = QtWidgets.QVBoxLayout(self)
self.layout.addWidget(self.plot_canvas)
if showToolbar:
self.toolbar = NavigationToolbar(self.plot_canvas, self)
self.layout.addWidget(self.toolbar)
def return_ax(self):
return(self.ax)
class HistogramPlotWidget(PlotWidget):
def __init__(self, parent=None, showToolbar=False, mri_plot=None, data_df=None, image_size=256):
self.data_df = data_df
self.image_size = image_size
super(HistogramPlotWidget,self).__init__(parent=parent, showToolbar=showToolbar)
self.buttonUpdate = QtWidgets.QPushButton('Update')
self.buttonUpdate.clicked.connect(self.update)
self.layout.addWidget(self.buttonUpdate)
def update(self):
print((self.ax.get_xlim()))
xmin,xmax = self.ax.get_xlim()
def update_plot(self, slice_info,data_dframes, plot_param):
self.ax.cla()
self.plot_canvas.draw()
print("Entered HistogramPlotWidget.update_image, plot_param =", plot_param)
data_df=None
slice_displayed = slice_info[0]
T2_slices = slice_info[1]
dixon_slices = slice_info[2]
print("data_dframes[0]", type(data_dframes[0]), data_dframes[0].columns)
print("data_dframes[1]", type(data_dframes[1]), data_dframes[1].columns)
if isinstance(data_dframes[0],pd.core.frame.DataFrame):
if plot_param in data_dframes[0].columns:
print("plot_param {} found in dataframe is T2".format(plot_param))
data_df = data_dframes[0]
data_df=data_df[data_df["slice"]==slice_displayed]
elif isinstance(data_dframes[1],pd.core.frame.DataFrame):
print("plot_param {} found in dataframe is Dixon".format(plot_param))
print("data_dframes[1].columns",data_dframes[1].columns)
if plot_param in data_dframes[1].columns:
print("plot_param in data_dframes[1]:", plot_param)
data_df = data_dframes[1]
if slice_displayed in T2_slices:
slice_displayed = dixon_slices[T2_slices.index(slice_displayed)]
data_df=data_df[data_df["slice"]==slice_displayed]
else:
print( "HIST", plot_param, " not found")
return False
else:
print("HIST", isinstance(data_dframes[1],pd.core.frame.DataFrame))
return False
print("HIST data_df.shape[0]",data_df.shape[0])
if data_df.shape[0] == 0 or type(data_df) == type(None):
print("HIST return because df shape[0] = 0 or type of data_df = type None")
return False
# self.ax2.cla()
if isinstance(data_df, pd.core.frame.DataFrame):
print("Plotting HIST Plot" )
data_df = data_df.sort_values(by=['roi'])
#plot_param = "T2value"
for roi in data_df.roi.unique():
print(roi)
query_str = '(slice == {}) and (roi == "{}")'.format(slice_displayed, roi)
sns.distplot(data_df.query(query_str)[plot_param], hist=False, label=roi, ax=self.ax)
# self.ax.hist( data_df.query(query_str)[plot_param], bins=100, label=roi, alpha=0.7);
self.ax.legend()
if plot_param == "T2m":
self.ax.set_xlabel("$T_2$ [ms]")
elif plot_param == "Am100":
self.ax.set_xlabel("$A_m$ [%]")
elif plot_param == "Af100":
self.ax.set_xlabel("$A_f$ [%]")
elif plot_param == "B1":
self.ax.set_xlabel("$B_1$")
elif plot_param == "fatPC":
self.ax.set_xlabel("ff [%]")
self.ax.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
self.plot_canvas.draw()
return True
class BarPlotWidget(PlotWidget):
def __init__(self, parent=None, showToolbar=True, data_df=None, image_size=256):
self.data_df = data_df
self.image_size = image_size
super(BarPlotWidget,self).__init__(parent=parent, showToolbar=showToolbar)
# self.buttonUpdate = QtWidgets.QPushButton('Update')
# self.buttonUpdate.clicked.connect(self.update)
# self.layout.addWidget(self.buttonUpdate)
def update(self):
print((self.ax.get_xlim()))
xmin,xmax = self.ax.get_xlim()
def update_plot(self, slice_info,data_dframes, plot_param):
self.ax.cla()
self.plot_canvas.draw()
print("Entered BarPlotWidget.update_image, plot_param =", plot_param)
#print(data_.columns)
slice_displayed = slice_info[0]
T2_slices = slice_info[1]
dixon_slices = slice_info[2]
data_df=None
print("data_dframes[0]", type(data_dframes[0]), data_dframes[0].columns)
print("data_dframes[1]", type(data_dframes[1]), data_dframes[1].columns)
if isinstance(data_dframes[0],pd.core.frame.DataFrame):
if plot_param in data_dframes[0].columns:
print("plot_param {} found in dataframe is T2".format(plot_param))
data_df = data_dframes[0]
data_df=data_df[data_df["slice"]==slice_displayed]
elif isinstance(data_dframes[1],pd.core.frame.DataFrame):
print("plot_param {} found in dataframe is Dixon".format(plot_param))
print("data_dframes[1].columns",data_dframes[1].columns)
if plot_param in data_dframes[1].columns:
print("plot_param in data_dframes[1]:", plot_param)
data_df = data_dframes[1]
if slice_displayed in T2_slices:
slice_displayed = dixon_slices[T2_slices.index(slice_displayed)]
# else:
# dixon_slice = slice_displayed
# slice_displayed = dixon_slices[T2_slices.index(slice_displayed)]
data_df=data_df[data_df["slice"]==slice_displayed]
else:
print( plot_param, " not found")
return(False)
else:
print(isinstance(data_dframes[1],pd.core.frame.DataFrame))
return(False)
print("HIST data_df.shape[0]", data_df.shape[0])
if data_df.shape[0] == 0 or type(data_df) == type(None):
print("return because df shape[0] = 0 or type of data_df = type None")
return False
data_df = data_df.sort_values(by=['roi'])
if isinstance(data_df, pd.core.frame.DataFrame):
print("Plotting BAR Plot" )
#plot_param = "T2value"
# for roi in data_df.roi.unique():
# print(roi)
# query_str = '(slice == {}) and (roi == "{}")'.format(slice_displayed, roi)
# self.ax.hist( data_df.query(query_str)[plot_param], bins=100, label=roi, alpha=0.4);
# self.ax.legend()
# numRois = data_df.roi.unique().shape[0]
sns.catplot( kind='bar',
x='slice',
y=plot_param,
data=data_df,
hue='roi',
ci="sd",
ax=self.return_ax()
);
self.ax.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
if plot_param == "T2m":
self.ax.set_ylabel("$T_2$ [ms]")
elif plot_param == "Am100":
self.ax.set_ylabel("$A_m$ [%]")
elif plot_param == "Af100":
self.ax.set_ylabel("$A_f$ [%]")
elif plot_param == "B1":
self.ax.set_ylabel("$B_1$")
elif plot_param == "fatPC":
self.ax.set_ylabel("ff [%]")
self.ax.set_xlabel("slices")
# plt.tight_layout()
self.plot_canvas.draw()
return True
class T2PlotWidget(PlotWidget):
def __init__( self, lmparams, parent=None, showToolbar=True):
super(T2PlotWidget, self).__init__(parent, showToolbar=showToolbar)
self.plot_T2_startup()
self.lmparams = lmparams
self.T2epgnorm_btns = radiobuttons_EPGWidget(self.lmparams, self)
self.layout.addWidget(self.T2epgnorm_btns)
def plot_T2_startup(self):
ttt = np.linspace(0,170, 17)
yyy = 80*np.exp(-ttt/35.0)+20*np.exp(-ttt/120.0)
yyy1 = yyy+np.random.randn(len(yyy))
self.ax.semilogy(ttt, yyy1, 'o')
self.ax.semilogy(ttt, yyy, '-')
self.ax.set_xlabel('Time [ms]')
self.ax.set_ylabel('Signal')
self.ax.set_ylim(1,110)
def update_plot(self, xcoord, ycoord, t2data):
print("update_T2PlotImag called")
#self.ttt = np.linspace(0,170, 17)
self.ax.cla() # clear the plot area
if self.T2epgnorm_btns.epg_rbtn.isChecked():
print("Run EPG Fit")
print('echo value', self.lmparams['epgt2fitparams']['echo'])
# params = lm.Parameters()
# params.add('T2fat', value = 180.0, min=0, max=5000, vary=False)
# params.add('T2muscle', value = 35, min=0, max=100, vary=True )
# params.add('Afat', value = 0.01, min=0, max=10, vary=True )
# params.add('Amuscle', value = 0.1, min=0, max=10, vary=True )
# params.add('T1fat', value = 365.0, vary=False)
# params.add('T1muscle', value = 1400, vary=False)
# params.add('echo', value = 10.0, vary=False)
#xxx = np.linspace(10,10*len(t2data), len(t2data))
# self.params.pretty_print()
#fit_values, fit_curve, fit_data, lmresults = t2fit.calculate_T2values_on_slice_muscleEPG(self.lmparams, t2data, len(t2data), xxx, epg_dx, epg_p90, epg_p180)
fit_curve, fit_data, lmresults, xxx = t2fit.calculate_T2values_on_slice_muscleEPG(self.lmparams, t2data)
else:
print("Run Normal T2 Fit")
fit_curve, fit_data, lmresults, xxx = t2fit.calculate_T2values_on_slice_muscleAzz(self.lmparams,t2data)
print(dir(lmresults))
print(lmresults.success)
if not lmresults.success:
return
#
# Create uncertainty floats of varied params
#
ufs = {}
for vname in lmresults.var_names:
v = lmresults.params[vname].value
e = lmresults.params[vname].stderr
ufs[vname] = ufloat( v,e)
if ('Amuscle' in ufs.keys()) and ('Afat' in ufs.keys()):
ufs['Amuscle'] = 100.0*ufs['Amuscle']/(ufs['Amuscle']+ufs['Afat'])
ufs['Afat'] = 100.0-ufs['Amuscle']
t2m_str = ""
t2f_str = ""
Am_str = ""
Af_str = ""
B1_str = ""
for name, value in ufs.items():
print(name)
if name == 'T2muscle':
t2m_str = "$T_{{2m}}$ = ${:5.2fL}$ ms\n".format(value)
elif name == 'T2fat':
t2f_str = "$T_{{2f}}$ = ${:5.2fL}$ ms\n".format(value)
elif name == 'Amuscle':
Am_str = "$A_m$ = ${:5.2fL}$\n".format(value)
elif name == 'Afat':
Af_str = "$A_f$ = ${:5.2fL}$\n".format(value)
elif name == 'B1scale':
B1_str = "$B_1$ scale = ${:5.2fL}$\n".format(value)
results_legend = "{}{}{}{}{}".format(t2m_str, t2f_str, Am_str, Af_str, B1_str)
if self.T2epgnorm_btns.epg_rbtn.isChecked():
self.ax.semilogy(xxx, 100*fit_data, 'o')
self.ax.semilogy(xxx, 100*fit_curve, '-', label=results_legend)
else:
self.ax.semilogy(xxx[2:], 100*fit_curve, '-', label=results_legend)
self.ax.semilogy(xxx, 100*fit_data, 'o')
self.ax.legend( fontsize=8)
#self.ax.set_ylim(1,110)
self.ax.set_xlabel('Time [ms]')
self.ax.set_ylabel('Signal')
self.ax.set_ylim(0.5,150)
self.plot_canvas.draw()
class radiobuttons_EPGWidget(QtWidgets.QWidget):
def __init__(self, lmparams, parent=None):
self.lmparams = lmparams
self.epgDialog = QtWidgets.QDialog()
self.epgT2params_widget = epgT2paramsDialog.EpgT2paramsDialog(self.lmparams)
self.epgT2params_widget.setupEpgT2paramsDialog(self.epgDialog)
self.azzDialog = QtWidgets.QDialog()
self.azzT2params_widget = azzT2paramsDialog.AzzT2paramsDialog(self.lmparams)
self.azzT2params_widget.setupAzzT2paramsDialog(self.azzDialog)
super(radiobuttons_EPGWidget, self).__init__(parent)
hlayout = QtWidgets.QHBoxLayout(self)
group_rbtns = QtWidgets.QButtonGroup()
group_rbtns.exclusive()
self.epg_rbtn = QtWidgets.QRadioButton("EPG T2")
self.norm_rbtn = QtWidgets.QRadioButton("normal T2")
self.norm_rbtn.setChecked(True);
self.T2params_btn = QtWidgets.QPushButton("T2 Parameters")
self.epg_rbtn.fittingParam = "epg"
self.norm_rbtn.fittingParam= 'norm'
self.epg_rbtn.toggled.connect(lambda:self.btnstate(self.epg_rbtn))
self.norm_rbtn.toggled.connect(lambda:self.btnstate(self.norm_rbtn))
self.T2params_btn.clicked.connect(self.T2params_btn_clicked)
group_rbtns.addButton(self.epg_rbtn)
group_rbtns.addButton(self.norm_rbtn)
hlayout.addWidget(self.norm_rbtn)
hlayout.addWidget(self.epg_rbtn)
hlayout.addStretch(1)
hlayout.addWidget(self.T2params_btn)
def T2params_btn_clicked(self):
print("T2params_btn_clicked")
if self.epg_rbtn.isChecked():
rt = self.epgDialog.show()
else:
rt = self.azzDialog.show()
print("rt =", rt)
def btnstate(self,b):
if b.isChecked():
print(b.text())
print(b.fittingParam)
#self.mri_window.on_fittingParams_rbtn_toggled( str(b.fittingParam))
class radiobuttons_fitWidget(QtWidgets.QWidget):
def __init__(self, parent=None, mri_window=None):
super(radiobuttons_fitWidget, self).__init__(parent)
self.mri_window = mri_window
vbox1_radiobuttons = QtWidgets.QVBoxLayout(self)
group_fittingParams_rbtns = QtWidgets.QButtonGroup()
group_fittingParams_rbtns.exclusive()
self.T2_rbtn = QtWidgets.QRadioButton("T2")
self.Am_rbtn = QtWidgets.QRadioButton("Am")
self.Af_rbtn = QtWidgets.QRadioButton("Af")
self.B1_rbtn = QtWidgets.QRadioButton("B1")
self.Dixon_rbtn = QtWidgets.QRadioButton("Dixon Fat [%]")
self.T2_rbtn.setChecked(True)
self.T2_rbtn.fittingParam = "T2m"
self.Am_rbtn.fittingParam = "Am100"
self.Af_rbtn.fittingParam = "Af100"
self.B1_rbtn.fittingParam = "B1"
self.Dixon_rbtn.fittingParam = "fatPC"
self.T2_rbtn.toggled.connect(lambda:self.btnstate(self.T2_rbtn))
self.Am_rbtn.toggled.connect(lambda:self.btnstate(self.Am_rbtn))
self.Af_rbtn.toggled.connect(lambda:self.btnstate(self.Af_rbtn))
self.B1_rbtn.toggled.connect(lambda:self.btnstate(self.B1_rbtn))
self.Dixon_rbtn.toggled.connect(lambda:self.btnstate(self.Dixon_rbtn))
group_fittingParams_rbtns.addButton(self.T2_rbtn)
group_fittingParams_rbtns.addButton(self.Am_rbtn)
group_fittingParams_rbtns.addButton(self.Af_rbtn)
group_fittingParams_rbtns.addButton(self.B1_rbtn)
group_fittingParams_rbtns.addButton(self.Dixon_rbtn)
vbox1_radiobuttons.addWidget(self.T2_rbtn)
vbox1_radiobuttons.addWidget(self.Am_rbtn)
vbox1_radiobuttons.addWidget(self.Af_rbtn)
vbox1_radiobuttons.addWidget(self.B1_rbtn)
vbox1_radiobuttons.addWidget(self.Dixon_rbtn)
vbox1_radiobuttons.addStretch(1)
def btnstate(self,b):
if b.isChecked():
print(b.text())
print(b.fittingParam)
self.mri_window.on_fittingParams_rbtn_toggled( str(b.fittingParam))
class ApplicationWindow(QtWidgets.QMainWindow):
def __init__(self, params):
self.params = params
imageData = T2imageData()
print("imageData.fittingParam:",imageData.fittingParam)
npts = 256*100
iii = np.random.permutation(np.arange(255*255))[:npts]
ddd = np.random.randn(npts)*100+500
data_df = pd.DataFrame({'iii': iii, 'ddd':ddd})
super(ApplicationWindow, self).__init__()
leftwindow = QtWidgets.QWidget()
rightwindow = QtWidgets.QWidget()
splitHwidget = QtWidgets.QSplitter(QtCore.Qt.Horizontal)
#hlayout = QtWidgets.QHBoxLayout(self._main)
hlayout = QtWidgets.QHBoxLayout(leftwindow)
vlayout = QtWidgets.QVBoxLayout(rightwindow)
mriplot_window = MRIPlotWidget(imageData=imageData)
rbtns_window = radiobuttons_fitWidget(mri_window=mriplot_window)
t2plot_window = T2PlotWidget( self.params, showToolbar=False)
h1_window = PlotWidget( showToolbar=False)
h2_window = HistogramPlotWidget(showToolbar=True)
#hlayout.addWidget(mriplot_window)
mriplot_window.register_PlotWidgets(t2plot_window, h1_window, h2_window)
#vbox1_radiobuttons = QtWidgets.QVBoxLayout()
# hbox.addLayout(vbox1_radiobuttons)
# hbox.addLayout(vbox1_image)
# hbox.addLayout(vbox2_image)
hlayout.addWidget(rbtns_window)
hlayout.addWidget(mriplot_window)
vlayout.addWidget(t2plot_window)
vlayout.addWidget(h1_window)
vlayout.addWidget(h2_window)
def func3(x, y):
return (1 - x / 2 + x**5 + y**3) * np.exp(-(x**2 + y**2))
# make these smaller to increase the resolution
dx, dy = 0.05, 0.05
x = np.arange(-3.0, 3.0, dx)
y = np.arange(-3.0, 3.0, dy)
X, Y = np.meshgrid(x, y)
# when layering multiple images, the images need to have the same
# extent. This does not mean they need to have the same shape, but
# they both need to render to the same coordinate system determined by
# xmin, xmax, ymin, ymax. Note if you use different interpolations
# for the images their apparent extent could be different due to
# interpolation edge effects
extent = np.min(x), np.max(x), np.min(y), np.max(y)
Z1 = np.add.outer(range(8), range(8)) % 2 # chessboard
mriplot_window.return_ax().imshow(Z1, cmap=plt.cm.gray,
interpolation='nearest', extent=extent)
Z2 = func3(X, Y)
mriplot_window.return_ax().imshow(Z2, cmap=plt.cm.viridis, alpha=.9,
interpolation='bilinear', extent=extent)
splitHwidget.addWidget(leftwindow)
splitHwidget.addWidget(rightwindow )
print(data_df.head())
plot_image = np.zeros(255*255)
plot_image[data_df['iii']] = data_df['ddd']
h1_window.return_ax().imshow( plot_image.reshape((255,255)))
h1_window.return_ax().set_xlabel('x')
h1_window.return_ax().set_ylabel('y')
h2_window.return_ax().hist(ddd, bins=100)
h2_window.return_ax().set_xlabel('x')
h2_window.return_ax().set_ylabel('y')
self.setCentralWidget(splitHwidget)
def zoom(self):
self.histtoolbar.zoom()
def ax_changed(self,ax):
old_xlim, old_ylim = self.lim_dict[ax]
print("old xlim", old_xlim, "ylim", old_ylim)
print("new xlim", ax.get_xlim(), "ylim", ax.get_ylim())
return np.all(old_xlim == ax.get_xlim()) and np.all(old_ylim == ax.get_ylim())
def onrelease(self,event):
print("Active Toolbar button:",self.histtoolbar._active )
print("plot release")
print(event)
self.static_canvas.flush_events()
changed_axes = [ax for ax in self.static_canvas.figure.axes if self.ax_changed(ax)]
not_changed_axes = [ax for ax in self.static_canvas.figure.axes if not self.ax_changed(ax)]
print("changed_axes",changed_axes)
print("not_changed_axes",not_changed_axes)
for ax in changed_axes:
print("Changed xlim", ax.get_xlim(), "ylim", ax.get_ylim())
if __name__ == "__main__":
epgt2fitparams = lm.Parameters()
epgt2fitparams.add('T2fat', value = 180.0, min=0, max=5000, vary=False)
epgt2fitparams.add('T2muscle', value = 35, min=0, max=100, vary=True )
epgt2fitparams.add('Afat', value = 0.2, min=0, max=10, vary=True )
epgt2fitparams.add('Amuscle', value = 0.8, min=0, max=10, vary=True )
epgt2fitparams.add('T1fat', value = 365.0, vary=False)
epgt2fitparams.add('T1muscle', value = 1400, vary=False)
epgt2fitparams.add('echo', value = 10.0, vary=False)
qapp = QtWidgets.QApplication(sys.argv)
app = ApplicationWindow(epgt2fitparams)
app.show()
qapp.exec_()
|
{"/visionplot_widgets.py": ["/t2fit.py", "/ImageData.py", "/epgT2paramsDialog.py", "/azzT2paramsDialog.py"], "/simple_pandas_plot.py": ["/visionplot_widgets.py", "/mriplotwidget.py", "/ImageData.py"]}
|
546
|
EricHughesABC/T2EPGviewer
|
refs/heads/master
|
/epgT2paramsDialog.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'epg_fit_parameters_dialog.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
import lmfit as lm
from PyQt5 import QtCore, QtGui, QtWidgets
class EpgT2paramsDialog(object):
def __init__(self, lmparams):
self.lmparams = lmparams
self.params = self.lmparams['epgt2fitparams']
def setupEpgT2paramsDialog(self, Dialog):
self.Dialog = Dialog
Dialog.setObjectName("Dialog")
Dialog.resize(386, 284)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setGeometry(QtCore.QRect(60, 250, 321, 23))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.widget = QtWidgets.QWidget(Dialog)
self.widget.setGeometry(QtCore.QRect(20, 10, 361, 231))
self.widget.setObjectName("widget")
self.gridLayout = QtWidgets.QGridLayout(self.widget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.fatT1value = QtWidgets.QLineEdit(self.widget)
self.fatT1value.setValidator(QtGui.QDoubleValidator())
self.fatT1value.setObjectName("fatT1value")
self.gridLayout.addWidget(self.fatT1value, 7, 1, 1, 1)
self.muscleFractionMax = QtWidgets.QLineEdit(self.widget)
self.muscleFractionMax.setValidator(QtGui.QDoubleValidator())
self.muscleFractionMax.setObjectName("muscleFractionMax")
self.gridLayout.addWidget(self.muscleFractionMax, 3, 3, 1, 1)
self.optimizeMuscleFraction = QtWidgets.QCheckBox(self.widget)
self.optimizeMuscleFraction.setText("")
self.optimizeMuscleFraction.setChecked(True)
self.optimizeMuscleFraction.setObjectName("optimizeMuscleFraction")
self.gridLayout.addWidget(self.optimizeMuscleFraction, 3, 4, 1, 1)
self.fatFractionMin = QtWidgets.QLineEdit(self.widget)
self.fatFractionMin.setValidator(QtGui.QDoubleValidator())
self.fatFractionMin.setObjectName("fatFractionMin")
self.gridLayout.addWidget(self.fatFractionMin, 4, 2, 1, 1)
self.fatFractionMax = QtWidgets.QLineEdit(self.widget)
self.fatFractionMax.setValidator(QtGui.QDoubleValidator())
self.fatFractionMax.setObjectName("fatFractionMax")
self.gridLayout.addWidget(self.fatFractionMax, 4, 3, 1, 1)
self.b1scaleMax = QtWidgets.QLineEdit(self.widget)
self.b1scaleMax.setValidator(QtGui.QDoubleValidator())
self.b1scaleMax.setObjectName("b1scaleMax")
self.gridLayout.addWidget(self.b1scaleMax, 5, 3, 1, 1)
self.muscleFractionMin = QtWidgets.QLineEdit(self.widget)
self.muscleFractionMin.setValidator(QtGui.QDoubleValidator())
self.muscleFractionMin.setObjectName("muscleFractionMin")
self.gridLayout.addWidget(self.muscleFractionMin, 3, 2, 1, 1)
self.b1scaleValue = QtWidgets.QLineEdit(self.widget)
self.b1scaleValue.setValidator(QtGui.QDoubleValidator())
self.b1scaleValue.setObjectName("b1scaleValue")
self.gridLayout.addWidget(self.b1scaleValue, 5, 1, 1, 1)
self.b1scaleMin = QtWidgets.QLineEdit(self.widget)
self.b1scaleMin.setValidator(QtGui.QDoubleValidator())
self.b1scaleMin.setObjectName("b1scaleMin")
self.gridLayout.addWidget(self.b1scaleMin, 5, 2, 1, 1)
self.fatFractionLabel = QtWidgets.QLabel(self.widget)
self.fatFractionLabel.setObjectName("fatFractionLabel")
self.gridLayout.addWidget(self.fatFractionLabel, 4, 0, 1, 1)
self.fatFractionValue = QtWidgets.QLineEdit(self.widget)
self.fatFractionValue.setValidator(QtGui.QDoubleValidator())
self.fatFractionValue.setObjectName("fatFractionValue")
self.gridLayout.addWidget(self.fatFractionValue, 4, 1, 1, 1)
self.muscleT1label = QtWidgets.QLabel(self.widget)
self.muscleT1label.setObjectName("muscleT1label")
self.gridLayout.addWidget(self.muscleT1label, 6, 0, 1, 1)
self.fatT2min = QtWidgets.QLineEdit(self.widget)
self.fatT2min.setValidator(QtGui.QDoubleValidator())
self.fatT2min.setObjectName("fatT2min")
self.gridLayout.addWidget(self.fatT2min, 2, 2, 1, 1)
self.maxHeadingLabel = QtWidgets.QLabel(self.widget)
self.maxHeadingLabel.setObjectName("maxHeadingLabel")
self.gridLayout.addWidget(self.maxHeadingLabel, 0, 3, 1, 1)
self.minHeadingLabel = QtWidgets.QLabel(self.widget)
self.minHeadingLabel.setObjectName("minHeadingLabel")
self.gridLayout.addWidget(self.minHeadingLabel, 0, 2, 1, 1)
self.valueHeadingLabel = QtWidgets.QLabel(self.widget)
self.valueHeadingLabel.setObjectName("valueHeadingLabel")
self.gridLayout.addWidget(self.valueHeadingLabel, 0, 1, 1, 1)
self.fatT2value = QtWidgets.QLineEdit(self.widget)
self.fatT2value.setValidator(QtGui.QDoubleValidator())
self.fatT2value.setObjectName("fatT2value")
self.gridLayout.addWidget(self.fatT2value, 2, 1, 1, 1)
self.optimizeFatT2 = QtWidgets.QCheckBox(self.widget)
self.optimizeFatT2.setText("")
self.optimizeFatT2.setChecked(False)
self.optimizeFatT2.setObjectName("optimizeFatT2")
self.gridLayout.addWidget(self.optimizeFatT2, 2, 4, 1, 1)
self.muscleT2value = QtWidgets.QLineEdit(self.widget)
self.muscleT2value.setInputMethodHints(QtCore.Qt.ImhDigitsOnly|QtCore.Qt.ImhFormattedNumbersOnly)
self.muscleT2value.setProperty("muscleValue", 0.0)
self.muscleT2value.setProperty("number", 35.0)
self.muscleT2value.setObjectName("muscleT2value")
self.gridLayout.addWidget(self.muscleT2value, 1, 1, 1, 1)
self.fatT2label = QtWidgets.QLabel(self.widget)
self.fatT2label.setObjectName("fatT2label")
self.gridLayout.addWidget(self.fatT2label, 2, 0, 1, 1)
self.fatT2max = QtWidgets.QLineEdit(self.widget)
self.fatT2max.setValidator(QtGui.QDoubleValidator())
self.fatT2max.setObjectName("fatT2max")
self.gridLayout.addWidget(self.fatT2max, 2, 3, 1, 1)
self.muscleT2max = QtWidgets.QLineEdit(self.widget)
self.muscleT2max.setValidator(QtGui.QDoubleValidator())
self.muscleT2max.setObjectName("muscleT2max")
self.gridLayout.addWidget(self.muscleT2max, 1, 3, 1, 1)
self.opimizedHeadingLabel = QtWidgets.QLabel(self.widget)
self.opimizedHeadingLabel.setObjectName("opimizedHeadingLabel")
self.gridLayout.addWidget(self.opimizedHeadingLabel, 0, 4, 1, 1)
self.muscleT2label = QtWidgets.QLabel(self.widget)
self.muscleT2label.setObjectName("muscleT2label")
self.gridLayout.addWidget(self.muscleT2label, 1, 0, 1, 1)
self.muscleT2min = QtWidgets.QLineEdit(self.widget)
self.muscleT2min.setInputMethodHints(QtCore.Qt.ImhFormattedNumbersOnly)
self.muscleT2min.setObjectName("muscleT2min")
self.gridLayout.addWidget(self.muscleT2min, 1, 2, 1, 1)
self.optimizeMuscleT2 = QtWidgets.QCheckBox(self.widget)
self.optimizeMuscleT2.setText("")
self.optimizeMuscleT2.setChecked(True)
self.optimizeMuscleT2.setObjectName("optimizeMuscleT2")
self.gridLayout.addWidget(self.optimizeMuscleT2, 1, 4, 1, 1)
self.optimizeB1scale = QtWidgets.QCheckBox(self.widget)
self.optimizeB1scale.setText("")
self.optimizeB1scale.setChecked(True)
self.optimizeB1scale.setObjectName("optimizeB1scale")
self.gridLayout.addWidget(self.optimizeB1scale, 5, 4, 1, 1)
self.optimizeFatFraction = QtWidgets.QCheckBox(self.widget)
self.optimizeFatFraction.setText("")
self.optimizeFatFraction.setChecked(True)
self.optimizeFatFraction.setObjectName("optimizeFatFraction")
self.gridLayout.addWidget(self.optimizeFatFraction, 4, 4, 1, 1)
self.b1scaleLabel = QtWidgets.QLabel(self.widget)
self.b1scaleLabel.setObjectName("b1scaleLabel")
self.gridLayout.addWidget(self.b1scaleLabel, 5, 0, 1, 1)
self.muscleT1value = QtWidgets.QLineEdit(self.widget)
self.muscleT1value.setObjectName("muscleT1value")
self.gridLayout.addWidget(self.muscleT1value, 6, 1, 1, 1)
self.T2echoValue = QtWidgets.QLineEdit(self.widget)
self.T2echoValue.setValidator(QtGui.QDoubleValidator())
self.T2echoValue.setObjectName("T2echoValue")
self.gridLayout.addWidget(self.T2echoValue, 8, 1, 1, 1)
self.muscleFractionValue = QtWidgets.QLineEdit(self.widget)
self.muscleFractionValue.setValidator(QtGui.QDoubleValidator())
self.muscleFractionValue.setObjectName("muscleFractionValue")
self.gridLayout.addWidget(self.muscleFractionValue, 3, 1, 1, 1)
self.muscleFractionLabel = QtWidgets.QLabel(self.widget)
self.muscleFractionLabel.setObjectName("muscleFractionLabel")
self.gridLayout.addWidget(self.muscleFractionLabel, 3, 0, 1, 1)
self.label = QtWidgets.QLabel(self.widget)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 8, 0, 1, 1)
self.fatT1label = QtWidgets.QLabel(self.widget)
self.fatT1label.setObjectName("fatT1label")
self.gridLayout.addWidget(self.fatT1label, 7, 0, 1, 1)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(self.dialog_ok_clicked)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "EPG"))
self.fatT1value.setText(_translate("Dialog", "1450"))
self.muscleFractionMax.setText(_translate("Dialog", "10"))
self.fatFractionMin.setText(_translate("Dialog", "0"))
self.fatFractionMax.setText(_translate("Dialog", "10"))
self.b1scaleMax.setText(_translate("Dialog", "2"))
self.muscleFractionMin.setText(_translate("Dialog", "0"))
self.b1scaleValue.setText(_translate("Dialog", "1"))
self.b1scaleMin.setText(_translate("Dialog", "0"))
self.fatFractionLabel.setText(_translate("Dialog", "Fat Fraction"))
self.fatFractionValue.setText(_translate("Dialog", ".3"))
self.muscleT1label.setText(_translate("Dialog", "<html><head/><body><p>Muscle T<span style=\" vertical-align:sub;\">1</span> (ms)</p></body></html>"))
self.fatT2min.setText(_translate("Dialog", "0"))
self.maxHeadingLabel.setText(_translate("Dialog", "maximum"))
self.minHeadingLabel.setText(_translate("Dialog", "minimum"))
self.valueHeadingLabel.setText(_translate("Dialog", "value"))
self.fatT2value.setText(_translate("Dialog", "200"))
self.muscleT2value.setText(_translate("Dialog", "35"))
self.fatT2label.setText(_translate("Dialog", "<html><head/><body><p>Fat T<span style=\" vertical-align:sub;\">2</span> (ms)</p></body></html>"))
self.fatT2max.setText(_translate("Dialog", "2000"))
self.muscleT2max.setText(_translate("Dialog", "150"))
self.opimizedHeadingLabel.setText(_translate("Dialog", "optimized"))
self.muscleT2label.setText(_translate("Dialog", "<html><head/><body><p>Muscle T<span style=\" vertical-align:sub;\">2</span> (ms)</p></body></html>"))
self.muscleT2min.setText(_translate("Dialog", "0"))
self.b1scaleLabel.setText(_translate("Dialog", "B<sub>1</sub> scale"))
self.muscleT1value.setText(_translate("Dialog", "500"))
self.T2echoValue.setText(_translate("Dialog", "10"))
self.muscleFractionValue.setText(_translate("Dialog", "0.7"))
self.muscleFractionLabel.setText(_translate("Dialog", "Muscle Fraction"))
self.label.setText(_translate("Dialog", "<html><head/><body><p>T<span style=\" vertical-align:sub;\">2</span> Echo (ms)</p></body></html>"))
self.fatT1label.setText(_translate("Dialog", "<html><head/><body><p>Fat T<span style=\" vertical-align:sub;\">1</span> (ms)</p></body></html>"))
def dialog_ok_clicked(self):
print("dialog_ok_clicked")
self.Dialog.setResult(1)
worked =self.get_fitparameters()
if worked:
self.params.pretty_print()
self.Dialog.accept()
def get_fitparameters( self ):
print("self.optimizeFatFraction.isChecked()", self.optimizeFatFraction.isChecked() )
#epgt2fitparams = lm.Parameters()
worked = True
try:
self.params.add(name='T2muscle', value = float(self.muscleT2value.text()),
min = float(self.muscleT2min.text()),
max = float(self.muscleT2max.text()),
vary = self.optimizeMuscleT2.isChecked())
self.params.add(name='T2fat', value = float(self.fatT2value.text()),
min = float(self.fatT2min.text()),
max = float(self.fatT2max.text()),
vary = self.optimizeFatT2.isChecked())
self.params.add(name='Amuscle', value = float(self.muscleFractionValue.text()),
min = float(self.muscleFractionMin.text()),
max = float(self.muscleFractionMax.text()),
vary = self.optimizeMuscleFraction.isChecked())
self.params.add(name='Afat', value = float(self.fatFractionValue.text()),
min = float(self.fatFractionMin.text()),
max = float(self.fatFractionMax.text()),
vary = self.optimizeFatFraction.isChecked())
self.params.add(name='B1scale', value = float(self.b1scaleValue.text()),
min = float(self.b1scaleMin.text()),
max = float(self.b1scaleMax.text()),
vary = self.optimizeB1scale.isChecked())
self.params.add(name='T1muscle', value = float(self.muscleT1value.text()),
vary = False)
self.params.add(name='T1fat', value = float(self.fatT1value.text()),
vary = False)
self.params.add(name='echo', value = float(self.T2echoValue.text()),
vary = False)
buttonsChecked = [not self.optimizeFatFraction.isChecked(),
not self.optimizeMuscleFraction.isChecked(),
not self.optimizeMuscleT2.isChecked(),
not self.optimizeFatT2.isChecked(),
not self.optimizeB1scale.isChecked()]
print(buttonsChecked)
if all(buttonsChecked):
worked=False
self.lmparams['epgt2fitparams'] = self.params
except:
worked = False
return worked
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
Dialog.setModal(False)
lmparams = {}
epgt2fitparams = lm.Parameters()
epgt2fitparams.add('T2fat', value = 180.0, min=0, max=5000, vary=False)
epgt2fitparams.add('T2muscle', value = 35, min=0, max=100, vary=True )
epgt2fitparams.add('Afat', value = 0.01, min=0, max=10, vary=True )
epgt2fitparams.add('Amuscle', value = 0.1, min=0, max=10, vary=True )
epgt2fitparams.add('T1fat', value = 365.0, vary=False)
epgt2fitparams.add('T1muscle', value = 1400, vary=False)
epgt2fitparams.add('echo', value = 10.0, vary=False)
epgt2fitparams.add('B1scale', value = 1.0, min=0, max=2, vary=True)
lmparams['epgt2fitparams']=epgt2fitparams
ui = EpgT2paramsDialog(lmparams)
ui.setupEpgT2paramsDialog(Dialog)
rt=Dialog.open()
print("Dialog.result() =",Dialog.result())
#print( "get_fitparameters(ui).items()", ui.get_fitparameters().items())
sys.exit(app.exec_())
|
{"/visionplot_widgets.py": ["/t2fit.py", "/ImageData.py", "/epgT2paramsDialog.py", "/azzT2paramsDialog.py"], "/simple_pandas_plot.py": ["/visionplot_widgets.py", "/mriplotwidget.py", "/ImageData.py"]}
|
547
|
EricHughesABC/T2EPGviewer
|
refs/heads/master
|
/mriplotwidget.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 17 14:34:43 2019
@author: neh69
"""
import numpy as np
import matplotlib
from matplotlib import pyplot as plt
#import seaborn as sns
from matplotlib.backends.qt_compat import QtCore, QtWidgets, is_pyqt5
#import seaborn as sns
if is_pyqt5():
print("pyqt5")
from matplotlib.backends.backend_qt5agg import (
FigureCanvas, NavigationToolbar2QT as NavigationToolbar)
else:
print("pyqt4")
from matplotlib.backends.backend_qt4agg import (
FigureCanvas, NavigationToolbar2QT as NavigationToolbar)
#from matplotlib.figure import Figure
import mplcursors
#from ImageData import T2imageData
parameterNames ={'T2m': [ 'T$_{2m}$ [ms]','{}, T$_{{2m}}$ = {:.1f} [ms]' ],
'Am100': [ 'A$_{m}$ [%]', '{}, A$_{{m}}$ = {:.1f} [%]' ],
'Af100': [ 'A$_{f}$ [%]', '{}, A$_{{f}}$ = {:.1f} [%]'],
'B1': [ 'B$_{1}$ [-]', '{}, B$_{{1}}$ = {:.1f} [-]'],
'fatPC': [ 'fat [%]', '{}, fat = {:.1f} [%]']
}
class MRIPlotWidget(QtWidgets.QWidget):
#class PlotWidget(QtWidgets.QWidget):
def __init__(self, parent=None, showToolbar=True, imageData=None):
super().__init__(parent)
self.fig, self.ax = plt.subplots()
# fig =Figure(figsize=(3, 5))
self.fig.set_tight_layout(True)
self.plot_canvas = FigureCanvas(self.fig)
# self.ax = self.fig.add_subplot(111)
# mplcursors.cursor(fig,hover=True)
self.layout = QtWidgets.QVBoxLayout(self)
# def __init__( self, parent=None, showToolbar=True, imageData=None):
self.axesList = []
self.imageData = imageData
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
self.toggleImage = QtWidgets.QRadioButton("Hide background Image")
self.toggleImage.toggled.connect(lambda: self.toggleImageChanged(self.toggleImage))
self.toggleImage.isChecked()
self.layout.addWidget(self.toggleImage)
self.toggleImage.setSizePolicy(sizePolicy)
self.sliceLabel = QtWidgets.QLabel("slices")
self.layout.addWidget(self.sliceLabel)
self.sliceLabel.setSizePolicy(sizePolicy)
self.slicesSlider = QtWidgets.QSlider(QtCore.Qt.Horizontal)
self.slicesSlider.setMinimum(0)
self.slicesSlider.setMaximum(4)
self.slicesSlider.setValue(0)
self.slicesSlider.setTickPosition(QtWidgets.QSlider.TicksBelow)
self.slicesSlider.setTickInterval(1)
self.slicesSlider.valueChanged.connect(self.valuechangedSlider)
self.slicesSlider.setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed))
self.layout.addWidget(self.slicesSlider)
self.echoesLabel = QtWidgets.QLabel("echoes")
self.echoesLabel.setSizePolicy(sizePolicy)
self.layout.addWidget(self.echoesLabel)
self.echoesSlider = QtWidgets.QSlider(QtCore.Qt.Horizontal)
self.echoesSlider.setMinimum(0)
self.echoesSlider.setMaximum(16)
self.echoesSlider.setValue(0)
self.echoesSlider.setTickPosition(QtWidgets.QSlider.TicksBelow)
self.echoesSlider.setTickInterval(1)
self.echoesSlider.valueChanged.connect(self.valuechangedSlider)
self.echoesSlider.setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed))
self.layout.addWidget(self.echoesSlider)
self.layout.addWidget(self.plot_canvas)
if showToolbar:
self.toolbar = NavigationToolbar(self.plot_canvas, self)
self.layout.addWidget(self.toolbar)
self.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
self.updateGeometry()
self.plot_canvas.mpl_connect('button_press_event', self.onclick)
# self.plot_canvas.mpl_connect("motion_notify_event", self.onhover)
self.ax.imshow(matplotlib.image.imread('vision.png')[:,:,0])
# self.canvas.figure.axes
# self.mpl_cursor = mplcursors.cursor(self.plot_canvas.figure.axes,hover=True)
self.ax.grid(False)
def valuechangedSlider(self):
slice_ = self.slicesSlider.value()
echo = self.echoesSlider.value()
self.imageData.currentSlice = slice_
self.imageData.currentEcho = echo
print("slicesSlider Value =", slice_, "echoesSlider Value =", echo )
if isinstance(self.imageData.ImageDataT2, np.ndarray):
print("updating image slice")
if self.toggleImage.isChecked():
self.imageData.mriSliceIMG *= 0.0
else:
self.imageData.mriSiceIMG=self.imageData.ImageDataT2[:,:,slice_,echo].copy()
self.imageData.overlayRoisOnImage(slice_+1, self.imageData.fittingParam)
self.update_plot(self.imageData.mriSiceIMG, self.imageData.maskedROIs.reshape(self.imageData.mriSiceIMG.shape))
self.histPlotWidget.update_plot([slice_+1,self.imageData.T2slices,self.imageData.dixonSlices],
[self.imageData.t2_data_summary_df,self.imageData.dixon_data_summary_df],
self.imageData.fittingParam)
self.barPlotWidget.update_plot([slice_+1,self.imageData.T2slices,self.imageData.dixonSlices],
[self.imageData.t2_data_summary_df,self.imageData.dixon_data_summary_df],
self.imageData.fittingParam)
else:
print("No images to update")
def on_fittingParams_rbtn_toggled(self, fittingParam):
# rb = self.fittingParams_rbtn.sender()
print(fittingParam)
self.imageData.fittingParam = fittingParam
self.valuechangedSlider()
def register_PlotWidgets(self, T2PlotWidget, histPlotWidget,
barPlotWidget, radioButtonsWidget):
self.T2PlotWidget = T2PlotWidget
self.histPlotWidget = histPlotWidget
self.barPlotWidget = barPlotWidget
self.radioButtonsWidget = radioButtonsWidget
# def onhover(self,event):
#
# if event.inaxes:
#
# xcoord = int(round(event.xdata))
# ycoord = int(round(event.ydata))
#
# print('on hover, ', xcoord, ycoord)
def onclick(self,event):
xcoord = int(round(event.xdata))
ycoord = int(round(event.ydata))
print("MRI Plot window On Click")
print('ycoord =', ycoord)
print(type(self.imageData.ImageDataT2))
if type(self.imageData.ImageDataT2) != type(None):
image_shape = self.imageData.ImageDataT2.shape
print(image_shape[0],image_shape[0]-ycoord, ycoord)
t2data = self.imageData.ImageDataT2[ycoord,xcoord,int(self.slicesSlider.value()),:]
self.T2PlotWidget.update_plot( xcoord, ycoord, t2data)
def update_plot(self, img, maskedROIs):
self.ax.cla()
self.ax.imshow(img,cmap=plt.cm.gray,
interpolation='nearest')
print("maskedROIs.shape", maskedROIs.shape)
print("img.shape", img.shape)
print("maskedROIs.max()",maskedROIs.max())
if maskedROIs.max() > 0:
self.ax.imshow(maskedROIs.reshape(img.shape),
cmap=plt.cm.jet, alpha=.5,
interpolation='bilinear')
mpl_cursor = mplcursors.cursor(self.plot_canvas.figure.axes,hover=True)
@mpl_cursor.connect("add")
def _(sel):
ann = sel.annotation
ttt = ann.get_text()
xc,yc, zl = [s.split('=') for s in ttt.splitlines()]
x = round(float(xc[1]))
y = round(float(yc[1]))
print("x",x, "y",y)
nrows,ncols = img.shape
cslice=self.imageData.currentSlice
fitParam = self.imageData.fittingParam
print("cslice",cslice, "nrows", nrows, "ncols")
print("fitParam",fitParam)
### figure out which data set to use
slice_df = None
if fitParam in self.imageData.t2_data_summary_df.columns:
print(fitParam, "T2 dataFrame chosen")
data_df = self.imageData.t2_data_summary_df
slice_df = data_df[data_df.slice==cslice+1]
elif fitParam in self.imageData.dixon_data_summary_df.columns:
print(fitParam, "Dixon dataFrame chosen")
data_df = self.imageData.dixon_data_summary_df
if cslice+1 in self.imageData.T2slices:
dixonSliceIndex = self.imageData.dixonSlices[self.imageData.T2slices.index(cslice+1)]
slice_df = data_df[data_df.slice==dixonSliceIndex]
else:
slice_df = data_df[data_df.slice==cslice]
### return current slice
# slice_df = data_df[data_df.slice==cslice+1]
roiList = []
valueList=[]
if not isinstance(slice_df, type(None)):
print("type(slice_df)",type(slice_df))
print("slice_df.shape",slice_df.shape)
roiList = slice_df[slice_df['pixel_index']==y*ncols+x]['roi'].values
valueList = slice_df[slice_df['pixel_index']==y*ncols+x][fitParam].values
print("roiList", roiList)
print("valueList",valueList)
fitParamLabel = parameterNames[fitParam][1]
if len(roiList)>0:
roi=roiList[0]
value=valueList[0]
ann.set_text(fitParamLabel.format( roi, value))
else:
ann.set_text("x = {:d}\ny = {:d}".format( x, y ))
self.ax.grid(False)
self.plot_canvas.draw()
def toggleImageChanged(self,b1):
print("Entered toggleImageChanged")
if not isinstance(self.imageData.mriSliceIMG, type(None) ):
if self.toggleImage.isChecked():
print("Clear background image")
self.update_plot(np.zeros((self.imageData.mriSliceIMG.shape)),
self.imageData.maskedROIs.reshape((self.imageData.mriSliceIMG.shape)))
else:
self.valuechangedSlider()
|
{"/visionplot_widgets.py": ["/t2fit.py", "/ImageData.py", "/epgT2paramsDialog.py", "/azzT2paramsDialog.py"], "/simple_pandas_plot.py": ["/visionplot_widgets.py", "/mriplotwidget.py", "/ImageData.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.