index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
17,446
|
seizans/sandbox-django
|
HEAD
|
/sandbox/core/factories.py
|
# coding=utf8
import string
import factory
from factory.fuzzy import FuzzyText
from core.models import Company, Staff
class CompanyFactory(factory.DjangoModelFactory):
FACTORY_FOR = Company
id = factory.Sequence(lambda n: n)
name = FuzzyText(prefix='1499', length=9, chars=string.digits)
# name = factory.LazyAttribute(lambda o: '会社名{}'.format(o.id))
class StaffFactory(factory.DjangoModelFactory):
FACTORY_FOR = Staff
id = factory.Sequence(lambda n: n)
name = factory.LazyAttribute(lambda o: 'スタッフ名{}'.format(o.id))
belong = factory.SubFactory(CompanyFactory)
company_name = factory.LazyAttribute(lambda o: o.belong.name)
|
{"/sandbox/core/search_indexes.py": ["/sandbox/core/models.py"], "/sandbox/settings/store_stg.py": ["/sandbox/settings/_store_base.py", "/sandbox/settings/_stg.py"], "/sandbox/settings/store_dev.py": ["/sandbox/settings/_store_base.py", "/sandbox/settings/_dev.py"], "/sandbox/settings/_store_base.py": ["/sandbox/settings/_base.py"], "/sandbox/settings/_back_base.py": ["/sandbox/settings/_base.py"], "/sandbox/settings/back_stg.py": ["/sandbox/settings/_back_base.py", "/sandbox/settings/_stg.py"], "/sandbox/settings/back_dev.py": ["/sandbox/settings/_back_base.py", "/sandbox/settings/_dev.py"]}
|
17,447
|
seizans/sandbox-django
|
HEAD
|
/sandbox/settings/back_stg.py
|
# coding=utf8
# 管理用アプリケーションの、ステージング環境用の設定
from ._back_base import * # NOQA
from ._stg import * # NOQA
|
{"/sandbox/core/search_indexes.py": ["/sandbox/core/models.py"], "/sandbox/settings/store_stg.py": ["/sandbox/settings/_store_base.py", "/sandbox/settings/_stg.py"], "/sandbox/settings/store_dev.py": ["/sandbox/settings/_store_base.py", "/sandbox/settings/_dev.py"], "/sandbox/settings/_store_base.py": ["/sandbox/settings/_base.py"], "/sandbox/settings/_back_base.py": ["/sandbox/settings/_base.py"], "/sandbox/settings/back_stg.py": ["/sandbox/settings/_back_base.py", "/sandbox/settings/_stg.py"], "/sandbox/settings/back_dev.py": ["/sandbox/settings/_back_base.py", "/sandbox/settings/_dev.py"]}
|
17,448
|
seizans/sandbox-django
|
HEAD
|
/sandbox/settings/back_dev.py
|
# coding=utf8
# 管理用アプリケーションの、開発環境用の設定
from ._back_base import * # NOQA
from ._dev import * # NOQA
# .dev で定義されている追加分を追加する
INSTALLED_APPS += INSTALLED_APPS_PLUS
|
{"/sandbox/core/search_indexes.py": ["/sandbox/core/models.py"], "/sandbox/settings/store_stg.py": ["/sandbox/settings/_store_base.py", "/sandbox/settings/_stg.py"], "/sandbox/settings/store_dev.py": ["/sandbox/settings/_store_base.py", "/sandbox/settings/_dev.py"], "/sandbox/settings/_store_base.py": ["/sandbox/settings/_base.py"], "/sandbox/settings/_back_base.py": ["/sandbox/settings/_base.py"], "/sandbox/settings/back_stg.py": ["/sandbox/settings/_back_base.py", "/sandbox/settings/_stg.py"], "/sandbox/settings/back_dev.py": ["/sandbox/settings/_back_base.py", "/sandbox/settings/_dev.py"]}
|
17,451
|
igniteflow/polymorph
|
refs/heads/master
|
/polymorph/tools.py
|
import yaml
class RowTools(object):
"""
transform a Python object to csv friendly rows
Example:
{
'foo': 'bar',
'cars': ['one', 'two'],
'fruit': [
{'apple': 'green'},
{'banana': 'yellow'},
]
}
Becomes rows:
[
('foo', 'bar'),
('cars.0', 'one'),
('cars.1', 'two'),
('fruit.0.apple', 'green'),
('fruit.1.banana', 'yellow'),
]
"""
rows = None
keys = None
def _str(self, data):
if self.rows is None:
self.rows = []
identifier = '.'.join([str(i) for i in self.keys])
self.rows.append((identifier, data))
self.keys.pop()
def _list(self, data):
items = []
for i, item in enumerate(data):
if i > 0 and self.keys[-1] == (i - 1):
# remove the index from the previous iteration
self.keys.pop()
items.append(self.recurse(item, key=i))
return items
def _dict(self, data):
# assumes keys can only be strings
for k, v in data.items():
self.recurse(v, key=k)
def recurse(self, data, key=None):
if self.keys is None:
self.keys = []
if key is not None:
self.keys.append(key)
if isinstance(data, list):
_data = self._list(data)
self.keys.pop()
return _data
elif isinstance(data, dict):
return self._dict(data)
elif isinstance(data, (str, unicode)):
self._str(data)
def to_rows(self, data):
self.recurse(data)
return self.rows
def rows_to_data(self, rows):
# TODO
pass
class YamlToCsv(object):
def load_from_file(self, path):
with open(path) as f:
return yaml.load(f)
def write_to_file(self, path, data):
with open(path, 'w+') as f:
f.write(yaml.dump(data))
def to_rows(self, data):
"""
csv will have two columns:
(1) identifier
(2) value
"""
pass
|
{"/polymorph/tests/test_tools.py": ["/polymorph/tools.py"]}
|
17,452
|
igniteflow/polymorph
|
refs/heads/master
|
/setup.py
|
from setuptools import setup
setup(name='polymorph',
version='0.1',
description='Python tooling to tranform data',
url='https://github.com/igniteflow/polymorph',
author='Phil Tysoe',
author_email='philtysoe@gmail.com',
license='MIT',
packages=['polymorph'],
zip_safe=False
)
|
{"/polymorph/tests/test_tools.py": ["/polymorph/tools.py"]}
|
17,453
|
igniteflow/polymorph
|
refs/heads/master
|
/polymorph/tests/test_tools.py
|
import os
from polymorph.tools import YamlToCsv, RowTools
TEST_DATA_DIR = './polymorph/tests/test_data/'
def get_test_file_path(filename):
return '{}{}'.format(TEST_DATA_DIR, filename)
def test_load_from_file():
yaml_to_csv = YamlToCsv()
path = get_test_file_path('simple_example.yaml')
assert yaml_to_csv.load_from_file(path) == {'foo': 'bar'}
def test_write_to_file():
yaml_to_csv = YamlToCsv()
path = get_test_file_path('output.yaml')
yaml_to_csv.write_to_file(path, {'foo': 'bar'})
with open(path) as f:
assert f.read() == '{foo: bar}\n'
# should probably mock open instead of actually creating a file
os.remove(path)
DATA = {
'foo': 'bar',
'cars': ['one', 'two'],
'fruit': [
{'apple': 'green'},
{'banana': 'yellow'},
]
}
ROWS = [
('foo', 'bar'),
('cars.0', 'one'),
('cars.1', 'two'),
('fruit.0.apple', 'green'),
('fruit.1.banana', 'yellow'),
]
def test_to_rows():
row_tools = RowTools()
assert sorted(row_tools.to_rows(DATA)) == sorted(ROWS)
def test_rows_to_data():
row_tools = RowTools()
assert sorted(row_tools.rows_to_data(ROWS)) == sorted(DATA)
|
{"/polymorph/tests/test_tools.py": ["/polymorph/tools.py"]}
|
17,472
|
valeriobasile/storkl
|
refs/heads/master
|
/app/__init__.py
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext import restful
from sqlalchemy import create_engine
import os
from sqlalchemy.ext.declarative import declarative_base
app = Flask(__name__)
api = restful.Api(app)
# create database
basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'storkl.db')
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
db = SQLAlchemy(app)
# let's try dataset
import dataset
DATASET_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'storkl.ds')
ds = dataset.connect(DATASET_DATABASE_URI)
from app import views, models
if __name__ == '__main__':
app.run(debug=True)
|
{"/db_create_test_data.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py", "/app/utils.py"]}
|
17,473
|
valeriobasile/storkl
|
refs/heads/master
|
/db_create_test_data.py
|
from app import db, models
from datetime import datetime
# create the database
db.create_all()
# empty the db
for user in models.User.query.all():
db.session.delete(user)
for project in models.Project.query.all():
db.session.delete(project)
for task in models.Task.query.all():
db.session.delete(task)
db.session.commit()
u1 = models.User(username='john',
email='john@email.com')
db.session.add(u1)
u2 = models.User(username='mary',
email='mary@email.com',
trusted=[u1])
db.session.add(u2)
p1 = models.Project(id=1,
title='Hyperlamp',
owner_id='mary',
description='A lamp shaped like an hypercube.',
created=datetime.utcnow())
db.session.add(p1)
t1 = models.Task(id=1,
project_id=1,
name='Buy wooden sticks',
description='go to Gamma and buy a few meters of thin cut wood.',
users=[u1])
db.session.add(t1)
t2 = models.Task(id=2,
project_id=1,
name='Buy paper',
description='go to the store and buy a few square meters of multi-color paper.',
users=[u1, u2],
dependencies=[t1])
db.session.add(t2)
t3 = models.Task(id=3,
project_id=1,
name='Build structure',
description='put together wood and paper.',
users=[u1],
dependencies=[t1, t2])
db.session.add(t3)
db.session.commit()
|
{"/db_create_test_data.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py", "/app/utils.py"]}
|
17,474
|
valeriobasile/storkl
|
refs/heads/master
|
/app/utils.py
|
def flatten(list_of_lists):
return [val for subl in list_of_lists for val in subl]
def unique(l):
return list(set(l))
|
{"/db_create_test_data.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py", "/app/utils.py"]}
|
17,475
|
valeriobasile/storkl
|
refs/heads/master
|
/test_requests.py
|
import requests
r = requests.get('http://127.0.0.1:5000/u/valerio')
res = r.json()
print res
r = requests.post('http://127.0.0.1:5000/u/new', data={'username' : 'valerio', 'email' : 'valerio@storkl.net'})
res = r.json()
print res
r = requests.get('http://127.0.0.1:5000/u/valerio')
res = r.json()
print res
r = requests.delete('http://127.0.0.1:5000/u/valerio')
res = r.json()
print res
r = requests.get('http://127.0.0.1:5000/u/valerio')
res = r.json()
print res
|
{"/db_create_test_data.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py", "/app/utils.py"]}
|
17,476
|
valeriobasile/storkl
|
refs/heads/master
|
/app/views.py
|
from app import db, app, models, api
from utils import *
from flask import make_response, jsonify
from flask.ext import restful
from flask.ext.restful import abort, reqparse
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import UnmappedInstanceError
### User ###
class User(restful.Resource):
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('username')
self.parser.add_argument('email')
def get(self, username):
user = models.User.query.get(username)
if user:
return jsonify(user.serialize())
else:
abort(404, message="User {} doesn't exist".format(username))
def post(self, username):
args = self.parser.parse_args()
try:
new_user = models.User(username=args['username'],
email=args['email'])
db.session.add(new_user)
db.session.commit()
return 201
except IntegrityError:
abort(400, message="User {} already exists".format(args['username']))
def delete(self, username):
args = self.parser.parse_args()
try:
user = models.User.query.get(username)
db.session.delete(user)
db.session.commit()
return 201
except UnmappedInstanceError:
abort(400, message="User {} does not exist".format('username'))
api.add_resource(User, '/u/<string:username>')
### User - owns - Project ###
class Ownership(restful.Resource):
def get(self, username):
projects = models.Project.query.filter_by(owner_id=username).all()
return jsonify({ 'projects' : [p.serialize() for p in projects] })
api.add_resource(Ownership, '/u/<string:username>/owned')
### User - is in task comprised by - Project ###
class UserInvolvement(restful.Resource):
def get(self, username):
user = models.User.query.get(username)
return jsonify({ 'projects' : [p.serialize() for p in user.involved()] })
api.add_resource(UserInvolvement, '/u/<string:username>/involved')
### User - Task ###
class Assignment(restful.Resource):
def get(self, username):
user = models.User.query.get(username)
return jsonify({ 'tasks' : [t.serialize() for t in user.tasks] })
api.add_resource(Assignment, '/u/<string:username>/tasks')
### User - User ###
class Trust(restful.Resource):
def get(self, username):
user = models.User.query.get(username)
return jsonify({ 'users' : [u.serialize() for u in user.trusted] })
api.add_resource(Trust, '/u/<string:username>/trusted')
### User - User ###
# every user involved in projects in which User is involved (minus himself)
class Association(restful.Resource):
def get(self, username):
user = models.User.query.get(username)
associates = unique(flatten(p.involved() for p in user.involved()))
associates.remove(user)
return jsonify({ 'users' : [u.serialize() for u in associates] })
api.add_resource(Association, '/u/<string:username>/associated')
### Project ###
class Project(restful.Resource):
def get(self, project_id):
project = models.Project.query.get(project_id)
if not project:
abort(404, message="Project {} doesn't exist".format(project_id))
return jsonify(project.serialize())
api.add_resource(Project, '/p/<int:project_id>')
### Project - is in task comprised by - Project ###
class ProjectInvolvement(restful.Resource):
def get(self, project_id):
project = models.Project.query.get(project_id)
return jsonify({ 'users' : [u.serialize() for u in project.involved()] })
api.add_resource(ProjectInvolvement, '/p/<int:project_id>/involved')
### Project - Task ###
class ProjectTasks(restful.Resource):
def get(self, project_id):
project = models.Project.query.get(project_id)
return jsonify({ 'tasks' : [t.serialize() for t in project.tasks] })
api.add_resource(ProjectTasks, '/p/<int:project_id>/tasks')
### Task ###
class Task(restful.Resource):
def get(self, task_id):
task = models.Task.query.get(task_id)
if not task:
abort(404, message="Task {} doesn't exist".format(task_id))
return jsonify(task.serialize())
api.add_resource(Task, '/t/<int:task_id>')
class Dependency(restful.Resource):
def get(self, task_id):
task = models.Task.query.get(task_id)
if not task:
abort(404, message="Task {} doesn't exist".format(task_id))
return jsonify({'dependency':
{'dependencies' :
[t.serialize() for t in task.dependencies],
'dependents' :
[t.serialize() for t in task.dependents] }
})
api.add_resource(Dependency, '/t/<int:task_id>/dep')
# error handling
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify( { 'error': 'Not found' } ), 404)
|
{"/db_create_test_data.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py", "/app/utils.py"]}
|
17,477
|
valeriobasile/storkl
|
refs/heads/master
|
/app/models.py
|
from app import db
from app.utils import *
assignment = db.Table('assignment',
db.Column('user', db.String(64), db.ForeignKey('user.username')),
db.Column('task', db.Integer, db.ForeignKey('task.id'))
)
trust = db.Table('trust',
db.Column('trustee', db.String(64), db.ForeignKey('user.username'), primary_key=True),
db.Column('trusted', db.String(64), db.ForeignKey('user.username'), primary_key=True)
)
dependency = db.Table('dependency',
db.Column('master', db.Integer, db.ForeignKey('task.id')),
db.Column('slave', db.Integer, db.ForeignKey('task.id'))
)
class User(db.Model):
username = db.Column(db.String(64), index = True, primary_key = True)
email = db.Column(db.String(120), index = True, unique = True)
projects = db.relationship('Project', backref = 'owner', lazy = 'dynamic')
tasks = db.relationship('Task',
secondary=assignment,
backref=db.backref('user',
lazy='dynamic'))
trusted = db.relationship('User',
secondary=trust,
backref=db.backref('trustees'),
lazy='dynamic',
primaryjoin=username==trust.c.trustee,
secondaryjoin=username==trust.c.trusted)
def involved(self):
return list(set([task.project for task in self.tasks]))
def serialize(self):
serialized = {'username' : self.username,
'email' : self.email}
return serialized
class Project(db.Model):
id = db.Column(db.Integer, primary_key = True)
title = db.Column(db.String(64))
owner_id = db.Column(db.String(64), db.ForeignKey('user.username'))
description = db.Column(db.Text())
created = db.Column(db.DateTime())
tasks = db.relationship('Task', backref = 'project', lazy = 'dynamic')
def involved(self):
return unique(flatten([task.users for task in self.tasks]))
return list(set([
val for subl in [
task.users for task in self.tasks
] for val in subl
]))
def serialize(self):
user = User.query.get(self.owner_id)
serialized = {'title' : self.title,
'owner' : user.serialize(),
'description' : self.description,
'created' : self.created}
return serialized
class Task(db.Model):
id = db.Column(db.Integer, primary_key = True)
project_id = db.Column(db.Integer, db.ForeignKey('project.id'))
name = db.Column(db.String(64))
description = db.Column(db.Text())
users = db.relationship('User', secondary=assignment, backref=db.backref('task', lazy='dynamic'))
dependencies = db.relationship('Task', secondary=dependency, primaryjoin=dependency.c.slave==id, secondaryjoin=dependency.c.master==id, backref='dependent')
dependents = db.relationship('Task', secondary=dependency, primaryjoin=dependency.c.master==id, secondaryjoin=dependency.c.slave==id, backref='dependency')
def serialize(self):
project = Project.query.get(self.project_id)
serialized = {'name' : self.name,
'project' : project.serialize(),
'description' : self.description}
return serialized
|
{"/db_create_test_data.py": ["/app/__init__.py"], "/app/views.py": ["/app/__init__.py"], "/app/models.py": ["/app/__init__.py", "/app/utils.py"]}
|
17,479
|
aditya-kandada/democrat
|
refs/heads/master
|
/polls/urls.py
|
from django.conf.urls import patterns
from django.conf.urls import url
urlpatterns = patterns('polls.views',
# Examples:
url(r'^$', 'index', name='index'),
)
|
{"/polls/views.py": ["/polls/models.py"], "/polls/admin.py": ["/polls/models.py"]}
|
17,480
|
aditya-kandada/democrat
|
refs/heads/master
|
/polls/models.py
|
from django.db import models
class Candidate(models.Model):
name = models.CharField(max_length=100)
first_name = models.CharField(max_length=100, null=True)
last_name = models.CharField(max_length=100, null=True)
description = models.CharField(max_length=250)
upvote = models.IntegerField(max_length=250, null=True, blank=True)
downvote = models.IntegerField(max_length=250, null=True, blank=True)
|
{"/polls/views.py": ["/polls/models.py"], "/polls/admin.py": ["/polls/models.py"]}
|
17,481
|
aditya-kandada/democrat
|
refs/heads/master
|
/polls/views.py
|
from django.shortcuts import render
from polls.models import Candidate
def index(request):
candidates = Candidate.objects.all().order_by('name')
return render(request, 'index.html', {'candidates':candidates})
|
{"/polls/views.py": ["/polls/models.py"], "/polls/admin.py": ["/polls/models.py"]}
|
17,482
|
aditya-kandada/democrat
|
refs/heads/master
|
/polls/admin.py
|
from django.contrib import admin
# Register your models here.
from polls.models import Candidate
class CandidateAdmin(admin.ModelAdmin):
list_display = ['name', 'description', 'upvote', 'downvote']
admin.site.register(Candidate, CandidateAdmin)
|
{"/polls/views.py": ["/polls/models.py"], "/polls/admin.py": ["/polls/models.py"]}
|
17,537
|
MfonUdoh/Mazer
|
refs/heads/master
|
/game.py
|
class Game(object):
def __init__(self):
#Board width
self.size = 10
self.minMoves = 0
self.level = 0
self.marksLocations = []
self.wallsLocations = []
self.empties = self.size ** 2 - len(self.wallsLocations) - len(self.marksLocations)
self.maxLevels = 0
self.x1 = 0
self.y1 = 0
self.x2= 0
self.y2 = 0
self.turns = 0
def set_level(self, level):
"""Assigns the location of all the walls and starting player position for the selected level"""
self.minmoves = level.minMoves[self.level]
self.wallsLocations = level.wallsLocations[self.level]
self.marksLocations = []
self.turns = 0
self.maxLevels = len(level.minMoves) - 1
self.x1 = level.playerPosition[self.level][0]
self.x2 = level.playerPosition[self.level][0]
self.y1 = level.playerPosition[self.level][1]
self.y2 = level.playerPosition[self.level][1]
self.empties = self.size ** 2 - len(self.wallsLocations) - len(self.marksLocations)
def make_marks(self):
"""Creates markers at every position that the player crosses"""
if self.y1 == self.y2 and self.x2 > self.x1:
for displace in range(self.x2 - self.x1 + 1):
if [self.x1 + displace, self.y1] not in self.marksLocations:
self.marksLocations.append([self.x1 + displace, self.y1])
elif self.y1 == self.y2 and self.x1 > self.x2:
for displace in range(self.x1 - self.x2 + 1):
if [self.x2 + displace, self.y1] not in self.marksLocations:
self.marksLocations.append([self.x2 + displace, self.y1])
elif self.y2 > self.y1:
for displace in range(self.y2 - self.y1 + 1):
if [self.x1, self.y1 + displace] not in self.marksLocations:
self.marksLocations.append([self.x1, self.y1 + displace])
else:
for displace in range(self.y1 - self.y2 + 1):
if [self.x1, self.y2 + displace] not in self.marksLocations:
self.marksLocations.append([self.x1, self.y2 + displace])
self.empties = self.size ** 2 - len(self.wallsLocations) - len(self.marksLocations)
def move(self, direction):
"""Takes a direction and moves the player in that direction"""
self.x1 = self.x2
self.y1 = self.y2
skip = self.distance_to_edge(direction)
moves = {
'a' : [-skip, 0],
'd' : [skip, 0],
'w' : [0, -skip],
's' : [0, skip]
}
if \
direction in moves \
and self.x1 + moves[direction][0] in range(self.size) \
and self.y1 + moves[direction][1] in range(self.size):
#if statement checks the move is in still in the maze and is a legal direction
self.x2 = self.x1 + moves[direction][0]
self.y2 = self.y1 + moves[direction][1]
if self.x2 != self.x1 or self.y2 != self.y1:
self.turns += 1
def distance_to_edge(self, direction):
"""Calculates how far away the nearest wall or edge is and returns how far the self must travel to get there"""
calc = []
edge = 0
for wall in self.wallsLocations:
#I think I can simplify the logic here
if direction == 'd':
edge = (self.size - 1) - self.x1
if wall[1] == self.y2 and (wall[0] - self.x1) > 0:
calc.append(wall[0] - self.x1)
elif direction == 'a':
edge = self.x1
if wall[1] == self.y2 and (self.x1 - wall[0]) > 0:
calc.append(self.x1 - wall[0])
elif direction == 's':
edge = (self.size - 1) - self.y2
if wall[0] == self.x1 and (wall[1] - self.y2) > 0:
calc.append(wall[1] - self.y2)
elif direction == 'w':
edge = self.y2
if wall[0] == self.x1 and (self.y2 - wall[1]) > 0:
calc.append(self.y2 - wall[1])
if calc == []:
calc = edge
else:
calc = min(calc)-1
return calc
|
{"/main.py": ["/game.py"]}
|
17,538
|
MfonUdoh/Mazer
|
refs/heads/master
|
/main.py
|
import levels, pygame, game
from pygame.locals import *
game = game.Game()
running = True
end = False
pygame.init()
screen_width = 600
screen_height = 600
multiple = 50
screen = pygame.display.set_mode((screen_width, screen_height))
pygame.display.set_caption("Mazer")
radius = int(0.5 * multiple)
wallwidth = int(1 * multiple)
markradius = int(0.05 * multiple)
scores = []
while running:
playing = True
refresh = True
game.set_level(levels)
x = multiple * (game.x1 + 1) + radius
y = multiple * (game.y1 + 1) + radius
font = pygame.font.SysFont(None, 20)
pygame.time.delay(100)
while playing:
keys = pygame.key.get_pressed()
if game.empties != 0:
if keys[pygame.K_LEFT] or keys[pygame.K_a]:
# Can make it only refresh if the move returns true
game.move('a')
refresh = True
elif keys[pygame.K_RIGHT] or keys[pygame.K_d]:
game.move('d')
refresh = True
elif keys[pygame.K_UP] or keys[pygame.K_w]:
game.move('w')
refresh = True
elif keys[pygame.K_DOWN] or keys[pygame.K_s]:
game.move('s')
refresh = True
else:
if game.turns > 100 + game.minMoves:
game.turns = 100 + game.minMoves
scores.append(100-(game.turns-game.minMoves))
game.level += 1
if game.level >= game.maxLevels:
end = True
break
if refresh:
game.make_marks()
x = multiple * (game.x1 + 1) + radius
y = multiple * (game.y1 + 1) + radius
screen.fill((0, 0, 0))
textSurface = font.render("LEVEL: {} TURNS: {} EMPTY SPACES: {}".format(game.level, game.turns, game.empties), True, [255, 255, 255], [0, 0, 0])
screen.blit(textSurface, (int(0.2 * multiple), int(0.3 * multiple)))
for mark in game.marksLocations:
pygame.draw.circle(screen, (255, 255, 255), (int(multiple * (1.5 + mark[0])), int(multiple * (1.5 + mark[1]))), markradius)
pygame.draw.circle(screen, (255, 255, 0), (x, y), radius, )
for wall in game.wallsLocations:
pygame.draw.rect(screen, (255, 255, 255), (int(multiple * (1 + wall[0])), int(multiple * (1 + wall[1])), wallwidth, wallwidth))
pygame.display.update()
refresh = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
playing = False
running = False
if end:
totalscore = sum(scores)
screen.fill((0, 0, 0))
textSurface = font.render("Congratulations, you have completed the game!", True, [255, 255, 255], [0, 0, 0])
screen.blit(textSurface, (int(0.2 * multiple), int(0.3 * multiple)))
scoreSurface = font.render("Final Score: {}/500".format(totalscore), True, [255, 255, 255], [0, 0, 0])
screen.blit(scoreSurface, (int(5 * multiple), int(5 * multiple)))
pygame.display.update()
while end:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
playing = False
end = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
pygame.quit()
|
{"/main.py": ["/game.py"]}
|
17,548
|
Qyon/AllegroObserver
|
refs/heads/master
|
/allegro/api.py
|
# coding=utf-8
__author__ = 'Qyon'
from suds.client import Client
from suds import WebFault
import time
import logging
logger = logging.getLogger(__name__)
class InvalidSessionException(Exception):
pass
class ApiHelper(object):
"""
...
"""
def __init__(self, settings):
logger.debug('Inicjalizacja')
self.settings = settings
self.client = self.getApiClient()
self.session = self.getSession()
self.get_auctions_retry_count = 0
def getApiClient(self):
"""
Pobiera klienta SOAPowego
"""
logger.debug('getApiClient')
client = Client('http://webapi.allegro.pl/uploader.php?wsdl')
return client
def getSysStatus(self):
"""
Metoda pozwala na pobranie wartości jednego z wersjonowanych komponentów
(drzewo kategorii oraz pola formularza sprzedaży) oraz umożliwia podgląd klucza wersji
dla wskazanego krajów.
"""
data_dict = {
'sysvar': 3,
'country-id': self.settings.ALLEGRO_COUNTRY,
'webapi-key': self.settings.ALLEGRO_KEY
}
return self.client.service.doQuerySysStatus(**data_dict)
def getSession(self):
"""
Pobierz sesję dla usera z Allegro.
"""
sys_info = self.getSysStatus()
data_dict = {
'user-login': self.settings.ALLEGRO_LOGIN,
'user-password': self.settings.ALLEGRO_PASSWORD,
'country-code': self.settings.ALLEGRO_COUNTRY,
'webapi-key': self.settings.ALLEGRO_KEY,
'local-version': sys_info['ver-key'] or self.settings.ALLEGRO_LOCALVERSION
}
logger.debug('getSession')
return self.client.service.doLogin(**data_dict)
def _get_auctions(self, doShowCatParams, offset):
doShowCatParams['cat-items-offset'] = offset
logger.info("Pobieram aukcje. Offset %d" % (doShowCatParams['cat-items-offset'], ))
try:
result = self.client.service.doShowCat(**doShowCatParams)
except WebFault as e:
if 'Sesja wygas' in e.message:
raise InvalidSessionException
logger.exception('API ERROR?')
raise e
return result
def getAuctions(self):
"""
"""
logger.info('getAuctions')
doShowCatParams = {
'session-handle': getattr(self.session, 'session-handle-part'),
'cat-id': self.settings.CATEGORY_ID,
'cat-items-limit': 100,
'cat-items-offset': 0,
}
all_auctions = []
result = {}
first = True
offset = 0
while first or len(all_auctions) < getattr(result, 'cat-items-count', 0):
first = False
try:
result = self._get_auctions(doShowCatParams, offset)
except InvalidSessionException as e:
if self.get_auctions_retry_count < 10:
logger.warning('Wygasła sesja. Próbuję odnowić')
self.session = self.getSession()
logger.debug('Sleep na 10 sekund, na wszelki wypadek...')
time.sleep(10)
return {}
else:
raise e
offset += 1
self.get_auctions_retry_count = 0
items = getattr(result, 'cat-items-array')
if not items or len(items) <= 0:
print result
logger.debug('Brak aukcji?')
break
all_auctions += items
logger.info("Pobrano %d aukcji" % (len(all_auctions), ))
return dict([(getattr(i, 's-it-id'), i) for i in all_auctions])
|
{"/run.py": ["/observer.py"]}
|
17,549
|
Qyon/AllegroObserver
|
refs/heads/master
|
/allegro/__init__.py
|
__author__ = 'Qyon'
from api import ApiHelper
|
{"/run.py": ["/observer.py"]}
|
17,550
|
Qyon/AllegroObserver
|
refs/heads/master
|
/settings.sample.py
|
__author__ = 'Qyon'
ALLEGRO_LOGIN='login'
ALLEGRO_PASSWORD='pass'
ALLEGRO_KEY='key'
ALLEGRO_LOCALVERSION=3
CATEGORY_ID=28273
EMAIL_TO='samplemail@gmail.com'
EMAIL_FROM='samplemail@gmail.com'
#in minutes
CHECK_INTERVAL=30
|
{"/run.py": ["/observer.py"]}
|
17,551
|
Qyon/AllegroObserver
|
refs/heads/master
|
/run.py
|
__author__ = 'Qyon'
import settings
from observer import Observer
import logging
# create console handler and set level to debug
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.DEBUG)
# create file handler and set level to debug
fileHandler = logging.FileHandler('allegro_observer.log')
fileHandler.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
consoleHandler.setFormatter(formatter)
fileHandler.setFormatter(formatter)
for lname in ('allegro', 'observer', ):
logger = logging.getLogger(lname)
logger.setLevel(logging.DEBUG)
logger.addHandler(consoleHandler)
logger.addHandler(fileHandler)
for lname in ( 'suds.client', ):
logger = logging.getLogger(lname)
logger.setLevel(logging.ERROR)
logger.addHandler(consoleHandler)
logger.addHandler(fileHandler)
def main():
observer = Observer(settings)
observer.watch()
if __name__ == "__main__":
main()
|
{"/run.py": ["/observer.py"]}
|
17,552
|
Qyon/AllegroObserver
|
refs/heads/master
|
/observer.py
|
# coding=utf-8
__author__ = 'Qyon'
import allegro
import time
# Import smtplib for the actual sending function
import smtplib
# Import the email modules we'll need
from email.mime.text import MIMEText
import logging
logger = logging.getLogger(__name__)
print __name__
class Observer(object):
def __init__(self, settings):
self.settings = settings
self.apiHelper = allegro.ApiHelper(self.settings)
self.auctions = {}
self.getAuctions()
self.sleep_time_default = 60 * self.settings.CHECK_INTERVAL
self.sleep_time_short = int(60 * self.settings.CHECK_INTERVAL / 10)
self.sleep_time = self.sleep_time_default
def getAuctions(self):
self.old_auctions = self.auctions
auctions = self.apiHelper.getAuctions()
if auctions and len(auctions):
self.auctions = auctions
return True
else:
return False
def getDelta(self):
if not self.old_auctions:
logger.debug('nie ma starych aukcji')
return []
#else:
# logger.debug('TEST: usuwamy 1 element z listy starych')
# del self.old_auctions[self.old_auctions.keys()[0]]
delta = [self.auctions[i] for i in set(self.auctions.keys()) - set(self.old_auctions.keys())]
return delta
def watch(self):
while True:
delta = self.getDelta()
if delta:
self.handleDelta(delta)
logger.info("Sleep for %d" % (self.sleep_time, ))
time.sleep(self.sleep_time)
if self.getAuctions():
self.sleep_time = self.sleep_time_default
else:
self.sleep_time = self.sleep_time_short
def handleDelta(self, delta):
content = 'Nowe aukcje na allegro:<br><ul>'
for i in delta:
price = getattr(i, 's-it-price', None)
if price is None or price <= 0.0:
price = getattr(i, 's-it-buy-now-price', 0.0)
str_data = (
getattr(i, 's-it-id'),
getattr(i, 's-it-thumb-url'),
getattr(i, 's-it-name'),
price
)
content += '<li><a href="http://allegro.pl/show_item.php?item=%s"><img src="%s">%s (%2.2f PLN)</a></li>' % str_data
content += '</ul>'
msg = MIMEText(content, 'html', 'utf-8')
msg['Subject'] = 'Na allegro jest %d nowych aukcji' % (len(delta),)
msg['To'] = self.settings.EMAIL_TO
try:
s = smtplib.SMTP('localhost')
s.sendmail(self.settings.EMAIL_FROM, [self.settings.EMAIL_TO], msg.as_string())
logger.info('Wysyłam maila')
s.quit()
except:
logger.exception('Błąd w czasie wysyłania maila')
|
{"/run.py": ["/observer.py"]}
|
17,562
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/celerydemo/models.py
|
from __future__ import absolute_import, unicode_literals
from django.contrib.postgres.fields import JSONField
from django.db import models
from django.db.models import signals
from django.utils.translation import ugettext_lazy as _
from django_celery_beat.models import PeriodicTask, PeriodicTasks
from . import schedules
class TaskLog(models.Model):
task_name = models.CharField(max_length=255)
created = models.DateTimeField(auto_now_add=True, editable=False)
modified = models.DateTimeField(auto_now=True)
class CustomPeriodicTask(PeriodicTask):
PERIOD_CHOICES = (
('ONCE', _('Once')),
('DAILY', _('Daily')),
('WEEKLY', _('Weekly')),
('MONTHLY', _('Monthly')),
)
MONTHLY_CHOICES = (
('DAY', _('Day')),
('FIRSTWEEK', _('First Week')),
('SECONDWEEK', _('Second Week')),
('THIRDWEEK', _('Third Week')),
('FOURTHWEEK', _('Fourth Week')),
('LASTWEEK', _('Last Week')),
('LASTDAY', _('Last Day')),
)
end_time = models.DateTimeField(
_('End Datetime'), blank=True, null=True,
help_text=_(
'Datetime when the scheduled task should end')
)
every = models.PositiveSmallIntegerField(
_('every'), null=False, default=1,
help_text=_('For Weekly and Monthly Repeat')
)
scheduler_type = models.CharField(
_('scheduler_type'), max_length=24, choices=PERIOD_CHOICES,
null=True, blank=True
)
monthly_type = models.CharField(
_('monthly_type'), max_length=24, choices=MONTHLY_CHOICES,
null=True, blank=True
)
max_run_count = models.PositiveIntegerField(
null=True, blank=True,
help_text=_('To end scheduled task after few occurrence')
)
last_executed_at = models.DateTimeField(null=True, blank=True)
last_executed_days = JSONField(null=True, blank=True)
@property
def schedule(self):
if self.interval:
return self.interval.schedule
if self.crontab:
crontab = schedules.my_crontab(
minute=self.crontab.minute,
hour=self.crontab.hour,
day_of_week=self.crontab.day_of_week,
day_of_month=self.crontab.day_of_month,
month_of_year=self.crontab.month_of_year,
)
return crontab
if self.solar:
return self.solar.schedule
if self.clocked:
return self.clocked.schedule
signals.pre_delete.connect(PeriodicTasks.changed, sender=CustomPeriodicTask)
signals.pre_save.connect(PeriodicTasks.changed, sender=CustomPeriodicTask)
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,563
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/hello_django/urls.py
|
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.auth.models import User
from django.urls import path, include
from rest_framework import serializers, viewsets
from rest_framework.routers import DefaultRouter
# from upload.views import image_upload
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('first_name', 'username', 'email', 'password')
class SnippetViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions.
Additionally we also provide an extra `highlight` action.
"""
queryset = User.objects.all()
serializer_class = AccountSerializer
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'users', SnippetViewSet)
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include(router.urls)),
# path('', image_upload, name='upload'),
]
if bool(settings.DEBUG):
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,564
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/celerydemo/tasks.py
|
from celery import shared_task
from .models import TaskLog
@shared_task
def logging_task():
print('Logging task invoked...........')
TaskLog.objects.create(task_name='test')
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,565
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/celerydemo/clockedschedule.py
|
"""Clocked schedule Implementation."""
from __future__ import absolute_import, unicode_literals
from celery import schedules
from celery.utils.time import maybe_make_aware
from collections import namedtuple
schedstate = namedtuple('schedstate', ('is_due', 'next'))
class clocked(schedules.BaseSchedule):
"""clocked schedule.
It depend on PeriodicTask once_off
"""
def __init__(self, clocked_time, enabled=True,
model=None, nowfun=None, app=None):
"""Initialize clocked."""
self.clocked_time = maybe_make_aware(clocked_time)
self.enabled = enabled
self.model = model
super(clocked, self).__init__(nowfun=nowfun, app=app)
def remaining_estimate(self, last_run_at):
return self.clocked_time - self.now()
def is_due(self, last_run_at):
# actually last run at is useless
print('is_due', last_run_at)
last_run_at = maybe_make_aware(last_run_at)
print('aware is_due', last_run_at)
rem_delta = self.remaining_estimate(last_run_at)
remaining_s = max(rem_delta.total_seconds(), 0)
print('remaining_s: ', remaining_s)
print('schedstate: ', schedstate)
if not self.enabled:
return schedstate(is_due=False, next=None)
if remaining_s == 0:
if self.model:
self.model.enabled = False
self.model.save()
print('Executing function')
return schedstate(is_due=True, next=None)
return schedstate(is_due=False, next=remaining_s)
def __repr__(self):
return '<clocked: {} {}>'.format(self.clocked_time, self.enabled)
def __eq__(self, other):
if isinstance(other, clocked):
return self.clocked_time == other.clocked_time and \
self.enabled == other.enabled
return False
def __ne__(self, other):
return not self.__eq__(other)
def __reduce__(self):
return self.__class__, (self.clocked_time, self.nowfun)
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,566
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/hello_django/celery_app.py
|
import os
from celery import Celery
from django.conf import settings
# Set default Django settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'hello_django.settings')
# os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dcs.settings')
app = Celery('hello_django')
app.config_from_object('django.conf:settings')
# app.autodiscover_tasks()
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
# Optional configuration, see the application user guide.
app.conf.update(
result_expires=3600,
)
#if __name__ == '__main__':
# app.start()
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,567
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/celerydemo/admin.py
|
from django.contrib import admin
from django_celery_beat.admin import PeriodicTaskAdmin
from django_celery_beat.models import SolarSchedule
from .models import TaskLog, CustomPeriodicTask
class CustomPeriodicTaskAdmin(PeriodicTaskAdmin):
fieldsets = (
(None, {
'fields': ('name', 'description', ('regtask', 'task'), 'enabled',),
'classes': ('extrapretty', 'wide'),
}),
('Schedule', {
'fields': (
('scheduler_type', 'monthly_type'), ('start_time', 'end_time'),
('every', 'max_run_count'), 'one_off', 'crontab', 'interval', 'clocked'),
'classes': ('extrapretty', 'wide'),
}),
('Schedule Run Details', {
'fields': ('total_run_count', 'last_run_at', 'last_executed_at',
'last_executed_days'),
'classes': ('extrapretty', 'wide'),
}),
('Arguments', {
'fields': ('args', 'kwargs'),
'classes': ('extrapretty', 'wide', 'collapse', 'in'),
}),
('Execution Options', {
'fields': ('expires', 'queue', 'exchange', 'routing_key',
'priority'),
'classes': ('extrapretty', 'wide', 'collapse', 'in'),
}),
)
readonly_fields = ('total_run_count', 'last_run_at')
def get_queryset(self, request):
qs = super(PeriodicTaskAdmin, self).get_queryset(request)
return qs.select_related('interval', 'crontab', 'solar', 'clocked')
admin.site.register(TaskLog)
admin.site.register(CustomPeriodicTask, CustomPeriodicTaskAdmin)
# admin.site.unregister(PeriodicTask)
admin.site.unregister(SolarSchedule)
# admin.site.unregister(IntervalSchedule)
# admin.site.unregister(CrontabSchedule)
# admin.site.register(IntervalSchedule)
# admin.site.register(CrontabSchedule)
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,568
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/celerydemo/migrations/0002_customperiodictask.py
|
# Generated by Django 2.2.1 on 2019-05-24 09:12
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('django_celery_beat', '0011_auto_20190508_0153'),
('celerydemo', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CustomPeriodicTask',
fields=[
('periodictask_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='django_celery_beat.PeriodicTask')),
('end_time', models.DateTimeField(blank=True, null=True, verbose_name='end_time')),
('every', models.IntegerField(default=1, verbose_name='every')),
('scheduler_type', models.CharField(blank=True, choices=[('ONCE', 'Once'), ('DAILY', 'Daily'), ('WEEKLY', 'Weekly'), ('MONTHLY', 'Monthly')], max_length=24, null=True, verbose_name='scheduler_type')),
('monthly_type', models.CharField(blank=True, choices=[('DAY', 'Day'), ('FIRSTWEEK', 'First Week'), ('SECONDWEEK', 'Second Week'), ('THIRDWEEK', 'Third Week'), ('FOURTHWEEK', 'Fourth Week'), ('LASTWEEK', 'Last Week'), ('LASTDAY', 'Last Day')], max_length=24, null=True, verbose_name='monthly_type')),
('max_run_count', models.PositiveIntegerField(blank=True, null=True)),
('last_executed_at', models.DateTimeField(blank=True, null=True)),
('last_executed_days', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
],
bases=('django_celery_beat.periodictask',),
),
]
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,569
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/celerydemo/schedules.py
|
from celery import schedules
class my_crontab(schedules.crontab):
def is_due(self, last_run_at):
print('cron is_due: ', last_run_at)
# if last_run_at - date
# if True:
# return schedules.schedstate(False, 5.0)
rem_delta = self.remaining_estimate(last_run_at)
rem = max(rem_delta.total_seconds(), 0)
print('rem', rem)
due = rem == 0
if due:
rem_delta = self.remaining_estimate(self.now())
rem = max(rem_delta.total_seconds(), 0)
print('due, rem', due, rem)
return schedules.schedstate(due, rem)
# return super(my_crontab, self).is_due(last_run_at)
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,570
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/celerydemo/apps.py
|
from django.apps import AppConfig
class CelerydemoConfig(AppConfig):
name = 'celerydemo'
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,571
|
rudra012/dj_celery_docker
|
refs/heads/master
|
/app/celerydemo/schedulers.py
|
from __future__ import absolute_import, unicode_literals
import datetime
import math
from celery import schedules
from celery.utils.time import maybe_make_aware
from dateutil.relativedelta import relativedelta
from django.conf import settings
from django_celery_beat.schedulers import ModelEntry, DatabaseScheduler
from django_celery_beat.utils import make_aware
from .models import (
CustomPeriodicTask)
try:
from celery.utils.time import is_naive
except ImportError: # pragma: no cover
pass
MONTH_FORMAT = "%m-%Y"
DATETIME_FORMAT = "%d-%m-%YT%H:%M:%SZ"
def months_difference(date1, date2):
return date1.month - date2.month + 12 * (date1.year - date2.year)
class CustomModelEntry(ModelEntry):
max_interval = 60
def is_due(self):
# return super(CustomModelEntry, self).is_due()
# Here write checks to be execute before calling scheduler
print('\n\n\nself.app.now: ', self.app.now())
print('******', self.schedule, self.model._meta.model_name, '******', )
print(self.model.name, self.model.task, self.model.enabled)
if not self.model.enabled:
# max interval second delay for re-enable.
return schedules.schedstate(False, self.max_interval)
# START DATE: only run after the `start_time`, if one exists.
if self.model.start_time is not None:
now = self._default_now()
if getattr(settings, 'DJANGO_CELERY_BEAT_TZ_AWARE', True):
now = maybe_make_aware(self._default_now())
if now < self.model.start_time:
# The datetime is before the start date - don't run.
# send a delay to retry on start_time
delay = math.ceil(
(self.model.start_time - now).total_seconds()
)
print('Call function after {} seconds'.format(delay))
return schedules.schedstate(False, delay)
# ONE OFF TASK: Disable one off tasks after they've ran once
def disable_task():
self.model.enabled = False
# self.model.total_run_count = 0 # Reset
self.model.no_changes = False # Mark the model entry as changed
self.model.save()
# self.model.save(update_fields=["enabled", ])
print('Disable the periodic task', self.model)
return schedules.schedstate(False, None) # Don't recheck
print('self.model.__class__.__name__: ', self.model.__class__.__name__)
if self.model.__class__.__name__ == 'CustomPeriodicTask':
print('self.model.max_run_count, self.model.total_run_count')
print(self.model.max_run_count, self.model.total_run_count)
if self.model.one_off and self.model.enabled and self.model.total_run_count > 0:
return disable_task()
# if task executed max_run_count times then disable task
if self.model.max_run_count and self.model.max_run_count <= self.model.total_run_count:
return disable_task()
if self.model.end_time is not None:
now = self._default_now()
if getattr(settings, 'DJANGO_CELERY_BEAT_TZ_AWARE', True):
now = maybe_make_aware(self._default_now())
if now >= self.model.end_time:
# disable task if end date is passed
return disable_task()
print('self.model.scheduler_type: ', self.model.scheduler_type)
print('last_run_at', self.last_run_at, self.model.last_run_at)
last_executed_at = self.model.last_executed_at
print('last_executed_at', last_executed_at)
today = self.app.now()
if self.model.scheduler_type == 'MONTHLY':
# Get this month's last date
month_last_date = datetime.datetime(
today.year, today.month, 1) + relativedelta(
months=1, days=-1)
month_first_date = today.replace(day=1)
today_week_no = today.isocalendar()[1]
print('today_week_no:', today_week_no)
if last_executed_at and last_executed_at.date() == today.date():
# If task executed today then skip execution for today
print('Executed today')
return schedules.schedstate(False, self.max_interval)
if self.model.monthly_type == 'LASTDAY':
# Check if today is not month's last day then return False
if month_last_date.date() != today.date():
print('Not today so execute after {} seconds'.format(
self.max_interval))
return schedules.schedstate(False, self.max_interval)
# return schedules.schedstate(False, self.max_interval)
elif self.model.monthly_type in ['FIRSTWEEK', 'SECONDWEEK',
'THIRDWEEK', 'FOURTHWEEK']:
first_week_no = month_first_date.isocalendar()[1]
print('first_week_no:', first_week_no)
week_diff = 0
if self.model.monthly_type == 'SECONDWEEK':
week_diff = 1
elif self.model.monthly_type == 'THIRDWEEK':
week_diff = 2
elif self.model.monthly_type == 'FOURTHWEEK':
week_diff = 3
if today_week_no - first_week_no == week_diff:
print('Week number pass')
last_executed_days = self.model.last_executed_days
print('last_executed_days: ', last_executed_days)
# Check whether task executed before or not
if last_executed_days:
# If task executed then get month of execution
last_executed_month_str = list(last_executed_days)[
0]
print('last_executed_month_str: ',
last_executed_month_str)
# Validate for month string format
if len(last_executed_month_str.split('-')) == 2:
# Month of task execution
last_executed_month = datetime.datetime.strptime(
last_executed_month_str, MONTH_FORMAT)
print('last_executed_month: ',
last_executed_month)
# Check whether task last executed task date is
# this month or specified interval
if months_difference(
today, last_executed_month) not in [
0, self.model.every]:
return schedules.schedstate(
False, self.max_interval)
elif self.model.monthly_type == 'LASTWEEK':
last_week_no = month_last_date.isocalendar()[1]
print('last_week_no:', last_week_no)
if today_week_no == last_week_no:
print('Last Week pass')
last_executed_days = self.model.last_executed_days
print('last_executed_days: ', last_executed_days)
# Check whether task executed before or not
if last_executed_days:
# If task executed then get month of execution
last_executed_month_str = list(last_executed_days)[
0]
print('last_executed_month_str: ',
last_executed_month_str)
# Validate for month string format
if len(last_executed_month_str.split('-')) == 2:
# Month of task execution
last_executed_month = datetime.datetime.strptime(
last_executed_month_str, MONTH_FORMAT)
print('last_executed_month: ',
last_executed_month)
# Check whether task last executed task date is
# this month or specified interval
if months_difference(
today, last_executed_month) not in [
0, self.model.every]:
return schedules.schedstate(
False, self.max_interval)
elif self.model.monthly_type == 'DAY' and self.model.crontab:
month_day = self.model.crontab.day_of_month.isdigit()
print('month_day: ', month_day)
if self.model.last_executed_at and int(month_day) == int(
today.day):
current_month = today.month
last_executed_month = self.model.last_executed_at.month
if current_month - last_executed_month != self.model.every:
return schedules.schedstate(
False, self.max_interval)
elif self.model.scheduler_type == 'WEEKLY':
day_number = today.strftime("%w")
day_last_executed_at = self.model.last_executed_days.get(
day_number) if self.model.last_executed_days else None
print('day_last_executed_at: ', day_last_executed_at)
if day_last_executed_at:
day_last_executed_at = datetime.datetime.strptime(
day_last_executed_at, DATETIME_FORMAT)
print('day_last_executed_at: ', day_last_executed_at)
if today.isocalendar()[1] - \
day_last_executed_at.isocalendar()[
1] != self.model.every:
print("Already executed on last week on the same day")
return schedules.schedstate(False, self.max_interval)
elif last_executed_at:
if today.isocalendar()[1] - last_executed_at.isocalendar()[
1] != self.model.every:
print("Already executed on last week on some day")
return schedules.schedstate(False, self.max_interval)
print('Calling scheduler function: ', self.schedule, '####')
return self.schedule.is_due(make_aware(self.last_run_at))
def __next__(self):
cls_obj = super(CustomModelEntry, self).__next__()
# Changes on execution of task
last_executed_days = self.model.last_executed_days or {}
if self.model.scheduler_type == 'WEEKLY':
today = self.app.now()
last_executed_days[today.strftime("%w")] = today.strftime(
DATETIME_FORMAT)
elif self.model.scheduler_type == 'MONTHLY':
today = self.app.now()
print('last_executed_days: ', last_executed_days)
if last_executed_days and list(last_executed_days)[
0] == today.strftime(MONTH_FORMAT):
print('Same month')
month_dict = last_executed_days[today.strftime(MONTH_FORMAT)]
month_dict[today.strftime("%w")] = today.strftime(
DATETIME_FORMAT)
last_executed_days[today.strftime(MONTH_FORMAT)] = month_dict
else:
print('Different month')
last_executed_days = {today.strftime(MONTH_FORMAT): {
today.strftime("%w"): today.strftime(DATETIME_FORMAT)}}
print('last_executed_days: ', last_executed_days)
self.model.last_executed_days = last_executed_days
self.model.last_executed_at = self.app.now()
self.model.save()
# self.model.save(update_fields=["last_run_at", "total_run_count"])
return cls_obj
class CustomDatabaseScheduler(DatabaseScheduler):
Entry = CustomModelEntry
Model = CustomPeriodicTask
|
{"/app/celerydemo/tasks.py": ["/app/celerydemo/models.py"], "/app/celerydemo/admin.py": ["/app/celerydemo/models.py"], "/app/celerydemo/schedulers.py": ["/app/celerydemo/models.py"]}
|
17,574
|
DevangML/Phoenix-The-Virtual-Assistant
|
refs/heads/main
|
/Phoenix/config/config.py
|
wolframalpha_id = "4LXRE2-TEHE99AKKJ"
weather_api_key = "f73e77fa6efab5c5ec319e3732ce8eea"
|
{"/main.py": ["/gui.py", "/ui_splash_screen.py"]}
|
17,575
|
DevangML/Phoenix-The-Virtual-Assistant
|
refs/heads/main
|
/ui_splash_screen.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'splash_screen.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_SplashScreen(object):
def setupUi(self, SplashScreen):
SplashScreen.setObjectName("SplashScreen")
SplashScreen.resize(750, 443)
self.centralwidget = QtWidgets.QWidget(SplashScreen)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.dropShadowFrame = QtWidgets.QFrame(self.centralwidget)
self.dropShadowFrame.setStyleSheet("background: rgba(191, 64, 64, 0);")
self.dropShadowFrame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.dropShadowFrame.setFrameShadow(QtWidgets.QFrame.Raised)
self.dropShadowFrame.setObjectName("dropShadowFrame")
self.label = QtWidgets.QLabel(self.dropShadowFrame)
self.label.setGeometry(QtCore.QRect(0, 0, 731, 411))
self.label.setStyleSheet("border-radius:30px;")
self.label.setText("")
self.label.setPixmap(QtGui.QPixmap(":/resources/icons/frame10.jpg"))
self.label.setScaledContents(True)
self.label.setObjectName("label")
self.frame_2 = QtWidgets.QFrame(self.dropShadowFrame)
self.frame_2.setGeometry(QtCore.QRect(40, 40, 651, 361))
self.frame_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.verticalLayout = QtWidgets.QVBoxLayout(self.frame_2)
self.verticalLayout.setObjectName("verticalLayout")
self.label_title = QtWidgets.QLabel(self.frame_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_title.sizePolicy().hasHeightForWidth())
self.label_title.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(62)
self.label_title.setFont(font)
self.label_title.setStyleSheet("background: rgba(191, 64, 64, 0);")
self.label_title.setAlignment(QtCore.Qt.AlignCenter)
self.label_title.setObjectName("label_title")
self.verticalLayout.addWidget(self.label_title)
self.label_description = QtWidgets.QLabel(self.frame_2)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(16)
self.label_description.setFont(font)
self.label_description.setStyleSheet("color: rgb(98, 114, 164);\n"
"background: rgba(191, 64, 64, 0);")
self.label_description.setAlignment(QtCore.Qt.AlignCenter)
self.label_description.setObjectName("label_description")
self.verticalLayout.addWidget(self.label_description)
self.progressBar = QtWidgets.QProgressBar(self.frame_2)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(21)
self.progressBar.setFont(font)
self.progressBar.setStyleSheet("QProgressBar {\n"
" \n"
" background-color: rgb(192, 192, 192);\n"
" \n"
" color: rgb(40, 40, 40);\n"
" border-style: none;\n"
" border-radius: 15px;\n"
" text-align: center;\n"
"}\n"
"QProgressBar::chunk{\n"
" border-radius: 15px;\n"
" background-color: qlineargradient(spread:pad, x1:0, y1:0.523, x2:1, y2:0.534, stop:0 rgba(221, 255, 0, 201), stop:1 rgba(255, 255, 255, 255));\n"
"}")
self.progressBar.setProperty("value", 24)
self.progressBar.setObjectName("progressBar")
self.verticalLayout.addWidget(self.progressBar)
self.label_loading = QtWidgets.QLabel(self.frame_2)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(12)
self.label_loading.setFont(font)
self.label_loading.setStyleSheet("color: rgb(255, 254, 129);\n"
"background: rgba(191, 64, 64, 0);")
self.label_loading.setAlignment(QtCore.Qt.AlignCenter)
self.label_loading.setObjectName("label_loading")
self.verticalLayout.addWidget(self.label_loading)
self.label_credits = QtWidgets.QLabel(self.frame_2)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
self.label_credits.setFont(font)
self.label_credits.setStyleSheet("color: rgb(98, 114, 164);\n"
"background: rgba(191, 64, 64, 0);")
self.label_credits.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_credits.setObjectName("label_credits")
self.verticalLayout.addWidget(self.label_credits)
self.gridLayout.addWidget(self.dropShadowFrame, 0, 0, 1, 1)
SplashScreen.setCentralWidget(self.centralwidget)
self.retranslateUi(SplashScreen)
QtCore.QMetaObject.connectSlotsByName(SplashScreen)
def retranslateUi(self, SplashScreen):
_translate = QtCore.QCoreApplication.translate
SplashScreen.setWindowTitle(_translate("SplashScreen", "MainWindow"))
self.label_title.setText(_translate("SplashScreen", "<html><head/><body><p><span style=\" font-size:72pt; color:#fffe81;\">Phoenix</span></p></body></html>"))
self.label_description.setText(_translate("SplashScreen", "<html><head/><body><p><span style=\" color:#fffe81;\">The Virtual Assistant</span></p></body></html>"))
self.label_loading.setText(_translate("SplashScreen", "loading..."))
self.label_credits.setText(_translate("SplashScreen", "<html><head/><body><p><span style=\" font-size:12pt; font-weight:600; color:#fffe81;\">Created By</span><span style=\" font-size:12pt; color:#fffe81;\">: Group L1</span></p></body></html>"))
import resources_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
SplashScreen = QtWidgets.QMainWindow()
ui = Ui_SplashScreen()
ui.setupUi(SplashScreen)
SplashScreen.show()
sys.exit(app.exec_())
|
{"/main.py": ["/gui.py", "/ui_splash_screen.py"]}
|
17,576
|
DevangML/Phoenix-The-Virtual-Assistant
|
refs/heads/main
|
/main.py
|
import re
import os
import random
import pprint
import datetime
import requests
import pyjokes
import time
import pyautogui
import pywhatkit
import wolframalpha
from PIL import Image
from Phoenix import PhoenixAssistant
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from Phoenix.config import config
from gui import Ui_Form
from ui_splash_screen import Ui_SplashScreen
import sys
from PyQt5.QtCore import (QRectF)
from PyQt5.QtGui import (QColor, QCursor, QPainterPath, QRegion)
from PyQt5.QtWidgets import *
from PyQt5 import QtWidgets, QtCore
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QMovie
counter = 0
obj = PhoenixAssistant()
# ================================ MEMORY ===========================================================================================================
GREETINGS = ["hello phoenix", "phoenix", "wake up phoenix", "you there phoenix", "time to work phoenix", "hey phoenix",
"ok phoenix", "are you there", "how are you phoenix", "how are you"]
GREETINGS_RES = ["always there for you sir", "i am ready sir",
"your wish my command", "how can i help you sir?", "i am online and ready sir"]
# =======================================================================================================================================================
def speak(text):
obj.tts(text)
app_id = config.wolframalpha_id
def computational_intelligence(question):
try:
client = wolframalpha.Client(app_id)
answer = client.query(question)
answer = next(answer.results).text
print(answer)
return answer
except:
speak("Sorry sir I couldn't fetch your question's answer. Please try again ")
return None
def wish():
hour = int(datetime.datetime.now().hour)
if hour>=0 and hour<=12:
speak("Good Morning")
elif hour>12 and hour<18:
speak("Good afternoon")
else:
speak("Good evening")
c_time = obj.tell_time()
speak(f"Currently it is {c_time}")
speak("I am Phoenix. Online and ready. Please tell me how may I help you")
class MainThread(QThread):
def __init__(self):
super(MainThread, self).__init__()
def run(self):
self.TaskExecution()
def TaskExecution(self):
wish()
while True:
command = obj.mic_input()
if re.search('date', command):
date = obj.tell_me_date()
print(date)
speak(date)
elif "time" in command:
time_c = obj.tell_time()
print(time_c)
speak(f"Sir the time is {time_c}")
elif re.search('launch', command):
dict_app = {
'chrome': 'C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe',
'notepad': 'C:\\WINDOWS\\system32\\notepad.exe',
'pycharm': 'C:\\Program Files (x86)\\JetBrains\\PyCharm Community Edition 2021.1.3\\bin\\pycharm64.exe',
'code': 'C:\\Users\\User\\AppData\Local\\Programs\\Microsoft VS Code\\Code.exe'
}
app = command.split(' ', 1)[1]
path = dict_app.get(app)
if path is None:
speak('Application path not found')
print('Application path not found')
else:
speak('Launching: ' + app + 'for you sir!')
obj.launch_any_app(path_of_app=path)
elif command in GREETINGS:
speak(random.choice(GREETINGS_RES))
elif re.search('open', command):
domain = command.split(' ')[-1]
open_result = obj.website_opener(domain)
speak(f'Alright sir !! Opening {domain}')
print(open_result)
elif re.search('weather', command):
city = command.split(' ')[-1]
weather_res = obj.weather(city=city)
print(weather_res)
speak(weather_res)
elif re.search('tell me about', command):
topic = command.split(' ')[-1]
if topic:
wiki_res = obj.tell_me(topic)
print(wiki_res)
speak(wiki_res)
else:
speak(
"Sorry sir. I couldn't load your query from my database. Please try again")
elif "buzzing" in command or "news" in command or "headlines" in command:
news_res = obj.news()
speak('Source: The Times Of India')
speak('Todays Headlines are..')
for index, articles in enumerate(news_res):
pprint.pprint(articles['title'])
speak(articles['title'])
if index == len(news_res)-2:
break
speak('These were the top headlines, Have a nice day Sir!!..')
elif "play music" in command or "hit some music" in command:
music_dir = "Music"
songs = os.listdir(music_dir)
for song in songs:
os.startfile(os.path.join(music_dir, song))
elif 'youtube' in command:
video = command.split(' ')[1]
speak(f"Okay sir, playing {video} on youtube")
pywhatkit.playonyt(video)
if "joke" in command:
joke = pyjokes.get_joke()
print(joke)
speak(joke)
elif "where is" in command:
place = command.split('where is ', 1)[1]
current_loc, target_loc, distance = obj.location(place)
city = target_loc.get('city', '')
state = target_loc.get('state', '')
country = target_loc.get('country', '')
time.sleep(1)
try:
if city:
res = f"{place} is in {state} state and country {country}. It is {distance} km away from your current location"
print(res)
speak(res)
else:
res = f"{state} is a state in {country}. It is {distance} km away from your current location"
print(res)
speak(res)
except:
res = "Sorry sir, I couldn't get the co-ordinates of the location you requested. Please try again"
speak(res)
elif "ip address" in command:
ip = requests.get('https://api.ipify.org').text
print(ip)
speak(f"Your ip address is {ip}")
elif "switch the window" in command or "switch window" in command:
speak("Okay sir, Switching the window")
pyautogui.keyDown("alt")
pyautogui.press("tab")
time.sleep(1)
pyautogui.keyUp("alt")
elif "where i am" in command or "current location" in command or "where am i" in command:
try:
city, state, country = obj.my_location()
print(city, state, country)
speak(
f"You are currently in {city} city which is in {state} state and country {country}")
except Exception as e:
speak(
"Sorry sir, I coundn't fetch your current location. Please try again")
elif "take screenshot" in command or "take a screenshot" in command or "capture the screen" in command:
speak("By what name do you want to save the screenshot?")
name = obj.mic_input()
speak("Alright sir, taking the screenshot")
img = pyautogui.screenshot()
name = f"ss\\{name}.png"
img.save(name)
speak("The screenshot has been succesfully captured")
elif "show me the screenshot" in command:
try:
img = Image.open('' + name)
img.show(img)
speak("Here it is sir")
time.sleep(2)
except IOError:
speak("Sorry sir, I am unable to display the screenshot")
elif "hide all files" in command or "hide this folder" in command:
os.system("attrib +h /s /d")
speak("Sir, all the files in this folder are now hidden")
elif "visible" in command or "make files visible" in command:
os.system("attrib -h /s /d")
speak("Sir, all the files in this folder are now visible to everyone. I hope you are taking this decision in your own peace")
elif "calculate" in command:
question = command
answer = computational_intelligence(question)
speak(answer)
elif 'search google for' in command:
obj.search_anything_google(command)
elif "what is" in command:
question = command
answer = computational_intelligence(question)
speak(answer)
elif "goodbye" in command or "offline" in command or "bye" in command:
speak("Alright sir, going offline. It was nice working with you")
sys.exit()
elif ("wake up" in command) or ("get up" in command):
speak("boss, I am not sleeping, I am in online, what can I do for u")
elif ('shutdown the system' in command) or ('down the system' in command):
speak("Boss shutting down the system in 10 seconds")
time.sleep(10)
os.system("shutdown /s /t 5")
elif 'restart the system' in command:
speak("Boss restarting the system in 10 seconds")
time.sleep(10)
os.system("shutdown /r /t 5")
elif 'remember that' in command:
speak("what should i remember sir")
rememberMessage = obj.mic_input()
speak("you said me to remember" + rememberMessage)
remember = open('data.txt', 'w')
remember.write(rememberMessage)
remember.close()
elif 'do you remember anything' in command:
remember = open('data.txt', 'r')
speak("you said me to remember that" + remember.read())
elif 'it\'s my birthday today' in command:
print(" Wow! Wish you a very Happy Birthday")
speak(" Wow! Wish you a very Happy Birthday")
elif "who made you" in command or "who created you" in command or "who discovered you" in command:
speak("I was built by Group L1")
print("I was built by Group L1")
elif 'who are you' in command or 'what can you do' in command:
speak(
'I am Phoenix version 1 point O your personal assistant. I am programmed to perform tasks like' 'opening youtube,google chrome,gmail and stackoverflow ,predict time,take a photo, etc. I like to help humans in their endeavours and I would like to be remembered as humanity\'s greatest ally')
startExecution = MainThread()
class Main(QtWidgets.QWidget, Ui_Form):
def startAnimation(self):
self.movie.start()
self.movie2.start()
self.movie3.start()
self.movie4.start()
def stopAnimation(self):
self.movie.stop()
self.movie2.stop()
self.movie3.stop()
self.movie4.stop()
def __init__(self):
super().__init__()
self.setupUi(self)
self.btn_minimize_5.clicked.connect(self.hideWindow)
self.btn_close_5.clicked.connect(self.close)
self.setWindowFlags(Qt.FramelessWindowHint)
self.setAttribute(Qt.WA_TranslucentBackground)
self.movie = QMovie("icons/powersource.gif")
self.label_3.setMovie(self.movie)
self.movie2 = QMovie("icons/lines1.gif")
self.label_4.setMovie(self.movie2)
self.movie3 = QMovie("icons/in.gif")
self.label_2.setMovie(self.movie3)
self.movie4 = QMovie("icons/globe.gif")
self.label.setMovie(self.movie4)
self.startAnimation()
self.pushButton.clicked.connect(self.startTask)
def startTask(self):
timer = QTimer(self)
timer.start(1000)
startExecution.start()
def __del__(self):
sys.stdout = sys.__stdout__
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
self.m_flag = True
self.m_Position = event.globalPos() - self.pos() # Get the position of the mouse relative to the window
event.accept()
self.setCursor(QCursor(Qt.OpenHandCursor)) # Change mouse icon
def mouseMoveEvent(self, QMouseEvent):
if Qt.LeftButton and self.m_flag:
self.move(QMouseEvent.globalPos() - self.m_Position) # Change window position
QMouseEvent.accept()
def mouseReleaseEvent(self, QMouseEvent):
self.m_flag = False
self.setCursor(QCursor(Qt.ArrowCursor))
def resizeEvent(self, event):
path = QPainterPath()
path.addRoundedRect(QRectF(self.rect()), 20, 20)
reg = QRegion(path.toFillPolygon().toPolygon())
self.setMask(reg)
def hideWindow(self):
self.showMinimized()
class SplashScreen(QtWidgets.QMainWindow):
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
self.ui = Ui_SplashScreen()
self.ui.setupUi(self)
## UI ==> INTERFACE CODES
########################################################################
## REMOVE TITLE BAR
self.setWindowFlag(QtCore.Qt.FramelessWindowHint)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground)
## DROP SHADOW EFFECT
self.shadow = QGraphicsDropShadowEffect(self)
self.shadow.setBlurRadius(20)
self.shadow.setXOffset(0)
self.shadow.setYOffset(0)
self.shadow.setColor(QColor(0, 0, 0, 60))
self.ui.dropShadowFrame.setGraphicsEffect(self.shadow)
## QTIMER ==> START
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.progress)
# TIMER IN MILLISECONDS
self.timer.start(35)
# CHANGE DESCRIPTION
# Initial Text
self.ui.label_description.setText("<strong>WELCOME</strong> TO MY APPLICATION")
# Change Texts
QtCore.QTimer.singleShot(1500, lambda: self.ui.label_description.setText("<strong>LOADING</strong> DATABASE"))
QtCore.QTimer.singleShot(3000, lambda: self.ui.label_description.setText("<strong>LOADING</strong> USER INTERFACE"))
## SHOW ==> MAIN WINDOW
########################################################################
self.show()
## ==> END ##
## ==> APP FUNCTIONS
########################################################################
def progress(self):
global counter
# SET VALUE TO PROGRESS BAR
self.ui.progressBar.setValue(counter)
# CLOSE SPLASH SCREE AND OPEN APP
if counter > 100:
# STOP TIMER
self.timer.stop()
# SHOW MAIN WINDOW
self.main = Main()
self.main.show()
# CLOSE SPLASH SCREEN
self.close()
# INCREASE COUNTER
counter += 1
if __name__ == "__main__":
app = QApplication(sys.argv)
window = SplashScreen()
sys.exit(app.exec_())
app = QtWidgets.QApplication(sys.argv)
Phoenix = Main()
Phoenix.show()
sys.exit(app.exec_())
window = SplashScreen()
sys.exit(app.exec_())
|
{"/main.py": ["/gui.py", "/ui_splash_screen.py"]}
|
17,577
|
DevangML/Phoenix-The-Virtual-Assistant
|
refs/heads/main
|
/Phoenix/features/google_search.py
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import re, pyttsx3
def speak(text):
engine = pyttsx3.init('sapi5')
voices = engine.getProperty('voices')
engine.setProperty('voices', voices[0].id)
engine.say(text)
engine.runAndWait()
engine.setProperty('rate', 180)
def google_search(command):
reg_ex = re.search('search google for (.*)', command)
search_for = command.split("for", 1)[1]
url = 'https://www.google.com/'
if reg_ex:
subgoogle = reg_ex.group(1)
url = url + 'r/' + subgoogle
speak("Okay sir!")
speak(f"Searching for {subgoogle}")
driver = webdriver.Chrome(
executable_path='driver\\chromedriver.exe')
driver.get('https://www.google.com')
search = driver.find_element_by_name('q')
search.send_keys(str(search_for))
search.send_keys(Keys.RETURN)
|
{"/main.py": ["/gui.py", "/ui_splash_screen.py"]}
|
17,578
|
DevangML/Phoenix-The-Virtual-Assistant
|
refs/heads/main
|
/gui.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(840, 611)
self.gridLayout = QtWidgets.QGridLayout(Form)
self.gridLayout.setObjectName("gridLayout")
self.frame = QtWidgets.QFrame(Form)
self.frame.setStyleSheet("background-color: rgb(0, 0, 0);\n"
"border-radius:30px;")
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.frame)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.frame_5 = QtWidgets.QFrame(self.frame)
self.frame_5.setMaximumSize(QtCore.QSize(800, 38))
self.frame_5.setStyleSheet("background: rgba(191, 64, 64, 0);")
self.frame_5.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_5.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_5.setObjectName("frame_5")
self.gridLayout_3 = QtWidgets.QGridLayout(self.frame_5)
self.gridLayout_3.setObjectName("gridLayout_3")
self.frame_btns = QtWidgets.QFrame(self.frame_5)
self.frame_btns.setMaximumSize(QtCore.QSize(56, 22))
self.frame_btns.setStyleSheet("background: rgba(255, 255, 255, 0);")
self.frame_btns.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_btns.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_btns.setObjectName("frame_btns")
self.horizontalLayout_7 = QtWidgets.QHBoxLayout(self.frame_btns)
self.horizontalLayout_7.setContentsMargins(9, 3, 5, 9)
self.horizontalLayout_7.setSpacing(7)
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.btn_minimize_5 = QtWidgets.QPushButton(self.frame_btns)
self.btn_minimize_5.setMinimumSize(QtCore.QSize(16, 16))
self.btn_minimize_5.setMaximumSize(QtCore.QSize(17, 17))
self.btn_minimize_5.setStyleSheet("QPushButton {\n"
" border: none;\n"
" border-radius: 8px; \n"
" background-color: rgb(255, 170, 0);\n"
"}\n"
"QPushButton:hover { \n"
" background-color: rgba(255, 170, 0, 150);\n"
"}")
self.btn_minimize_5.setText("")
self.btn_minimize_5.setObjectName("btn_minimize_5")
self.horizontalLayout_7.addWidget(self.btn_minimize_5)
self.btn_close_5 = QtWidgets.QPushButton(self.frame_btns)
self.btn_close_5.setMinimumSize(QtCore.QSize(16, 16))
self.btn_close_5.setMaximumSize(QtCore.QSize(17, 17))
self.btn_close_5.setStyleSheet("QPushButton {\n"
" border: none;\n"
" border-radius: 8px; \n"
" background-color: rgb(255, 0, 0);\n"
"}\n"
"QPushButton:hover { \n"
" background-color: rgba(255, 0, 0, 150);\n"
"}")
self.btn_close_5.setText("")
self.btn_close_5.setObjectName("btn_close_5")
self.horizontalLayout_7.addWidget(self.btn_close_5)
self.gridLayout_3.addWidget(self.frame_btns, 0, 0, 1, 1)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_3.addItem(spacerItem, 0, 1, 1, 1)
self.verticalLayout_2.addWidget(self.frame_5)
self.frame_3 = QtWidgets.QFrame(self.frame)
self.frame_3.setMinimumSize(QtCore.QSize(97, 145))
self.frame_3.setMaximumSize(QtCore.QSize(798, 226))
self.frame_3.setStyleSheet("background: rgba(191, 64, 64, 0);")
self.frame_3.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_3.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_3.setObjectName("frame_3")
self.gridLayout_2 = QtWidgets.QGridLayout(self.frame_3)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label_2 = QtWidgets.QLabel(self.frame_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setMinimumSize(QtCore.QSize(405, 138))
self.label_2.setMaximumSize(QtCore.QSize(393, 137))
self.label_2.setStyleSheet("")
self.label_2.setText("")
self.label_2.setPixmap(QtGui.QPixmap(":/resources/icons/in.gif"))
self.label_2.setObjectName("label_2")
self.gridLayout_2.addWidget(self.label_2, 0, 2, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(82, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem1, 0, 0, 1, 1)
spacerItem2 = QtWidgets.QSpacerItem(41, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem2, 0, 3, 1, 1)
self.verticalLayout_2.addWidget(self.frame_3)
self.frame_2 = QtWidgets.QFrame(self.frame)
self.frame_2.setMinimumSize(QtCore.QSize(758, 209))
self.frame_2.setMaximumSize(QtCore.QSize(800, 203))
self.frame_2.setStyleSheet("background: rgba(191, 64, 64, 0);")
self.frame_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.frame_2)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setSpacing(28)
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_3 = QtWidgets.QLabel(self.frame_2)
self.label_3.setMinimumSize(QtCore.QSize(0, 0))
self.label_3.setMaximumSize(QtCore.QSize(274, 168))
self.label_3.setStyleSheet("background-color: rgba(255, 255, 255, 0);")
self.label_3.setText("")
self.label_3.setPixmap(QtGui.QPixmap(":/resources/icons/powersource.gif"))
self.label_3.setScaledContents(True)
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.label_5 = QtWidgets.QLabel(self.frame_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5.sizePolicy().hasHeightForWidth())
self.label_5.setSizePolicy(sizePolicy)
self.label_5.setMinimumSize(QtCore.QSize(238, 41))
self.label_5.setMaximumSize(QtCore.QSize(219, 183))
self.label_5.setSizeIncrement(QtCore.QSize(0, 0))
self.label_5.setStyleSheet("background: rgba(255, 255, 255, 0);")
self.label_5.setText("")
self.label_5.setPixmap(QtGui.QPixmap(":/resources/icons/phoenix.png"))
self.label_5.setScaledContents(True)
self.label_5.setObjectName("label_5")
self.horizontalLayout.addWidget(self.label_5)
self.label = QtWidgets.QLabel(self.frame_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
self.label.setMinimumSize(QtCore.QSize(61, 48))
self.label.setMaximumSize(QtCore.QSize(225, 221))
self.label.setToolTipDuration(-3)
self.label.setFrameShape(QtWidgets.QFrame.Box)
self.label.setText("")
self.label.setPixmap(QtGui.QPixmap(":/resources/icons/globe.gif"))
self.label.setScaledContents(True)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.verticalLayout_2.addWidget(self.frame_2)
spacerItem3 = QtWidgets.QSpacerItem(20, 80, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.verticalLayout_2.addItem(spacerItem3)
self.frame_4 = QtWidgets.QFrame(self.frame)
self.frame_4.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_4.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_4.setObjectName("frame_4")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.frame_4)
self.horizontalLayout_2.setContentsMargins(5, -1, 4, 9)
self.horizontalLayout_2.setSpacing(1)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
spacerItem4 = QtWidgets.QSpacerItem(194, 20, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem4)
self.pushButton = QtWidgets.QPushButton(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton.sizePolicy().hasHeightForWidth())
self.pushButton.setSizePolicy(sizePolicy)
self.pushButton.setMinimumSize(QtCore.QSize(62, 61))
self.pushButton.setMaximumSize(QtCore.QSize(48, 55))
self.pushButton.setStyleSheet("QPushButton {\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 30px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
self.pushButton.setText("")
self.pushButton.setObjectName("pushButton")
self.horizontalLayout_2.addWidget(self.pushButton)
self.label_4 = QtWidgets.QLabel(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4.sizePolicy().hasHeightForWidth())
self.label_4.setSizePolicy(sizePolicy)
self.label_4.setMinimumSize(QtCore.QSize(100, 33))
self.label_4.setMaximumSize(QtCore.QSize(300, 70))
self.label_4.setStyleSheet("background-color: rgba(255, 255, 255, 0);")
self.label_4.setText("")
self.label_4.setPixmap(QtGui.QPixmap(":/resources/icons/lines1.gif"))
self.label_4.setScaledContents(True)
self.label_4.setObjectName("label_4")
self.horizontalLayout_2.addWidget(self.label_4)
spacerItem5 = QtWidgets.QSpacerItem(182, 20, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem5)
self.verticalLayout_2.addWidget(self.frame_4)
self.gridLayout.addWidget(self.frame, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.btn_minimize_5.setToolTip(_translate("Form", "Minimize"))
self.btn_close_5.setToolTip(_translate("Form", "Close"))
import resources_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
|
{"/main.py": ["/gui.py", "/ui_splash_screen.py"]}
|
17,591
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/Controls.py
|
import cv2
from Anlysis.VisulizeLinage import PlotLinageTree
from Anlysis.PrintMotherDoughter import printMotherDoghuther
from UserInterface.UpdateFrame import updateFrame
#TODO Make this the control class With method update
class Controls:
def __init__(self,video):
self.video = video
self.currentFrame = 1
self.currentBlend = 0
self.showMaskImg = False
self.showCellID = True
self.showLinagesTree = True
self.showOptImg = True
self.showWHI5ActivImg = False
cv2.namedWindow('CellTracker')
numFrames = video.getNumFrmes()
cv2.createTrackbar("Frame",'CellTracker',self.currentFrame,numFrames-1,self.changeFrame)
cv2.createTrackbar("Channel",'CellTracker',0,100,self.changeChanell)
def startControls(self):
self.updateFrame()
#List With comand chars and coresponding function
listOfComandsChars = ["q", "s", "o", "i", "w", "l","p"]
listOfComandsFunctions = ["quit", "Show Segmentation", "show Opt Chan", "show cell ID", "show WHI5 Activ Threshold", "Print Lineage","Plot Data"]
while(True):
#global showMaskImg,showCellID,showLinagesTree,showOptImg,showWHI5ActivImg
print("Options:")
for i in range(0,len(listOfComandsChars)):
print(listOfComandsChars[i] + " = " + listOfComandsFunctions[i])
key = cv2.waitKey(0)
#input = str(input())
print("Your input: " + chr(key))
if(key == ord('q')):
break
if(key == ord('s')):
self.showMaskImg = not self.showMaskImg
print("showMaskImage is now " + str(self.showMaskImg))
#updateFrame(video)
if(key == ord("o")):
self.showOptImg = not self.showOptImg
print("showOptImage is now " + str(self.showOptImg))
#updateFrame(video)
if(key == ord("i")):
self.showCellID = not self.showCellID
print("showCellID is now " + str(self.showCellID))
#updateFrame(video)
if(key == ord("w")):
self.showWHI5ActivImg = not self.showWHI5ActivImg
print("showWHI5ActivFrame is now " + str(self.showWHI5ActivImg))
#updateFrame(video)
if(key == ord("l")):
trackedCells = self.video.getTrackedCells()
printMotherDoghuther(trackedCells)
PlotLinageTree(trackedCells)
if(key == ord("p")):
trackedCells = self.video.getTrackedCells()
plotFunction(trackedCells)
self.updateFrame()
def updateFrame(self):
param = [self.currentFrame,self.currentBlend,self.showMaskImg,self.showCellID,self.showLinagesTree,self.showOptImg,self.showWHI5ActivImg]
updateFrame(self.video,param)
def changeFrame(self,frameNum):
self.currentFrame = frameNum
self.updateFrame()
#Change Between Florecent And Video Channel
def changeChanell(self,division):
self.currentBlend = division
self.updateFrame()
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,592
|
Klas96/YeastTrack
|
refs/heads/master
|
/test.py
|
import unittest
class TestStringMethods(unittest.TestCase):
def test_segmentation(self):
#TODO
pass
def test_something(self):
#TODO
pass
def test_somethingElse(self):
#TODO
pass
if __name__ == '__main__':
unittest.main()
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,593
|
Klas96/YeastTrack
|
refs/heads/master
|
/Tracking/centroidTracker.py
|
from scipy.spatial import distance as dist
from collections import OrderedDict
import numpy as np
from Segmentation.cellInstance import cellInstance
from Tracking.TrackedCell import TrackedCell
#vaiabels
#objects
#disappeared
#maxDisappeared
class CentroidTracker():
#Constructor
def __init__(self, maxDisappeared=50):
# initialize the next unique object ID along with two ordered
# dictionaries used to keep track of mapping a given object
# ID to its centroid and number of consecutive frames it has
# been marked as "disappeared", respectively
self.nextObjectID = 0
self.cellObjects = OrderedDict()
self.disappeared = OrderedDict()
self.frameNumber = 0
#MAx disappeared before deleting
self.maxDisappeared = maxDisappeared
#Stors the centroid in next availibal ObjectID
#pre1: centroid
#pre2: size
def register(self, cellInstans, frameNum = -1):
#Register in nex availibal object
#self.objects[self.nextObjectID] = centroid
self.cellObjects[self.nextObjectID] = TrackedCell(cellInstans,self.nextObjectID,frameNum)
self.disappeared[self.nextObjectID] = 0
self.nextObjectID += 1
#Del object from object list and disappeared list
#pre: objectID
def deregister(self, objectID):
#del self.objects[objectID]
del self.cellObjects[objectID]
del self.disappeared[objectID]
def updateCellInst(self, cellInstances):
#Is empty
if len(cellInstances) == 0:
#Mark all as disappeared
for objectID in list(self.disappeared.keys()):
self.disappeared[objectID] += 1
#If the object have been gone for long enough delet
if self.disappeared[objectID] > self.maxDisappeared:
self.deregister(objectID)
return(list(self.cellObjects.values()))
#If no tracked object. Frst objects track all
if len(self.cellObjects) == 0:
for i in range(0, len(cellInstances)):
self.register(cellInstances[i],self.frameNumber)
inputCentroids = np.zeros((len(cellInstances), 2), dtype="int")
for i in range(0,len(cellInstances)):
inputCentroids[i] = cellInstances[i].getPosition()
#Try matching to current centroids
else:
#Grab the set of object IDs and corresponding centroids
#objectIDs = list(self.objects.keys())
cellObjectIDs = list(self.cellObjects.keys())
#objectCentroids = list(self.objects.values())
#List of trackdedCell Objects
cellObjectList = list(self.cellObjects.values())
cellObjectsCentroids = list()
#Makeing centroid list
for cellObj in cellObjectList:
cellObjectsCentroids.append(cellObj.getCentroid())
#Compute the distance between each pair of object
cellD = dist.cdist(np.array(cellObjectsCentroids), inputCentroids)
#Find the smallest value in each row and then
#Sort the rows so the row with smalest value is on top.
cellRows = cellD.min(axis=1).argsort()
#Finding smalest value in each colom
#sorting using the previously computed row index list
cellCols = cellD.argmin(axis=1)[cellRows]
#Keeping track of used Rows and used coloms
usedRows = set()
usedCols = set()
for (row, col) in zip(cellRows, cellCols):
#Ignore examined rows or colums
if row in usedRows or col in usedCols:
continue
#set its new centroid, and reset the disappeared counter
objectID = cellObjectIDs[row]
self.cellObjects[objectID].update(cellInstances[col])
self.disappeared[objectID] = 0
#Indicate that we have examined each of the row and
#Column indexes, respectively
usedRows.add(row)
usedCols.add(col)
#Compute both the row and column index we have NOT yet examined
unusedRows = set(range(0, cellD.shape[0])).difference(usedRows)
unusedCols = set(range(0, cellD.shape[1])).difference(usedCols)
#in the event that the number of object centroids is
#equal or greater than the number of input centroids
#we need to check and see if some of these objects have
#potentially disappeared
if cellD.shape[0] > cellD.shape[1]:
#loop over the unused row indexes
for row in unusedRows:
#grab the object ID for the corresponding row
#index and increment the disappeared counter
objectID = cellObjectIDs[row]
self.disappeared[objectID] += 1
#check to see if the number of consecutive
#frames the object has been marked "disappeared"
#for warrants deregistering the object
if self.disappeared[objectID] > self.maxDisappeared:
self.deregister(objectID)
# otherwise, if the number of input centroids is greater
# than the number of existing object centroids we need to
# register each new input centroid as a trackable object
else:
for col in unusedCols:
self.register(cellInstances[col],self.frameNumber)
#Update all cells in Disaperd list
#for disi in self.disappeared:
for objectID in list(self.disappeared.keys()):
#self.cellObjects[objectID].update()
pass
#frame number increases with one
self.frameNumber = self.frameNumber + 1
return(list(self.cellObjects.values()))
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,594
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/cellInstance.py
|
import cv2
class cellInstance:
def __init__(self,contour,whi5Activ = -1):
self.whi5Activ = whi5Activ
self.contour = contour
def getPosition(self):
moments = cv2.moments(self.contour)
#TOOD Byt till funktioner ist??
cx = int(moments['m10']/moments['m00'])
cy = int(moments['m01']/moments['m00'])
position = (cx,cy)
return(position)
def getSize(self):
moments = cv2.moments(self.contour)
size = moments['m00']
return(size)
def getWHI5Activity(self):
return(self.whi5Activ)
def getContour(self):
return(self.contour)
def setWhi5Activity(whi5Activ):
self.whi5Activ = whi5Activ
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,595
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/LoadData/LoadChannels.py
|
import cv2
from UserInterface.videoClass import Video
def loadChannels():
filePathOpt = "VideoData/tileScan2/tileScan2OptZ2.avi"
filePathFlo = "VideoData/tileScan2/tileScan2Flo.avi"
filePathOpt = "/home/klas/Documents/Chalmers/ExamensArbete/YeastTrack/VideoData/tileScan1/130419opt.avi"
filePathFlo = "/home/klas/Documents/Chalmers/ExamensArbete/YeastTrack/VideoData/tileScan1/130419flo.avi"
filePathOpt = "VideoData/Experiment13h_050619/vidP4C1Z4.avi"
filePathFlo = "VideoData/Experiment13h_050619/vidP4C2Z2.avi"
#Get video Capture
vidOpt = cv2.VideoCapture(filePathOpt)
vidFlo = cv2.VideoCapture(filePathFlo)
video = Video(vidOpt,vidFlo)
return(video)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,596
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/ConvexHull.py
|
import cv2
import numpy as np
#Pre: Binary image
#Ret: ConvexHull Binary image
def convexHull(img):
# Finding contours for the thresholded image
contours, hierarchy = cv2.findContours(img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#im2, contours, hierarchy = cv2.findContours(frame, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# create hull array for convex hull points
hull = []
# calculate points for each contour
for i in range(len(contours)):
# creating convex hull object for each contour
hull.append(cv2.convexHull(contours[i], False))
#Create an empty black image
img = np.zeros((img.shape[0], img.shape[1]), np.uint8)
for i in range(len(contours)):
img = cv2.fillPoly(img, pts =[hull[i]], color=(255))
return(img)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,597
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/UpdateFrame.py
|
import cv2
from UserInterface.IncreasIntesity import increasIntens
#Update Frame Does scaling and adds all visual effect
#Pre
#Ret
def updateFrame(video,param):
[currentFrame,currentBlend,showMaskImg,showCellID,showLinagesTree,showOptImg,showWHI5ActivImg] = param
frame = video.getFrame(currentFrame)
#optImg = frame.getScaledOptImage()
optImg = frame.getUserOptImage()
#floImg = frame.getScaledFloImage()
floImg = frame.getUserFloImage()
classImg = frame.getClassificationImage()
finalImg = increasIntens(floImg,currentBlend)
szX = finalImg.shape[0]
szY = finalImg.shape[1]
if showOptImg:
finalImg = cv2.add(finalImg,optImg)
if showMaskImg:
finalImg = cv2.add(finalImg,classImg)
if showCellID:
finalImg = cv2.add(frame.getIDImage(),finalImg)
if showWHI5ActivImg:
finalImg = cv2.add(finalImg,frame.getWHI5ActivImage())
cv2.imshow('CellTracker', finalImg)
return()
def changeFrame(frameNum):
global currentFrame
currentFrame = frameNum
updateFrame()
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,598
|
Klas96/YeastTrack
|
refs/heads/master
|
/Tracking/getEdgeToEdgeDist.py
|
def getSigmaEdegeToEdge(doughter,mother):
distMD = getEdgeToEdgeDist(doughter,mother)
#relatabelityFactor higher The closer the distance is to cellRadius
slopeFactor = 1.3
midPoint = 140
sigmaDist = 1-1/(1+slopeFactor**(midPoint-distMD))
return(sigmaDist)
def getEdgeToEdgeDist(doughter,mother):
doughterDiscovFrame = doughter.getDetectionFrameNum()
#Get Dist between cells att discovery moment
dContour = doughter.getContour(pos = doughterDiscovFrame)
mContour = mother.getContour(pos = doughterDiscovFrame)
#Make Distance betven all points in countours
minDist = float('inf')
for pnt1 in dContour:
pnt1 = pnt1[0]
for pnt2 in mContour:
pnt2 = pnt2[0]
distPnts = (pnt1[0]-pnt2[0])**2
distPnts = distPnts + (pnt1[1]-pnt2[1])**2
distPnts = distPnts ** 0.5
if(distPnts < float('inf')):
minDist = distPnts
return(minDist)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,599
|
Klas96/YeastTrack
|
refs/heads/master
|
/Tracking/findLineage.py
|
from scipy.spatial import distance as dist
import numpy as np
from matplotlib import pyplot as plt
from Tracking.getEdgeToEdgeDist import getSigmaEdegeToEdge
def findLineage(trackedCells):
for doughter in trackedCells:
maxRelFactor = 0.0
for mother in trackedCells:
relFactor = getRelatabelityFactor(doughter,mother)
if relFactor > maxRelFactor:
maxRelFactor = relFactor
doughter.setMotherCell(mother.getCellID(),relFactor)
#anlyseRelatabelityFactor()
#Pre: Two TrackedCell objects
#Ret: number between 0 and 1 reflecting how likely they are to be related
def getRelatabelityFactor(doughter,mother):
relatabelityFactor = -1
doughterDetectFrame = doughter.getDetectionFrameNum()
motherDetectFrame = mother.getDetectionFrameNum()
#Number of frames must have exsisted befor being abale to be mother
buddFrameNum = 10
if motherDetectFrame+buddFrameNum > doughterDetectFrame:
return(relatabelityFactor)
if doughter.getCellID() == mother.getCellID():
return(relatabelityFactor)
#distFactorOLD = getDistFacorSigma(doughter,mother)
distFactor = getSigmaEdegeToEdge(doughter,mother)
whi5Factor = getWHI5Factor(doughter,mother)
whi5Factor = 1
#print("D: " + str(doughter.getCellID()) + " M: " + str(mother.getCellID()))
#print("distFactor: " + str(distFactorNEW))
#print("whi5Factor: " + str(whi5Factor))
distWeight = 1.5
whi5Weight = 1
relatabelityFactor = ((distFactor**distWeight)*(whi5Factor**whi5Weight))
return(relatabelityFactor)
def getDistFacorSigma(doughter,mother):
doughterDiscovFrame = doughter.getDetectionFrameNum()
#Get Dist between cells att discovery moment
(douX,douY) = doughter.getCentroid(doughterDiscovFrame)
(motX,motY) = mother.getCentroid(doughterDiscovFrame)
distMD = (douX-motX)*(douX-motX)
distMD = distMD + (douY-motY)*(douY-motY)
distMD = distMD ** 0.5
#relatabelityFactor higher The closer the distance is to cellRadius
slopeFactor = 1.3
midPoint = 140
sigmaDist = 1-1/(1+slopeFactor**(midPoint-distMD))
return(sigmaDist)
#Ret: portion of anlyse frames in which cell whi5 over threshold.
#pre1: DoughterTrackedCell
#pre2: MotherTrackdeCell
def getWHI5Factor(doughter,mother):
analysisSpan = 50
intensThreshold = 0.18
binaryFactor = 0
#Extract traces
whi5Mother = mother.getWhi5Trace()
doughterDetectFrame = doughter.getDetectionFrameNum()
motherDetectFrame = mother.getDetectionFrameNum()
#Take 50 elements after doughter cell have been detected.
#If 50 elements are not availibal take elements to end.
startMotherWhi5arr = motherDetectFrame-doughterDetectFrame
if len(whi5Mother) < (startMotherWhi5arr+analysisSpan):
whi5Mother = whi5Mother[startMotherWhi5arr:startMotherWhi5arr+analysisSpan]
else:
whi5Mother = whi5Mother[startMotherWhi5arr:-1]
whi5Factor = 0
for whi5 in whi5Mother:
if(whi5 > intensThreshold):
whi5Factor = whi5Factor + 1
#Dont want to compleatly exclude the onece with 0 whi5
baseConsidFactor = 0.1
whi5Factor = max(whi5Factor/len(whi5Mother),0.1)
print(whi5Factor)
return(whi5Factor)
def findWHI5BothPeak(doughter,mother):
analysisSpan = 50
#Extract traces
whi5Doughter = doughter.getWhi5Trace()
whi5Mother = mother.getWhi5Trace()
doughterDetectFrame = doughter.getDetectionFrameNum()
motherDetectFrame = mother.getDetectionFrameNum()
#Take 50 elements after doughter cell have been detected.
#If 50 elements are not availibal take elements to end.
if len(whi5Doughter) < analysisSpan:
whi5Doughter = whi5Doughter[:analysisSpan]
else:
whi5Doughter = whi5Doughter[:-1]
startMotherWhi5arr = motherDetectFrame-doughterDetectFrame
if len(whi5Mother) < (startMotherWhi5arr+analysisSpan):
whi5Mother = whi5Mother[startMotherWhi5arr:startMotherWhi5arr+analysisSpan]
else:
whi5Mother = whi5Mother[startMotherWhi5arr:-1]
meanIntensDoughter = sum(whi5Doughter)/len(whi5Doughter)
meanIntensMother = sum(whi5Mother)/len(whi5Mother)
maxIntensDoughter = max(whi5Doughter)
maxIntensMother = max(whi5Mother)
bothPeakFactor = maxIntensDoughter*maxIntensMother
return(bothPeakFactor)
def findWHI5Correlation(doughter,mother):
analysisSpan = 50
#Extract traces
whi5Doughter = doughter.getWhi5Trace()
whi5Mother = mother.getWhi5Trace()
doughterDetectFrame = doughter.getDetectionFrameNum()
motherDetectFrame = mother.getDetectionFrameNum()
#Take 50 elements after doughter cell have been detected.
#If 50 elements are not availibal take elements to end.
if len(whi5Doughter) < analysisSpan:
whi5Doughter = whi5Doughter[:analysisSpan]
else:
whi5Doughter = whi5Doughter[:-1]
startMotherWhi5ardistFactorr = motherDetectFrame-doughterDetectFrame
if len(whi5Mother) < (startMotherWhi5arr+analysisSpan):
whi5Mother = whi5Mother[startMotherWhi5arr:startMotherWhi5arr+analysisSpan]
else:
whi5Mother = whi5Mother[startMotherWhi5arr:-1]
#Check if same length else cut to shortest.
if len(whi5Mother) < len(whi5Doughter):
whi5Doughter = whi5Doughter[:len(whi5Mother)]
if len(whi5Doughter) < len(whi5Mother):
whi5Mother = whi5Mother[:len(whi5Doughter)]
whi5correlation = np.correlate(whi5Mother,whi5Doughter)
return(whi5correlation)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,600
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/Preprocessing.py
|
from skimage.restoration import denoise_nl_means, estimate_sigma
from skimage import exposure
import cv2
import numpy as np
#Preprossesing of image using rescaling meanfiltering with sigma estimator and histogram equalization
#Pre: image Raw
#Ret: preprocessed image
def preprocess(img):
#Rescaling
#img = rescale_frame(img, percent=1000)
#Decreasing noise
img = cv2.fastNlMeansDenoising(img)
#increasing contrast
#img = cv2.equalizeHist(img)
return(img)
def preprocessFloImg(img):
img = cv2.fastNlMeansDenoising(img)
#img = rescale_frame(img, percent=1000)
return(img)
#Rescale for optimal analysis size
#Pre1: Image as numpy array
#Pre2:
#Ret:
def rescale_frame(image, percent=1000):
width = int(image.shape[1] * percent/100)
height = int(image.shape[0] * percent/100)
dim = (width, height)
return cv2.resize(image, dim, interpolation = cv2.INTER_AREA)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,601
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/watershed.py
|
import numpy as np
import cv2
from matplotlib import pyplot as plt
from Segmentation.cellInstance import cellInstance
from Segmentation.getWHI5Activity import getWHI5Activity
from Segmentation.FilterDetection import filterDetections
from Segmentation.OstuBinarizartion import getMaskFrame
from Segmentation.getThreshold import getTherholdImage
#watershed
#Pre: Frame As defined in main
#Ret1: List of cellInstanses
def watershed(frame):
openingThres = 25
optFrame = frame.getScaledOptChan()
floFrame = frame.getScaledFloChan()
gray = cv2.cvtColor(floFrame,cv2.COLOR_BGR2GRAY)
#Thresolding
#ret, thresh = cv2.threshold(gray,openingThres,255,cv2.THRESH_BINARY)
thresh = getTherholdImage(frame)
# noise removal
kernel = np.ones((3,3),np.uint8)
opening = cv2.morphologyEx(thresh,cv2.MORPH_OPEN,kernel, iterations = 2)
opening = cv2.cvtColor(opening, cv2.COLOR_BGR2GRAY)
# sure background area
sureBG = cv2.dilate(opening,kernel,iterations=3)
# Finding sure foreground area
#opening = np.uint8(opening)
#opening = cv2.convertTo(opening, CV_8UC1);
distTransform = cv2.distanceTransform(opening,cv2.DIST_L2,5)
ret, sureFG = cv2.threshold(distTransform,0.65*distTransform.max(),255,0)
#Finding unknown region
sureFG = np.uint8(sureFG)
unknown = cv2.subtract(sureBG,sureFG)
#Marker labelling
ret, markers = cv2.connectedComponents(sureFG)
markers = markers+1
#Finding unknown region
sureFG = np.uint8(sureFG)
unknown = cv2.subtract(sureBG,sureFG)
#Mark unknown region with 0
markers[unknown==255] = 0
markers = cv2.watershed(floFrame,markers)
#markers = cv2.watershed(distTransform,markers)
floFrame[markers == -1] = [0,0,255]
markersShow = np.array(markers, dtype=np.uint8)
markersShow = cv2.cvtColor(markersShow, cv2.COLOR_GRAY2BGR)
markersShow[markers == -1] = [255,255,255]
markersShow = cv2.add(markersShow,floFrame)
cv2.imshow("markers",markersShow)
cv2.waitKey(0)
cellInstanses = conectedCompontents(markersShow,floFrame)
cellInstanses = filterDetections(cellInstanses)
#print(cellInstanses)
return(cellInstanses)
def conectedCompontents(frame,floFrame):
#Frame to CV_8UC1
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY);
#gray = frame
conectedCompontents, hirearchy = cv2.findContours(gray, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
cellInstanses = []
for cnt in conectedCompontents:
whi5Activ = getWHI5Activity(cnt,floFrame)
cellInstans = cellInstance(cnt,whi5Activ)
cellInstanses.append(cellInstans)
return(cellInstanses)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,602
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/getInstantSegmentImage.py
|
import cv2
import numpy as np
from UserInterface.getMaskImage import getMaskImage
#color all the blobs with individual colors
#Text size for all cells
def getCellInstImage(listOfCellInstances,sizeX,sizeY):
colorSet = [(0,7,100),(32,107,203),(237, 120, 255),(255, 170,0),(100,2,100)]
drawing = np.zeros((sizeX,sizeY, 3), np.uint8)
for cellInstances in listOfCellInstances:
cnt = cellInstances.getContour()
convexHull = cv2.convexHull(cnt, False)
col = colorSet[trackedCell.getCellID() % len(colorSet)]
drawing = cv2.fillPoly(drawing, pts =[convexHull], color=col)
return(drawing)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,603
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/RandomForestSegmentaion.py
|
from Segmentation.ParmeterizeImagegs import imagesToPrameter
import pickle
import cv2
from matplotlib import pyplot as plt
from Segmentation.ConectedComponents import conectedCompontents
from Segmentation.FilterDetection import filterDetections
#Pre: Frame
#Ret: CellInstances in that frame
def rfSegmentetion(Frame):
optImg = Frame.getOptImage()
floImg = Frame.getFloImage()
#Make Images To Parameters
parm = imagesToPrameter(optImg,floImg)
#Load Random Forest model
rfModel = pickle.load(open("Segmentation/YeastCellRFModel", 'rb'))
#Predic Segemt With Model
result = rfModel.predict(parm)
result = result.reshape((optImg.shape))
#Grow Erode??
#Use Conected Components
cellInstances = conectedCompontents(result,floImg)
cellInstances = filterDetections(cellInstances)
#Return Cell instance
return(cellInstances)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,604
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/getMaskImage.py
|
import cv2
import numpy as np
#Pre: VideoFrame
#Ret: White on black maskFrame
def getMaskImage(frame):
frame = otsuThreshold(frame)
maskFrame = convexHull(frame)
return(maskFrame)
def otsuThreshold(frame):
gray = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
#apply thresholding
gotFrame, thresh = cv2.threshold(gray,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
return(thresh)
#Pre: takes An image Black and white
#Ret: Returns Image with conexHull filled of all wite separated images
def convexHull(frame):
# Finding contours for the thresholded image
contours, hierarchy = cv2.findContours(frame, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#im2, contours, hierarchy = cv2.findContours(frame, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# create hull array for convex hull points
hull = []
# calculate points for each contour
for i in range(len(contours)):
# creating convex hull object for each contour
hull.append(cv2.convexHull(contours[i], False))
# create an empty black image
drawing = np.zeros((frame.shape[0], frame.shape[1], 3), np.uint8)
# draw contours and hull points
for i in range(len(contours)):
color_contours = (0, 255, 0) # green - color for contours
color = (255, 0, 0) # blue - color for convex hull
# draw ith contour
cv2.drawContours(drawing, contours, i, color_contours, 1, 8, hierarchy)
# draw ith convex hull object
cv2.drawContours(drawing, hull, i, color, 1, 8)
for i in range(len(contours)):
drawing = cv2.fillPoly(drawing, pts =[hull[i]], color=(255,255,255))
return(drawing)
#cv2.imshow("ConvexHull",drawing)
#cv2.waitKey(0)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,605
|
Klas96/YeastTrack
|
refs/heads/master
|
/Analysis/PrintMotherDoughter.py
|
def printMotherDoghuther(trackedCells):
for trackedCell in trackedCells:
doughterID = trackedCell.getCellID()
motgherID = trackedCell.getMotherCell()
relatabelityFactor = trackedCell.getRelatabelityFactor()
print("M: " + str(motgherID) + " --> " + "D: " + str(doughterID))
print("RelFactor: " + str(relatabelityFactor))
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,606
|
Klas96/YeastTrack
|
refs/heads/master
|
/Analysis/FitExponential.py
|
import numpy as np
import scipy.optimize as op
from matplotlib import pyplot as plt
def func(x,const,rate):
return(const*np.exp(rate*x))
def fitExponential(array):
print(len(array))
fx = np.array(range(len(array)))
fy = np.array(array)
popt, pcov = op.curve_fit(func,fx,fy,p0=(fx[0], 0.1),maxfev = 6000)
plt.plot(fx, fy, 'x', label='data')
plt.plot(fx, func(fx, *popt), label='curve-fit')
plt.legend(loc='upper left')
return(popt)
#Put in epo in size plot
def plotDataWithExpo(array):
xArr = range(0,31)
const = 5.3338403*1000
rate = 2.1211569/100
yArr = []
for i in range(len(xArr)):
yArr.append(func(i,const,rate))
plt.plot(xArr, yArr,color='C1',label="exponential fit")
const = 8.493409*1000
rate = 5.3318/1000
xArr = range(36,95)
yArr = []
for i in range(len(xArr)):
yArr.append(func(i,const,rate))
plt.plot(xArr, yArr,color='C2',label="exponential fit")
plt.plot(range(len(array)), array,'x',color='C0',label= "data")
plt.ylabel('Growth Curves with exponential fit')
plt.xlabel('Time')
plt.title("Size")
plt.xticks([])
plt.yticks([])
plt.legend()
plt.show()
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,607
|
Klas96/YeastTrack
|
refs/heads/master
|
/Analysis/getDevisionFrameNum.py
|
#Returnsfirst Whi5 activation Index
def getDevisionFrameNum(doughter):
thresh = 0.30
cellWhi5Trace = doughter.getWhi5Trace()
index = 0
for whi5 in cellWhi5Trace:
index = index + 1
if whi5 > thresh:
index = index+doughter.getDetectionFrameNum()
return(index)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,608
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/getClassImage.py
|
import cv2
import numpy as np
from UserInterface.getMaskImage import getMaskImage
from UserInterface.rescaleImageToUser import rescaleImageToUser
#color all the blobs with individual colors
#Text size for all cells
def getClassImage(listOfObjects,sizeX,sizeY):
colorSet = [(0,7,100),(32,107,203),(237, 120, 255),(255, 170,0),(100,2,100)]
classImg = np.zeros((sizeX,sizeY, 3), np.uint8)
for trackedCell in listOfObjects:
cnt = trackedCell.getContour()
convexHull = cv2.convexHull(cnt, False)
col = colorSet[trackedCell.getCellID() % len(colorSet)]
classImg = cv2.fillPoly(classImg, pts =[convexHull], color=col)
classImg = rescaleImageToUser(classImg)
return(classImg)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,609
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/getWHI5Activity.py
|
import cv2
import numpy as np
#Pre1: Keypoint All cells
#Pre2: Mask Frame With cells
#Pre3: Florecent Chanell
#Ret: Array with numberes corresponding to WHI5 Activity
def getWHI5ActivityNorm(countour, floChan):
#convexHull = cv2.ConvexHull2(countour,orientation=CV_CLOCKWISE, return_points=0)
convexHull = cv2.convexHull(countour, False)
drawing = np.zeros((floChan.shape[0], floChan.shape[1], 1), np.uint8)
drawing = cv2.fillPoly(drawing, pts =[convexHull], color=(255))
#Take intesection floChan and convexHull
mask_out = cv2.subtract(drawing,floChan)
mask_out = cv2.subtract(drawing,mask_out)
whi5Activ = cv2.sumElems(mask_out)
moments = cv2.moments(countour)
area = moments['m00']
whi5Activ = whi5Activ[1]/area/255
return(whi5Activ)
def getWHI5Activity(countour, floChan):
#convexHull = cv2.ConvexHull2(countour,orientation=CV_CLOCKWISE, return_points=0)
convexHull = cv2.convexHull(countour, False)
drawing = np.zeros((floChan.shape[0], floChan.shape[1], 1), np.uint8)
drawing = cv2.fillPoly(drawing, pts =[convexHull], color=(255))
#print("Got gray in get getWHI5Activity")
drawing = np.zeros((floChan.shape[0], floChan.shape[1], 1), np.uint8)
drawing = cv2.fillPoly(drawing, pts =[convexHull], color=(255))
mask_out=cv2.subtract(drawing,floChan)
mask_out=cv2.subtract(drawing,mask_out)
#cv2.imshow("mask ",mask_out)
#cv2.waitKey(0)
whi5Activ = mask_out[...].max()/255
return(whi5Activ)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,610
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/ConectedComponents.py
|
import cv2
from Segmentation.getWHI5Activity import getWHI5Activity
from Segmentation.cellInstance import cellInstance
def conectedCompontents(maskImg,floImg):
conectedCompontents, hirearchy = cv2.findContours(maskImg, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
cellInstanses = []
for cnt in conectedCompontents:
whi5Activ = getWHI5Activity(cnt,floImg)
cellInstans = cellInstance(cnt,whi5Activ)
cellInstanses.append(cellInstans)
return(cellInstanses)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,611
|
Klas96/YeastTrack
|
refs/heads/master
|
/Analysis/AddBudsToMother.py
|
from Anlysis.getDevisionFrameNum import getDevisionFrameNum
#Pre1: Mother Tracked CellTrackel
#Pre2: list Doughters
def addBudsToMother(mother,doughters):
sizeTrace = mother.getSizesTraceFromBegining()
for dought in doughters:
deviNum = getDevisionFrameNum(dought)
dughtSzTrc = dought.getSizesTraceFromBegining()[0:deviNum]
for i in range(min(len(sizeTrace),len(dughtSzTrc))):
sizeTrace[i] += dughtSzTrc[i]
return(sizeTrace)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,612
|
Klas96/YeastTrack
|
refs/heads/master
|
/main.py
|
#Yeast Track Main
import cv2
#from UserInterface.videoClass import Video
from UserInterface.LoadData.LoadData import getVideo
from UserInterface.LoadData.LoadtifFile import imortTiftoVideoNew
from UserInterface.Controls import Controls
from UserInterface.LoadData.ImportThreeZoomLevel import loadThreeZoomLevel
from UserInterface.LoadData.LoadChannels import loadChannels
#video = loadThreeZoomLevel()
video = loadChannels()
video.runTracking()
cntrl = Controls(video)
cntrl.startControls()
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,613
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/rescaleImageToUser.py
|
import cv2
#Scale image For Visual Apropriate Size
#Pre: image
#Ret: Scaled image
def rescaleImageToUser(img):
prop = getScaleProprtion(img.shape)
szX = int(img.shape[1]/prop)
szY = int(img.shape[0]/prop)
dim = (szX, szY)
img = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
return(img)
#Pre1: Centroid
#Pre2: frame
#Ret: REscaled cetroid
def rescalePosToUser(centroid,imgShape):
prop = getScaleProprtion(imgShape)
(centerX,centerY) = centroid
#print(centroid)
centerX = int(centerX/prop)
centerY = int(centerY/prop)
centroid = (centerX,centerY)
#print(centroid)
return(centroid)
def rescaleCounur(contour,imgShape):
scaledCnt = []
for cntPt in contour:
cntPt = cntPt[0]
cntPt = rescalePosToUser(cntPt,imgShape)
scaledCnt.append(cntPt)
return(scaledCnt)
#Gives Scale proportion
#Pre: image shape
#Ret: Proportion to scale to get good visual
def getScaleProprtion(imgSape):
userSzX = 1200
userSzY = 800
xSzProp = imgSape[1]/userSzX
ySzProp = imgSape[0]/userSzY
prop = max(xSzProp,ySzProp)
return(prop)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,614
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/getIDImage.py
|
import cv2
import numpy as np
from UserInterface.rescaleImageToUser import rescaleImageToUser
from UserInterface.rescaleImageToUser import rescalePosToUser
from UserInterface.rescaleImageToUser import rescaleCounur
from Tracking.GetPositionFromContour import getPositionFromContour
#Pre1: list of objects
#Pre2: frame
def getIDImage(listOfObjects,frame):
szX = frame.xSz
szY = frame.ySz
numCol = 3
idImg = np.zeros((szX,szY, numCol), np.uint8)
idImg = rescaleImageToUser(idImg)
#Loop over the tracked objects
for trackedCell in listOfObjects:
#Draw both the ID of the object and the centroid
idText = "ID " + str(trackedCell.getCellID())
(centerX,centerY) = trackedCell.getCentroid()
#contour = trackedCell.getContour()
#contour = rescaleCounur(contour,[szX,szY])
#(centerX,centerY) = getPositionFromContour(contour)
(centerX,centerY) = rescalePosToUser((centerX,centerY),frame.getOptImage().shape)
#Put Text and Cetroid
#print(idText)
#print((centerX,centerY))
cv2.putText(idImg, idText, (centerX-10,centerY-25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
cv2.circle(idImg, (centerX,centerY), 10, (0, 255, 0), -1)
#cv2.circle(idImg, (centerX,centerY), 1, (0, 255, 0), -1)
#idImg = rescaleImageToUser(idImg)
#Return
return(idImg)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,615
|
Klas96/YeastTrack
|
refs/heads/master
|
/Tracking/TrackedCell.py
|
import numpy as np
from Segmentation.cellInstance import cellInstance
class TrackedCell():
def __init__(self, cellInst = -1, cellID = -1,detectionFrameNum = -1):
self.cellTrace = []
self.cellTrace.append(cellInst)
self.cellID = cellID
self.detectionFrameNum = detectionFrameNum
self.motherID = None
self.relatabelityFactor = 0
def update(self ,cellInst = -1):
if(cellInst == -1):
cellInst = self.cellTrace[-1]
self.cellTrace.append(cellInst)
self.cellTrace.append(cellInst)
def setMotherCell(self,motherID,relatabelityFactor = -1):
self.motherID = motherID
self.relatabelityFactor = relatabelityFactor
def getMotherCell(self):
return(self.motherID)
def getRelatabelityFactor(self):
return(self.relatabelityFactor)
def getDetectionFrameNum(self):
return(self.detectionFrameNum)
def getContour(self,pos = -1):
if(pos > 0 and pos > self.detectionFrameNum):
pos = pos - self.detectionFrameNum
#If pos < detectionFrameNum here means want pos before cell was detected give first instance
if(pos < self.detectionFrameNum and pos > 0):
#return Early
return(self.cellTrace[0].getContour())
#If pos >= len(cellTrace) want position of cell after it have disaperd
if(pos >= len(self.cellTrace)):
#Return earlt latest instace
return(self.cellTrace[-1].getContour())
return(self.cellTrace[pos].getContour())
def getCellID(self):
return(self.cellID)
#Ret No Arg: latest registered poistion
#Ret Arg: position at that frame number
def getCentroid(self, pos = -1):
if(pos > 0 and pos > self.detectionFrameNum):
pos = pos - self.detectionFrameNum
#If pos < detectionFrameNum here means want pos before cell was detected give first instance
if(pos < self.detectionFrameNum and pos > 0):
#return Early
return(self.cellTrace[0].getPosition())
#If pos >= len(cellTrace) want position of cell after it have disaperd
if(pos >= len(self.cellTrace)):
#Return earlt latest instace
return(self.cellTrace[-1].getPosition())
return(self.cellTrace[pos].getPosition())
def getSizesTrace(self):
sizeTrace = []
for cellInst in self.cellTrace:
sizeTrace.append(cellInst.getSize())
return(sizeTrace)
def getSizesTraceFromBegining(self):
sizeTrace = []
for i in range(self.detectionFrameNum):
sizeTrace.append(0)
for cellInst in self.cellTrace:
sizeTrace.append(cellInst.getSize())
return(sizeTrace)
def getWhi5Trace(self):
whi5Trace = []
for cellInst in self.cellTrace:
whi5Trace.append(cellInst.getWHI5Activity())
return(whi5Trace)
def getPosTrace(self):
xPosTrace = []
yPosTrace = []
for cellInst in self.cellTrace:
(xPos, yPos) = cellInst.getPosition()
xPosTrace.append(xPos)
yPosTrace.append(yPos)
return(xPosTrace,yPosTrace)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,616
|
Klas96/YeastTrack
|
refs/heads/master
|
/Analysis/plotSize.py
|
from matplotlib import pyplot as plt
#from Anlysis.plotSize import plotTrackCellSizeBudToMother
from Anlysis.FitExponential import fitExponential
from Anlysis.FitExponential import plotDataWithExpo
from Anlysis.AddBudsToMother import addBudsToMother
from Anlysis.PlotTrackedCellsSize import plotTrackedCellsSize
#Pre1: List of number with cells to be ploted
#Pre2:
def plotTrackCellSizeBudToMother(cellToPlot, trackedCells):
szTrc = []
#add to Mother trace
#return mother trace
for mother in trackedCells:
szTrc = []
cellID = mother.getCellID()
#find Doughter cells
if any(cellID == i for i in cellToPlot):
#Get doughters
doughters = findDoughetCells(mother, trackedCells)
#doughters = []
szTrc = addBudsToMother(mother,doughters)
plotTrackedCellsSize(doughters)
#plt.show()
plt.plot(range(len(szTrc)),szTrc, label="ID " + str(cellID))
plt.ylabel('Growth Curves')
plt.xlabel('Time')
plt.title("Size")
plt.xticks([])
plt.yticks([])
plt.legend()
plt.show()
def addBudToMother(mother,trackedCells,idOfBuds):
motherCellTrace = mother.getSizesTrace()
for trCell in trackedCells:
cellID = trCell.getCellID()
if any(cellID == i for i in idOfBuds):
motherCellTrace = addBudtoMother(mother,trCell)
return(motherCellTrace)
def findDoughetCells(mother, trackedCells):
motherID = mother.getCellID()
doughters = []
for trCell in trackedCells:
if motherID == trCell.getMotherCell():
doughters.append(trCell)
return(doughters)
def addBudtoMotherOOOLD(motherTrace,doughter):
deviInst = getDevisionInst(doughter)
deviInst = 157
doughterSizeTrace = doughter.getSizesTrace()
doughterDetectFrame = 157 - len(doughterSizeTrace)
startIt = doughterDetectFrame-(157-len(motherTrace))
#print(startIt)
for dSzI in range(len(doughterSizeTrace)):
dSz = doughterSizeTrace[dSzI]
motherTrace[startIt+dSzI] = motherTrace[startIt+dSzI] + dSz
#param = fitExponential(motherTrace[(deviInst-len(doughterSizeTrace)):deviInst])
return(motherTrace)
def addBudtoMother(motherTrace,doughter):
deviInst = getDevisionInst(doughter)
doughterSizeTrace = doughter.getSizesTrace()[:deviInst]
for dSzI in range(len(doughterSizeTrace)):
dSz = doughterSizeTrace[-dSzI]
motherTrace[deviInst-dSzI] = motherTrace[deviInst-dSzI] + dSz
#param = fitExponential(motherTrace[(deviInst-len(doughterSizeTrace)):deviInst])
return(motherTrace)
#Returnsfirst Whi5 activation Index
def getDevisionInst(doughter):
thresh = 0.30
cellWhi5Trace = doughter.getWhi5Trace()
index = 0
for whi5 in cellWhi5Trace:
index = index + 1
if whi5 > thresh:
index = index+doughter.getDetectionFrameNum()
colorsd = 'C1'
plt.axvline(x=index, color=colorsd, linestyle='--')
break
return(index)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,617
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/LoadData/getCropCoordinates.py
|
posList = []
def onMouse(event, x, y, flags, param):
global posList
if event == cv2.EVENT_LBUTTONDOWN:
posList.append((x, y))
def getCropCoordinates(mats):
#Get last image
#Import Image Crop
cv2.imshow("SelectCropPos",mats[-2])
cv2.setMouseCallback("SelectCropPos",onMouse)
print(posList)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,618
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/OstuBinarizartion.py
|
import cv2
import numpy as np
from Segmentation.cellInstance import cellInstance
from Segmentation.getWHI5Activity import getWHI5Activity
from Segmentation.FilterDetection import filterDetections
from Segmentation.getThreshold import getTherholdImage
from Segmentation.Rescaling import rescaleImage
from Segmentation.ConvexHull import convexHull
#OstuBinarization
#Pre: Frame As defined in main
#Ret: CellInstances in
def OtsuBinarization(frame):
optImg = frame.getOptImage()
floImg = frame.getFloImage()
optImg = rescaleImage(optImg,10)
#floImg = rescaleImage(floImg,10)
maskImg = getMaskFrame(optImg)
maskImg = rescaleImage(maskImg,0.1)
cellInstanses = conectedCompontents(maskImg,floImg)
cellInstanses = filterDetections(cellInstanses)
return(cellInstanses)
#Pre: VideoFrame
#Ret: White on black maskFrame
def getMaskFrame(img):
img = otsuThreshold(img)
maskImg = convexHull(img)
return(maskImg)
"""
#Pre: takes An binary image
#Ret: Returns Image with conexHull filled of all wite separated images
def convexHull(img):
# Finding contours for the thresholded image
contours, hierarchy = cv2.findContours(img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#im2, contours, hierarchy = cv2.findContours(frame, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# create hull array for convex hull points
hull = []
# calculate points for each contour
for i in range(len(contours)):
# creating convex hull object for each contour
hull.append(cv2.convexHull(contours[i], False))
#Create an empty black image
img = np.zeros((img.shape[0], img.shape[1]), np.uint8)
for i in range(len(contours)):
img = cv2.fillPoly(img, pts =[hull[i]], color=(255))
return(img)
"""
def conectedCompontents(img,floFrame):
conectedCompontents, hirearchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
cellInstanses = []
for cnt in conectedCompontents:
whi5Activ = getWHI5Activity(cnt,floFrame)
cellInstans = cellInstance(cnt,whi5Activ)
cellInstanses.append(cellInstans)
return(cellInstanses)
def otsuThreshold(img):
#apply thresholding
gotFrame, thresh = cv2.threshold(img,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
return(thresh)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,619
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/IncreasIntesity.py
|
import numpy as np
import cv2
#Pre: frame
#ret: Frame with higer intesity
def incFloIntens(img,intens):
intens = int(intens/10)
#Check number of colors
numCol = 3
intensImg = np.zeros((img.shape[0], img.shape[1], numCol), np.uint8)
for i in range(intens):
intensImg = cv2.add(intensImg,img)
return(intensImg)
#Merge
def increasIntens(img,currentBlend):
img = incFloIntens(img,currentBlend)
return(img)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,620
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/FilterDetection.py
|
import cv2
import numpy as np
#Pre: detections
#Ret: Filtered Detections
def filterDetections(cellInstances):
maxSize = 210
minSize = 15
filterdList = []
for cellInst in cellInstances:
size = cellInst.getSize()
if size < maxSize and size > minSize:
filterdList.append(cellInst)
return(filterdList)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,621
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/ThersholdingSegmentation.py
|
from Segmentation.Preprocessing import preprocess
from Segmentation.Preprocessing import preprocessFloImg
import cv2
from matplotlib import pyplot as plt
import numpy as np
from Segmentation.cellInstance import cellInstance
from Segmentation.getWHI5Activity import getWHI5Activity
from Segmentation.FilterDetection import filterDetections
from Segmentation.ConvexHull import convexHull
from Segmentation.ConectedComponents import conectedCompontents
#Pre: Frame
#Ret: CellInstances
def segementThreshold(frame):
#Get Image
optImg = frame.getOptImage()
floImg = frame.getFloImage()
#Apply Preprocessing
optImg = preprocess(optImg)
floImg = preprocessFloImg(floImg)
#Segment Edges with thresholding
binImg = thesholdEdges(optImg)
#Erode Here To avoid conecting cells??
#cv2.imshow("binimgEdges",binImg)
binImg = convexHull(binImg)
#Threshold floImg
binImgFlo = thesholdFlorecense(floImg)
#grayThr = thresholdGray(optImg)
#Intersection of Thresholds
binImg = cv2.bitwise_and(binImg, binImgFlo)
#cv2.imshow("binimgFinal",binImg)
#cv2.waitKey(0)
cellInstanses= conectedCompontents(binImg,floImg)
cellInstanses = filterDetections(cellInstanses)
return(cellInstanses)
#Pre: image of cells with clear edges
#Ret: Binary image Edges White not edge black
def thesholdEdges(img):
#Threshold values
thrLow = 85
thrHigh = 255
#cv2.imshow("img",img)
gotImg, thresh = cv2.threshold(img,thrLow,thrHigh,cv2.THRESH_BINARY)
return(thresh)
def thresholdGray(img):
#Threshold
thrLow = 65
thrHigh = 255
gotImg, thresh = cv2.threshold(img,thrLow,thrHigh,cv2.THRESH_BINARY)
kernel = np.ones((2,2), np.uint8)
thresh = cv2.erode(thresh, kernel, iterations=1)
cv2.imshow("img",thresh)
cv2.waitKey(0)
#Remove Largest
#Find largest contour in intermediate image
cnts, _ = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
cnt = max(cnts, key=cv2.contourArea)
#Fill largest with Black
cv2.drawContours(thresh, [cnt], -1, 0, cv2.FILLED)
return(thresh)
def thesholdFlorecense(img):
#Threshold values
thrLow = 20
thrHigh = 255
#cv2.imshow("img",img)
gotImg, thresh = cv2.threshold(img,thrLow,thrHigh,cv2.THRESH_BINARY)
return(thresh)
#Pre: Binary Image
#Ret: Binary img with convex hull
def cellInstasConvexHull(img,floImg):
# Finding contours for the thresholded image
contours, hierarchy = cv2.findContours(img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#im2, contours, hierarchy = cv2.findContours(frame, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# create hull array for convex hull points
cellInstanses = []
# calculate points for each contour
for i in range(len(contours)):
# creating convex hull object for each contour
hull = (cv2.convexHull(contours[i], False))
whi5Activ = getWHI5Activity(hull,floImg)
cellInstans = cellInstance(hull,whi5Activ)
cellInstanses.append(cellInstans)
return(cellInstanses)
def conectedCompontents(binImg,floImg):
conectedCompontents, hirearchy = cv2.findContours(binImg, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
cellInstanses = []
for cnt in conectedCompontents:
whi5Activ = getWHI5Activity(cnt,floImg)
cellInstans = cellInstance(cnt,whi5Activ)
cellInstanses.append(cellInstans)
return(cellInstanses)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,622
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/LoadData/ImportThreeZoomLevel.py
|
from UserInterface.videoClass import Video
import cv2
def loadThreeZoomLevel():
zom0Path = "VideoData/tileScan2/tileScan2OptZ0.avi"
zom1Path = "VideoData/tileScan2/tileScan2OptZ1.avi"
zom2Path = "VideoData/tileScan2/tileScan2OptZ2.avi"
flo1Path = "VideoData/tileScan2/tileScan2Flo.avi"
zom0Cap = cv2.VideoCapture(zom0Path)
zom1Cap = cv2.VideoCapture(zom1Path)
zom2Cap = cv2.VideoCapture(zom2Path)
flo1Cap = cv2.VideoCapture(flo1Path)
return Video(zom0Cap,zom1Cap,zom2Cap,flo1Cap)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,623
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/LaplacianGausian.py
|
import cv2
from Segmentation.cellInstance import cellInstance
import numpy as np
from Segmentation.getWHI5Activity import getWHI5Activity
from Segmentation.FilterDetection import filterDetections
#from frameClass import rescale_frame
#LAP MEthoth for segmentation of yeast cells.
def laplacianGausian(frame):
optFrame = frame.getOptChan()
floFrame = frame.getFloChan()
kernelSize = 3;
scale = 1;
delta = 0;
ddepth = cv2.CV_16S;
gaussian = cv2.GaussianBlur(optFrame, (3, 3), 0)
gaussianShow = rescale_frame(gaussian,1000)
cv2.imshow("gaussian", gaussianShow)
cv2.waitKey(0)
#cv2.imwrite("gaussian", gaussianShow)
gaussian = cv2.cvtColor(gaussian, cv2.COLOR_BGR2GRAY)
laplacian = cv2.Laplacian(gaussian, ddepth, ksize=kernelSize)
laplacian = cv2.convertScaleAbs(laplacian)
laplacianShow = rescale_frame(laplacian,1000)
cv2.imshow("Laplacian", laplacianShow)
cv2.waitKey(0)
#cv2.imwrite("Laplacian", laplacianShow)
return([])
def rescale_frame(frame, percent=75):
width = int(frame.shape[1] * percent/100)
height = int(frame.shape[0] * percent/100)
dim = (width, height)
return cv2.resize(frame, dim, interpolation =cv2.INTER_AREA)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,624
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/LoadData/LoadtifFile.py
|
from UserInterface.videoClass import Video
import cv2
def imortTiftoVideo(filePath):
numChan = 2
numZoomLevles = 4
etval, mats = cv2.imreadmulti(filePath)
#8 images for each frame
#TODO Generalize
allFrames = []
for matIndex in range(0,len(mats),8):
frame = []
optChan = []
#optChan.append(mats[matIndex])
#optChan.append(mats[matIndex+1])
#optChan.append(mats[matIndex+2])
optChan.append(mats[matIndex+3])
floChan = []
#floChan.append(mats[matIndex+4])
floChan.append(mats[matIndex+5])
#floChan.append(mats[matIndex+6])
#floChan.append(mats[matIndex+7])
frame.append(optChan)
frame.append(floChan)
allFrames.append(frame)
video = Video(allFrames)
del mats
return(video)
def imortTiftoVideoNew(filePath):
numChan = 2
numZoomLevles = 4
etval, mats = cv2.imreadmulti(filePath)
#8 images for each frame
#TODO Generalize
allFrames = []
for matIndex in range(0,len(mats)-1,2):
frame = []
frame.append(mats[matIndex])
frame.append(mats[matIndex+1])
allFrames.append(frame)
video = Video(allFrames)
del mats
return(video)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,625
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/LoadData/ConvertLiftoTif.py
|
import os
def convertLifToTif(inPath, OutPath):
cleanWorking = "rm ./VideoData/WorkingData/*"
os.system(cleanWorking)
series = 3
channel = 1
zoomLevel = 3
filePath = inPath
#Cropping Coordinates
ulx,uly = (0,0)
drx,dry = (512,512)
#Convering -lif file with bioformats
#Using nolookup option
comand = "./bftools/bfconvert -nolookup"
seriesFlag = " -series " + str(series)
channelFlag = " -channel " + str(channel)
zoomFlag = " -z " + str(zoomLevel)
cropFlag = " -crop "+str(ulx)+","+str(uly)+","+str(drx)+","+str(dry)
filePath = " " + filePath
tifPath = " " + OutPath
#cmd = comand + seriesFlag + cropFlag + channelFlag + zoomFlag + filePath + tifPath
cmd = comand + seriesFlag + cropFlag + filePath + tifPath
os.system(cmd)
def convertLifToTifNew(inPath, OutPath):
cleanWorking = "rm ./VideoData/WorkingData/*"
os.system(cleanWorking)
series = 3
channel = 1
zoomLevel = 3
filePath = inPath
#Cropping Coordinates
ulx,uly = (0,0)
drx,dry = (10,10)
#Convering -lif file with bioformats
#Using nolookup option
comand = "./bftools/bfconvert -nolookup"
seriesFlag = " -series " + str(series)
channelFlag = " -channel " + str(channel)
zoomFlag = " -z " + str(zoomLevel)
cropFlag = " -crop "+str(ulx)+","+str(uly)+","+str(drx)+","+str(dry)
filePath = " " + filePath
tifPath = " " + OutPath
#cmd = comand + seriesFlag + cropFlag + channelFlag + zoomFlag + filePath + tifPath
cmd = comand + seriesFlag + cropFlag + zoomFlag + filePath + tifPath
os.system(cmd)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,626
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/LoadData/LoadData.py
|
import cv2
import numpy as np
from matplotlib import pyplot as plt
import os
from UserInterface.videoClass import Video
#from Tkinter import Tk
#from tkinter.filedialog import askopenfilename
#Displays the OME-XML metadata for a file on the console:
#showinf -omexml /path/to/file
#showinf -nopix /path/to/file
#os.popen('cat /etc/services').read()
def getVideo():
filePath = getFilePath()
series = choseSeries()
cropUppLeft, cropDownRight = cropStage(filePath)
#IF .lif file run
convertLifToTif(lifFilePath, tifFilePath)
video = imortTiftoVideo(tifFilePath)
return(video)
def getFilePath():
#Tk().withdraw()
#filename = askopenfilename() # show an "Open" dialog box and return the path to the selected file
#print(filename)
path = "/home/klas/Documents/Chalmers/ExamensArbete/YeastTrack/VideoData/Experiment13h_050619/Experiment13h_050619.lif"
return(path)
#Gets Witch series should be loded by user
def choseSeries():
print("What series would you like to load")
return(3)
#Gets user to crop Video
def cropStage(filePath):
uppLeft = (100,100)
downRight = (200,200)
return(uppLeft,downRight)
def loadData(filePath,series,cropUppLeft = -1, cropDownRight = -1):
matList = []
numZoomIn = 4
numChan = 2
#loading channel 0
for channel in range(numChan):
#List containg all zoom in levels
zoomList = []
for zoomLevel in range(numZoomIn):
cleanWorking = "rm ./YeastTrack/VideoData/WorkingData/*"
os.system(cleanWorking)
comand = "./bftools/bfconvert -nolookup"
seriesFlag = " -series " + str(series)
channelFlag = " -channel " + str(channel)
zoomFlag = " -z " + str(zoomLevel)
cropFlag = " -crop 0,0,512,512"
filePath = " " + filePath
tifPath = " ./YeastTrack/VideoData/WorkingData/working.tif"
cmd = comand + seriesFlag + cropFlag + channelFlag + zoomFlag + filePath + tifPath
os.system(cmd)
path = "./YeastTrack/VideoData/WorkingData/working.tif"
retval, mats = cv2.imreadmulti(path)
zoomList.append(mats)
matList.append(zoomList)
return(matList)
def skrap():
path = "/home/klas/Documents/Chalmers/ExamensArbete/YeastTrack/VideoData/WorkingData/working.tif"
retval, mats = cv2.imreadmulti(path)
#retval, mats = cv2.imread(path)
for i in range(len(mats)):
cv2.imshow("Funka",mats[i])
cv2.waitKey()
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,627
|
Klas96/YeastTrack
|
refs/heads/master
|
/Analysis/VisulizeLinage.py
|
from matplotlib import pyplot as plt
from scipy.cluster import hierarchy
import numpy as np
import networkx as nx
from networkx.drawing.nx_agraph import graphviz_layout
import matplotlib.pyplot as plt
from Anlysis.visulizeLinNetworkX import plotNxTree
#import PyQt5
#from ete3 import Tree
#'from ete3 import TreeStyle
# from igraph import *;
from networkx.drawing.nx_agraph import graphviz_layout
#def PlotLinageTree(trackedCells):
#
#Plots Linage tree
def PlotLinageTree(trackedCells):
G = nx.DiGraph()
#Add all Cells As Nodes
for trCell in trackedCells:
cellLabel = str(trCell.getCellID())#"ID " +
G.add_node(cellLabel)
#Add all edges
for trCell in trackedCells:
motherID = trCell.getMotherCell()
if motherID == None:
motherID = -1
cellLabelM = str(motherID)#"ID " +
cellLabelD = str(trCell.getCellID())#"ID " +
relFactor = trCell.getRelatabelityFactor()
G.add_edge(cellLabelM, cellLabelD, object=str(round(relFactor, 2)))
btree = G#nx.balanced_tree(2,4)
pos=graphviz_layout(G,prog='dot')
nx.draw(G,pos,with_labels=True,arrows=True)
#plotNxTree(G)
#nx.draw_networkx(G, pos = nx.spring_layout(G))
#nx.draw_networkx_edge_labels(G, pos = nx.spectral_layout(G))
#plt.sxhow()
#pos = nx.nx_pydot.graphviz_layout(g, prog='neato')
#nx.draw(g, pos=layout)
edge_labels = nx.get_edge_attributes(G, 'object')
nx.draw_networkx_edge_labels(G, pos=pos, edge_labels=edge_labels)
plt.show()
def PlotLinageTreeOLD(trackedCells):
G = nx.DiGraph()
G.add_node("ROOT")
for i in range(5):
G.add_node("Child_%i" % i)
G.add_node("Grandchild_%i" % i)
G.add_node("Greatgrandchild_%i" % i)
G.add_edge("ROOT", "Child_%i" % i)
G.add_edge("Child_%i" % i, "Grandchild_%i" % i)
G.add_edge("Grandchild_%i" % i, "Greatgrandchild_%i" % i)
# write dot file to use with graphviz
# run "dot -Tpng test.dot >test.png"
nx.nx_agraph.write_dot(G,'test.dot')
# same layout using matplotlib with no labels
plt.title('draw_networkx')
pos=graphviz_layout(G, prog='dot')
nx.draw(G, pos, with_labels=False, arrows=False)
plt.savefig('nx_test.png')
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,628
|
Klas96/YeastTrack
|
refs/heads/master
|
/Analysis/PlotTrackedCellsSize.py
|
from matplotlib import pyplot as plt
from Anlysis.getDevisionFrameNum import getDevisionFrameNum
def plotTrackedCellsSize(trackedCells):
for trCell in trackedCells:
deviNum = getDevisionFrameNum(trCell)
trace = trCell.getSizesTraceFromBegining()[0:deviNum]
plt.plot(range(len(trace)),trace)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,629
|
Klas96/YeastTrack
|
refs/heads/master
|
/Analysis/plotFunctions.py
|
from matplotlib import pyplot as plt
from Anlysis.plotSize import plotTrackCellSizeBudToMother
from Anlysis.VisulizeLinage import PlotLinageTree
def plotFunction(trackedCells):
cellToPlot = range(len(trackedCells))
#plotTrackCellSizeBudToMother(cellToPlot, trackedCells)
#PlotLinageTree(trackedCells)
plotTrackCellWhi5(cellToPlot, trackedCells)
#Pre1: ID number for cell
#Pre2: List of tracked cells.
def plotSizeLineage(cellID,trackedCells):
cellsInLinage = [cellID]
for trackCell in trackedCells:
if(trackCell.getMotherCell() == cellID):
cellsInLinage.append(trackCell.getCellID())
cellsInLinage = [0,6,13]
plotTrackCellSize(cellsInLinage, trackedCells)
plotTrackCellWhi5(cellsInLinage, trackedCells)
def plotTrackCellSize(cellToPlot, trackedCells):
for trackedCell in trackedCells:
cellID = trackedCell.getCellID()
if any(cellID == i for i in cellToPlot):
whi5Trace = trackedCell.getSizesTrace()
dicovFrame = trackedCell.getDetectionFrameNum()
plt.plot(range(dicovFrame, dicovFrame+len(whi5Trace)),whi5Trace, label="ID " + str(cellID))
plt.ylabel('Growth Curves')
plt.xlabel('Time')
plt.title("Size")
plt.xticks([])
plt.yticks([])
plt.legend()
plt.show()
def plotTrackCellWhi5(cellToPlot, trackedCells):
for trackedCell in trackedCells:
cellID = trackedCell.getCellID()
if any(cellID == i for i in cellToPlot):
whi5Trace = trackedCell.getWhi5Trace()
dicovFrame = trackedCell.getDetectionFrameNum()
plt.plot(range(dicovFrame, dicovFrame+len(whi5Trace)),whi5Trace, label="ID " + str(cellID))
plt.ylabel('Whi5 Activity')
plt.xlabel('Time')
plt.title("Whi5 Activity")
plt.xticks([])
plt.yticks([])
plt.legend()
plt.show()
def plotPositions(cellToPlot, trackedCells):
for trackedCell in trackedCells:
cellID = trackedCell.getCellID()
if any(cellID == i for i in cellToPlot):
xPosTrace,yPosTrace = trackedCell.getPosTrace()
plt.plot(xPosTrace,yPosTrace)#, label="ID " + str(cellID))
plt.ylabel('y Position')
plt.xlabel('x Position')
plt.title("Position Trace")
plt.xticks([])
plt.yticks([])
plt.legend()
plt.show()
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,630
|
Klas96/YeastTrack
|
refs/heads/master
|
/Tracking/GetPositionFromContour.py
|
import cv2
def getPositionFromContour(contour):
moments = cv2.moments(contour)
#TOOD Byt till funktioner ist??
cx = int(moments['m10']/moments['m00'])
cy = int(moments['m01']/moments['m00'])
position = (cx,cy)
return(position)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,631
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/frameClass.py
|
from Segmentation.cellInstance import cellInstance
import cv2
import numpy as np
from Tracking.centroidTracker import CentroidTracker
from Segmentation.OstuBinarizartion import OtsuBinarization
from Segmentation.watershed import watershed
from Segmentation.cellInstance import cellInstance
from Segmentation.LaplacianGausian import laplacianGausian
from Segmentation.ThersholdingSegmentation import segementThreshold
from Segmentation.RandomForestSegmentaion import rfSegmentetion
from UserInterface.getInstantSegmentImage import getCellInstImage
from UserInterface.rescaleImageToUser import rescaleImageToUser
class Frame:
#TODO three zoom Init???
#variables
#optImage
#floImage
#Constructor
def __init__(self,optImage,floImage,frameNum=-1):
#TODO load as gray images
#variables
self.optImg = optImage
self.floImg = floImage
self.frameNum = frameNum
self.xSz = self.optImg.shape[0]
self.ySz = self.optImg.shape[1]
self.scaling = 1000
#TODO MAke to factors of scaling
#TODO
self.pixelToMiccron = 1000
self.classFrame = 0
self.idFrame = 0
self.analyseFrame()
def addZoomLevels(self,zom0Img,zom1Img):
self.optImgZom0 = zom0Img
self.optImgZom1 = zom1Img
#Methods
#Getters
def getOptImage(self):
return(self.optImg)
def getFloImage(self):
return(self.floImg)
def getZoom0Image(self):
return(self.optImgZom0)
def getZoom1Image(self):
return(self.optImgZom1)
def getFrameNum(self):
return(self.frameNum)
def getUserOptImage(self):
#Make A Certain Size
img = self.getOptImage()
#make Empty image with size
userImg = np.zeros(img.shape, np.uint8)
#Merge two zeros and one grey
userImg = cv2.merge([img,img,img])
userImg = rescaleImageToUser(userImg)
return(userImg)
def getUserFloImage(self):
#Make A Certain Size
img = self.getFloImage()
#make Empty image with size
userImg = np.zeros(img.shape, np.uint8)
#Merge two zeros and one grey
userImg = cv2.merge([userImg,img,userImg])
userImg = rescaleImageToUser(userImg)
return(userImg)
def getClassificationImage(self):
return(self.classImg)
#Ret: Image ilustrating whi5 Activation
def getWHI5ActivImage(self):
#Whi5Detect
threshold = 0.30
#CellDeteect
#threshold = 0.175-0.0125
#gray = cv2.cvtColor(self.getScaledfloImage(),cv2.COLOR_BGR2GRAY)
gray = self.getUserFloImage()
#apply thresholding
gotFrame, thresh = cv2.threshold(gray,int(255*threshold),255,cv2.THRESH_BINARY)
return(thresh)
#ret: Gives Image
def getCellInstancesImage(self):
#self.cellInstanses
return(getCellInstImg(self.cellInstanses))
#ret: Image With ID at cell positions
def getIDImage(self):
return(self.idImg)
#Setters
def showFrame(self):
cv2.imshow("optImage",self.optImage)
cv2.imshow("floImage",self.floImage)
cv2.waitKey(0)
#Segmentation of frame.
#Use the Anlysis Method selected
def analyseFrame(self):
self.cellInstanses = OtsuBinarization(self)
#self.cellInstanses = segementThreshold(self)
#self.cellInstanses = rfSegmentetion(self)
#self.cellInstanses = watershed(self)
#self.cellInstanses = laplacianGausian(self)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,632
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/getThreshold.py
|
import cv2
from Segmentation.FilterDetection import filterDetections
import numpy as np
#Threshold Image that
#Pre: Frame objecet
#Ret: Threshold Image
def getTherholdImage(frame):
optFrame = frame.getScaledOptChan()
floFrame = frame.getScaledFloChan()
gaussian = cv2.GaussianBlur(optFrame, (3, 3), 0)
gray = cv2.cvtColor(gaussian,cv2.COLOR_BGR2GRAY)
gotFrame, thresh = cv2.threshold(gray,50,255,cv2.THRESH_BINARY)
#Remove background
thresh = removeLargestConected(thresh)
#cv2.imshow("Thresh",thresh)
#cv2.waitKey(0)
return(thresh)
def removeLargestConected(image):
conectedCompontents, hirearchy = cv2.findContours(image, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
drawing = np.zeros((image.shape[0], image.shape[1], 3), np.uint8)
cellSize = 4500
sizeThreshold = cellSize*10
for cnt in conectedCompontents:
#check Size
if(sizeThreshold > cv2.contourArea(cnt)):
#drawing = cv2.fillPoly(drawing, pts =cnt[0], color=(255,255,255))
#drawing = cv2.drawContours(drawing, [cnt], 0, (0,255,0), 3)
drawing = cv2.fillPoly(drawing, pts =[cnt], color=(255,255,255))
return(drawing)
def convexHull(frame):
# Finding contours for the thresholded image
contours, hierarchy = cv2.findContours(frame, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#im2, contours, hierarchy = cv2.findContours(frame, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# create hull array for convex hull points
hull = []
# calculate points for each contour
for i in range(len(contours)):
# creating convex hull object for each contour
hull.append(cv2.convexHull(contours[i], False))
#Create an empty black image
drawing = np.zeros((frame.shape[0], frame.shape[1], 3), np.uint8)
#Draw contours
for i in range(len(contours)):
color_contours = (0, 255, 0) # green - color for contours
color = (255, 0, 0) # blue - color for convex hull
#Draw ith contour
cv2.drawContours(drawing, contours, i, color_contours, 1, 8, hierarchy)
# draw ith convex hull object
cv2.drawContours(drawing, hull, i, color, 1, 8)
for i in range(len(contours)):
drawing = cv2.fillPoly(drawing, pts =[hull[i]], color=(255,255,255))
return(drawing)
#cv2.imshow("ConvexHull",drawing)
#cv2.waitKey(0)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,633
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/Rescaling.py
|
import cv2
#Rescale for optimal analysis size
#What is this size??
def rescaleImage(img,portion):
width = int(img.shape[1] * portion)
height = int(img.shape[0] * portion)
dim = (width, height)
return cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,634
|
Klas96/YeastTrack
|
refs/heads/master
|
/Tracking/filterTracking.py
|
#Pre: List of trackedCells
#Ret: Filterd list with trackedCells
def filterTrackedCells(trackedCells):
trackedCells = filterByOpserLen(trackedCells)
trackedCells = filterByMeanSize(trackedCells)
return(trackedCells)
def filterByOpserLen(trackedCells):
filterdList = []
#Filter by observation length
observationThreshold = 10
for tracked in trackedCells:
exsistingLength = len(tracked.getSizesTrace())
if exsistingLength > observationThreshold:
filterdList.append(tracked)
return(filterdList)
def filterByMeanSize(trackedCells):
filterdList = []
#Filter by mean size
cellSize = 4500
cellThreshold = 0.2*cellSize
for tracked in trackedCells:
meanSizeCell = sum(tracked.getSizesTrace())/len(tracked.getSizesTrace())
if meanSizeCell > cellThreshold:
filterdList.append(tracked)
return(filterdList)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,635
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/ParmeterizeImagegs.py
|
import numpy as np
import cv2
import pandas as pd
from scipy import ndimage as nd
import gc
def imagesToPrameter(optImgArr,floImgArr,maskImgArr = []):
#Save Originals to DataFrame
#img2 = img.reshape(-1)
#print("loading Images")
optImgReArr = []
floImgReArr = []
for imgIndex in range(len(optImgArr)):
optImgReArr.append(optImgArr[imgIndex].reshape(-1))
floImgReArr.append(floImgArr[imgIndex].reshape(-1))
df = pd.DataFrame()
#df['optImg'] = np.append(optImgReArr)
df['optImg'] = np.concatenate(optImgReArr, axis=0)
df['floImg'] = np.concatenate(floImgReArr, axis=0)
del optImgReArr
del floImgReArr
gc.collect()
#Add Filters To Model
#print("loading Median Filter")
#MEDIAN with sigma=3
medC0Z3Arr = []
medC1Z2Arr = []
for imgIndex in range(len(optImgArr)):
medC0Z3 = nd.median_filter(optImgArr[imgIndex], size=3)
medC1Z2 = nd.median_filter(floImgArr[imgIndex], size=3)
medC0Z3Arr.append(medC0Z3.reshape(-1))
medC1Z2Arr.append(medC1Z2.reshape(-1))
df['MedS3C0Z3'] = np.concatenate(medC0Z3Arr, axis=0)
df['MedS3C1Z2'] = np.concatenate(medC1Z2Arr, axis=0)
del medC0Z3Arr
del medC1Z2Arr
gc.collect()
medC0Z3Arr = []
medC1Z2Arr = []
for imgIndex in range(len(optImgArr)):
medC0Z3 = nd.median_filter(optImgArr[imgIndex], size=1)
medC1Z2 = nd.median_filter(floImgArr[imgIndex], size=1)
medC0Z3Arr.append(medC0Z3.reshape(-1))
medC1Z2Arr.append(medC1Z2.reshape(-1))
df['MedS1C0Z3'] = np.concatenate(medC0Z3Arr, axis=0)
df['MedS1C1Z2'] = np.concatenate(medC1Z2Arr, axis=0)
del medC0Z3Arr
del medC1Z2Arr
gc.collect()
#print("loading Variance")
#VARIANCE with size=3
varC0Z3Arr = []
varC1Z2Arr = []
for imgIndex in range(len(optImgArr)):
varC0Z3 = nd.generic_filter(optImgArr[imgIndex], np.var, size=3)
varC1Z2 = nd.generic_filter(floImgArr[imgIndex], np.var, size=3)
varC0Z3Arr.append(varC0Z3.reshape(-1))
varC1Z2Arr.append(varC1Z2.reshape(-1))
df['varS3C0Z3'] = np.concatenate(varC0Z3Arr, axis=0)
df['varS3C1Z2'] = np.concatenate(varC1Z2Arr, axis=0)
del varC0Z3Arr
del varC1Z2Arr
gc.collect()
#VARIANCE with size=3
varC0Z3Arr = []
varC1Z2Arr = []
for imgIndex in range(len(optImgArr)):
varC0Z3 = nd.generic_filter(optImgArr[imgIndex], np.var, size=1)
varC1Z2 = nd.generic_filter(floImgArr[imgIndex], np.var, size=1)
varC0Z3Arr.append(varC0Z3.reshape(-1))
varC1Z2Arr.append(varC1Z2.reshape(-1))
df['varS1C0Z3'] = np.concatenate(varC0Z3Arr, axis=0)
df['varS1C1Z2'] = np.concatenate(varC1Z2Arr, axis=0)
del varC0Z3Arr
del varC1Z2Arr
gc.collect()
#VARIANCE with size=3
histEC0Z3Arr = []
histEC1Z2Arr = []
for imgIndex in range(len(optImgArr)):
histEC0Z3 = cv2.equalizeHist(optImgArr[imgIndex])
histEC1Z2 = cv2.equalizeHist(floImgArr[imgIndex])
histEC0Z3Arr.append(histEC0Z3.reshape(-1))
histEC1Z2Arr.append(histEC1Z2.reshape(-1))
df['histES1C0Z3'] = np.concatenate(histEC0Z3Arr, axis=0)
df['histES1C1Z2'] = np.concatenate(histEC1Z2Arr, axis=0)
del histEC0Z3Arr
del histEC1Z2Arr
gc.collect()
if maskImgArr != []:
print("loading Labels")
maskImgArrRe = []
for maskImgIndex in range(len(maskImgArr)):
maskImg = maskImgArr[maskImgIndex].reshape(-1)
maskImgArrRe.append(maskImg)
#print(maskIm)
df['Labels'] = np.concatenate(maskImgArrRe, axis=0)
del maskImgArrRe
gc.collect()
#print("writing to File")
#df.to_csv('YeastCell/Train/modelTrain.csv', index=False)
return(df)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,636
|
Klas96/YeastTrack
|
refs/heads/master
|
/Segmentation/Denoising.py
|
#TODO
#Write Method for denoising
def denoiseImage(img):
pass
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,637
|
Klas96/YeastTrack
|
refs/heads/master
|
/UserInterface/videoClass.py
|
from UserInterface.frameClass import Frame
#from Segmentation.cellInstance import cellInstance
import cv2
import numpy as np
from Tracking.centroidTracker import CentroidTracker
from UserInterface.getIDImage import getIDImage
from UserInterface.getClassImage import getClassImage
from Tracking.findLineage import findLineage
from Tracking.filterTracking import filterTrackedCells
class Video:
#variables
frames = []
tracker = 0
numFloFrames = 0
maxDisappeared = 50
#Constructor
def vidCapInit(self,optImgCap,floImgCap):
self.numZoom = 1
self.numVidFrames = int(optImgCap.get(cv2.CAP_PROP_FRAME_COUNT))
self.numFloFrames = int(optImgCap.get(cv2.CAP_PROP_FRAME_COUNT))
self.tracker = CentroidTracker()
for i in range(self.numVidFrames):
print("loading Frame " + str(i))
#Read Images
hasFrame,optImg = optImgCap.read()
hasFrame,floImg = floImgCap.read()
#Convert Images
optImg = cv2.cvtColor(optImg, cv2.COLOR_BGR2GRAY)
floImg = cv2.cvtColor(floImg, cv2.COLOR_BGR2GRAY)
frame = Frame(optImg,floImg,i)
self.frames.append(frame)
#Init Object With List of mats with frames
def matListInit(self, mats):
self.numFrames = len(mats)
print("Loadling "+str(self.numFrames)+" Frames")
self.tracker = CentroidTracker()
for frameNum in range(self.numFrames):
print("Loading Frame Number: " + str(frameNum))
#frameArr = mats[frameNum]
#Channels
optImg = mats[frameNum][0]
#floImage = floArr[1]
floImg = mats[frameNum][1]
frame = Frame(optImg,floImg,frameNum)
self.frames.append(frame)
del mats
def threeZoomInit(self, zom0Cap,zom1Cap,zom2Cap,flo1Cap):
self.numZoom = 3
self.numVidFrames = int(zom2Cap.get(cv2.CAP_PROP_FRAME_COUNT))
self.numFloFrames = int(flo1Cap.get(cv2.CAP_PROP_FRAME_COUNT))
self.tracker = CentroidTracker()
for i in range(self.numVidFrames):
print("loading Frame " + str(i))
#Read Images
hasFrame,optImg = zom2Cap.read()
hasFrame,floImg = flo1Cap.read()
#Convert Images
optImg = cv2.cvtColor(optImg, cv2.COLOR_BGR2GRAY)
floImg = cv2.cvtColor(floImg, cv2.COLOR_BGR2GRAY)
frame = Frame(optImg,floImg,i)
#Extra Zoom Levels :))))
#Read Images
hasFrame,zom0Img = zom0Cap.read()
hasFrame,zom1Img = zom1Cap.read()
#Convert Images
zom0Img = cv2.cvtColor(zom0Img, cv2.COLOR_BGR2GRAY)
zom1Img = cv2.cvtColor(zom1Img, cv2.COLOR_BGR2GRAY)
frame.addZoomLevels(zom0Img,zom1Img)
self.frames.append(frame)
#TODO take String that tells what init to use
#Pre: captureVideo, captureFlo
#Ret: Video object
def __init__(self,arg1,arg2 = -1,arg3 = -1,arg4 = -1):
if(arg2 == -1):
self.matListInit(arg1)
elif(arg3 != -1):
self.threeZoomInit(arg1,arg2,arg3,arg4)
else:
self.vidCapInit(arg1,arg2)
self.xSz = self.frames[0].getUserOptImage().shape[0]
self.ySz = self.frames[0].getUserOptImage().shape[1]
#Methods
def getNumFrmes(self):
return(len(self.frames))
#Pre: frameNum nuber of the frame being retrived
#Ret: Frame of given number
def getFrame(self,frameNum):
return(self.frames[frameNum])
def getTrackedCells(self):
return(self.trackedCells)
def runTracking(self):
#loop through frames in video
for frame in self.frames:
cellInstanses = frame.cellInstanses
self.trackedCells = self.tracker.updateCellInst(cellInstanses)
frame.idImg = getIDImage(self.trackedCells,frame)
frame.classImg = getClassImage(self.trackedCells,frame.xSz,frame.ySz)
#self.trackedCells = filterTrackedCells(self.trackedCells)
#TODO: Make ID frame and Segmentation Frame Here after filtering
#for frame in self.frames:
#frame.idFrame = getIDFrameNY(self.trackedCells,frame)
#frame.classFrame = getClassFrameNY(self.trackedCells,frame.xScaleSz,frame.yScaleSz)
self.findLineage()
def findLineage(self):
findLineage(self.trackedCells)
|
{"/UserInterface/Controls.py": ["/UserInterface/UpdateFrame.py"], "/Tracking/centroidTracker.py": ["/Segmentation/cellInstance.py", "/Tracking/TrackedCell.py"], "/UserInterface/LoadData/LoadChannels.py": ["/UserInterface/videoClass.py"], "/UserInterface/UpdateFrame.py": ["/UserInterface/IncreasIntesity.py"], "/Tracking/findLineage.py": ["/Tracking/getEdgeToEdgeDist.py"], "/Segmentation/watershed.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/getThreshold.py"], "/UserInterface/getInstantSegmentImage.py": ["/UserInterface/getMaskImage.py"], "/Segmentation/RandomForestSegmentaion.py": ["/Segmentation/ParmeterizeImagegs.py", "/Segmentation/ConectedComponents.py", "/Segmentation/FilterDetection.py"], "/UserInterface/getClassImage.py": ["/UserInterface/getMaskImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/ConectedComponents.py": ["/Segmentation/getWHI5Activity.py", "/Segmentation/cellInstance.py"], "/main.py": ["/UserInterface/LoadData/LoadData.py", "/UserInterface/LoadData/LoadtifFile.py", "/UserInterface/Controls.py", "/UserInterface/LoadData/ImportThreeZoomLevel.py", "/UserInterface/LoadData/LoadChannels.py"], "/UserInterface/getIDImage.py": ["/UserInterface/rescaleImageToUser.py", "/Tracking/GetPositionFromContour.py"], "/Tracking/TrackedCell.py": ["/Segmentation/cellInstance.py"], "/Segmentation/OstuBinarizartion.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/getThreshold.py", "/Segmentation/Rescaling.py", "/Segmentation/ConvexHull.py"], "/Segmentation/ThersholdingSegmentation.py": ["/Segmentation/Preprocessing.py", "/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py", "/Segmentation/ConvexHull.py", "/Segmentation/ConectedComponents.py"], "/UserInterface/LoadData/ImportThreeZoomLevel.py": ["/UserInterface/videoClass.py"], "/Segmentation/LaplacianGausian.py": ["/Segmentation/cellInstance.py", "/Segmentation/getWHI5Activity.py", "/Segmentation/FilterDetection.py"], "/UserInterface/LoadData/LoadtifFile.py": ["/UserInterface/videoClass.py"], "/UserInterface/LoadData/LoadData.py": ["/UserInterface/videoClass.py"], "/UserInterface/frameClass.py": ["/Segmentation/cellInstance.py", "/Tracking/centroidTracker.py", "/Segmentation/OstuBinarizartion.py", "/Segmentation/watershed.py", "/Segmentation/LaplacianGausian.py", "/Segmentation/ThersholdingSegmentation.py", "/Segmentation/RandomForestSegmentaion.py", "/UserInterface/getInstantSegmentImage.py", "/UserInterface/rescaleImageToUser.py"], "/Segmentation/getThreshold.py": ["/Segmentation/FilterDetection.py"], "/UserInterface/videoClass.py": ["/UserInterface/frameClass.py", "/Tracking/centroidTracker.py", "/UserInterface/getIDImage.py", "/UserInterface/getClassImage.py", "/Tracking/findLineage.py", "/Tracking/filterTracking.py"]}
|
17,639
|
amaralunao/api_proxy
|
refs/heads/master
|
/api/constants.py
|
HOST = "https://demo.calendar42.com/api/v2/"
API_TOKEN = "Token 5426034f09d8463684d5de9beea93ea34d214b65"
headers = {"Accept": "application/json",
"Content-type": "application/json",
"Authorization": "{Token}".format(Token=API_TOKEN)}
|
{"/api/views.py": ["/api/utils.py"], "/api/utils.py": ["/api/constants.py"], "/api/urls.py": ["/api/views.py"]}
|
17,640
|
amaralunao/api_proxy
|
refs/heads/master
|
/api/views.py
|
from django.shortcuts import render
from .utils import get_event_title, get_event_names
from django.views.decorators.cache import cache_page
@cache_page(60 * 4.2)
def events_with_subscriptions(request, event_id):
title = get_event_title(event_id)
names = get_event_names(event_id)
events_with_names_dict = {
"id": event_id,
"title": title,
"names": names
}
return render(request, 'events_with_subscriptions.html',
{'events_with_names_dict': events_with_names_dict})
|
{"/api/views.py": ["/api/utils.py"], "/api/utils.py": ["/api/constants.py"], "/api/urls.py": ["/api/views.py"]}
|
17,641
|
amaralunao/api_proxy
|
refs/heads/master
|
/api/utils.py
|
import requests
from .constants import HOST, API_TOKEN, headers
def get_event(event_id):
url = HOST+"events/{EVENT_ID}/".format(EVENT_ID=event_id)
return requests.get(url, headers=headers).json()
def get_event_subscriptions(event_id):
url = HOST+"event-subscriptions/?event_ids=[{EVENT_ID}]".format(EVENT_ID=event_id)
return requests.get(url, headers=headers).json()
def get_event_title(event_id):
event_details = get_event(event_id)
if event_details.get('error'):
title = "Error occured while getting the title"
title = event_details.get('data')[0].get('title')
return title
def get_event_names(event_id):
event_details = get_event_subscriptions(event_id)
if event_details.get('error'):
names = ['Error occured while getting the event names']
else:
names = []
for entry in event_details.get('data'):
names.append(str(entry.get('subscriber').get('first_name')))
return names
|
{"/api/views.py": ["/api/utils.py"], "/api/utils.py": ["/api/constants.py"], "/api/urls.py": ["/api/views.py"]}
|
17,642
|
amaralunao/api_proxy
|
refs/heads/master
|
/api/urls.py
|
from django.conf.urls import url
from .views import events_with_subscriptions
urlpatterns = [
url(r'^events-with-subscriptions/(?P<event_id>[0-9a-fA-F_]+)/*',
events_with_subscriptions, name='events-with-subscriptions'),
]
|
{"/api/views.py": ["/api/utils.py"], "/api/utils.py": ["/api/constants.py"], "/api/urls.py": ["/api/views.py"]}
|
17,644
|
wissemkhrarib/Bookstore---Django
|
refs/heads/main
|
/books/admin.py
|
from django.contrib import admin
from .models import Book, Author
class BookAdmin(admin.ModelAdmin):
list_display = ('name', 'serie_number', 'author')
class AuthorAdmin(admin.ModelAdmin):
list_display = ('firstname', 'email')
admin.site.register(Book, BookAdmin)
admin.site.register(Author, AuthorAdmin)
|
{"/books/admin.py": ["/books/models.py"]}
|
17,645
|
wissemkhrarib/Bookstore---Django
|
refs/heads/main
|
/books/urls.py
|
from django.urls import path
from books import views
urlpatterns = [
path('', views.index),
path('new', views.new)
]
|
{"/books/admin.py": ["/books/models.py"]}
|
17,646
|
wissemkhrarib/Bookstore---Django
|
refs/heads/main
|
/books/models.py
|
from django.db import models
class Author(models.Model):
firstname = models.CharField(max_length=255)
lastname = models.CharField(max_length=255)
email = models.EmailField()
def __str__(self):
return self.firstname+' '+self.lastname
class Book(models.Model):
name = models.CharField(max_length=255)
description = models.CharField(max_length=1000)
serie_number = models.IntegerField()
author = models.ForeignKey(Author, on_delete=models.CASCADE)
|
{"/books/admin.py": ["/books/models.py"]}
|
17,647
|
Syvokobylenko/ProjectAutoHome
|
refs/heads/master
|
/gpio.py
|
class switchObject():
def __init__(self, channel):
import machine
self.pin = machine.Pin(channel, machine.Pin.OUT)
self.state = "1"
self.switch()
def switch(self):
if bool(int(self.state)):
print("Turning OFF")
self.pin.on()
self.state = "0"
return self.state
else:
print("Turning ON")
self.pin.off()
self.state = "1"
return self.state
|
{"/socket_server.py": ["/TCP_socket_object.py"], "/boot.py": ["/read_file.py", "/TCP_socket_object.py"], "/esp8266 (1).py": ["/gpio.py", "/read_file.py", "/TCP_socket_object.py"]}
|
17,648
|
Syvokobylenko/ProjectAutoHome
|
refs/heads/master
|
/TCP_socket_object.py
|
class createConnection():
def __init__(self):
import socket
self.socket = socket.socket()
def startServer(self,port,max_con):
self.port = port
self.socket.bind(('',self.port))
self.socket.listen(max_con)
def client(self,IP,port):
self.IP = IP
self.port = port
self.connection = self.socket
self.connection.connect((self.IP, self.port))
def send(self,message,connection=None):
if connection is None:
connection = self.connection
connection.send(message.encode())
def recieve(self,timeoutms,maxlenght,connection=None):
if connection is None:
connection = self.connection
connection.settimeout(timeoutms)
try:
msg = connection.recv(maxlenght).decode()
except TimeoutError:
msg = False
connection.settimeout(None)
return msg
|
{"/socket_server.py": ["/TCP_socket_object.py"], "/boot.py": ["/read_file.py", "/TCP_socket_object.py"], "/esp8266 (1).py": ["/gpio.py", "/read_file.py", "/TCP_socket_object.py"]}
|
17,649
|
Syvokobylenko/ProjectAutoHome
|
refs/heads/master
|
/esp8266.py
|
import socket, machine
def do_connect(ESSID,password):
import network
network.WLAN(network.AP_IF).active(False)
sta_if = network.WLAN(network.STA_IF)
if not sta_if.isconnected():
print("connecting to network...")
sta_if.active(True)
sta_if.connect(ESSID, password)
while not sta_if.isconnected():
pass
print("network config:", sta_if.ifconfig())
def credentialsRead(filename):
file = open(filename,"r")
f = file.read()
f = f.split("\n")
credentials = []
for line in f:
credentials.append(line)
file.close()
return credentials
do_connect(*credentialsRead("wifi.ini"))
class switchObject():
def __init__(self, channel):
self.pin = machine.Pin(channel, machine.Pin.OUT)
self.state = "1"
self.switch()
def switch(self):
if bool(int(self.state)):
print("Turning OFF")
self.pin.on()
self.state = "0"
return self.state
else:
print("Turning ON")
self.pin.off()
self.state = "1"
return self.state
class socketConnection():
def __init__(self, port):
self.server = socket.socket()
self.server.bind(("", port))
self.server.listen(1)
def acceptCon(self):
return self.server.accept()
server_instance = socketConnection(2198)
GPIO0Handler = switchObject(0)
while True:
data, addr = server_instance.acceptCon()
print ("Got connection from" + str(addr))
data.settimeout(5)
while True:
try:
if not bool(int(data.recv(1).decode())):
data.send(GPIO0Handler.switch())
data.close()
except(ValueError):
print("Invalid Input")
data.close()
break
except(OSError):
print("Timed Out")
data.close()
break
|
{"/socket_server.py": ["/TCP_socket_object.py"], "/boot.py": ["/read_file.py", "/TCP_socket_object.py"], "/esp8266 (1).py": ["/gpio.py", "/read_file.py", "/TCP_socket_object.py"]}
|
17,650
|
Syvokobylenko/ProjectAutoHome
|
refs/heads/master
|
/socket_server.py
|
from TCP_socket_object import createConnection
def node(con):
while True:
pc, pc_IP = con.socket.accept()
print("New connection:", pc_IP)
while True:
try:
print(con.recieve(None,10,pc))
except ConnectionResetError:
print("Connection lost:", pc_IP)
break
if __name__ == "__main__":
con = createConnection()
con.startServer(2198,5)
import threading
for x in range(5):
thread = threading.Thread(target=node, args=(con,))
thread.daemon = False
thread.start()
|
{"/socket_server.py": ["/TCP_socket_object.py"], "/boot.py": ["/read_file.py", "/TCP_socket_object.py"], "/esp8266 (1).py": ["/gpio.py", "/read_file.py", "/TCP_socket_object.py"]}
|
17,651
|
Syvokobylenko/ProjectAutoHome
|
refs/heads/master
|
/boot.py
|
import read_file, TCP_socket_object, wifi_connect, machine, time
ipconfig = wifi_connect.do_connect(*read_file.credentialsRead("wifi.ini"))
con = TCP_socket_object.createConnection()
con.client(ipconfig[3],2198)
pin = machine.Pin(0, machine.Pin.IN)
while True:
if not pin.value():
con.send('1')
time.sleep(2)
|
{"/socket_server.py": ["/TCP_socket_object.py"], "/boot.py": ["/read_file.py", "/TCP_socket_object.py"], "/esp8266 (1).py": ["/gpio.py", "/read_file.py", "/TCP_socket_object.py"]}
|
17,652
|
Syvokobylenko/ProjectAutoHome
|
refs/heads/master
|
/read_file.py
|
def credentialsRead(filename):
file = open(filename,"r")
f = file.read()
f = f.split("\n")
credentials = []
for line in f:
credentials.append(line)
file.close()
return credentials
|
{"/socket_server.py": ["/TCP_socket_object.py"], "/boot.py": ["/read_file.py", "/TCP_socket_object.py"], "/esp8266 (1).py": ["/gpio.py", "/read_file.py", "/TCP_socket_object.py"]}
|
17,653
|
Syvokobylenko/ProjectAutoHome
|
refs/heads/master
|
/esp8266 (1).py
|
import gpio, read_file, TCP_socket_object, wifi_connect
wifi_connect.do_connect(*read_file.credentialsRead("wifi.ini"))
GPIO0Handler = gpio.switchObject(0)
while True:
data, addr = TCP_socket_object.server(2198)
print ("Got connection from" + str(addr))
data.settimeout(5)
while True:
try:
if not bool(int(data.recv(1).decode())):
data.send(GPIO0Handler.switch())
data.close()
except(ValueError):
print("Invalid Input")
data.close()
break
except(OSError):
print("Timed Out")
data.close()
break
|
{"/socket_server.py": ["/TCP_socket_object.py"], "/boot.py": ["/read_file.py", "/TCP_socket_object.py"], "/esp8266 (1).py": ["/gpio.py", "/read_file.py", "/TCP_socket_object.py"]}
|
17,654
|
Syvokobylenko/ProjectAutoHome
|
refs/heads/master
|
/inputsocket.py
|
import socket, time
class connection:
def __init__(self, IP, port):
self.IP = IP
self.port = port
self.startConnection()
def startConnection(self):
self.s = socket.socket()
try:
self.s.connect((self.IP, self.port))
except(KeyboardInterrupt):
exit
def sendData(self, state):
try:
self.s.send(str(state))
except(KeyboardInterrupt):
exit
while True:
state = input("Type 0 to use switch: ")
client_soc = connection("192.168.0.39", 2198)
client_soc.sendData(state)
print(client_soc.s.recv(1).decode())
client_soc.s.close()
|
{"/socket_server.py": ["/TCP_socket_object.py"], "/boot.py": ["/read_file.py", "/TCP_socket_object.py"], "/esp8266 (1).py": ["/gpio.py", "/read_file.py", "/TCP_socket_object.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.