index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
2,691
|
seosaju/SoupKitchen
|
refs/heads/master
|
/booth/views.py
|
from django.http import HttpResponse
from django.shortcuts import render
from load_csv import load
from secret import MAP_KEY
from .models import Booth, Company
'''
def make_booth(request):
booth_list = load('./data.csv')
for booth in booth_list:
name = booth[3]
try:
company = Company.objects.get(name=name)
except Company.DoesNotExist:
company = Company(name=name)
company.save()
Booth.objects.create(
name=booth[0], # μμ€λͺ
road_address=booth[1], # μμ¬μ§λλ‘λͺ
μ£Όμ
land_address=booth[2], # μμ¬μ§μ§λ²μ£Όμ
company=company, # μ΄μκΈ°κ΄λͺ
contact=booth[4], # μ νλ²νΈ
place=booth[5], # κΈμμ₯μ
target=booth[6], # κΈμλμ
time=booth[7], # κΈμμκ°
date=booth[8], # κΈμμΌ
latitude=booth[11], # μλ
longitude=booth[12] # κ²½λ
)
return HttpResponse('load complete!')
'''
def maps(request):
booths = Booth.objects.all()
return render(request, 'booth/maps.html', {'booths': booths, 'MAP_KEY': MAP_KEY})
|
{"/booth/admin.py": ["/booth/models.py"], "/booth/views.py": ["/load_csv.py", "/booth/models.py"]}
|
2,692
|
seosaju/SoupKitchen
|
refs/heads/master
|
/booth/urls.py
|
from django.urls import path
from . import views
app_name = 'booth'
urlpatterns = [
# path('make_booth/', views.make_booth, name='make_booth'), csv νμΌ DBμ λ±λ‘ν λλ§ μ¬μ©νλ URL.
path('', views.maps, name='index'),
]
|
{"/booth/admin.py": ["/booth/models.py"], "/booth/views.py": ["/load_csv.py", "/booth/models.py"]}
|
2,693
|
seosaju/SoupKitchen
|
refs/heads/master
|
/booth/models.py
|
from django.db import models
class Company(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Booth(models.Model):
name = models.CharField(max_length=50) # μμ€λͺ
road_address = models.CharField(max_length=100) # μμ¬μ§λλ‘λͺ
μ£Όμ
land_address = models.CharField(max_length=100) # μμ¬μ§μ§λ²μ£Όμ
company = models.ForeignKey('Company', on_delete=models.CASCADE) # μ΄μκΈ°κ΄λͺ
contact = models.CharField(max_length=20) # μ νλ²νΈ
place = models.CharField(max_length=100) # κΈμμ₯μ
target = models.CharField(max_length=100) # κΈμλμ
time = models.CharField(max_length=50) # κΈμμκ°
date = models.CharField(max_length=50) # κΈμμΌ
latitude = models.DecimalField(max_digits=10, decimal_places=8) # μλ
longitude = models.DecimalField(max_digits=11, decimal_places=8) # κ²½λ
def __str__(self):
return self.name
|
{"/booth/admin.py": ["/booth/models.py"], "/booth/views.py": ["/load_csv.py", "/booth/models.py"]}
|
2,701
|
rossmounce/OpenArticleGauge
|
refs/heads/master
|
/openarticlegauge/recordmanager.py
|
from openarticlegauge import config
from datetime import datetime
def record_provider_url(record, url):
if not "provider" in record:
record['provider'] = {}
if not "url" in record["provider"]:
record["provider"]["url"] = []
if url not in record['provider']['url']:
record['provider']['url'].append(url)
def record_provider_urls(record, urls):
for url in urls:
record_provider_url(record, url)
def record_provider_doi(record, doi):
if not "provider" in record:
record['provider'] = {}
record["provider"]["doi"] = doi
def add_license(record,
description="",
title="",
url="",
version="",
jurisdiction="",
type="",
open_access=False,
BY="",
NC="",
ND="",
SA="",
error_message="",
suggested_solution="",
category="",
provenance_description="",
agent=config.agent,
source="",
date=datetime.strftime(datetime.now(), config.date_format),
handler="",
handler_version=""):
"""
{
"description": "",
"title": "",
"url": licence_url,
"version": "",
"jurisdiction": "",
"type": "failed-to-obtain-license",
"open_access": False,
"BY": "",
"NC": "",
"ND": "",
"SA": "",
"error_message": why,
"suggested_solution": suggested_solution,
"provenance": {
"category": "page_scrape",
"description": self.gen_provenance_description_fail(source_url),
"agent": config.agent,
"source": source_url,
"date": datetime.strftime(datetime.now(), config.date_format),
"handler" : self._short_name,
"handler_version" : self.__version__
}
}
"""
if "bibjson" not in record:
record["bibjson"] = {}
if "license" not in record['bibjson']:
record['bibjson']['license'] = []
record['bibjson']['license'].append(
{
"description": description,
"title": title,
"url": url,
"version": version,
"jurisdiction": jurisdiction,
"type": type,
"open_access": open_access,
"BY": BY,
"NC": NC,
"ND": ND,
"SA": SA,
"error_message": error_message,
"suggested_solution": suggested_solution,
"provenance": {
"category": category,
"description": provenance_description,
"agent": agent,
"source": source,
"date": date,
"handler" : handler,
"handler_version" : handler_version
}
}
)
|
{"/openarticlegauge/workflow.py": ["/openarticlegauge/slavedriver.py"]}
|
2,702
|
rossmounce/OpenArticleGauge
|
refs/heads/master
|
/openarticlegauge/cache.py
|
import redis, json, datetime, logging
import config
log = logging.getLogger(__name__)
def check_cache(key):
"""
check the cache for an object stored under the given key, and convert it
from a string into a python object
"""
client = redis.StrictRedis(host=config.REDIS_CACHE_HOST, port=config.REDIS_CACHE_PORT, db=config.REDIS_CACHE_DB)
s = client.get(key)
if s is None:
return None
try:
obj = json.loads(s)
except ValueError as e:
# cache is corrupt, just get rid of it
invalidate(key)
return None
return obj
def is_stale(bibjson):
"""
Check to see if the bibjson record in the supplied record is stale. Look
in bibjson['license'][n]['provenance']['date'] for all n. If the newest date
is older than the stale time, then the record is stale. If the record does
not have a licence, it is stale.
"""
# check that the record has a licence at all
if not "license" in bibjson:
return True
# get the date strings of all the licences
log.debug("stale check on: " + str(bibjson))
date_strings = [licence.get("provenance", {}).get("date")
for licence in bibjson.get("license", [])
if licence.get("provenance", {}).get("date") is not None]
# check that there were any dates, if not then the record is necessarily stale
if len(date_strings) == 0:
return True
# convert all the viable date strings to datetimes
dates = []
for d in date_strings:
try:
dt = datetime.datetime.strptime(d, config.date_format)
dates.append(dt)
except ValueError as e:
continue
# check that at least one date has parsed, and if not assume that the record is stale
if len(dates) == 0:
return True
# get the most recent date by sorting the list (reverse, most recent date first)
dates.sort(reverse=True)
most_recent = dates[0]
# now determine if the most recent date is older or newer than the stale timeout
td = datetime.timedelta(seconds=config.licence_stale_time)
n = datetime.datetime.now()
stale_date = most_recent + td
return stale_date < n
def invalidate(key):
"""
remove anything identified by the supplied key from the cache
"""
client = redis.StrictRedis(host=config.REDIS_CACHE_HOST, port=config.REDIS_CACHE_PORT, db=config.REDIS_CACHE_DB)
client.delete(key)
def cache(key, obj):
"""
take the provided python data structure, serialise it via json to a string, and
store it at the provided key with the appropriate timeout. This may be
required to create a new cache entry or update an existing one
"""
try:
s = json.dumps(obj)
except TypeError:
raise CacheException("can only cache python objects that can be sent through json.dumps")
client = redis.StrictRedis(host=config.REDIS_CACHE_HOST, port=config.REDIS_CACHE_PORT, db=config.REDIS_CACHE_DB)
client.setex(key, config.REDIS_CACHE_TIMEOUT, s)
class CacheException(Exception):
def __init__(self, message):
self.message = message
super(CacheException, self).__init__(self, message)
|
{"/openarticlegauge/workflow.py": ["/openarticlegauge/slavedriver.py"]}
|
2,703
|
rossmounce/OpenArticleGauge
|
refs/heads/master
|
/openarticlegauge/plugloader.py
|
import config
import logging
log = logging.getLogger(__name__)
"""
NOTE: these might be useful to someone in the future, but we don't need them
right now, so leaving them commented out
def get_info(callable_path):
if callable_path is None:
log.debug("attempted to load plugin with no plugin path")
return None
# callable_path is a function in a module, and the module itself holds
# the info, so we need to just load the module
components = callable_path.split(".")
modpath = ".".join(components[:-1])
if modpath == "" or modpath is None:
return None, None
# ok, so now we know the path to the module, load it
module = load(modpath)
name = "unknown"
version = -1
if hasattr(module, "__name__"):
name = module.__name__.split(".")[-1]
if hasattr(module, "__version__"):
version = module.__version__
return name, version
def load_sibling(callable_path, sibling_name):
if callable_path is None:
log.debug("attempted to load plugin with no plugin path")
return None
components = callable_path.split(".")
call = components[-1:][0]
modpath = ".".join(components[:-1])
# construct the new callable
sibling = modpath + "." + sibling_name
return load(sibling)
"""
def load(callable_path):
if callable_path is None:
log.debug("attempted to load plugin with no plugin path")
return None
# split out the callable and the modpath
components = callable_path.split(".")
call = components[-1:][0]
modpath = ".".join(components[:-1])
log.debug("loading plugin from modpath: " + modpath + ", and callable: " + call)
if modpath is not None and modpath != "":
# try to load the callable
call_able = _load_callable(modpath, call)
# if success then return
if call_able is not None:
log.debug("loaded plugin from " + modpath + ": " + str(call_able))
return call_able
# if we don't find the callable, then we may need to look in one of the
# other search contexts as defined in the config
for search_prefix in config.module_search_list:
nm = search_prefix + "." + modpath
call_able = _load_callable(nm, call)
if call_able is not None:
log.debug("loaded plugin from " + modpath + ": " + str(call_able))
return call_able
# couldn't load a plugin
log.debug("unable to load plugin " + call + " from " + modpath)
return None
def _load_callable(modpath, call):
# now, do some introspection to get a handle on the callable
try:
mod = __import__(modpath, fromlist=[call])
call_able = getattr(mod, call)
return call_able
except ImportError as e:
# in this case it's possible that it's just a context thing, and
# the class we're trying to load is in a different package.
log.debug("import error loading " + call + " from " + modpath + " - path may not be accessible or available in this context")
return None
except AttributeError as e:
# found the module but failed to load the attribute (probably the
# callable isn't in that module)
log.error("attribute error loading " + call + " from " + modpath + " - path is valid, but callable isn't part of that module")
#raise e
return None
|
{"/openarticlegauge/workflow.py": ["/openarticlegauge/slavedriver.py"]}
|
2,704
|
rossmounce/OpenArticleGauge
|
refs/heads/master
|
/openarticlegauge/plugin.py
|
from openarticlegauge import config, plugloader, recordmanager
from openarticlegauge.licenses import LICENSES
from openarticlegauge import oa_policy
import logging, requests
from copy import deepcopy
from datetime import datetime
log = logging.getLogger(__name__)
class Plugin(object):
## Capabilities that must be implemented by the sub-class ##
__version__ = "0.0"
_short_name = "vanilla_plugin"
def capabilities(self):
"""
Describe the capabilities of this plugin, in the following form:
{
"type_detect_verify" : True,
"canonicalise" : ["<supported type>"],
"detect_provider" : ["<supported type>"],
"license_detect" : True
}
Omit any key for any feature that the plugin does not support, or set the
value of the key to False
"""
return {}
def type_detect_verify(self, bibjson_identifier):
"""
determine if the provided bibjson identifier has the correct type for this plugin, by
inspecting first the "type" parameter, and then by looking at the form
of the id. If it is tagged as a DOI, then verify that it is a valid one.
Add "type" parameter to the bibjson_identifier object if successful.
"""
raise NotImplementedError("type_detect_verify has not been implemented")
def canonicalise(self, bibjson_identifier):
"""
create a canonical form of the identifier
and insert it into the bibjson_identifier['canonical'].
"""
raise NotImplementedError("canonicalise has not been implemented")
def detect_provider(self, record):
"""
Attempt to determine information regarding the provider of the identifier.
Identifier can be found in record["identifier"].
This function should - if successful - populate the record["provider"] field
(create if necessary), with any information relevant to downstream plugins
(see back-end documentation for more information)
"""
raise NotImplementedError("detect_provider has not been implemented")
def supports(self, provider):
"""
Does the page_license method in this plugin support this provider
"""
raise NotImplementedError("supports has not been implemented")
def license_detect(self, record):
"""
Determine the licence conditions of the record. Plugins may achieve this by
any means, although the record['provider']['url'] and record['provider']['doi']
fields will be key pieces of information.
Plugins should populate (create if necessary) record['bibjson'] and populate with
a record containing a "license" as per the back-end and API documentation
"""
raise NotImplementedError("license_detect has not been implemented")
## utilities that the sub-class can take advantage of ##
def clean_url(self, url):
# strip any leading http:// or https://
if url.startswith("http://"):
url = url[len("http://"):]
elif url.startswith("https://"):
url = url[len("https://"):]
return url
def clean_urls(self, urls):
cleaned_urls = []
for url in urls:
cleaned_urls.append(self.clean_url(url))
return cleaned_urls
def simple_extract(self, lic_statements, record, url):
"""
Generic code which looks for a particular string in a given web page (URL),
determines the licence conditions of the article and populates
the record['bibjson']['license'] (note the US spelling) field.
The URL it analyses, the statements it looks for and the resulting licenses
are passed in. This is not a plugin for a particular publisher - it just
contains (allows re-use) the logic that any "dumb string matching" plugin
would use.
:param handler: The name of the plugin which called this function to
handle the record.
:param handler_version: The __version__ of the plugin which called this
function to handle the record.
:param lic_statements: licensing statements to look for on this publisher's
pages. Take the form of {statement: meaning}
where meaning['type'] identifies the license (see licenses.py)
and meaning['version'] identifies the license version (if available)
See a publisher plugin for an example, e.g. bmc.py
:param record: a request for the OAG status of an article, see OAG docs for
more info.
:param url: source url of the item to be fetched. This is where the HTML
page that's going to be scraped is expected to reside.
"""
# get content
r = requests.get(url)
# see if one of the licensing statements is in content
# and populate record with appropriate license info
for statement_mapping in lic_statements:
# get the statement string itself - always the first key of the dict
# mapping statements to licensing info
statement = statement_mapping.keys()[0]
#import logging
#logging.debug('Statement "' + statement + '"...')
if statement in r.content:
#logging.debug('... matches')
# okay, statement found on the page -> get license type
lic_type = statement_mapping[statement]['type']
# license identified, now use that to construct the license object
license = deepcopy(LICENSES[lic_type])
license['open_access'] = oa_policy.oa_for_license(lic_type)
# set some defaults which have to be there, even if empty
license.setdefault('version','')
license.setdefault('description','')
license.setdefault('jurisdiction','') # TODO later (or later version of OAG!)
# Copy over all information about the license from the license
# statement mapping. In essence, transfer the knowledge of the
# publisher plugin authors to the license object.
# Consequence: Values coming from the publisher plugin overwrite
# values specified in the licenses module.
license.update(statement_mapping[statement])
# add provenance information to the license object
provenance = {
'date': datetime.strftime(datetime.now(), config.date_format),
'source': url,
'agent': config.agent,
'category': 'page_scrape', # TODO we need to think how the
# users get to know what the values here mean.. docs?
'description': self.gen_provenance_description(url, statement),
'handler': self._short_name, # the name of the plugin processing this record
'handler_version': self.__version__ # version of the plugin processing this record
}
license['provenance'] = provenance
record['bibjson'].setdefault('license', [])
record['bibjson']['license'].append(license)
#logging.debug('... does NOT match')
def gen_provenance_description(self, source_url, statement):
return 'License decided by scraping the resource at ' + source_url + ' and looking for the following license statement: "' + statement + '".'
def gen_provenance_description_fail(self, source_url):
return 'We have found it impossible or prohibitively difficult to decide what the license of this item is by scraping the resource at ' + source_url + '. See "error_message" in the "license" object for more information.'
def describe_license_fail(self, record, source_url, why, suggested_solution='', licence_url=""):
recordmanager.add_license(
record,
source=source_url,
error_message=why,
suggested_solution=suggested_solution,
url=licence_url,
type="failed-to-obtain-license",
open_access=False,
category="page_scrape",
provenance_description=self.gen_provenance_description_fail(source_url),
handler=self._short_name,
handler_version=self.__version__
)
class PluginFactory(object):
@classmethod
def type_detect_verify(cls):
# FIXME: this should be updated to utilise the "capabilities" aspect of the plugin
plugins = []
for plugin_class in config.type_detection:
klazz = plugloader.load(plugin_class)
if klazz is None:
log.warn("unable to load plugin for detecting identifier type from " + str(plugin_class))
continue
plugins.append(klazz()) # append an instance of the class
return plugins
@classmethod
def canonicalise(cls, identifier_type):
plugin_class = config.canonicalisers.get(identifier_type)
klazz = plugloader.load(plugin_class)
return klazz() # return an instance of the class
@classmethod
def detect_provider(cls, identifier_type):
plugins = []
for plugin_class in config.provider_detection.get(identifier_type, []):
# all provider plugins run, until each plugin has had a go at determining provider information
klazz = plugloader.load(plugin_class)
plugins.append(klazz()) # append an instance of the class
return plugins
@classmethod
def license_detect(cls, provider_record):
for plugin_class in config.license_detection:
log.debug("checking " + plugin_class + " for support of provider " + str(provider_record))
klazz = plugloader.load(plugin_class)
if klazz is None:
continue
inst = klazz()
if inst.supports(provider_record):
log.debug(plugin_class + " (" + inst._short_name + " v" + inst.__version__ + ") services provider " + str(provider_record))
return inst
return None
|
{"/openarticlegauge/workflow.py": ["/openarticlegauge/slavedriver.py"]}
|
2,705
|
rossmounce/OpenArticleGauge
|
refs/heads/master
|
/openarticlegauge/workflow.py
|
from celery import chain
from openarticlegauge import models, model_exceptions, config, cache, plugin, recordmanager
import logging
from openarticlegauge.slavedriver import celery
logging.basicConfig(filename='oag.log',level=logging.DEBUG)
log = logging.getLogger(__name__)
def lookup(bibjson_ids):
"""
Take a list of bibjson id objects
{
"id" : "<identifier>",
"type" : "<type>"
}
and process them, returning a models.ResultSet object of completed or incomplete results
"""
# FIXME: should we sanitise the inputs?
# create a new resultset object
log.debug("looking up ids: " + str(bibjson_ids))
rs = models.ResultSet(bibjson_ids)
# now run through each passed id, and either obtain a cached copy or
# inject it into the asynchronous back-end
for bid in bibjson_ids:
# first, create the basic record object
record = { "identifier" : bid }
log.debug("initial record " + str(record))
# trap any lookup errors
try:
# Step 1: identifier type detection/verification
_detect_verify_type(record)
log.debug("type detected record " + str(record))
# Step 1a: if we don't find a type for the identifier, there's no point in us continuing
if record.get("identifier", {}).get("type") is None:
raise model_exceptions.LookupException("unable to determine the type of the identifier")
# Step 2: create a canonical version of the identifier for cache keying
_canonicalise_identifier(record)
log.debug("canonicalised record " + str(record))
# Step 3: check the cache for an existing record
cached_copy = _check_cache(record)
log.debug("cached record " + str(cached_copy))
# this returns either a valid, returnable copy of the record, or None
# if the record is not cached or is stale
if cached_copy is not None:
if cached_copy.get('queued', False):
record['queued'] = True
elif cached_copy.has_key('bibjson'):
record['bibjson'] = cached_copy['bibjson']
log.debug("loaded from cache " + str(record))
rs.add_result_record(record)
log.debug(str(bid) + " added to result, continuing ...")
continue
# Step 4: check the archive for an existing record
archived_bibjson = _check_archive(record)
log.debug("archived bibjson: " + str(archived_bibjson))
# this returns either a valid, returnable copy of the record, or None
# if the record is not archived, or is stale
if archived_bibjson is not None:
record['bibjson'] = archived_bibjson
log.debug("loaded from archive " + str(archived_bibjson))
rs.add_result_record(record)
continue
# Step 5: we need to check to see if any record we have has already
# been queued. In theory, this step is pointless, but we add it
# in for completeness, and just in case any of the above checks change
# in future
if record.get("queued", False):
# if the item is already queued, we just need to update the
# cache (which may be a null operation anyway), and then carry on
# to the next record
_update_cache(record)
log.debug("caching record " + str(record))
continue
# Step 6: if we get to here, we need to set the state of the record
# queued, and then cache it.
record['queued'] = True
_update_cache(record)
log.debug("caching record " + str(record))
# Step 7: the record needs the licence looked up on it, so we inject
# it into the asynchronous lookup workflow
_start_back_end(record)
except model_exceptions.LookupException as e:
record['error'] = e.message
# write the resulting record into the result set
rs.add_result_record(record)
# finish by returning the result set
return rs
def _check_archive(record):
"""
check the record archive for a copy of the bibjson record
"""
if not record.has_key('identifier'):
raise model_exceptions.LookupException("no identifier in record object")
if not record['identifier'].has_key('canonical'):
raise model_exceptions.LookupException("can't look anything up in the archive without a canonical id")
# obtain a copy of the archived bibjson
log.debug("checking archive for canonical identifier: " + record['identifier']['canonical'])
archived_bibjson = models.Record.check_archive(record['identifier']['canonical'])
# if it's not in the archive, return
if archived_bibjson is None:
log.debug(record['identifier']['canonical'] + " is not in the archive")
return None
# if there is archived bibjson, then we need to check whether it is stale
# or not
if _is_stale(archived_bibjson):
log.debug(record['identifier']['canonical'] + " is in the archive, but is stale")
return None
# otherwise, just return the archived copy
log.debug(record['identifier']['canonical'] + " is in the archive")
return archived_bibjson
def _update_cache(record):
"""
update the cache, and reset the timeout on the cached item
"""
if not record.has_key('identifier'):
raise model_exceptions.LookupException("no identifier in record object")
if not record['identifier'].has_key('canonical'):
raise model_exceptions.LookupException("can't create/update anything in the cache without a canonical id")
# update or create the cache
cache.cache(record['identifier']['canonical'], record)
def _invalidate_cache(record):
"""
invalidate any cache object associated with the passed record
"""
if not record.has_key('identifier'):
raise model_exceptions.LookupException("no identifier in record object")
if not record['identifier'].has_key('canonical'):
raise model_exceptions.LookupException("can't invalidate anything in the cache without a canonical id")
cache.invalidate(record['identifier']['canonical'])
def _is_stale(bibjson):
"""
Do a stale check on the bibjson object.
"""
return cache.is_stale(bibjson)
def _check_cache(record):
"""
check the live local cache for a copy of the object. Whatever we find,
return it (a record of a queued item, a full item, or None)
"""
if not record.has_key('identifier'):
raise model_exceptions.LookupException("no identifier in record object")
if not record['identifier'].has_key('canonical'):
raise model_exceptions.LookupException("can't look anything up in the cache without a canonical id")
log.debug("checking cache for key: " + record['identifier']['canonical'])
cached_copy = cache.check_cache(record['identifier']['canonical'])
# if it's not in the cache, then return
if cached_copy is None:
log.debug(record['identifier']['canonical'] + " not found in cache")
return None
# if the cached copy exists ...
# first check to see if the cached copy is already on the queue
if cached_copy.get('queued', False):
log.debug(record['identifier']['canonical'] + " is in the cache and is queued for processing")
return cached_copy
# next check to see if the cached copy has a bibjson record in it
if cached_copy.has_key('bibjson'):
# if it does, we need to see if the record is stale. If so, we remember that fact,
# and we'll deal with updating stale items later (once we've checked bibserver)
if _is_stale(cached_copy['bibjson']):
log.debug(record['identifier']['canonical'] + " is in the cache but is a stale record")
_invalidate_cache(record)
return None
# otherwise, just return the cached copy
log.debug(record['identifier']['canonical'] + " is in the cache")
return cached_copy
def _canonicalise_identifier(record):
"""
load the appropriate plugin to canonicalise the identifier. This will add a "canonical" field
to the "identifier" record with the canonical form of the identifier to be used in cache control and bibserver
lookups
"""
# verify that we have everything required for this step
if not record.has_key("identifier"):
raise model_exceptions.LookupException("no identifier in record object")
if not record['identifier'].has_key("id"):
raise model_exceptions.LookupException("bibjson identifier object does not contain an 'id' field")
if not record['identifier'].has_key("type"):
raise model_exceptions.LookupException("bibjson identifier object does not contain a 'type' field")
# load the relevant plugin based on the "type" field, and then run it on the record object
p = plugin.PluginFactory.canonicalise(record['identifier']['type'])
if p is None:
raise model_exceptions.LookupException("no plugin for canonicalising " + record['identifier']['type'])
p.canonicalise(record['identifier'])
def _detect_verify_type(record):
"""
run through a set of plugins which will detect the type of id, and verify that it meets requirements
"""
# verify that the record has an identifier key, which is required for this operation
if not record.has_key("identifier"):
raise model_exceptions.LookupException("no identifier in record object")
if not record['identifier'].has_key("id"):
raise model_exceptions.LookupException("bibjson identifier object does not contain an 'id' field")
# run through /all/ of the plugins and give each a chance to augment/check
# the identifier
plugins = plugin.PluginFactory.type_detect_verify()
for p in plugins:
p.type_detect_verify(record['identifier'])
def _start_back_end(record):
"""
kick off the asynchronous licence lookup process. There is no need for this to return
anything, although a handle on the asynchronous is provided for convenience of
testing
"""
log.debug("injecting record into asynchronous processing chain: " + str(record))
ch = chain(detect_provider.s(record), provider_licence.s(), store_results.s())
r = ch.apply_async()
return r
############################################################################
# Celery Tasks
############################################################################
@celery.task(name="openarticlegauge.workflow.detect_provider")
def detect_provider(record):
# Step 1: see if we can actually detect a provider at all?
# as usual, this should never happen, but we should have a way to
# handle it
if not record.has_key("identifier"):
return record
if not record['identifier'].has_key("type"):
return record
# Step 2: get the provider plugins that are relevant, and
# apply each one until a provider string is added
plugins = plugin.PluginFactory.detect_provider(record['identifier']["type"])
for p in plugins:
log.debug("applying plugin " + str(p._short_name))
p.detect_provider(record)
# we have to return the record, so that the next step in the chain
# can deal with it
log.debug("yielded result " + str(record))
return record
@celery.task(name="openarticlegauge.workflow.provider_licence")
def provider_licence(record):
# Step 1: check that we have a provider indicator to work from
if not record.has_key("provider"):
log.debug("record has no provider, so unable to look for licence: " + str(record))
return record
# Step 2: get the plugin that will run for the given provider
p = plugin.PluginFactory.license_detect(record["provider"])
if p is None:
log.debug("No plugin to handle provider: " + str(record['provider']))
return record
log.debug("Plugin " + str(p) + " to handle provider " + str(record['provider']))
# Step 3: run the plugin on the record
if "bibjson" not in record:
# if the record doesn't have a bibjson element, add a blank one
record['bibjson'] = {}
p.license_detect(record)
# was the plugin able to detect a licence?
# if not, we need to add an unknown licence for this provider
if "license" not in record['bibjson'] or len(record['bibjson'].get("license", [])) == 0:
log.debug("No licence detected by plugin " + p._short_name + " so adding unknown licence")
recordmanager.add_license(record,
url=config.unknown_url,
type="failed-to-obtain-license",
open_access=False,
error_message="unable to detect licence",
category="failure",
provenance_description="a plugin ran and failed to detect a license for this record. This entry records that the license is therefore unknown",
handler=p._short_name,
handler_version=p.__version__
)
# describe_license_fail(record, "none", "unable to detect licence", "", config.unknown_url, p._short_name, p.__version__)
# we have to return the record so that the next step in the chain can
# deal with it
log.debug("plugin " + str(p) + " yielded result " + str(record))
return record
@celery.task(name="openarticlegauge.workflow.store_results")
def store_results(record):
# Step 1: ensure that a licence was applied, and if not apply one
if "bibjson" not in record:
# no bibjson record, so add a blank one
log.debug("record does not have a bibjson record.")
record['bibjson'] = {}
if "license" not in record['bibjson'] or len(record['bibjson'].get("license", [])) == 0:
# the bibjson record does not contain a license list OR the license list is of zero length
log.debug("Licence could not be detected, therefore adding 'unknown' licence to " + str(record['bibjson']))
recordmanager.add_license(record,
url=config.unknown_url,
type="failed-to-obtain-license",
open_access=False,
error_message="unable to detect licence",
category="failure",
provenance_description="no plugin was found that would try to detect a licence. This entry records that the license is therefore unknown",
)
# describe_license_fail(record, "none", "unable to detect licence", "", config.unknown_url)
# Step 2: unqueue the record
if record.has_key("queued"):
log.debug(str(record['identifier']) + ": removing this item from the queue")
del record["queued"]
# Step 3: update the archive
_add_identifier_to_bibjson(record['identifier'], record['bibjson'])
log.debug(str(record['identifier']) + ": storing this item in the archive")
models.Record.store(record['bibjson'])
# Step 4: update the cache
log.debug(str(record['identifier']) + ": storing this item in the cache")
_update_cache(record)
# we have to return the record so that the next step in the chain can
# deal with it (if such a step exists)
log.debug("yielded result " + str(record))
return record
def _add_identifier_to_bibjson(identifier, bibjson):
# FIXME: this is pretty blunt, could be a lot smarter
if not bibjson.has_key("identifier"):
bibjson["identifier"] = []
found = False
for identifier in bibjson['identifier']:
if identifier.has_key("canonical") and identifier['canonical'] == bibjson['identifier']['canonical']:
found = True
break
if not found:
bibjson['identifier'].append(identifier)
|
{"/openarticlegauge/workflow.py": ["/openarticlegauge/slavedriver.py"]}
|
2,706
|
rossmounce/OpenArticleGauge
|
refs/heads/master
|
/openarticlegauge/slavedriver.py
|
from __future__ import absolute_import
from celery import Celery
celery = Celery()
from openarticlegauge import celeryconfig
celery.config_from_object(celeryconfig)
# Optional configuration, see the application user guide.
celery.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
)
if __name__ == '__main__':
celery.start()
|
{"/openarticlegauge/workflow.py": ["/openarticlegauge/slavedriver.py"]}
|
2,707
|
marvin939/ZombiePygame
|
refs/heads/master
|
/tests/test_utilities.py
|
import math
import unittest
import utilities
class UtilitiesTestCase(unittest.TestCase):
def test_unit_circle_angle(self):
angles = list(range(-20, 20))
hypotenuse = 5
assumed_angles = {}
for angle in angles:
opposite = hypotenuse * math.sin(angle)
assumed_angles[angle] = opposite
for angle in angles:
with self.subTest(angle=angle):
converted_angle = utilities.unit_angle(angle)
self.assertLessEqual(converted_angle, math.pi * 2)
self.assertGreaterEqual(converted_angle, 0)
opposite = hypotenuse * math.sin(converted_angle)
self.assertAlmostEqual(assumed_angles[angle], opposite, 10)
def test_unit_circle_angle_bounds(self):
hypotenuse = 10
angles = (0, math.pi * 2)
for angle in angles:
with self.subTest(angle=angle):
expected_adjacent = hypotenuse
adjacent = hypotenuse * math.cos(utilities.unit_angle(angle))
self.assertAlmostEqual(adjacent, expected_adjacent)
expected_opposite = 0
opposite = hypotenuse * math.sin(utilities.unit_angle(angle))
self.assertAlmostEqual(opposite, expected_opposite)
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,708
|
marvin939/ZombiePygame
|
refs/heads/master
|
/tests/test_weapon.py
|
import unittest
import utilities
from weapon import *
from game import *
class WeaponSimplifiedTestCase(unittest.TestCase):
def setUp(self):
self.fire_rate = 3 # bullets per second
self.world = World()
self.owner_location = Vector2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2)
self.owner = GameEntity(self.world, 'dummy', None, self.owner_location)
self.ammo = 9999
self.damage = 10
self.weapon = WeaponSimplified(self.world, self.owner, self.fire_rate, self.damage, self.ammo)
def test_ammunition_decrease_1tick(self):
self.weapon.process(TICK_SECOND)
self.weapon.fire()
self.assertEqual(self.weapon.ammo, self.ammo - 1)
# def test_ammunition_decrease_2sec(self):
# seconds = 2
# self.weapon.process(seconds)
# self.assertEqual(self.weapon.ammo, self.ammo - self.fire_rate * seconds)
def test_after_2seconds_ready_to_fire(self):
self.weapon.fire()
self.assertFalse(self.weapon.ready_to_fire)
self.weapon.process(2)
self.weapon.ready_to_fire = True
pass
def test_bullets_spawned_on_fire(self):
self.weapon.process(1)
self.weapon.fire()
self.assertGreater(self.world.entity_count(), 0)
def test_bullets_damage(self):
self.weapon.process(1)
bullets = (e for e in self.world.entities.values() if e.name == 'bullet')
for b in bullets:
with self.subTest(bullet=b):
self.assertEqual(b.damage, self.weapon.damage)
def test_no_ammo(self):
self.weapon.ammo = 0
self.weapon.process(TICK_SECOND)
self.weapon.fire()
self.assertEqual(self.weapon.ammo, 0)
self.assertEqual(self.weapon.accumulator, 0) # accumulator = 0, since there is no more ammo
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,709
|
marvin939/ZombiePygame
|
refs/heads/master
|
/run.py
|
import pygame
from pygame.locals import *
from game import *
import sys
import mobs
from manager import ImageManager
from random import randint
image_dude = None
TITLE = 'Zombie Defence v0.0.0'
def main():
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE)
pygame.display.set_caption(TITLE)
clock = pygame.time.Clock()
world = World()
global image_dude
image_dude = ImageManager('data/images/')
setup_world(world)
time_passed = 0
while True:
if time_passed > 0:
pygame.display.set_caption('{title} {fps:>.0f} FPS'.format(title=TITLE, fps=1000 / time_passed))
for event in pygame.event.get():
if event.type == QUIT:
quit_game()
# Dirty way of attacking the enemy
lmb, mmb, rmb = pygame.mouse.get_pressed()
mouse_x, mouse_y = pygame.mouse.get_pos()
if lmb:
e = world.get_close_entity('zombie', Vector2(mouse_x, mouse_y), radius=32)
if e is not None:
print('zombie found @ {}; state: {}'.format(e.location, e.brain.active_state.name))
e.hp -= 1
world.process(time_passed)
screen.fill(BLACK)
world.render(screen)
pygame.display.update()
time_passed = clock.tick(FPS)
def quit_game():
pygame.quit()
sys.exit()
def setup_world(world):
# Create RED sprite for zombie
zombie_surf = image_dude['zombie.png']
for i in range(20):
z_width, z_height = zombie_surf.get_size()
randx = randint(z_width / 2, SCREEN_WIDTH - z_width / 2)
randy = randint(z_height / 2, SCREEN_HEIGHT - z_height / 2)
z_location = Vector2(randx, randy)
zombie = mobs.Zombie(world, zombie_surf, z_location)
world.add_entity(zombie)
survivor_surf = pygame.Surface((32, 32)).convert()
survivor_surf.fill(GREEN)
for i in range(5):
s_width, s_height = survivor_surf.get_size()
randx = randint(s_width / 2, SCREEN_WIDTH - s_width / 2)
randy = randint(s_height / 2, SCREEN_HEIGHT - s_height / 2)
s_location = Vector2(randx, randy)
survivor = mobs.Survivor(world, survivor_surf, s_location)
world.add_entity(survivor)
sentry_gun_surf = image_dude['sentrygun.png']
w, h = sentry_gun_surf.get_size()
for i in range(1, 2):
x, y = (SCREEN_WIDTH * i / 3, SCREEN_HEIGHT / 2)
sentry_gun = mobs.SentryGun(world, sentry_gun_surf, Vector2(x, y))
world.add_entity(sentry_gun)
for e in world.entities.values():
print(e)
if __name__ == '__main__':
main()
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,710
|
marvin939/ZombiePygame
|
refs/heads/master
|
/demo/demo_turret_rotate.py
|
from encodings.punycode import selective_find
import pygame
from manager import ImageManager
from game import *
from pygame.locals import *
from pygame.math import Vector2
from mobs import *
image_manager = None
def main():
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE)
clock = pygame.time.Clock()
global image_manager
image_manager = ImageManager('../data/images')
world = World()
sentry_gun = SentryGun(world, image_manager['sentrygun.png'], Vector2(SCREEN_WIDTH / 2.0, SCREEN_HEIGHT / 2.0))
'''
zombie = Zombie(world, image_manager['zombie.png'], Vector2(*pygame.mouse.get_pos()))
zombie.hp = math.inf
zombie.brain = StateMachine() # Reset brain to 0
'''
world.add_entity(sentry_gun)
#world.add_entity(zombie)
#sentry_gun.target = zombie
time_passed = 0
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
return
screen.fill((0, 0, 0))
world.process(time_passed)
mouse_x, mouse_y = mouse_pos = pygame.mouse.get_pos()
mouse_location = Vector2(mouse_pos)
#zombie.location = mouse_location
if any(pygame.mouse.get_pressed()):
spawn_zombie(world, mouse_location)
# Draw center cross-hair lines:
pygame.draw.line(screen, (255, 0, 0), (0, SCREEN_HEIGHT/2), (SCREEN_WIDTH, SCREEN_HEIGHT/2))
pygame.draw.line(screen, (255, 0, 0), (SCREEN_WIDTH / 2, 0), (SCREEN_WIDTH / 2, SCREEN_HEIGHT))
world.render(screen)
#print(sentry_gun.brain.active_state.name)
#print('Entity count:', len(world.entities.keys()))
#print(sentry_gun.turret_angle)
#print(GameEntity.get_angle(sentry_gun.location, zombie.location))
pygame.display.update()
time_passed = clock.tick(FPS)
def spawn_zombie(world, mouse_location):
zombie = Zombie(world, image_manager['zombie.png'], mouse_location)
world.add_entity(zombie)
print('There are {} entities in this world.'.format(len(world.entities.keys())))
if __name__ == '__main__':
main()
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,711
|
marvin939/ZombiePygame
|
refs/heads/master
|
/utilities.py
|
import math
'''
def unit_angle(angle):
"""Convert radians to unit circle radians' range of 0 to 6.28"""
one_rev = math.pi * 2
if angle > 0:
return divmod(angle, math.pi * 2)[1]
if angle < 0:
angle = divmod(angle, one_rev)[1]
if angle < 0:
return angle + one_rev
return angle
'''
def unit_angle(angle):
"""Convert radians to unit circle radians' range of 0 to 6.28"""
one_rev = math.pi * 2
angle = divmod(angle, math.pi * 2)[1]
if angle < 0:
return angle + one_rev
return angle
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,712
|
marvin939/ZombiePygame
|
refs/heads/master
|
/demo/demo_projectile.py
|
import sys
import pygame
from pygame.math import Vector2
from game import *
from pygame.locals import *
from weapon import Projectile
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE)
pygame.display.set_caption('Projectile object demonstration')
clock = pygame.time.Clock()
world = World()
CENTER_VEC = Vector2(SCREEN_CENTER)
def main():
time_passed = 0
while True:
for event in pygame.event.get():
if event.type == QUIT:
terminate()
elif event.type == MOUSEBUTTONDOWN:
spawn_projectile(CENTER_VEC, event.pos)
print(world.entity_count())
lmb, mmb, rmb = pygame.mouse.get_pressed()
if lmb:
spawn_projectile(CENTER_VEC, event.pos)
world.process(time_passed)
screen.fill(BLACK)
world.render(screen)
pygame.display.update()
time_passed = clock.tick(FPS)
pass
def spawn_projectile(from_pos, to_pos):
direction = (Vector2(to_pos) - Vector2(from_pos)).normalize()
print('dir', direction)
proj = Projectile(world, 'bullet', None, CENTER_VEC, direction, max_distance=100)
world.add_entity(proj)
def terminate():
pygame.quit()
sys.exit()
if __name__ == '__main__':
main()
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,713
|
marvin939/ZombiePygame
|
refs/heads/master
|
/mobs.py
|
from random import randint
from entity import *
from game import *
from pygame.math import Vector2
import math
import utilities
from effects import *
from weapon import WeaponSimplified
class Zombie(SentientEntity):
"""A Zombie wandering aimlessly"""
NAME = 'zombie'
def __init__(self, world, image, location):
super().__init__(world, self.NAME, image, location)
self.brain.add_state(ZombieExploreState(self))
self.brain.add_state(ZombieAttackState(self))
self.brain.set_state('explore')
self.MAX_HP = 80
self.hp = self.MAX_HP
self.speed = 50
self.sight = 50
#self.enemies = [SentryGun.NAME, Survivor.NAME]
def process(self, seconds_passed):
super().process(seconds_passed)
bullet_entity = self.world.get_close_entity('bullet', self.location, self.rect.width / 2)
if bullet_entity is not None and bullet_entity.owner.name == SentryGun.NAME:
self.hp -= bullet_entity.damage
self.world.remove_entity(bullet_entity)
if self.hp <= 0:
self.world.remove_entity(self)
def shot(self):
pass
class ZombieExploreState(State):
def __init__(self, zombie):
super().__init__('explore')
self.entity = zombie
def do_actions(self):
# Change directions at least every 10th frame
if randint(0, 100) == 1:
self.random_destination()
def check_conditions(self):
if self.entity.hp < self.entity.MAX_HP:
return 'attack'
return None
def random_destination(self):
lower_x_boundary = int(self.entity.image.get_width() / 2)
lower_y_boundary = int(self.entity.image.get_height() / 2)
upper_x_boundary = int(SCREEN_WIDTH - lower_x_boundary)
upper_y_boundary = int(SCREEN_HEIGHT - lower_y_boundary)
x = randint(lower_x_boundary, upper_x_boundary)
y = randint(lower_y_boundary, upper_y_boundary)
self.entity.destination = Vector2(x, y)
class ZombieAttackState(ZombieExploreState):
"""Select a random survivor to attack until either is dead."""
def __init__(self, zombie):
super().__init__(zombie)
self.name = 'attack'
self.zombie = zombie
self.has_killed = False
self.target = None
self.original_speed = -1
self.reset_state()
def entry_actions(self):
#print('entering attack state...')
self.original_speed = self.zombie.speed
self.zombie.speed = 200
self.acquire_target()
def acquire_target(self):
if self.target is not None:
return
target = self.zombie.world.get_close_entity('survivor', self.zombie.location, radius=self.zombie.sight)
if target is not None:
self.target = target
def do_actions(self):
# Keep wandering until a target is found
if self.target is None:
if randint(1, 10) == 1:
self.random_destination()
self.acquire_target()
return
self.zombie.destination = self.target.location
if self.zombie.location.distance_to(self.target.location) < 5:
self.target.hp -= 1
if self.target.hp <= 0:
self.has_killed = True
def check_conditions(self):
if self.has_killed:
return 'explore'
return None
def exit_actions(self):
self.zombie.hp = self.zombie.MAX_HP # replenish zombie health
self.reset_state()
def reset_state(self):
self.zombie.speed = self.original_speed
self.has_killed = False
self.target = None
class Survivor(SentientEntity):
"""A survivor shooting at zombies"""
NAME = 'survivor'
def __init__(self, world, image, location):
super().__init__(world, self.NAME, image, location)
self.brain.add_state(SurvivorExploreState(self))
self.brain.add_state(SurvivorPanicState(self))
self.brain.set_state('explore')
self.MAX_HP = 20
self.hp = self.MAX_HP
self.speed = 50
def process(self, seconds_passed):
super().process(seconds_passed)
if self.hp <= 0:
self.world.remove_entity(self)
def shot(self):
pass
class SurvivorExploreState(ZombieExploreState):
def __init__(self, survivor):
super().__init__(survivor)
def do_actions(self):
# Change directions at least every 100th frame
if randint(0, 100) == 1:
self.random_destination()
def check_conditions(self):
zombies = tuple(self.entity.world.entities_with_name('zombie'))
if self.entity.hp < self.entity.MAX_HP and len(zombies) > 0:
return 'panic'
return None
class SurvivorPanicState(SurvivorExploreState):
def __init__(self, survivor):
super().__init__(survivor)
self.name = 'panic'
self.original_speed = self.entity.speed
def entry_actions(self):
self.original_speed = self.entity.speed
self.entity.speed = 300
def do_actions(self):
# Change directions frequently
if randint(0, 10) == 1:
self.random_destination()
def check_conditions(self):
# Survivor should stop panicking once there are no more zombies...
zombies = tuple(self.entity.world.entities_with_name('zombie'))
#if not any(zombies):
if len(zombies) <= 0:
return 'explore'
return None
def exit_actions(self):
self.entity.speed = self.original_speed
class SentryGun(SentientEntity):
NAME = 'sentry_gun'
def __init__(self, world, image, location):
super().__init__(world, self.NAME, image, location)
self.TURRET_ROTATION_RATE_DEGREES = 180
self.turret_rotation_rate = math.radians(self.TURRET_ROTATION_RATE_DEGREES) # radians per second
self.__turret_angle = 0
self.speed = 0
self.target = None
self.CONE_OF_VISION_DEGREES = 60
self.cone_of_vision = math.radians(self.CONE_OF_VISION_DEGREES) # radians
self.brain.add_state(self.ScanEnvironment(self))
self.brain.add_state(self.AttackTargetState(self))
self.brain.set_state('scan')
self.weapon = WeaponSimplified(self.world, self, 10, 10, math.inf, spread=10)
def process(self, seconds_passed):
super().process(seconds_passed)
if self.target is None:
self.turret_angle += self.turret_rotation_rate * seconds_passed
return
self.weapon.process(seconds_passed)
# # Rotate towards the target
# angle = SentientEntity.get_angle(self.location, self.target.location)
# self.turret_angle = angle
# # attack target
# self.target.hp -= 1
#self.world.add_entity(BulletTravelEffect(self.world, self.location, self.target.location, speed=2000, color=(128, 0, 255)))
def render(self, surface):
rotated_image = pygame.transform.rotate(self.image, math.degrees(self.turret_angle))
x, y = self.location
w, h = rotated_image.get_size()
surface.blit(rotated_image, (x - w / 2, y - h / 2))
if self.target is not None:
pygame.draw.aaline(surface, VIOLET, self.location, self.target.location)
def turret_face_entity(self, entity):
angle = SentientEntity.get_angle(self.location, entity.location)
self.turret_angle = angle
@property
def turret_angle(self):
return utilities.unit_angle(self.__turret_angle)
@turret_angle.setter
def turret_angle(self, angle):
self.__turret_angle = utilities.unit_angle(angle)
class ScanEnvironment(State):
def __init__(self, turret):
super().__init__('scan')
self.turret = turret
def entry_actions(self):
#self.turret.target = None
pass
def check_conditions(self):
"""Scan surroundings by scanning all enemies around"""
half_cone = self.turret.cone_of_vision / 2
turret_angle = utilities.unit_angle(self.turret.turret_angle)
def is_zombie(entity):
return entity.name == 'zombie'
zombies = filter(is_zombie, self.turret.world.entities.values())
for zombie in zombies:
angle = SentientEntity.get_angle(self.turret.location, zombie.location)
if turret_angle - half_cone < angle <= turret_angle + half_cone:
self.turret.target = zombie
#print('New target:', zombie)
return 'attack'
class AttackTargetState(State):
def __init__(self, turret):
super().__init__('attack')
self.turret = turret
def do_actions(self):
# Rotate towards the target
angle = SentientEntity.get_angle(self.turret.location, self.turret.target.location)
self.turret.turret_angle = angle
# attack target
#self.turret.target.hp -= 1
self.turret.weapon.fire()
def check_conditions(self):
if self.turret.target.hp > 0 and self.turret.target is not None:
return
return 'scan'
def exit_actions(self):
self.turret.target = None
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,714
|
marvin939/ZombiePygame
|
refs/heads/master
|
/game.py
|
import copy
import math
import pygame
from pygame.math import Vector2
FPS = 60
SCREEN_WIDTH, SCREEN_HEIGHT = SCREEN_SIZE = (640, 480)
SCREEN_CENTER = (SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2)
TICK_SECOND = 1000 / FPS / 1000
# Colors
BLACK = (0, 0, 0)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
WHITE = (255, 255, 255)
VIOLET = (128, 0, 255)
class World:
def __init__(self):
self.entities = {}
self.entity_id = 0
self.background = pygame.Surface(SCREEN_SIZE) #.convert()
self.background.fill(BLACK, (0, 0, SCREEN_WIDTH, SCREEN_HEIGHT))
def add_entity(self, entity):
"""Store an entity, give it an id and advance the current entity_id"""
self.entities[self.entity_id] = entity
entity.id = self.entity_id
self.entity_id += 1
def remove_entity(self, entity):
if entity.id in self.entities.keys():
del self.entities[entity.id]
def get(self, entity_id):
"""Retrieve an entity by id"""
if entity_id in self.entities:
return self.entities[entity_id]
else:
return None
def process(self, time_passed):
"""Update every entity in the world"""
seconds_passed = time_passed / 1000.0
entities_copy = copy.copy(self.entities)
for entity in entities_copy.values():
entity.process(seconds_passed)
def render(self, surface):
"""Draw the background and all the entities"""
surface.blit(self.background, (0, 0))
for entity in self.entities.values():
entity.render(surface)
def get_close_entity(self, name, location, radius=100):
"""Find an entity within the radius of a location"""
location = Vector2(*location)
for entity in self.entities.values():
if not entity.name == name:
continue
distance = location.distance_to(entity.location)
if distance < radius:
return entity
return None
def entities_with_name(self, name):
def is_entity(entity):
return entity.name == name
return filter(is_entity, self.entities.values())
def entity_count(self):
return len(self.entities.values())
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,715
|
marvin939/ZombiePygame
|
refs/heads/master
|
/effects.py
|
"""This is where effects go. eg. Explosions, bullet effects, etc. that disappear in time"""
from entity import GameEntity
from game import *
import math
class BulletTravelEffect(GameEntity):
def __init__(self, world, origin, destination, color=YELLOW, speed=1000, length=50, duration=math.inf):
super().__init__(world, 'bullet_travel', None, origin, destination)
self.color = color
self.DURATION = duration
self.remaining_time = self.DURATION
self.fx_head = Vector2(self.location)
self.fx_tail = Vector2(self.location)
self.fx_length = length
self.fx_heading = (self.destination - self.location).normalize()
self.fx_speed = speed
self.stop_fx_head = False
@property
def fx_speed(self):
return self.speed
@fx_speed.setter
def fx_speed(self, new_value):
self.speed = new_value
def process(self, seconds_passed):
if self.fx_head != self.destination:
head_to_destination_vec = self.destination - self.fx_head
head_heading = head_to_destination_vec.normalize()
distance = min(self.speed * seconds_passed, head_to_destination_vec.length())
self.fx_head += head_heading * distance
if self.fx_tail != self.destination and (self.fx_head.distance_to(self.location) >= self.fx_length or self.fx_head == self.destination):
tail_to_destination_vec = self.destination - self.fx_tail
tail_heading = tail_to_destination_vec.normalize()
distance = min(tail_to_destination_vec.length(), self.speed * seconds_passed)
self.fx_tail += tail_heading * distance
self.remaining_time -= seconds_passed
if self.remaining_time <= 0 or (self.fx_tail == self.fx_head == self.destination):
self.world.remove_entity(self)
def render(self, surface):
pygame.draw.aaline(surface, self.color, self.fx_tail, self.fx_head)
class ExplosionEffect(GameEntity):
def __init__(self, world, location, radius, color=YELLOW):
super().__init__(world, 'explosion_effect', None, location)
if type(radius) not in (float, int):
raise TypeError('radius argument must be a float or int!')
if radius <= 0:
raise ValueError('radius value must be greater than 0.')
if type(color) not in (pygame.Color, tuple, list):
raise TypeError('color argument must be type tuple or pygame.Color!')
else:
if type(color) in (tuple, list) and len(color) != 3:
raise ValueError('color tuple/list must have 3 values (R, G, B)')
self.RADIUS = radius
self.radius = radius
self.color = color
# self.DURATION = duration
# self.remaining_time = duration
def process(self, seconds_passed):
self.radius -= seconds_passed * self.RADIUS * 2
# if self.remaining_time <= 0 or self.radius <= 0:
if self.radius <= 0:
self.world.remove_entity(self)
return
#self.remaining_time -= seconds_passed
def render(self, surface):
print('surface:', surface)
print('color:', self.color)
print('location:', self.location)
print('radius:', self.radius)
x = int(self.location.x)
y = int(self.location.y)
pygame.draw.circle(surface, self.color, (x, y), int(self.radius))
#pygame.draw.circle(surface, self.color, self.location, int(self.radius))
#pygame.draw.circle()
class ShockwaveEffect(GameEntity):
pass
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,716
|
marvin939/ZombiePygame
|
refs/heads/master
|
/tests/test_mobs.py
|
import unittest
from manager import ImageManager
import time
from mobs import *
from game import *
from pygame.math import Vector2
class SentryGunTestCase(unittest.TestCase):
def setUp(self):
pygame.init()
self.screen = pygame.display.set_mode(SCREEN_SIZE)
self.image_manager = ImageManager('../data/images/')
self.sentry_gun_image = self.image_manager['sentrygun.png']
self.world = World()
self.TICK_SECOND = 33 / 1000
# Create the sentry gun
x = SCREEN_WIDTH / 2
y = SCREEN_HEIGHT / 2
self.sentry_gun = SentryGun(self.world, self.sentry_gun_image, (x, y))
self.world.add_entity(self.sentry_gun)
# Add a couple of zombies
'''
for i in range(10):
zombie_image = self.image_manager['zombie.png']
zombie = Zombie(self.world, zombie_image, (randint(0, SCREEN_WIDTH), randint(0, SCREEN_HEIGHT)))
self.world.add_entity(zombie)
'''
# Main zombie
self.zombie = Zombie(self.world, self.image_manager['zombie.png'], (100, 100))
self.world.add_entity(self.zombie)
self.world.render(self.screen)
pygame.display.update()
def test_turret_face_target(self):
self.sentry_gun.turret_face_entity(self.zombie)
self.sentry_gun.brain.think()
self.assertEqual(self.sentry_gun.target, self.zombie)
def test_target_acquire(self):
# Make the turret face the zombie
angle = SentientEntity.get_angle(self.sentry_gun.location, self.zombie.location)
self.sentry_gun.turret_angle = angle
self.sentry_gun.brain.think() # Switch states from scan to face
print(self.sentry_gun.brain.active_state.name)
self.assertEqual(self.sentry_gun.target, self.zombie)
@unittest.skip
def test_rotate_to_target(self):
self.sentry_gun.target = self.zombie
self.sentry_gun.brain.set_state('face')
# Do a loop that will repeatedly call think
'''
prev_angle = self.sentry_gun.turret_angle
for i in range(100):
self.screen.fill((0, 0, 0))
#with self.subTest(i=i):
self.sentry_gun.process(self.TICK_SECOND)
#self.assertNotEqual(self.sentry_gun.turret_angle, prev_angle)
print('angle:',self.sentry_gun.turret_angle)
#angle_diff = self.sentry_gun.turret_angle - prev_angle
#self.assertAlmostEqual(angle_diff, self.sentry_gun.turret_rotation_rate * self.TICK_SECOND, 4)
prev_angle = self.sentry_gun.turret_angle
self.world.render(self.screen)
pygame.display.update()
'''
def test_turret_angle(self):
self.assertAlmostEqual(self.sentry_gun.turret_angle,utilities.unit_angle(self.sentry_gun.turret_angle))
new_angle = 100
self.sentry_gun.turret_angle = new_angle
angle = self.sentry_gun.turret_angle
self.assertEqual(angle, utilities.unit_angle(new_angle))
def test_entity_angle(self):
self.assertAlmostEqual(self.sentry_gun.angle, utilities.unit_angle(self.sentry_gun.angle))
new_angle = 100
self.sentry_gun.angle = new_angle
angle = self.sentry_gun.angle
self.assertEqual(angle, utilities.unit_angle(new_angle))
def test_attack_target(self):
#self.sentry_gun.face_entity(self.zombie)
self.sentry_gun.turret_angle = SentientEntity.get_angle(self.sentry_gun.location, self.zombie.location)
for i in range(10):
self.sentry_gun.brain.think()
current_state_name = self.sentry_gun.brain.active_state.name
#self.assertEqual(current_state_name, 'attack')
self.assertEqual(self.sentry_gun.target, self.zombie)
# Kill target and check if it returns to scan mode
self.zombie.hp -= 10000
self.sentry_gun.brain.think()
current_state_name = self.sentry_gun.brain.active_state.name
self.assertEqual(current_state_name, 'scan')
self.assertIsNone(self.sentry_gun.target) # it should no longer target dead zombie
self.sentry_gun.target = None
# Move the zombie somewhere it cannot be seen by the turret
self.zombie.hp = 10
x = self.sentry_gun.location.x + 100
y = self.sentry_gun.location.y + 100
self.zombie.location = Vector2(x, y)
for i in range(10):
self.sentry_gun.brain.think()
self.assertIsNone(self.sentry_gun.target) # No target since zombie is behind turret
current_state_name = self.sentry_gun.brain.active_state.name
self.assertEqual(current_state_name, 'scan')
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,717
|
marvin939/ZombiePygame
|
refs/heads/master
|
/tests/test_entity.py
|
import unittest
from pygame.math import Vector2
from game import *
from entity import *
class GameEntityTestCase(unittest.TestCase):
def setUp(self):
self.world = World()
self.ENTITY_WIDTH, self.ENTITY_HEIGHT = self.ENTITY_SIZE = (32, 32)
self.entity_image = pygame.Surface(self.ENTITY_SIZE)
x = SCREEN_WIDTH / 2
y = SCREEN_HEIGHT / 2
self.entityA = SentientEntity(self.world, 'dummy', self.entity_image, location=Vector2(x, y))
x = SCREEN_WIDTH * 3 / 4
y = SCREEN_HEIGHT * 3 / 4
self.entityB = SentientEntity(self.world, 'dummy', self.entity_image, location=Vector2(x, y))
def test_face_entity(self):
rotation_a = self.entityA.face_entity(self.entityB)
# Manually calculate rotation
vec_diff = self.entityB.location - self.entityA.location
angle = utilities.unit_angle(-math.atan2(vec_diff.y, vec_diff.x))
self.assertAlmostEqual(angle, rotation_a, 4)
self.assertAlmostEqual(angle, self.entityA.angle, 4)
def test_face_vector(self):
# Do face_vector version:
rotation_a = self.entityA.face_vector(self.entityB.location)
# Manually calculate rotation
vec_diff = self.entityB.location - self.entityA.location
angle = utilities.unit_angle(-math.atan2(vec_diff.y, vec_diff.x))
self.assertAlmostEqual(angle, rotation_a, 4)
self.assertAlmostEqual(angle, self.entityA.angle, 4)
def test_get_angle(self):
angle = SentientEntity.get_angle(self.entityA.location, self.entityB.location)
# Manually calculate angle
vec_diff = self.entityB.location - self.entityA.location
calc_angle = utilities.unit_angle(-math.atan2(vec_diff.y, vec_diff.x))
self.assertAlmostEqual(calc_angle, angle, 4)
class GameEntityBoundaryRectTestCase(unittest.TestCase):
def setUp(self):
self.dummy_surf = pygame.Surface((32, 32))
self.location = Vector2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2)
self.world = World()
self.entity = GameEntity(self.world, 'dummy', self.dummy_surf, self.location)
self.world.add_entity(self.entity)
# What we're interested in:
self.rect_width = 16 # surface may have 32px width, but entity should really be 16px when performing things
self.rect_height = 32
self.boundary_rect = pygame.Rect((0, 0), (self.rect_width, self.rect_height)) # note: x/y don't matter
self.boundary_rect_offset = Vector2(-self.rect_width / 2, -self.rect_height) # Offset from entity.location
def test_set_boundary_rect(self):
self.entity.set_rect(self.boundary_rect) # Should ignore rect x and y...
self.assertEqual(self.entity._GameEntity__rect.width, self.boundary_rect.width)
self.assertEqual(self.entity._GameEntity__rect.height, self.boundary_rect.height)
def test_set_boundary_rect_with_offset(self):
self.entity.set_rect(self.boundary_rect, self.boundary_rect_offset) # Should ignore rect x and y...
self.assertEqual(self.entity._GameEntity__rect, self.boundary_rect)
self.assertEqual(self.entity._GameEntity__rect_offset, self.boundary_rect_offset)
def test_get_boundary_rect(self):
self.entity.set_rect(self.boundary_rect)
rect = self.entity.get_rect()
self.assertEqual(self.entity._GameEntity__rect.width, rect.width)
self.assertEqual(self.entity._GameEntity__rect.height, rect.height)
# Because there is no offset, the rect will be centered to location
self.assertEqual(rect.x, self.entity.location.x - rect.width / 2)
self.assertEqual(rect.y, self.entity.location.y - rect.height / 2)
def test_get_boundary_rect_with_offsets(self):
self.entity.set_rect(self.boundary_rect, self.boundary_rect_offset)
rect = self.entity.get_rect()
loc = self.entity.location
brect = self.boundary_rect
self.assertEqual(rect.x, loc.x - brect.width / 2 + self.boundary_rect_offset.x)
self.assertEqual(rect.y, loc.y - brect.height / 2 + self.boundary_rect_offset.y)
def test_get_boundary_rect_no_rect_height_width_only(self):
"""Test the get_rect() method to return the entity's image rect instead of rect when there is none assigned.
This test will not concern the entity's rectangle's X/Y coordinates."""
rect = self.entity.get_rect()
image_rect = self.entity.image.get_rect()
self.assertEqual(rect.width, image_rect.width)
self.assertEqual(rect.height, image_rect.height)
def test_get_boundary_rect_no_rect(self):
"""Continuation of above, but considers x and y attributes"""
rect = self.entity.get_rect()
image_rect = self.entity.image.get_rect()
self.assertEqual(rect.x, self.location.x - image_rect.width / 2)
self.assertEqual(rect.y, self.location.y - image_rect.height / 2)
class SentientEntitySidesTestCase(unittest.TestCase):
def setUp(self):
self.world = World()
self.good_guy_name = 'good_guy'
self.bad_guy_name = 'bad_fuy'
self.other_bad_guy_name = 'bad_man'
self.good_guy = SentientEntity(self.world, self.good_guy_name, None, Vector2(100, 100), speed=0,
enemies=[self.bad_guy_name, self.other_bad_guy_name])
self.bad_guy = SentientEntity(self.world, self.bad_guy_name, None, Vector2(150, 140), speed=0,
enemies=[self.good_guy_name])
self.bad_guy2 = SentientEntity(self.world, self.other_bad_guy_name, None,
Vector2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2), speed=0,
enemies=[self.good_guy_name])
self.world.add_entity(self.good_guy)
self.world.add_entity(self.bad_guy)
self.world.add_entity(self.bad_guy2)
def test_get_enemy_entity(self):
enemy = self.good_guy.get_close_enemy(radius=100)
self.assertIsNotNone(enemy)
self.assertIn(enemy.name, self.good_guy.enemies)
def test_get_enemy_entity_other_bad_guy(self):
# Replace other bad guy's location with first bad guys', and put the first far away
temp_loc = self.bad_guy.location
self.bad_guy.location = Vector2(*SCREEN_SIZE)
self.bad_guy2.location = temp_loc
enemy = self.good_guy.get_close_enemy(radius=100)
self.assertIsNotNone(enemy)
self.assertIn(enemy.name, self.good_guy.enemies)
self.assertEqual(enemy.name, self.other_bad_guy_name)
def test_get_enemy_entity_beyond_radius(self):
self.good_guy.location = (0, 0)
self.bad_guy.location = Vector2(*SCREEN_SIZE)
self.bad_guy2.location = Vector2(*SCREEN_SIZE)
enemy = self.good_guy.get_close_enemy(radius=100)
self.assertIsNone(enemy)
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,718
|
marvin939/ZombiePygame
|
refs/heads/master
|
/entity.py
|
from pygame.math import Vector2
import math
import pygame
import utilities
#from mobs import *
class GameEntity:
"""GameEntity that has states"""
def __init__(self, world, name, image, location=None, destination=None, speed=0):
self.world = world
self.name = name
self.image = image
self.location = Vector2(location) if location is not None else Vector2(0, 0)
self.destination = Vector2(destination) if destination is not None else Vector2(0, 0)
self.speed = speed
self.id = 0
self.__angle = 0.0
self.__rect = None # represents the boundary rectangle
self.__rect_offset = None
self.render_offset = None # how much to offset the image by (relative to location) when rendering to a surface
@property
def angle(self):
return utilities.unit_angle(self.__angle)
@angle.setter
def angle(self, angle):
self.__angle = utilities.unit_angle(angle)
def render(self, surface):
if self.image is None:
return
x, y = 0, 0
if self.render_offset is not None:
x = self.location.x + self.render_offset.x
y = self.location.y + self.render_offset.y
else:
x, y = self.location
w, h = self.image.get_size()
surface.blit(self.image, (x - w / 2, y - h / 2))
def process(self, seconds_passed):
if self.speed > 0 and self.location != self.destination:
vec_to_destination = self.destination - self.location
distance_to_destination = vec_to_destination.length()
heading = vec_to_destination.normalize()
travel_distance = min(distance_to_destination, seconds_passed * self.speed)
self.location += travel_distance * heading
def face_vector(self, vector):
"""Face the entity towards the vector's location, set the new angle, and return it"""
vec_diff = vector - self.location
new_angle = self.get_angle(self.location, vector)
self.angle = new_angle
return new_angle
def face_entity(self, entity):
"""Face the entity towards the other entity's location, set the new angle, and return it"""
return self.face_vector(entity.location)
@staticmethod
def get_angle(vectora, vectorb):
"""Retrieve the angle (radians) between vectora and vectorb, where vectorb is the end point, and
vectora, the starting point"""
vec_diff = vectorb - vectora
#return -math.atan2(vec_diff.y, vec_diff.x)
return utilities.unit_angle(-math.atan2(vec_diff.y, vec_diff.x))
def set_rect(self, rect, vec_offset=None):
self.__rect = rect
if vec_offset is not None:
self.__rect_offset = vec_offset
def get_rect(self):
if self.__rect is not None:
new_rect = pygame.Rect(self.__rect)
new_rect.center = self.location
if self.__rect_offset is not None:
new_rect.x += self.__rect_offset.x
new_rect.y += self.__rect_offset.y
return new_rect
img_rect = self.image.get_rect()
img_rect.center = self.location
return img_rect
@property
def rect(self):
return self.get_rect()
class SentientEntity(GameEntity):
"""GameEntity that has states, and is able to think..."""
def __init__(self, world, name, image, location=None, destination=None, speed=0, friends=None, enemies=None):
super().__init__(world, name, image, location, destination, speed)
self.friends = friends
self.enemies = enemies
self.brain = StateMachine()
def process(self, seconds_passed):
self.brain.think()
super().process(seconds_passed)
def get_close_enemy(self, radius=100):
for enemy in self.enemies:
e = self.world.get_close_entity(enemy, self.location, radius)
if e is not None:
return e
return None
class State:
def __init__(self, name):
self.name = name
def do_actions(self):
pass
def check_conditions(self):
pass
def entry_actions(self):
pass
def exit_actions(self):
pass
class StateMachine:
def __init__(self):
self.states = {}
self.active_state = None
def add_state(self, state):
"""Add a state to the internal dictionary"""
self.states[state.name] = state
def think(self):
"""Let the current state do it's thing"""
# Only continue if there is an
if self.active_state is None:
return
# Perform the actions of the active state and check conditions
self.active_state.do_actions()
new_state_name = self.active_state.check_conditions()
if new_state_name is not None:
self.set_state(new_state_name)
def set_state(self, new_state_name):
"""Change state machine's active state"""
# perform any exit actions of the current state
if self.active_state is not None:
self.active_state.exit_actions()
if new_state_name not in self.states.keys():
print('Warning! "{}" not in self.states...'.format(new_state_name))
return
# Switch state and perform entry actions of new state
self.active_state = self.states[new_state_name]
self.active_state.entry_actions()
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,719
|
marvin939/ZombiePygame
|
refs/heads/master
|
/demo/demo_rotate_towards_mouse.py
|
import pygame
from manager import ImageManager
from game import *
from pygame.locals import *
from pygame.math import Vector2
def main():
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE)
clock = pygame.time.Clock()
image_manager = ImageManager('../data/images')
sprite_image = image_manager['sentrygun.png']
sprite_location = Vector2(SCREEN_WIDTH / 2.0, SCREEN_HEIGHT / 2.0)
circles = []
print(sprite_location)
time_passed = 0
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
return
screen.fill((0, 0, 0))
mouse_x, mouse_y = mouse_pos = pygame.mouse.get_pos()
mouse_location = Vector2(mouse_pos)
vec_diff = mouse_location - sprite_location
angle = -math.atan2(vec_diff.y, vec_diff.x) # atan2's result is inverted controls, so * -1
#print(angle)
rotated_image = pygame.transform.rotate(sprite_image, math.degrees(angle))
rotated_x = (SCREEN_WIDTH - rotated_image.get_width()) / 2.0
rotated_y = (SCREEN_HEIGHT - rotated_image.get_height()) / 2.0
# Draw center cross-hair lines:
pygame.draw.line(screen, (255, 0, 0), (0, SCREEN_HEIGHT/2), (SCREEN_WIDTH, SCREEN_HEIGHT/2))
pygame.draw.line(screen, (255, 0, 0), (SCREEN_WIDTH / 2, 0), (SCREEN_WIDTH / 2, SCREEN_HEIGHT))
if pygame.mouse.get_pressed()[0]:
circles += [mouse_pos]
for circle_pos in circles:
pygame.draw.circle(screen, (0, 255, 0), circle_pos, 5)
screen.blit(sprite_image, mouse_pos)
screen.blit(rotated_image, (rotated_x, rotated_y))
# Why is it the angle offset!?
#pygame.display.update(pygame.Rect(rotated_x, rotated_y, rotated_image.get_width(), rotated_image.get_height()))
pygame.display.update()
time_passed = clock.tick(FPS)
if __name__ == '__main__':
main()
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,720
|
marvin939/ZombiePygame
|
refs/heads/master
|
/manager.py
|
import os
import pygame
from errors import *
class ImageManager:
"""The thing that manages images"""
def __init__(self, dir='.'):
self.image_directory = os.path.abspath(dir)
self.surf_dict = {}
if pygame.display.get_surface() is None:
raise ScreenNotInitialized('ImageManager instances require a screen to be already initialised!')
def __getitem__(self, item):
"""Load the image even though it has not bee loaded before"""
surface = None
try:
surface = self.surf_dict[item]
except KeyError:
# Image has not been loaded before
if not isinstance(item, str):
raise TypeError('argument item ({}) must be str!'.format(type(item)))
image_path = self.__get_image_path(item)
if not os.path.exists(image_path):
raise FileNotFoundError('Path: {}'.format(image_path))
# Load the image and store into dictionary
surface = pygame.image.load(image_path).convert_alpha()
self.surf_dict[item] = surface
return surface
def __get_image_path(self, image_name):
return os.path.join(self.image_directory, image_name)
def __setitem__(self, image_name, surface):
"""Manually name an image surface (key-value pair)"""
if not isinstance(surface, pygame.Surface):
raise TypeError('surface argument ({}) must be a pygame.Surface type!'.format(surface))
self.surf_dict[image_name] = surface
return surface
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,721
|
marvin939/ZombiePygame
|
refs/heads/master
|
/weapon.py
|
from random import *
from entity import *
from game import *
'''
self.pistol = Weapon(self.weap_damage, \
self.weap_clip, \
self.weap_reload_rate, \
self.weap_fire_rate, \
self.weap_spread, \
self.weap_rounds_per_shot, \
self.weap_projectile_type, \
self.weap_projectile_count)
'''
class Weapon:
def __init__(self,
damage=1,
clip=1,
max_ammo=90,
reload_rate=1,
fire_rate=1,
spread=0,
rounds_per_shot=1,
proj_type=None,
num_proj=1,
proj_speed=100,
warhead=None,
factory=None,
reload_entire_clip=True,
projectile_factory=None):
if factory is not None:
factory(self)
self.DAMAGE = damage
self.clip = clip
self.MAX_CLIP = clip
self.MAX_AMMO = max_ammo
self.RELOAD_RATE = reload_rate
self.FIRE_RATE = fire_rate
self.SPREAD = spread
self.ROUNDS_PER_SHOT = rounds_per_shot
self.PROJECTILE_TYPE = proj_type
self.NUM_PROJECTILES = num_proj
self.PROJECTILE_SPEED = proj_speed
self.WARHEAD=warhead
self.ready = True
self.reload_entire_clip = reload_entire_clip
def shoot_angled(self, world, angle):
"""Shoot the projectiles at an angle, and add them into the world"""
pass
def process(self, seconds_passed):
if self.clip == 0:
# reload
self.reload(seconds_passed)
if self.is_ready():
pass
def reload(self, seconds_passed):
self.clip += self.RELOAD_RATE * seconds_passed
# if self.clip > self.MAX_CLIP:
class ProjectileFactory:
"""Class that gives a new projectile object each time it is called.
An instance of it will reside in a weapon object."""
def __init__(self, ptype, speed, image, warhead):
pass
class Projectile(GameEntity):
def __init__(self, world, name, image, location, direction_vec, speed=200, damage=0, max_distance=300, owner=None):
super().__init__(world, name, image, location, None, speed)
self.direction = direction_vec
self.damage = damage
self.origin = location
self.max_distance = max_distance
self.owner = owner
def process(self, seconds_passed):
if self.location.distance_to(self.origin) >= self.max_distance:
self.world.remove_entity(self)
return
self.location += self.direction * self.speed * seconds_passed
def render(self, surface):
if self.image is not None:
super().render(surface)
return
pygame.draw.circle(surface, YELLOW, (int(self.location.x), int(self.location.y)), 1)
@staticmethod
def factory(type_name, world, owner, weapon):
angle = owner.angle if not hasattr(owner, 'turret_angle') else owner.turret_angle
angle *= -1 # Multiply by -1 to fix direction vector
direction = Vector2(1, 0).rotate(math.degrees(angle) + uniform(-weapon.spread/2, weapon.spread/2))
if type_name == 'bullet':
return Projectile(world, 'bullet', None, owner.location, direction, speed=500, damage=weapon.damage, owner=owner)
raise ValueError('Unknown projectile type name {}'.format(type_name))
class Warhead:
pass
class WeaponSimplified(SentientEntity):
"""A simple weapon that fires without reload; just a delay in between."""
def __init__(self, world, owner, fire_rate, damage, ammo, spread=0):
self.world = world
self.owner = owner
self.fire_rate = fire_rate
self.damage = damage
self.ammo = ammo
self.accumulator = 0
self.spread = spread
self.ready_to_fire = True
def render(self, surface):
return
def process(self, seconds_passed):
if self.ammo <= 0:
self.accumulator = 0
return
if self.ready_to_fire:
return
if self.accumulator >= 1 / self.fire_rate:
self.accumulator = 0
self.ready_to_fire = True
self.accumulator += seconds_passed
def fire(self):
if not self.ready_to_fire or self.ammo <= 0:
return
self.ready_to_fire = False
bullet = Projectile.factory('bullet', self.world, self.owner, self)
self.world.add_entity(bullet)
self.ammo -= 1
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,722
|
marvin939/ZombiePygame
|
refs/heads/master
|
/demo/demo_weapon.py
|
import sys
import pygame
from pygame.math import Vector2
from game import *
from pygame.locals import *
from weapon import Projectile, WeaponSimplified
from entity import GameEntity
import utilities
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE)
pygame.display.set_caption('Projectile object demonstration')
clock = pygame.time.Clock()
world = World()
CENTER_VEC = Vector2(SCREEN_CENTER)
AMMO = 10000
SPREAD = 10
FIRE_RATE = 10
def main():
time_passed = 0
player = GameEntity(world, 'player', None, CENTER_VEC)
world.add_entity(player)
weapon = WeaponSimplified(world, player, FIRE_RATE, 0, AMMO, spread=SPREAD)
ready2fire_surf = pygame.Surface((32, 32))
#font_obj = pygame.SysFont()
#print('\n'.join(pygame.font.get_fonts()))
font_obj = pygame.font.SysFont('freesans', 32)
while True:
for event in pygame.event.get():
if event.type == QUIT:
terminate()
elif event.type == MOUSEBUTTONDOWN:
pass
#print(world.entity_count())
elif event.type == MOUSEMOTION:
angle = GameEntity.get_angle(player.location, Vector2(event.pos))
player.angle = angle
elif event.type == KEYDOWN:
if event.key == K_r:
weapon.ammo = AMMO
seconds_passed = time_passed / 1000
lmb, mmb, rmb = pygame.mouse.get_pressed()
if any((lmb, mmb, rmb)):
weapon.fire()
world.process(time_passed)
weapon.process(seconds_passed)
screen.fill(BLACK)
world.render(screen)
ready2fire_surf.fill(GREEN if weapon.ready_to_fire else RED)
screen.blit(ready2fire_surf, (0, 0))
ready2fire_text = font_obj.render('ready' if weapon.ready_to_fire else 'loading',
True,
WHITE)
screen.blit(ready2fire_text, (32, 0))
pygame.display.set_caption('Weapon demo; Ammo: {ammo}'.format(ammo=weapon.ammo))
pygame.display.update()
time_passed = clock.tick(FPS)
pass
# def spawn_projectile(from_pos, to_pos):
# direction = (Vector2(to_pos) - Vector2(from_pos)).normalize()
# proj = Projectile(world, 'bullet', None, CENTER_VEC, direction, max_distance=100)
# world.add_entity(proj)
def terminate():
pygame.quit()
sys.exit()
if __name__ == '__main__':
main()
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,723
|
marvin939/ZombiePygame
|
refs/heads/master
|
/demo/demo_effects.py
|
import time
import sys
import pygame
from game import *
from effects import *
from pygame.locals import *
from manager import ImageManager
GREEN = (0, 255, 0)
FPS = 30
"""
bullet_travel = BulletTravelEffect(world, Vector2(0, 0), Vector2(320, 240))
world.add_entity(bullet_travel)
"""
image_dude = None
def main():
pygame.init()
clock = pygame.time.Clock()
screen = pygame.display.set_mode((640, 480))
world = World()
global image_dude
image_dude = ImageManager('../data/images')
time_passed = 0
while True:
#print(time_passed)
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
elif event.type == MOUSEBUTTONDOWN:
print(event)
if event.button is 1:
spawn_effect(world)
print('fx added')
elif event.type == KEYDOWN:
if event.key == K_e:
spawn_explosion_effect(world)
elif event.key == K_i:
# Show entities
print(world.entities.values())
if pygame.mouse.get_pressed()[2]:
spawn_effect(world)
# if pygame.mouse.get_pressed()[3]:
# print('world entities:')
# print(world.entities.values())
world.process(time_passed)
screen.fill((0, 0, 0))
world.render(screen)
# pygame.draw.circle(screen, RED, pygame.mouse.get_pos(), 5)
pygame.display.update()
# simulate FPS drop
#time.sleep(0.2)
time_passed = clock.tick(FPS)
def spawn_effect(world):
bullet_fx = BulletTravelEffect(world, Vector2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2),
Vector2(*pygame.mouse.get_pos()), GREEN, speed=500)
world.add_entity(bullet_fx)
def spawn_explosion_effect(world):
explosion = ExplosionEffect(world, Vector2(*pygame.mouse.get_pos()), 50, color=VIOLET)
world.add_entity(explosion)
if __name__ == '__main__':
main()
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,724
|
marvin939/ZombiePygame
|
refs/heads/master
|
/tests/test_projectile.py
|
from weapon import Weapon, Projectile, Warhead
from unittest import TestCase
from game import *
import utilities
class DestinationProjectileTestCase(TestCase):
def setUp(self):
self.warhead = None
self.speed = 100
self.world = World()
self.location = Vector2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2)
self.destination = Vector2(SCREEN_WIDTH, SCREEN_HEIGHT)
self.projectile = Projectile(self.world, None, self.location, self.destination, self.speed, self.warhead)
self.world.add_entity(self.projectile)
def test_instance(self):
pass
class AngledProjectileTestCase(TestCase):
def setUp(self):
self.speed = 100
self.world = World()
self.location = Vector2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2)
self.angle = utilities.unit_angle(math.radians(300))
self.direction = Vector2(1, 0).rotate(self.angle)
self.max_distance = 200
self.projectile = Projectile(self.world, 'bullet', None, self.location, self.direction, speed=self.speed, damage=0, max_distance=self.max_distance)
self.world.add_entity(self.projectile)
def test_instance(self):
pass
def test_max_distance_remove_from_world(self):
seconds = self.max_distance / self.speed
self.projectile.process(seconds)
self.assertNotIn(self.projectile, self.world.entities.values())
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,725
|
marvin939/ZombiePygame
|
refs/heads/master
|
/tests/test_world.py
|
from mobs import *
from random import randint, random
import unittest
from game import *
import pygame
NUM_ZOMBIES = 10
NUM_SURVIVORS = 5
NUM_SENTRY_GUNS = 2
class WorldTestCase(unittest.TestCase):
def setUp(self):
self.world = World()
dummy_surface = pygame.Surface((16, 16))
w, h = dummy_surface.get_size()
# Add zombies
for i in range(NUM_ZOMBIES):
x = random() * SCREEN_WIDTH
y = random() * SCREEN_HEIGHT
zombie = Zombie(self.world, dummy_surface, Vector2(x, y))
self.world.add_entity(zombie)
# Add survivors
for i in range(NUM_SURVIVORS):
x = random() * SCREEN_WIDTH
y = random() * SCREEN_HEIGHT
survivor = Survivor(self.world, dummy_surface, Vector2(x, y))
self.world.add_entity(survivor)
# Add sentry guns
for i in range(NUM_SENTRY_GUNS):
x = random() * SCREEN_WIDTH
y = random() * SCREEN_HEIGHT
self.sentry_gun = SentryGun(self.world, dummy_surface, Vector2(x, y))
self.world.add_entity(self.sentry_gun)
def test_list_all_entities_with_name(self):
zombies = tuple(self.world.entities_with_name('zombie'))
survivors = tuple(self.world.entities_with_name('survivor'))
sentry_guns = tuple(self.world.entities_with_name('sentry_gun'))
self.assertEqual(len(zombies), NUM_ZOMBIES)
self.assertEqual(len(survivors), NUM_SURVIVORS)
self.assertEqual(len(sentry_guns), NUM_SENTRY_GUNS)
def test_get_close_entity_type_zombie(self):
z = self.world.get_close_entity('zombie', SCREEN_CENTER)
self.assertEqual(z.name, 'zombie')
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,726
|
marvin939/ZombiePygame
|
refs/heads/master
|
/tests/test_image_manager.py
|
import unittest
from manager import ImageManager
import pygame
import os
from errors import *
class ImageManagerTestCaseA(unittest.TestCase):
def test_try_making_imagemanager(self):
"""ImageManager should raise an error if the screen surface has not been initialised yet"""
with self.assertRaises(ScreenNotInitialized):
imagemanager = ImageManager()
class ImageManagerTestCaseB(unittest.TestCase):
def setUp(self):
# Initialise required stuff
pygame.init()
self.screen = pygame.display.set_mode((640, 480))
self.path = '../data/images/'
self.imagedude = ImageManager(self.path) # Load images from data/images/
self.bg = pygame.image.load(os.path.join(self.path, 'backgroundA.jpg')).convert() # Load image
self.bg_width, self.bg_height = self.bg.get_size()
self.imagedude['backgroundB.jpg'] = self.bg # Add image
def test_try_making_imagemanager(self):
"""ImageManager should raise an error if the screen surface has not been initialised yet"""
pygame.quit()
pygame.init()
with self.assertRaises(ScreenNotInitialized):
imagemanager = ImageManager()
def test_add_image_invalid_value(self):
with self.assertRaises(TypeError):
self.imagedude['abc'] = '123'
self.imagedude['edf'] = 123
def test_add_images(self):
# Add image
image_name = 'bg'
self.imagedude[image_name] = self.bg
self.assertEqual(self.imagedude[image_name], self.bg)
def test_get_image(self):
bg = self.imagedude['backgroundB.jpg']
self.assertEqual(bg, self.bg)
@unittest.skip
def test_get_image_invalid_type(self):
with self.assertRaises(TypeError):
surf = self.imagedude[123123]
def test_get_image_not_found(self):
with self.assertRaises(FileNotFoundError):
surf = self.imagedude['filenotfoundimage.png']
def test_automatic_load_image(self):
"""Load an image that has not been loaded before"""
# Make sure that the requested surface is not none
background = self.imagedude['backgroundA.jpg']
self.assertIsNotNone(background)
# Test that the image was actually stored into the dictionary
self.assertEqual(background, self.imagedude['backgroundA.jpg'])
# Compare the dimensions of the loaded images
bgB = pygame.image.load(os.path.join(self.path, 'backgroundA.jpg')).convert()
background_size = background.get_size()
bgB_size = bgB.get_size()
self.assertEqual(background_size, bgB_size)
# Test loading image that doesn't exist.
with self.assertRaises(FileNotFoundError):
image = self.imagedude['asdflkjoiuqeioqwe.jog']
# Make sure that loading images with invalid image filename types is illegal
with self.assertRaises(TypeError):
invalid = self.imagedude[123456]
invalid = self.imagedude[123456.3]
def test_transparent_image(self):
# Test loading an image with alpha
transparent_image = self.imagedude['transparent.png']
pixel = transparent_image.get_at((10, 10))
self.assertNotEqual(pixel, (0, 0, 0))
self.assertNotEqual(pixel, (255, 255, 255))
self.assertEqual(transparent_image.get_at((70, 70)), (0, 0, 0)) # BLACK
self.assertEqual(transparent_image.get_at((35, 70)), (149, 0, 186)) # Arbitrary purple
def test_pre_cache_all(self):
pass
def test_directory(self):
imagedude_path = self.imagedude.image_directory
#print(imagedude_path)
self.assertEqual(imagedude_path, os.path.abspath(self.path))
all_filesA = tuple((entry.name for entry in os.scandir(imagedude_path)))
all_filesB = tuple((entry.name for entry in os.scandir(self.path)))
self.assertTupleEqual(all_filesA, all_filesB)
if __name__ == '__main__':
unittest.main()
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,727
|
marvin939/ZombiePygame
|
refs/heads/master
|
/demo/demo_image_manager.py
|
from manager import ImageManager
import sys
import os
import pygame
import time
# Add 1-dir-up to path (contains manager.py, and errors.py)
# sys.path += [os.path.join(os.getcwd(), '..')]
'''No need to do; just change the working directory of the file @ Run->Edit Configurations...
Don't forget to change relative paths of instances (eg. ImageManager('../data/images/')
to ImageManager('data/images/')'''
def main():
pygame.init()
SCREEN_WIDTH, SCREEN_HEIGHT = SCREEN_SIZE = (640, 480)
screen = pygame.display.set_mode(SCREEN_SIZE)
pygame.display.set_caption('[Demo] ImageManager image loading')
imagedude = ImageManager('data/images')
imagedude['backgroundB.jpg'] = pygame.transform.scale(imagedude['backgroundB.jpg'], SCREEN_SIZE)
screen.blit(imagedude['backgroundB.jpg'], (0, 0))
pygame.display.update()
time.sleep(2)
pygame.quit()
sys.exit()
if __name__ == '__main__':
main()
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,728
|
marvin939/ZombiePygame
|
refs/heads/master
|
/tests/test_warhead.py
|
from weapon import Weapon, Projectile, Warhead
import unittest
from game import *
class WarheadTestCase(unittest.TestCase):
"""Warheads should be reusable for different projectiles of same type"""
def setUp(self):
"""
self.warhead = None
self.speed = 100
self.world = World()
self.location = Vector2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2)
self.projectile = Projectile(self.world, None, self.location, self.destination, self.speed, self.warhead)
self.world.add_entity(self.projectile)
"""
def test_instance(self):
pass
class WarheadTestCase(unittest.TestCase):
def setUp(self):
self.damage = 0
self.vs_armor = 0.5
self.vs_flesh = 1
self.weapon = None # If there is one, the weapon will fire too
self.radius = 0
self.attached_effect = None
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,729
|
marvin939/ZombiePygame
|
refs/heads/master
|
/tests/test_effects.py
|
import copy
from effects import BulletTravelEffect, ExplosionEffect
from game import World
import unittest
from pygame.math import Vector2
from game import *
TICK_SECOND = 1000 / 30 / 1000 # One tick represented by 30 frames per second; 33 milliseconds
class BulletTravelEffectTestCase(unittest.TestCase):
def setUp(self):
self.world = World()
'''
self.origin = Vector2(0, SCREEN_HEIGHT)
self.destination = Vector2(SCREEN_WIDTH, 0)
self.bullet_effect = BulletTravelEffect(self.world, self.origin, self.destination)
'''
self.origin = Vector2(0, SCREEN_HEIGHT)
self.destination = Vector2(SCREEN_WIDTH, 0)
self.color = YELLOW
#self.duration = 1 / 10 # 1/10th of a second
self.bullet = BulletTravelEffect(self.world, self.origin, self.destination, color=self.color)
self.world.add_entity(self.bullet)
def test_instance(self):
origin = Vector2(0, SCREEN_HEIGHT)
destination = Vector2(SCREEN_WIDTH, 0)
color = YELLOW
duration = 1/10 # 1/10th of a second
bullet = BulletTravelEffect(self.world, origin, destination, color=color, duration=duration)
self.assertEqual(bullet.location, origin)
self.assertEqual(bullet.destination, destination)
self.assertEqual(bullet.color, color)
self.assertEqual(bullet.remaining_time, duration)
def test_location_destination(self):
pass
def test_fade(self):
d = 1
self.bullet.DURATION = d
self.bullet.remaining_time = d # seconds
# Test when the bullet trail/line starts to fade
self.bullet.process(TICK_SECOND)
self.assertLess(self.bullet.remaining_time, self.bullet.DURATION)
self.assertEqual(self.bullet.remaining_time, self.bullet.DURATION - TICK_SECOND)
def test_remaining_zero(self):
# Kill the effect
self.bullet.remaining_time = 0
self.bullet.process(TICK_SECOND)
self.assertNotIn(self.bullet, self.world.entities.values())
def test_bullet_travel(self):
"""Test the bullet_head and bullet_tail vectors"""
self.assertEqual(self.bullet.fx_head, self.bullet.location)
self.assertEqual(self.bullet.fx_tail, self.bullet.location)
#self.assertEqual(self.bullet.fx_length, 100)
heading = (self.bullet.destination - self.bullet.location).normalize()
self.assertEqual(self.bullet.fx_heading, heading)
# Do one TICK; the head should start moving, while the tail remains the same
self.bullet.process(TICK_SECOND)
travelled = (TICK_SECOND * self.bullet.fx_speed)
self.assertEqual(self.bullet.fx_head.distance_to(self.bullet.location), travelled)
self.assertEqual(self.bullet.fx_tail, self.bullet.location)
def test_process_head(self):
num_ticks = 1000
ticks = list((TICK_SECOND for i in range(num_ticks)))
tick_accumulate = 0
expected_head = {}
b = self.bullet
# build expected head; assumptions of fx_head's whereabouts relative to tick_accumulate
for tick in ticks:
heading = (b.destination - b.location).normalize()
new_location = b.fx_head + (heading * (tick_accumulate + tick)* b.speed)
# ^ accumulate current tick since it is leading tail
expected_head[tick_accumulate] = new_location
tick_accumulate += tick
tick_accumulate = 0
for i, tick in enumerate(ticks):
if b not in self.world.entities.values():
# bullet is no longer in this world... but still exists as object;
# eg. b's fx_head == fx_tail == fx_destination
break
with self.subTest(tick_accumulate=tick_accumulate, i=i):
b.process(tick)
expected = expected_head[tick_accumulate]
if b.fx_head != b.destination:
self.assertEqual(expected, b.fx_head)
tick_accumulate += tick
def test_location(self):
b = self.bullet
self.assertEqual(b.fx_tail, b.location)
self.assertEqual(b.fx_head, b.location)
self.assertNotEqual(b.fx_head, b.destination)
self.assertNotEqual(b.fx_tail, b.destination)
self.assertIn(b, self.world.entities.values())
def test_process_tail(self):
self.assertIsNotNone(self.bullet)
num_ticks = 1000
ticks = list((TICK_SECOND for i in range(num_ticks)))
tick_accumulate = 0
expected_head = {}
expected_tail = {}
b = self.bullet
self.assertIn(TICK_SECOND, ticks)
self.assertEqual(num_ticks, len(ticks))
# build expected tail; assumptions of fx_tail's whereabouts relative to tick_accumulate
for tick in ticks:
tail_heading = (b.destination - b.fx_tail).normalize()
new_tail_location = b.fx_tail + (tail_heading * tick_accumulate * b.speed)
expected_tail[tick_accumulate] = new_tail_location
tick_accumulate += tick
self.assertNotEqual(id(b.fx_tail), id(b.fx_head))
tick_accumulate = 0
for i, tick in enumerate(ticks):
if b not in self.world.entities.values():
break
with self.subTest(tick_accumulate=tick_accumulate, i=i):
b.process(tick)
#print(expected_tail[tick_accumulate], b.fx_tail, sep='=')
self.assertEqual(expected_tail[tick_accumulate], b.fx_tail)
tick_accumulate += tick
@unittest.skip
def test_each_tick(self):
# There's a bug here, where the length is far less than fx_length,
# relative to a single tick and its speed... But visually, it's not a big problem.\
num_ticks = 100
ticks = list((TICK_SECOND for i in range(num_ticks)))
b = self.bullet
tick_accumulate = 0
for tick in ticks:
b.process(tick)
with self.subTest(tick_accumulate=tick_accumulate):
if b.fx_head != b.destination and b.fx_tail != b.destination and \
b.fx_tail != b.location and b.fx_head != b.location:
self.assertAlmostEqual(b.fx_head.distance_to(b.fx_tail), b.fx_length, 1)
tick_accumulate += tick
def test_die(self):
"""Effect should die when both fx_head/tail reaches destination"""
self.bullet.fx_head = self.bullet.destination
self.bullet.fx_tail = self.bullet.fx_head
self.bullet.process(TICK_SECOND)
self.assertNotIn(self.bullet, self.world.entities.values())
class ExplosionEffectTestCase(unittest.TestCase):
def setUp(self):
self.exp_radius = 50
self.exp_duration = 1 # second
self.world = World()
self.exp_location = Vector2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2)
#self.exp_image = pygame.Surface((32, 32)).fill(RED)
self.exp_color = RED
self.explosion = ExplosionEffect(self.world, self.exp_location, self.exp_radius, self.exp_color)
self.world.add_entity(self.explosion)
def test_instantiate_radius(self):
# Negative radius
with self.assertRaises(ValueError):
ExplosionEffect(self.world, self.exp_location, -1)
def test_instantiate_color(self):
# Color argument type
with self.assertRaises(TypeError):
ExplosionEffect(self.world, self.exp_location, self.exp_radius, color=1)
# Color argument length
with self.assertRaises(ValueError):
ExplosionEffect(self.world, self.exp_location, self.exp_radius, color=(100,200))
def test_die_radius_zero(self):
self.explosion.radius = 0
self.explosion.process(TICK_SECOND)
self.assertNotIn(self.explosion, self.world.entities.values())
def test_radius_shrink(self):
"""Explosion should shrink based on TICK"""
old_radius = self.explosion.radius
self.explosion.process(TICK_SECOND)
self.assertLess(self.explosion.radius, old_radius)
# num_ticks = 0
# while self.explosion.radius >= 0:
# self.explosion.process(TICK_SECOND)
# print('radius:', self.explosion.radius)
# num_ticks += 1
# print(num_ticks)
|
{"/tests/test_utilities.py": ["/utilities.py"], "/tests/test_weapon.py": ["/utilities.py", "/weapon.py", "/game.py"], "/run.py": ["/game.py", "/mobs.py", "/manager.py"], "/demo/demo_turret_rotate.py": ["/manager.py", "/game.py", "/mobs.py"], "/demo/demo_projectile.py": ["/game.py", "/weapon.py"], "/mobs.py": ["/entity.py", "/game.py", "/utilities.py", "/effects.py", "/weapon.py"], "/effects.py": ["/entity.py", "/game.py"], "/tests/test_mobs.py": ["/manager.py", "/mobs.py", "/game.py"], "/tests/test_entity.py": ["/game.py", "/entity.py"], "/entity.py": ["/utilities.py"], "/demo/demo_rotate_towards_mouse.py": ["/manager.py", "/game.py"], "/weapon.py": ["/entity.py", "/game.py"], "/demo/demo_weapon.py": ["/game.py", "/weapon.py", "/entity.py", "/utilities.py"], "/demo/demo_effects.py": ["/game.py", "/effects.py", "/manager.py"], "/tests/test_projectile.py": ["/weapon.py", "/game.py", "/utilities.py"], "/tests/test_world.py": ["/mobs.py", "/game.py"], "/tests/test_image_manager.py": ["/manager.py"], "/demo/demo_image_manager.py": ["/manager.py"], "/tests/test_warhead.py": ["/weapon.py", "/game.py"], "/tests/test_effects.py": ["/effects.py", "/game.py"]}
|
2,732
|
AklerQ/python_training
|
refs/heads/master
|
/data/contact_data.py
|
from model.contact import Contact
import random
import string
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + " "*10
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
def random_number(maxlen):
symbols = string.digits + ")" + "(" + "-" + " "
return "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
def random_email(maxlen):
symbols = string.ascii_lowercase + string.digits + "_" + "-"
return "".join([random.choice(symbols) for i in range(random.randrange(maxlen))] + ['@'] + [random.choice(symbols)
for i in range(random.randrange(maxlen))] + ['.', 'ru'])
def random_date(maxlen):
return str(random.randrange(maxlen))
testdata = [Contact(firstname="", middlename="", lastname="", nickname="", companyname="", address="",
homenumber="", worknumber="", email="", email2="",
birth_date="//div[@id='content']/form/select[1]//option[1]",
birth_month="//div[@id='content']/form/select[2]//option[1]", birth_year="",
anniversary_date="//div[@id='content']/form/select[3]//option[1]",
anniversary_month="//div[@id='content']/form/select[4]//option[1]", notes="",
mobilenumber="", secondarynumber="")] + [
Contact(firstname=random_string("firstname", 10), middlename=random_string("middlename", 10), lastname=random_string
("lastname", 10), nickname=random_string("nickname", 10), companyname=random_string("companyname", 10), address=
random_string("address", 25), homenumber=random_number(9), mobilenumber=random_number(12), worknumber=random_number(12),
email=random_email(6), email2=random_email(7), email3=random_email(8),
birth_date="//div[@id='content']/form/select[1]//option["+random_date(32)+"]",
birth_month="//div[@id='content']/form/select[2]//option["+random_date(13)+"]", birth_year=random_number(4),
anniversary_date="//div[@id='content']/form/select[3]//option["+random_date(32)+"]", notes=random_string("name", 30),
anniversary_month="//div[@id='content']/form/select[4]//option["+random_date(13)+"]", secondarynumber=random_number(12))
for i in range(5)]
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,733
|
AklerQ/python_training
|
refs/heads/master
|
/test/test_del_contact_from_group.py
|
# -*- coding: utf-8 -*-
from model.group import Group
from model.contact import Contact
from fixture.orm import ORMfixture
import random
db = ORMfixture(host="127.0.0.1", name="addressbook", user="root", password="")
def test_del_contact_from_group(app):
# ΠΡΠΎΠ²Π΅ΡΠΊΠ° Π½Π° Π½Π°Π»ΠΈΡΠΈΠ΅ Π³ΡΡΠΏΠΏ
if len(db.get_group_list()) == 0:
app.group.create(Group(name="For adds contact", header="For adds contact", footer="For adds contact"))
group_list = db.get_group_list()
group = random.choice(group_list)
# ΠΡΠΎΠ²Π΅ΡΠΊΠ° Π½Π° Π½Π°Π»ΠΈΡΠΈΠ΅ ΠΊΠΎΠ½ΡΠ°ΠΊΡΠΎΠ² Π² Π³ΡΡΠΏΠΏΠ΅
if len(db.get_contacts_in_group(group)) == 0:
app.contact.create(Contact(firstname="Π’Π΅ΡΡ_Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΡ", lastname="Π’Π΅ΡΡ_Π΄Π»Ρ_Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΡ",
birth_date="//div[@id='content']/form/select[1]//option[1]",
birth_month="//div[@id='content']/form/select[2]//option[1]",
anniversary_date="//div[@id='content']/form/select[3]//option[1]",
anniversary_month="//div[@id='content']/form/select[4]//option[1]",
new_group="//select[@name='new_group']/option[@value='%s']" % group.id))
app.navigation.open_group_page_by_id(group.id)
contacts_list = db.get_contacts_in_group(group)
contact = random.choice(contacts_list)
app.contact.select_contact_by_id(contact.id)
app.contact.delete_contact_from_group()
app.navigation.open_group_page_by_id(group.id)
# test validation
assert contact in list(db.get_contacts_not_in_group(group))
assert contact not in list(db.get_contacts_in_group(group))
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,734
|
AklerQ/python_training
|
refs/heads/master
|
/test/test_edit_contact.py
|
# -*- coding: utf-8 -*-
from model.contact import Contact
import random
def test_edit_contact_by_index(app, db, check_ui):
if app.contact.count_contacts() == 0:
app.contact.create(Contact(firstname="For modify", birth_date="//div[@id='content']/form/select[1]//option[1]",
birth_month="//div[@id='content']/form/select[2]//option[1]",
anniversary_date="//div[@id='content']/form/select[3]//option[1]",
anniversary_month="//div[@id='content']/form/select[4]//option[1]"))
old_contacts = db.get_contact_list()
contact = random.choice(old_contacts)
input_contact = Contact(firstname="ΠΡΡΠ΅Π΄Π°ΠΊΡΠΈΡΠΎΠ²Π°Π½", middlename="ΠΡΡΠ΅Π΄Π°ΠΊΡΠΈΡΠΎΠ²ΠΈΡ",
lastname="ΠΡΡΠ΅Π΄Π°ΠΊΡΠΈΡΠΎΠ²Π°Π½ΡΠΊΠΈΠΉ", nickname="Π Π΅Π΄Π°ΠΊΡΠΎΡ",
companyname='ΠΠΠ "Π Π΅Π΄Π°ΠΊΡΠΈΡ ΠΈ ΠΠΈΡ"', address="ΡΠ΅Π΄Π°ΠΊΡΠΎΡΡΠΊΠΈΠΉ Π³ΠΎΡΠΎΠ΄ΠΎΠΊ",
homenumber="567-22-04", worknumber="45+6", email="glavred@mir.ur",
notes="ΠΠ΄Π΅ΡΡ ΠΌΠΎΠ³Π»Π° Π±Ρ Π±ΡΡΡ Π²Π°ΡΠ° ΡΠ΅ΠΊΠ»Π°ΠΌΠ°", email2="",
birth_date="//div[@id='content']/form/select[1]//option[4]",
birth_month="//div[@id='content']/form/select[2]//option[5]", birth_year="",
anniversary_date="//div[@id='content']/form/select[3]//option[6]",
anniversary_month="//div[@id='content']/form/select[4]//option[7]",
mobilenumber="12345678", secondarynumber="(098)76543")
input_contact.id = contact.id
app.contact.edit_contact_by_id(contact.id, input_contact)
# Test validation
new_contacts = db.get_contact_list()
assert len(old_contacts) == len(new_contacts)
idx = int(old_contacts.index(contact))
old_contacts[idx] = input_contact
assert old_contacts == new_contacts
if check_ui:
new_contacts = map(app.contact.clean, db.get_contact_list())
assert sorted(new_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(), key=Contact.id_or_max)
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,735
|
AklerQ/python_training
|
refs/heads/master
|
/fixture/group.py
|
# -*- coding: utf-8 -*-
from model.group import Group
class GroupHelper:
def __init__(self, app):
self.app = app
def create(self, group):
wd = self.app.wd
self.app.navigation.open_groups_page()
# init group creation
wd.find_element_by_name("new").click()
# fill group form
self.fill_group_fields(group)
# submit group creation
wd.find_element_by_name("submit").click()
self.app.navigation.return_to_groups_page()
self.group_cache = None
def delete_group_by_index(self, index):
wd = self.app.wd
self.app.navigation.open_groups_page()
self.select_group_by_index(index)
# submit deletion
wd.find_element_by_name("delete").click()
self.app.navigation.return_to_groups_page()
self.group_cache = None
def delete_first_group(self):
self.delete_group_by_index(0)
def edit_group_by_index(self, index, input_group):
wd = self.app.wd
self.app.navigation.open_groups_page()
self.select_group_by_index(index)
# init group edition
wd.find_element_by_name("edit").click()
# fill group form
self.fill_group_fields(input_group)
# submit group edition
wd.find_element_by_name("update").click()
self.app.navigation.return_to_groups_page()
self.group_cache = None
def edit_first_group(self, input_group):
self.edit_group_by_index(0, input_group)
def select_first_group(self):
wd = self.app.wd
wd.find_element_by_name("selected[]").click()
def select_group_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def fill_group_fields(self, input_group):
self.change_field_value("group_name", input_group.name)
self.change_field_value("group_header", input_group.header)
self.change_field_value("group_footer", input_group.footer)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def count_groups(self):
wd = self.app.wd
self.app.navigation.open_groups_page()
return len(wd.find_elements_by_name("selected[]"))
group_cache = None
def get_group_list(self):
if self.group_cache is None:
wd = self.app.wd
self.app.navigation.open_groups_page()
self.group_cache = []
for element in wd.find_elements_by_css_selector("span.group"):
text = element.text
id = element.find_element_by_name("selected[]").get_attribute("value")
self.group_cache.append(Group(name=text, id=id))
return list(self.group_cache)
def delete_group_by_id(self, id):
wd = self.app.wd
self.app.navigation.open_groups_page()
self.select_group_by_id(id)
# submit deletion
wd.find_element_by_name("delete").click()
self.app.navigation.return_to_groups_page()
self.group_cache = None
def select_group_by_id(self, id):
wd = self.app.wd
wd.find_element_by_css_selector("input[value='%s']" % id).click()
def select_group_by_id_for_add_to(self, id):
wd = self.app.wd
wd.find_element_by_xpath('//select[@name="to_group"]/option[@value="%s"]' % id).click()
def clean(self, group):
return Group(id=group.id, name=group.name.strip())
def edit_group_by_id(self, id, input_group):
wd = self.app.wd
self.app.navigation.open_groups_page()
self.select_group_by_id(id)
# init group edition
wd.find_element_by_name("edit").click()
# fill group form
self.fill_group_fields(input_group)
# submit group edition
wd.find_element_by_name("update").click()
self.app.navigation.return_to_groups_page()
self.group_cache = None
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,736
|
AklerQ/python_training
|
refs/heads/master
|
/test/test_edit_group.py
|
# -*- coding: utf-8 -*-
from model.group import Group
import random
def test_edit_first_group_footer(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(name="For modification", header="For modification", footer="For modification"))
old_groups = db.get_group_list()
group = random.choice(old_groups)
input_group = Group(name="Modify name", header="Modify header", footer="Modify footer")
app.group.edit_group_by_id(group.id, input_group)
# Test validation
new_groups = db.get_group_list()
assert len(old_groups) == len(new_groups)
idx = int(old_groups.index(group))
old_groups[idx] = input_group
assert old_groups == new_groups
if check_ui:
new_groups = map(app.group.clean, db.get_group_list())
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max)
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,737
|
AklerQ/python_training
|
refs/heads/master
|
/fixture/contact.py
|
# -*- coding: utf-8 -*-
from model.contact import Contact
import re
class ContactHelper:
def __init__(self, app):
self.app = app
def create(self, contact):
wd = self.app.wd
self.app.navigation.turn_to_home_page()
# create new contact
wd.find_element_by_link_text("add new").click()
# fill contact form
self.fill_contact_fields(contact)
# submit created contact
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
self.app.navigation.return_to_home_page()
self.contact_cache = None
def fill_contact_fields(self, contact):
wd = self.app.wd
# fill personal data
self.change_field_value("firstname", contact.firstname)
self.change_field_value("middlename", contact.middlename)
self.change_field_value("lastname", contact.lastname)
self.change_field_value("nickname", contact.nickname)
self.change_field_value("company", contact.companyname)
self.change_field_value("address", contact.address)
# fill communication data
self.change_field_value("home", contact.homenumber)
self.change_field_value("mobile", contact.mobilenumber)
self.change_field_value("work", contact.worknumber)
self.change_field_value("email", contact.email)
self.change_field_value("email2", contact.email2)
self.change_field_value("phone2", contact.secondarynumber)
# fill dates
if not wd.find_element_by_xpath(contact.birth_date).is_selected():
wd.find_element_by_xpath(contact.birth_date).click()
if not wd.find_element_by_xpath(contact.birth_month).is_selected():
wd.find_element_by_xpath(contact.birth_month).click()
self.change_field_value("byear", contact.birth_year)
if not wd.find_element_by_xpath(contact.anniversary_date).is_selected():
wd.find_element_by_xpath(contact.anniversary_date).click()
if not wd.find_element_by_xpath(contact.anniversary_month).is_selected():
wd.find_element_by_xpath(contact.anniversary_month).click()
# fill contact commentary
self.change_field_value("notes", contact.notes)
if not wd.find_element_by_xpath(contact.new_group).is_selected():
wd.find_element_by_xpath(contact.new_group).click()
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def select_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def select_first_contact(self):
wd = self.app.wd
wd.find_element_by_name("selected[]").click()
def delete_contact_by_index(self, index):
wd = self.app.wd
self.app.navigation.turn_to_home_page()
self.select_contact_by_index(index)
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
# ΠΠ΄Π΅ΡΡ ΠΏΠΎΠ²ΡΠΎΡΠ½ΠΎ ΠΈΡΠΏΠΎΠ»ΡΠ·ΡΠ΅ΡΡΡ ΠΌΠ΅ΡΠΎΠ΄ TURN Π²ΠΌΠ΅ΡΡΠΎ RETURN, ΡΠ°ΠΊ ΠΊΠ°ΠΊ ΠΏΠΎΡΠ»Π΅ ΡΠ΄Π°Π»Π΅Π½ΠΈΡ
# Π½Π΅ Π΄ΠΎΡΡΡΠΏΠ΅Π½ ΠΏΠ΅ΡΠ΅Ρ
ΠΎΠ΄ ΠΏΠΎ ΡΡΡΠ»ΠΊΠ΅ home_page
self.app.navigation.turn_to_home_page()
self.contact_cache = None
def delete_first_contact(self):
self.delete_contact_by_index(0)
def edit_contact_by_index(self, index, contact):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
self.fill_contact_fields(contact)
wd.find_element_by_xpath("//input[@name='update'][@value='Update']").click()
self.app.navigation.return_to_home_page()
self.contact_cache = None
def edit_contact_by_id(self, id, contact):
wd = self.app.wd
self.app.navigation.open_contact_edit_page_by_id(id)
self.fill_contact_fields(contact)
wd.find_element_by_xpath("//input[@name='update'][@value='Update']").click()
self.app.navigation.return_to_home_page()
self.contact_cache = None
def edit_first_contact(self, contact):
self.edit_contact_by_index(0, contact)
def count_contacts(self):
wd = self.app.wd
self.app.navigation.turn_to_home_page()
return len(wd.find_elements_by_name("selected[]"))
contact_cache = None
def get_contact_list(self):
if self.contact_cache is None:
wd = self.app.wd
self.app.navigation.turn_to_home_page()
self.contact_cache = []
for row in wd.find_elements_by_css_selector('tr[name=entry]'):
cells = row.find_elements_by_css_selector('td')
id = cells[0].find_element_by_css_selector('input').get_attribute('value')
lastname = cells[1].text
firstname = cells[2].text
address = cells[3].text
all_email = cells[4].text
all_phones = cells[5].text
self.contact_cache.append(Contact(firstname=firstname, lastname=lastname, id=id, address=address,
all_phones_from_home_page=all_phones, all_email_from_home_page=all_email))
return list(self.contact_cache)
def open_contact_view_by_index(self, index):
wd = self.app.wd
self.app.navigation.turn_to_home_page()
row = wd.find_elements_by_name("entry")[index]
cell = row.find_elements_by_tag_name("td")[6]
cell.find_element_by_tag_name("a").click()
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
self.app.navigation.turn_to_home_page()
wd.find_element_by_xpath("//table[@id='maintable']/tbody/tr["+str(index+2)+"]/td[8]/a/img").click()
def get_contact_info_from_edit_page(self, index):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
firstname = wd.find_element_by_name('firstname').get_attribute('value')
lastname = wd.find_element_by_name('lastname').get_attribute('value')
id = wd.find_element_by_name('id').get_attribute('value')
homenumber = wd.find_element_by_name('home').get_attribute('value')
mobilenumber = wd.find_element_by_name('mobile').get_attribute('value')
worknumber = wd.find_element_by_name('work').get_attribute('value')
secondarynumber = wd.find_element_by_name('phone2').get_attribute('value')
address = wd.find_element_by_name('address').get_attribute('value')
email = wd.find_element_by_name('email').get_attribute('value')
email2 = wd.find_element_by_name('email2').get_attribute('value')
email3 = wd.find_element_by_name('email3').get_attribute('value')
return Contact(id=id, firstname=firstname, lastname=lastname, homenumber=homenumber, mobilenumber=mobilenumber,
worknumber=worknumber, secondarynumber=secondarynumber, address=address, email=email,
email2=email2, email3=email3)
def get_contact_from_view_page(self, index):
wd = self.app.wd
self.open_contact_view_by_index(index)
text = wd.find_element_by_id("content").text
homenumber = re.search("H: (.*)", text)
if homenumber is not None:
homenumber = homenumber.group(1)
worknumber = re.search("W: (.*)", text)
if worknumber is not None:
worknumber = worknumber.group(1)
mobilenumber = re.search("M: (.*)", text)
if mobilenumber is not None:
mobilenumber = mobilenumber.group(1)
secondarynumber = re.search("P: (.*)", text)
if secondarynumber is not None:
secondarynumber = secondarynumber.group(1)
return Contact(homenumber=homenumber, worknumber=worknumber, mobilenumber=mobilenumber, secondarynumber=secondarynumber)
def delete_contact_by_id(self, id):
wd = self.app.wd
self.app.navigation.turn_to_home_page()
self.select_contact_by_id(id)
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
# ΠΠ΄Π΅ΡΡ ΠΏΠΎΠ²ΡΠΎΡΠ½ΠΎ ΠΈΡΠΏΠΎΠ»ΡΠ·ΡΠ΅ΡΡΡ ΠΌΠ΅ΡΠΎΠ΄ TURN Π²ΠΌΠ΅ΡΡΠΎ RETURN, ΡΠ°ΠΊ ΠΊΠ°ΠΊ ΠΏΠΎΡΠ»Π΅ ΡΠ΄Π°Π»Π΅Π½ΠΈΡ
# Π½Π΅ Π΄ΠΎΡΡΡΠΏΠ΅Π½ ΠΏΠ΅ΡΠ΅Ρ
ΠΎΠ΄ ΠΏΠΎ ΡΡΡΠ»ΠΊΠ΅ home_page
self.app.navigation.turn_to_home_page()
self.contact_cache = None
def select_contact_by_id(self, id):
wd = self.app.wd
wd.find_element_by_id(id).click()
def clean(self, contact):
return Contact(id=contact.id, firstname=contact.firstname.strip(), lastname=contact.lastname.strip())
def add_contact_to_group(self):
wd = self.app.wd
wd.find_element_by_name("add").click()
self.contact_cache = None
def delete_contact_from_group(self):
wd = self.app.wd
wd.find_element_by_xpath('//input[@name="remove"]').click()
self.contact_cache = None
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,738
|
AklerQ/python_training
|
refs/heads/master
|
/test/test_contact_data_validation.py
|
import re
from random import randrange
from model.contact import Contact
def test_random_contact_data_on_home_page(app):
contacts = app.contact.get_contact_list()
index = randrange(len(contacts))
contact_from_home_page = app.contact.get_contact_list()[index]
contact_from_edit_page = app.contact.get_contact_info_from_edit_page(index)
assert contact_from_home_page.all_phones_from_home_page == merge_phones_like_on_home_page(contact_from_edit_page)
assert contact_from_home_page.all_email_from_home_page == merge_email_like_on_home_page(contact_from_edit_page)
assert contact_from_home_page.firstname == contact_from_edit_page.firstname
assert contact_from_home_page.lastname == contact_from_edit_page.lastname
assert contact_from_home_page.address == contact_from_edit_page.address
def clear(s):
return re.sub("[() -]", "", s)
def merge_phones_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.homenumber, contact.mobilenumber, contact.worknumber, contact.secondarynumber]))))
def merge_email_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
filter(lambda x: x is not None, [contact.email, contact.email2, contact.email3])))
def test_full_contacts_data_on_home_page(app, db):
contacts = app.contact.get_contact_list()
count = len(contacts)
contacts_from_db = sorted(list(db.get_contact_list()), key=Contact.id_or_max)
contacts_from_ui = sorted(list(app.contact.get_contact_list()), key=Contact.id_or_max)
for i in range(count):
assert contacts_from_ui[i].firstname.strip() == contacts_from_db[i].firstname.strip()
assert contacts_from_ui[i].lastname.strip() == contacts_from_db[i].lastname.strip()
assert contacts_from_ui[i].address.strip() == contacts_from_db[i].address.strip()
assert contacts_from_ui[i].all_email_from_home_page == merge_email_like_on_home_page(contacts_from_db[i])
assert contacts_from_ui[i].all_phones_from_home_page == merge_phones_like_on_home_page(contacts_from_db[i])
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,739
|
AklerQ/python_training
|
refs/heads/master
|
/test/test_del_contact.py
|
# -*- coding: utf-8 -*-
from model.contact import Contact
import random
def test_delete_first_contact(app, db, check_ui):
if app.contact.count_contacts() == 0:
app.contact.create(Contact(firstname="Π’Π΅ΡΡ_ΠΈΠΌΠ΅Π½ΠΈ", lastname="Π’Π΅ΡΡ_ΡΠ°ΠΌΠΈΠ»ΠΈΠΈ",
birth_date="//div[@id='content']/form/select[1]//option[1]",
birth_month="//div[@id='content']/form/select[2]//option[1]",
anniversary_date="//div[@id='content']/form/select[3]//option[1]",
anniversary_month="//div[@id='content']/form/select[4]//option[1]"))
old_contacts = db.get_contact_list()
contact = random.choice(old_contacts)
app.contact.delete_contact_by_id(contact.id)
# Test validation
new_contacts = db.get_contact_list()
assert len(old_contacts) - 1 == len(new_contacts)
old_contacts.remove(contact)
assert old_contacts == new_contacts
if check_ui:
new_contacts = map(app.contact.clean, db.get_contact_list())
assert sorted(new_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(), key=Contact.id_or_max)
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,740
|
AklerQ/python_training
|
refs/heads/master
|
/model/contact.py
|
from sys import maxsize
class Contact:
def __init__(self, firstname=None, middlename=None, lastname=None, nickname=None, companyname=None, address=None,
homenumber=None, worknumber=None, mobilenumber=None, faxnumber=None, email=None, email2=None,
birth_date=None, birth_month=None, birth_year=None, anniversary_date=None, anniversary_month=None,
secondarynumber=None, notes=None, id=None, email3=None, all_phones_from_home_page=None,
all_email_from_home_page=None, new_group=None):
self.firstname = firstname
self.middlename = middlename
self.lastname = lastname
self.nickname = nickname
self.companyname = companyname
self.address = address
self.homenumber = homenumber
self.mobilenumber = mobilenumber
self.worknumber = worknumber
self.faxnumber = faxnumber
self.email = email
self.email2 = email2
self.email3 = email3
self.birth_date = birth_date
self.birth_month = birth_month
self.birth_year = birth_year
self.anniversary_date = anniversary_date
self.anniversary_month = anniversary_month
self.secondarynumber = secondarynumber
self.notes = notes
self.id = id
self.all_phones_from_home_page = all_phones_from_home_page
self.all_email_from_home_page = all_email_from_home_page
self.new_group = new_group
def __repr__(self):
return "%s:%s:%s" % (self.id, self.firstname, self.lastname)
def __eq__(self, other):
return (self.id == other.id or self.id is None) and self.firstname == other.firstname \
and self.lastname == other.lastname
def id_or_max(self):
if self.id:
return int(self.id)
else:
return maxsize
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,741
|
AklerQ/python_training
|
refs/heads/master
|
/test/test_db_matches_ui.py
|
from model.group import Group
def test_group_list(app, db):
ui_list = app.group.get_group_list()
db_list = map(app.group.clean, db.get_group_list())
assert sorted(ui_list, key=Group.id_or_max) == sorted(db_list, key=Group.id_or_max)
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,742
|
AklerQ/python_training
|
refs/heads/master
|
/test/test_add_contact_to_group.py
|
# -*- coding: utf-8 -*-
from model.group import Group
from model.contact import Contact
from fixture.orm import ORMfixture
import random
orm = ORMfixture(host="127.0.0.1", name="addressbook", user="root", password="root")
def test_add_contact_to_group(app, db):
# ΠΡΠΎΠ²Π΅ΡΠΊΠ° Π½Π° Π½Π°Π»ΠΈΡΠΈΠ΅ Π³ΡΡΠΏΠΏ
if len(db.get_group_list()) == 0:
app.group.create(Group(name="For adds contact", header="For adds contact", footer="For adds contact"))
# ΠΡΠΎΠ²Π΅ΡΠΊΠ° Π½Π° Π½Π°Π»ΠΈΡΠΈΠ΅ ΡΠ²ΠΎΠ±ΠΎΠ΄Π½ΡΡ
ΠΊΠΎΠ½ΡΠ°ΠΊΡΠΎΠ²
if len(db.get_contacts_out_groups()) == 0:
app.contact.create(Contact(firstname="Π’Π΅ΡΡ_Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΡ", lastname="Π’Π΅ΡΡ_Π΄Π»Ρ_Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΡ",
birth_date="//div[@id='content']/form/select[1]//option[1]",
birth_month="//div[@id='content']/form/select[2]//option[1]",
anniversary_date="//div[@id='content']/form/select[3]//option[1]",
anniversary_month="//div[@id='content']/form/select[4]//option[1]",
new_group="//select[@name='new_group']/option[@value='[none]']"))
contact_list = db.get_contacts_out_groups()
contact = random.choice(contact_list)
group_list = db.get_group_list()
group = random.choice(group_list)
app.navigation.turn_to_home_page()
app.contact.select_contact_by_id(contact.id)
app.group.select_group_by_id_for_add_to(group.id)
app.contact.add_contact_to_group()
app.navigation.open_group_page_by_id(group.id)
# test validation
assert contact in list(orm.get_contacts_in_group(group))
assert contact not in list(db.get_contacts_out_groups())
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,743
|
AklerQ/python_training
|
refs/heads/master
|
/generator/contact_gen.py
|
from model.contact import Contact
import random
import string
import os.path
import jsonpickle
import getopt
import sys
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["number of contacts", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = "data/contacts.json"
for o, a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + " "*10
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
def random_number(maxlen):
symbols = string.digits + ")" + "(" + "-" + " "
return "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
def random_email(maxlen):
symbols = string.ascii_lowercase + string.digits + "_" + "-"
return "".join([random.choice(symbols) for i in range(random.randrange(maxlen))] + ['@'] + [random.choice(symbols)
for i in range(random.randrange(maxlen))] + ['.', 'ru'])
def random_date(maxlen):
return str(random.randrange(maxlen))
testdata = [Contact(firstname="", middlename="", lastname="", nickname="", companyname="", address="",
homenumber="", worknumber="", email="", email2="", mobilenumber="",
birth_date="//div[@id='content']/form/select[1]//option[1]",
birth_month="//div[@id='content']/form/select[2]//option[1]", birth_year="",
anniversary_date="//div[@id='content']/form/select[3]//option[1]",
anniversary_month="//div[@id='content']/form/select[4]//option[1]", notes="",
secondarynumber="", new_group="//select[@name='new_group']/option[@value='[none]']")] + [
Contact(firstname=random_string("firstname", 10), middlename=random_string("middlename", 10), lastname=random_string
("lastname", 10), nickname=random_string("nickname", 10), companyname=random_string("companyname", 10), address=
random_string("address", 25), homenumber=random_number(9), mobilenumber=random_number(12), worknumber=random_number(12),
email=random_email(6), email2=random_email(7), email3=random_email(8),
birth_date="//div[@id='content']/form/select[1]//option["+random_date(32)+"]",
birth_month="//div[@id='content']/form/select[2]//option["+random_date(13)+"]", birth_year=random_number(4),
anniversary_date="//div[@id='content']/form/select[3]//option["+random_date(32)+"]", notes=random_string("name", 30),
anniversary_month="//div[@id='content']/form/select[4]//option["+random_date(13)+"]", secondarynumber=random_number(12),
new_group="//select[@name='new_group']/option[@value='[none]']")
for i in range(5)]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
jsonpickle.set_encoder_options("json", indent=2)
out.write(jsonpickle.encode(testdata))
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,744
|
AklerQ/python_training
|
refs/heads/master
|
/fixture/navigation.py
|
# -*- coding: utf-8 -*-
class NavigationHelper:
def __init__(self, app):
self.app = app
def open_home_page(self):
wd = self.app.wd
if not ((len(wd.find_elements_by_link_text("Create account")) > 0)
and (len(wd.find_elements_by_link_text("Forgot password")) > 0)):
wd.get(self.app.base_url)
def turn_to_home_page(self):
wd = self.app.wd
if not (len(wd.find_elements_by_name("add")) > 0
and wd.find_element_by_xpath("//*[contains(text(), 'Number of results')]")):
wd.find_element_by_link_text("home").click()
def return_to_home_page(self):
wd = self.app.wd
if not (len(wd.find_elements_by_name("add")) > 0
and wd.find_element_by_xpath("//*[contains(text(), 'Number of results')]")):
wd.find_element_by_link_text("home page").click()
def open_groups_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("/group.php") and len(wd.find_elements_by_name("new")) > 0):
wd.find_element_by_link_text("groups").click()
def return_to_groups_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("/group.php") and len(wd.find_elements_by_name("new")) > 0):
wd.find_element_by_link_text("group page").click()
def open_contact_edit_page_by_id(self, id):
wd = self.app.wd
if not wd.current_url.endswith("/edit.php?id=%s" % id):
wd.get(self.app.base_url+"/edit.php?id=%s" % id)
def open_group_page_by_id(self, id):
wd = self.app.wd
if not wd.current_url.endswith("/?group=%s" % id):
wd.get(self.app.base_url+"?group=%s" % id)
|
{"/data/contact_data.py": ["/model/contact.py"], "/test/test_del_contact_from_group.py": ["/model/contact.py"], "/test/test_edit_contact.py": ["/model/contact.py"], "/fixture/contact.py": ["/model/contact.py"], "/test/test_contact_data_validation.py": ["/model/contact.py"], "/test/test_del_contact.py": ["/model/contact.py"], "/test/test_add_contact_to_group.py": ["/model/contact.py"], "/generator/contact_gen.py": ["/model/contact.py"]}
|
2,765
|
peteramazonian/simulation_project
|
refs/heads/master
|
/movement.py
|
import time_management
from time_management import add_to_fel
from system_arrival import SystemArrival
ss_list = __import__('service_station').ServiceStation.list
class Movement():
list = []
@classmethod
def check(cls):
x = len(ss_list)
if len(cls.list) == x + 1:
return
elif len(cls.list) < x + 1:
raise ValueError("Movement objects should be more")
else:
raise ValueError("Movement objects are more than needed")
def __init__(self, moving_time_generator):
self.time_generator = moving_time_generator
self.position = len(Movement.list) + 1
self.name = "m" + str(self.position)
Movement.list.append(self)
# Overriding Python's original __repr__ function
def __repr__(self):
return self.name
def move(self, costumer_id):
if self.position <= len(ss_list):
event_notice = (
self.time_generator.generate() + time_management.clock, "A" + str(self.position), costumer_id,
ss_list[self.position - 1].arrival)
add_to_fel(event_notice)
else:
event_notice = (self.time_generator.generate() + time_management.clock, "D", costumer_id, SystemArrival.departure)
add_to_fel(event_notice)
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,766
|
peteramazonian/simulation_project
|
refs/heads/master
|
/logger_multi_run.py
|
import xlsxwriter
from datetime import datetime
class LoggerMR:
def __init__(self, ss_names, replications):
self.total_replications = replications
self.ss_names = ss_names # Setting list of service ServiceStations
time = datetime.now().strftime("%d-%m-%Y--%H-%M-%S")
self.wb = xlsxwriter.Workbook('LOG_MR/LOG-' + str(self.total_replications) + 'R--' + time + '.xlsx') # Creating Excel report
# file in LOG_MR folder in the project directory
self.ws = self.wb.add_worksheet("all logs") # Creating a sheet inside the Excel file
# Creating default format object
self.default_format = self.wb.add_format(dict(font_name='Century Gothic', align='center', valign='vcenter'))
# Defining a dictionary so it can be edited easily before creating a new format object
self.header_format_dict = dict(font_name='Century Gothic', align='center', valign='vcenter', bold=True,
font_color='navy', text_wrap=True, bg_color='silver', border=1)
# Setting default format and height=14 for first 50 columns in all rows
self.ws.set_column(0, 50, 14, self.default_format)
# Freezing first 2 rows and first column
self.ws.freeze_panes(2, 1)
# Writing header for first column
format_tmp = self.wb.add_format(self.header_format_dict) # Creating a temporary format object
self.ws.merge_range(0, 0, 1, 0, "Replication", format_tmp) # Writing first row in merged cell
# Writing header for column 2
format_tmp = self.wb.add_format(self.header_format_dict) # Creating a temporary format object
format_tmp.set_bg_color('#CCFF99') # Changing background color of the format object
self.ws.write(0, 1, "System", format_tmp) # Writing first row
system_parameters = ['Average Time in System']
for col_num, cell_name in enumerate(system_parameters): # Writing second row
self.ws.write(1, col_num + 1, cell_name, format_tmp)
# Writing header for columns after 5
# One section for each ServiceStation. It will cover all ServiceStations automatically.
color_list = ['#FF5050', '#FFFF99'] # Defining a color list to choose in a loop for each
# ServiceStation so it can be separated easily
for num, ss in enumerate(self.ss_names):
format_tmp = self.wb.add_format(self.header_format_dict)
format_tmp.set_bg_color(color_list[int(num % len(color_list))]) # Setting background color of the
# format object used for this ServiceStation's header, from the color list
# Parameters names list. you need to edit this if you want to change what parameters are printed in log file
# Also you should change ServiceStation's "final_calculations" function
# Order of parameters in ss_parameters and result dict in ServiceStations should be the same
ss_parameters = ['Total Wait Time', 'Average Queue Delay', 'Average Queue Length', 'Maximum Queue Length',
'Servers Efficiency', 'Queue Busy Percentage']
i = num * len(ss_parameters) + 2 # Choose starting column
self.ws.merge_range(0, i, 0, i + len(ss_parameters) - 1, ss, format_tmp) # Writing first row in
# merged cell
for index, cell_name in enumerate(ss_parameters): # Writing second row
self.ws.write(1, index + i, cell_name, format_tmp)
self.row = 2 # Setting the starting row to write logs. 3rd row is the row after header.
self.replication_number = 1
def replication_logger(self, s_list, SystemArrival): # It will write the system evaluation parameters for each replication in a new row
column = 0
format_tmp = self.wb.add_format(self.header_format_dict) # Creating a temporary format object
self.ws.write(self.row, column, self.replication_number, format_tmp)
self.replication_number += 1
column += 1
for key, value in SystemArrival.result.items():
self.ws.write(self.row, column, value)
column += 1
for ss in s_list:
for key, value in ss.result.items():
self.ws.write(self.row, column, value)
column += 1
self.row += 1
def result_logger(self, ss_names, result): # It will write the system evaluation parameters in a table at the end of the log file
self.row += 3 # The table starts 3 rows after where log table ends
column = 4 # The table starts from 5th column
format_tmp = self.wb.add_format(self.header_format_dict) # Creating a temporary format object
format_tmp.set_bg_color('#29A8FF') # Changing it's background color to blue
# Writing the header:
self.ws.write(self.row, column, 'Scope', format_tmp)
self.ws.merge_range(self.row, column + 1, self.row, column + 2, "Parameter Average", format_tmp)
self.ws.write(self.row, column + 3, 'Value', format_tmp)
self.row += 1
color_list = ['#FF5050', '#FFFF99'] # Used to separate parts with two colors in loop
for num, ss in enumerate(ss_names): # Writing ServiceStations evaluation parameters
format_tmp = self.wb.add_format(self.header_format_dict)
format_tmp.set_bg_color(color_list[int(num % len(color_list))])
if len(result[num]) > 1:
self.ws.merge_range(self.row, column, self.row + len(result[num]) - 1, column, ss, format_tmp)
else:
self.ws.write(self.row, column, ss, format_tmp)
for key, value in result[num].items(): # Writing parameters name and value
self.ws.merge_range(self.row, column + 1, self.row, column + 2, key, format_tmp)
self.ws.write(self.row, column + 3, value / self.total_replications, format_tmp)
self.row += 1
def close_file(self): # It will close and save the Excel file in the project directory
self.wb.close()
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,767
|
peteramazonian/simulation_project
|
refs/heads/master
|
/logger_single_run.py
|
import xlsxwriter
from datetime import datetime
class LoggerSR:
def __init__(self, s_list):
self.s_list = s_list # Setting list of service ServiceStations
self.system_arrival = __import__('system_arrival').SystemArrival # Importing SystemArrival class. It
# should be imported inside init to avoid circular imports
time = datetime.now().strftime("%d-%m-%Y--%H-%M-%S")
self.wb = xlsxwriter.Workbook('LOG_SR/LOG-SR--' + time + '.xlsx') # Creating Excel report file in LOG_SR
# folder in the project directory
self.ws = self.wb.add_worksheet("all logs") # Creating a sheet inside the Excel file
# Creating default format object
self.default_format = self.wb.add_format(dict(font_name='Century Gothic', align='center', valign='vcenter'))
# Defining a dictionary so it can be edited easily before creating a new format object
self.header_format_dict = dict(font_name='Century Gothic', align='center', valign='vcenter', bold=True,
font_color='navy', text_wrap=True, bg_color='silver', border=1)
# Setting default format and height=14 for first 50 columns in all rows
self.ws.set_column(0, 50, 14, self.default_format)
# Freezing first 2 rows and first 3 columns
self.ws.freeze_panes(2, 3)
# Writing header for first 3 columns
format_tmp = self.wb.add_format(self.header_format_dict) # Creating a temporary format object
self.ws.merge_range(0, 0, 0, 2, "FEL", format_tmp) # Writing first row in merged cell
fel_parameters = ["Clock", "Event Type", "Costumer_ID"]
for col_num, cell_name in enumerate(fel_parameters): # Writing second row
self.ws.write(1, col_num, cell_name, format_tmp)
# Writing header for columns 4-5
format_tmp = self.wb.add_format(self.header_format_dict) # Creating a temporary format object
format_tmp.set_bg_color('#CCFF99') # Changing background color of the format object
self.ws.merge_range(0, 3, 0, 4, "System", format_tmp) # Writing first row in merged cell
system_parameters = ["Costumers Total Time", "Costumers Departured"]
for col_num, cell_name in enumerate(system_parameters): # Writing second row
self.ws.write(1, col_num + 3, cell_name, format_tmp)
# Writing header for columns after 5
# One section for each ServiceStation. It will cover all ServiceStations automatically.
color_list = ['#FF5050', '#FFFF99'] # Defining a color list to choose in a loop for each
# ServiceStation so it can be separated easily
for num, ss in enumerate(self.s_list):
format_tmp = self.wb.add_format(self.header_format_dict)
format_tmp.set_bg_color(color_list[int(num % len(color_list))]) # Setting background color of the
# format object used for this ServiceStation's header, from the color list
# Parameters names list. you need to edit this if you want to change what parameters are printed in log file
# Also you should change ServiceStation's "return_printables" function
# Order of parameters in ss_parameters and printables list in ServiceStations should be the same
ss_parameters = ['Available Servers', 'Busy Servers', 'Queue Len', 'Rest in Waiting',
'Cumulative Queue Len', 'Max Queue Len', 'Total Service Time', 'Total Service Count',
'Queue Delay Cumulative', 'Queue Total Time', 'Servers Total Busy Time',
'Servers Total Available Time']
i = num * len(ss_parameters) + 5 # Choose starting column
self.ws.merge_range(0, i, 0, i + len(ss_parameters) - 1, ss.name, format_tmp) # Writing first row in
# merged cell
for index, cell_name in enumerate(ss_parameters): # Writing second row
self.ws.write(1, index + i, cell_name, format_tmp)
self.row = 2 # Setting the starting row to write logs. 3rd row is the row after header.
def fel_logger(self, event_notice): # It will write the event notice passed, into in the next blank row
for col_num, item in enumerate(event_notice[0: -1]):
self.ws.write(self.row, col_num, item)
self.variable_logger() # Calling variable_logger function to log cumulative and state variables
self.row += 1 # Moving to next row
def variable_logger(self): # It will log cumulative and state variables for SystemArrivals and ServiceStations in
# columns after 3 (where fel ends)
column = 3
# Writing System variables
self.ws.write(self.row, column, self.system_arrival.costumers_total_time)
column += 1
self.ws.write(self.row, column, self.system_arrival.costumers_departured)
column += 1
# Writing ServiceStation variables
for ss in self.s_list:
for item in ss.return_printables():
self.ws.write(self.row, column, item)
column += 1
def result_logger(self): # It will write the system evaluation parameters in a table at the end of the log file
self.row += 3 # The table starts 3 rows after where log table ends
column = 4 # The table starts from 5th column
format_tmp = self.wb.add_format(self.header_format_dict) # Creating a temporary format object
format_tmp.set_bg_color('#29A8FF') # Changing it's background color to blue
# Writing the header:
self.ws.write(self.row, column, 'Scope', format_tmp)
self.ws.merge_range(self.row, column + 1, self.row, column + 2, "Parameter", format_tmp)
self.ws.write(self.row, column + 3, 'Value', format_tmp)
self.row += 1
color_list = ['#FF5050', '#FFFF99'] # Used to separate parts with two colors in loop
for num, ss in enumerate(self.s_list): # Writing ServiceStations evaluation parameters
format_tmp = self.wb.add_format(self.header_format_dict)
format_tmp.set_bg_color(color_list[int(num % len(color_list))])
result = ss.result # ss.result is calculated in final_calculations method in ServiceStations at the end
# of simulation
self.ws.merge_range(self.row, column, self.row + len(result) - 1, column, ss.name, format_tmp)
for key, value in result.items(): # Writing parameters name and value
self.ws.merge_range(self.row, column + 1, self.row, column + 2, key, format_tmp)
self.ws.write(self.row, column + 3, value, format_tmp)
self.row += 1
# Writing ServiceStations evaluation parameters:
result = self.system_arrival.result # SystemArrival.result is calculated in final_calculations method in
# SystemArrival at the end of simulation
format_tmp = self.wb.add_format(self.header_format_dict)
self.ws.write(self.row, column, "System", format_tmp) # Writing the scope column
for key, value in result.items(): # Writing parameters name and value
self.ws.merge_range(self.row, column + 1, self.row, column + 2, key, format_tmp)
self.ws.write(self.row, column + 3, value, format_tmp)
self.row += 1
def close_file(self): # It will close and save the Excel file in the project directory
self.wb.close()
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,768
|
peteramazonian/simulation_project
|
refs/heads/master
|
/number_generator.py
|
import random
from math import exp
class NumberGenerator:
class Discrete(random.Random):
def __init__(self, x: tuple, fx: tuple, **kwargs):
self.x = None
self.fx_list = fx
self.x_list = x
if len(self.x_list) != len(self.fx_list):
raise ValueError("x_list and fx_list should have same number of elements")
for key, value in kwargs.items():
if key == "seed":
setattr(self, "x", value)
super().__init__(self.x)
def generate(self):
rnd = self.random()
for i in range(self.fx_list.__len__()):
if rnd < sum(self.fx_list[:i + 1]):
return self.x_list[i]
class Static:
def __init__(self, x=0):
self.x = x
def generate(self):
return self.x
class Poisson(random.Random):
def __init__(self, mean=1, **kwargs):
self.x = None
self.mean = mean
self.e = exp(-1 * mean)
for key, value in kwargs.items():
if key == "seed":
setattr(self, "x", value)
super().__init__(self.x)
def generate(self):
n = -1
p = 1
while p > self.e:
p = p * self.random()
n += 1
return n
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,769
|
peteramazonian/simulation_project
|
refs/heads/master
|
/main_single_run.py
|
from service_station import ServiceStation
from system_arrival import SystemArrival
from movement import Movement
from time_generator import TimeGenerator
from number_generator import NumberGenerator
import time_management
from logger_single_run import LoggerSR
# Its our simulation's main file.
# Here we import classes and functions from other project files.
# Then we need to make objects from our classes and set attributes.
# These objects are used to setup the system in a modular way.
# You can make as many service stations as you need with their own attributes, then arrange the whole system together.
# ---------------------------------------------------------------------
# Creating SystemArrival objects
# ---------------------------------------------------------------------
# -------- First SystemArrival object --------
t_generator = TimeGenerator.Exponential(3) # Creating its TimeGenerator
n_generator = NumberGenerator.Static(1) # Creating its NumberGenerator
ief1 = SystemArrival("ief1", t_generator, n_generator) # Creating first SystemArrival object
del n_generator, t_generator
# -------- Second SystemArrival object --------
t_generator = TimeGenerator.Exponential(5) # Creating its TimeGenerator
n_generator = NumberGenerator.Discrete((1, 2, 3, 4), (0.2, 0.3, 0.3, 0.2)) # Creating its NumberGenerator
ief2 = SystemArrival("ief2", t_generator, n_generator) # Creating first SystemArrival object
del n_generator, t_generator
# -------- Third SystemArrival object --------
t_generator = TimeGenerator.Uniform(0, 120) # Creating its TimeGenerator
n_generator = NumberGenerator.Poisson(30) # Creating its NumberGenerator
ief3 = SystemArrival("ief3", t_generator, n_generator) # Creating first SystemArrival object
del n_generator, t_generator
# ---------------------------------------------------------------------
# Creating ServiceStation objects
# ---------------------------------------------------------------------
# -------- First ServiceStation object --------
t_generator = TimeGenerator.DoubleTriangular(1, 2, 4, 1, 2, 3) # Creating its TimeGenerator
ss1 = ServiceStation("ss1", t_generator, 5) # Creating first ServiceStation object
del t_generator
# -------- Second ServiceStation object --------
t_generator = TimeGenerator.Uniform(0.5, 2) # Creating its TimeGenerator
ss2 = ServiceStation("ss2", t_generator, 2) # Creating first ServiceStation object
del t_generator
# -------- Third ServiceStation object --------
t_generator = TimeGenerator.Triangular(10, 20, 30) # Creating its TimeGenerator
ss3 = ServiceStation("ss3", t_generator, 30) # Creating first ServiceStation object
del t_generator
# ---------------------------------------------------------------------
# Creating Movement objects
# ---------------------------------------------------------------------
m1 = Movement(TimeGenerator.Static(0))
m2 = Movement(TimeGenerator.Exponential(0.5))
m3 = Movement(TimeGenerator.Exponential(0.5))
m4 = Movement(TimeGenerator.Exponential(1))
Movement.check()
# ---------------------------------------------------------------------
# Creating Loggers
# ---------------------------------------------------------------------
# time_management.logger_set_list(ServiceStation.list)
logger = LoggerSR(ServiceStation.list)
# ---------------------------------------------------------------------
# Creating Preliminary FEL
# ---------------------------------------------------------------------
ief1.set_first_arrival(0)
ief2.set_first_arrival(0)
ief3.set_single_arrival(60)
ss1.set_rest_times([50, 110, 230, 290])
ss2.set_rest_times([50, 110, 230, 290])
# ---------------------------------------------------------------------
# Set Duration
# ---------------------------------------------------------------------
es = 300
time_management.set_end_of_simulation(es)
# ---------------------------------------------------------------------
# RUN!
# ---------------------------------------------------------------------
try:
while True:
logger.fel_logger(time_management.advance_time())
except time_management.SimulationDone:
for ss in ServiceStation.list:
ss.final_calculations()
SystemArrival.final_calculations()
logger.fel_logger((es, "ES", 0))
logger.result_logger()
print("Simulation DONE!")
logger.close_file()
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,770
|
peteramazonian/simulation_project
|
refs/heads/master
|
/main_multi_run.py
|
import sys
import importlib
from service_station import ServiceStation
from system_arrival import SystemArrival
from movement import Movement
from time_generator import TimeGenerator
from number_generator import NumberGenerator
import time_management
from logger_multi_run import LoggerMR
replications = 100
result = []
ss_names = ['ss1', 'ss2', 'ss3']
logger = LoggerMR(ss_names, replications)
i = 0
while i < replications:
importlib.reload(sys.modules['service_station'])
importlib.reload(sys.modules['system_arrival'])
importlib.reload(sys.modules['movement'])
importlib.reload(sys.modules['time_management'])
from service_station import ServiceStation
from system_arrival import SystemArrival
from movement import Movement
# Its our simulation's main file.
# Here we import classes and functions from other project files.
# Then we need to make objects from our classes and set attributes.
# These objects are used to setup the system in a modular way.
# You can make as many service stations as you need with their own attributes, then arrange the whole system
# ---------------------------------------------------------------------
# Creating SystemArrival objects
# ---------------------------------------------------------------------
# -------- First SystemArrival object --------
t_generator = TimeGenerator.Exponential(3) # Creating its TimeGenerator
n_generator = NumberGenerator.Static(1) # Creating its NumberGenerator
ief1 = SystemArrival("ief1", t_generator, n_generator) # Creating first SystemArrival object
del n_generator, t_generator
# -------- Second SystemArrival object --------
t_generator = TimeGenerator.Exponential(5) # Creating its TimeGenerator
n_generator = NumberGenerator.Discrete((1, 2, 3, 4), (0.2, 0.3, 0.3, 0.2)) # Creating its NumberGenerator
ief2 = SystemArrival("ief2", t_generator, n_generator) # Creating first SystemArrival object
del n_generator, t_generator
# -------- Third SystemArrival object --------
t_generator = TimeGenerator.Uniform(0, 120) # Creating its TimeGenerator
n_generator = NumberGenerator.Poisson(30) # Creating its NumberGenerator
ief3 = SystemArrival("ief3", t_generator, n_generator) # Creating first SystemArrival object
del n_generator, t_generator
# ---------------------------------------------------------------------
# Creating ServiceStation objects
# ---------------------------------------------------------------------
# -------- First ServiceStation object --------
t_generator = TimeGenerator.DoubleTriangular(1, 2, 4, 1, 2, 3) # Creating its TimeGenerator
ss1 = ServiceStation("ss1", t_generator, 5) # Creating first ServiceStation object
del t_generator
# -------- Second ServiceStation object --------
t_generator = TimeGenerator.Uniform(0.5, 2) # Creating its TimeGenerator
ss2 = ServiceStation("ss2", t_generator, 2) # Creating first ServiceStation object
del t_generator
# -------- Third ServiceStation object --------
t_generator = TimeGenerator.Triangular(10, 20, 30) # Creating its TimeGenerator
ss3 = ServiceStation("ss3", t_generator, 30) # Creating first ServiceStation object
del t_generator
# ---------------------------------------------------------------------
# Creating Movement objects
# ---------------------------------------------------------------------
m1 = Movement(TimeGenerator.Static(0))
m2 = Movement(TimeGenerator.Exponential(0.5))
m3 = Movement(TimeGenerator.Exponential(0.5))
m4 = Movement(TimeGenerator.Exponential(1))
Movement.check()
# ---------------------------------------------------------------------
# Creating Preliminary FEL
# ---------------------------------------------------------------------
ief1.set_first_arrival(0)
ief2.set_first_arrival(0)
ief3.set_single_arrival(60)
ss1.set_rest_times([50, 110, 230, 290])
ss2.set_rest_times([50, 110, 230, 290])
# ---------------------------------------------------------------------
# Set Duration
# ---------------------------------------------------------------------
es = 300
time_management.set_end_of_simulation(es)
# ---------------------------------------------------------------------
# RUN!
# ---------------------------------------------------------------------
try:
while True:
time_management.advance_time()
except time_management.SimulationDone:
for ss in ServiceStation.list:
ss.final_calculations()
SystemArrival.final_calculations()
logger.replication_logger(ServiceStation.list, SystemArrival)
i += 1
print('#' + str(i) + ' : Simulation DONE!')
if i == 1:
for ss in ServiceStation.list:
result.append(ss.result)
result.append(SystemArrival.result)
else:
for j, ss in enumerate(ServiceStation.list):
for key, value in ss.result.items():
result[j][key] += value
for key, value in SystemArrival.result.items():
result[-1][key] += value
ss_names.append('System')
for num, scope in enumerate(result):
for key, value in scope.items():
print("%s: %s = %s" %(ss_names[num], key, round(value / replications, 10)))
logger.result_logger(ss_names, result)
logger.close_file()
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,771
|
peteramazonian/simulation_project
|
refs/heads/master
|
/time_management.py
|
import bisect
# ----------------------------------------------------------------------------------------------------------------
# In this module we handle anything related to FEL and clock. in another word this module is the engine that makes
# the code to move.
# ----------------------------------------------------------------------------------------------------------------
fel = [] # It's our simulation's main Future Event List.
clock = 0 # It is the clock that we are in it right now, trying to handle future events and advance time.
def add_to_fel(event_notice: tuple): # This func will add a given tuple to our FEL, in the right place based on
# event's clock.
try:
bisect.insort_left(fel, event_notice) # Bisect library provides a very efficient algorithm to add an object
# in the right place in a SORTED list of objects to keep it sorted.
except TypeError: # It will be used when two tuples are very exactly same except their functions passed.
# bisect cant compare functions so it will return an error. After all it's some how impossible for two events
# to be that much same.
fel.append(event_notice)
fel.sort(key=lambda x: x[0])
class SimulationDone(Exception): # It's an exception class that will raise the SimulationDone exception when we want.
pass
def es(*args): # es is short form for End of Simulation. It will throw "SimulationDone" Exception when called.
raise SimulationDone
def set_end_of_simulation(es_time): # It will add an "es" event to the fel at clock = es_time
add_to_fel((es_time, es))
def advance_time(): # This function will check the FEL, and handle the very upcoming event and advances the clock.
global clock
tmp = fel[0] # Using tmp and delete the event notice from fel before handling it is necessary when we want to add
# event notices to the current clock. E.g. when moving time between two parts equals 0
del fel[0]
clock = tmp[0] # Sets the clock to current event's clock.
tmp[-1](tmp[-2]) # Calls the event notice's method which is placed in the last element of the tuple, with passing
# the one before the last element as it's argument. the argument is mostly the user_ID
return tmp # It will return the event notice just handled to the main file. it's used to log the event notice.
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,772
|
peteramazonian/simulation_project
|
refs/heads/master
|
/system_arrival.py
|
import time_management
from time_management import add_to_fel
__id__ = 10000
# TODO new arrivals in fel dont have id?!?
def id_generator():
global __id__
__id__ += 1
return __id__
class SystemArrival:
list = []
costumers_inside_dict = {}
costumers_departured = 0
costumers_total_time = 0
result = {}
@classmethod
def departure(cls, costumer_id):
cls.costumers_departured += 1
cls.costumers_total_time += time_management.clock - cls.costumers_inside_dict[costumer_id]
cls.costumers_inside_dict.pop(costumer_id)
def __init__(self, name, inter_arrival_time_generator, number_of_arrivals_generator):
self.name = name
self.time_generator = inter_arrival_time_generator
self.number_generator = number_of_arrivals_generator
SystemArrival.list.append(self)
self.m_list = __import__('movement').Movement.list
# Overriding Python's original __repr__ function
def __repr__(self):
return self.name
def set_first_arrival(self, beginning_time):
event_notice = (
self.time_generator.generate() + beginning_time, self.name, self.number_generator.generate(),
self.new_arrival)
add_to_fel(event_notice)
def new_arrival(self, number_of_arrivals):
for i in range(number_of_arrivals):
id_tmp = id_generator()
SystemArrival.costumers_inside_dict[id_tmp] = time_management.clock
self.m_list[0].move(id_tmp)
# generating next arrival event
event_notice = (
self.time_generator.generate() + time_management.clock, self.name, self.number_generator.generate(),
self.new_arrival)
add_to_fel(event_notice)
def set_single_arrival(self, beginning_time):
event_notice = (
self.time_generator.generate() + beginning_time, self.name, self.number_generator.generate(),
self.new_single_arrival)
add_to_fel(event_notice)
def new_single_arrival(self, number_of_arrivals):
for i in range(number_of_arrivals):
id_tmp = id_generator()
SystemArrival.costumers_inside_dict[id_tmp] = time_management.clock
self.m_list[0].move(id_tmp)
@classmethod
def final_calculations(cls):
cls.result = dict(average_time_in_system=cls.costumers_total_time / cls.costumers_departured)
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,773
|
peteramazonian/simulation_project
|
refs/heads/master
|
/time_generator.py
|
""" Random time generators to be used for inter arrival time or activity time in simulation models.
"""
import random
from math import sqrt, log
class TimeGenerator:
class Uniform(random.Random):
def __init__(self, lower_limit=0, upper_limit=1, **kwargs):
self.x = None
self.lower_limit = lower_limit
self.upper_limit = upper_limit
for key, value in kwargs.items():
if key == "seed":
setattr(self, "x", value)
super().__init__(self.x)
def generate(self):
return round(self.random() * (self.upper_limit - self.lower_limit) + self.lower_limit, 3)
class Static():
def __init__(self, x=0):
self.x = x
def generate(self):
return self.x
class Exponential(random.Random):
def __init__(self, mean=1, **kwargs):
self.x = None
self.rate = 1 / mean
for key, value in kwargs.items():
if key == "seed":
setattr(self, "x", value)
super().__init__(self.x)
def generate(self):
rnd = self.random()
return round(-1 / self.rate * log(rnd), 3)
class Triangular(random.Random):
def __init__(self, lower_limit=0, mode=.5, upper_limit=1, **kwargs):
self.x = None
self.a = lower_limit
self.b = upper_limit
self.c = mode
self.Fc = (self.c - self.a) / (self.b - self.a)
for key, value in kwargs.items():
if key == "seed":
setattr(self, "x", value)
super().__init__(self.x)
def generate(self):
rnd = self.random()
if rnd < self.Fc:
return round(self.a + sqrt(rnd * (self.b - self.a) * (self.c - self.a)), 3)
return round(self.b - sqrt((1 - rnd) * (self.b - self.a) * (self.b - self.c)), 3)
class DoubleTriangular(random.Random):
def __init__(self, lower_limit_1=0, mode_1=0.5, upper_limit_1=1, lower_limit_2=0, mode_2=.5, upper_limit_2=1, **kwargs):
self.x = None
self.a1 = lower_limit_1
self.b1 = upper_limit_1
self.c1 = mode_1
self.a2 = lower_limit_2
self.b2 = upper_limit_2
self.c2 = mode_2
self.Fc1 = (self.c1 - self.a1) / (self.b1 - self.a1)
self.Fc2 = (self.c2 - self.a2) / (self.b2 - self.a2)
for key, value in kwargs.items():
if key == "seed":
setattr(self, "x", value)
super().__init__(self.x)
def generate(self):
rnd1 = self.random()
rnd2 = self.random()
if rnd1 < self.Fc1:
t1 = round(self.a1 + sqrt(rnd1 * (self.b1 - self.a1) * (self.c1 - self.a1)), 3)
else:
t1 = round(self.b1 - sqrt((1 - rnd1) * (self.b1 - self.a1) * (self.b1 - self.c1)), 3)
if rnd2 < self.Fc2:
t2 = round(self.a2 + sqrt(rnd2 * (self.b2 - self.a2) * (self.c2 - self.a2)), 3)
else:
t2 = round(self.b2 - sqrt((1 - rnd2) * (self.b2 - self.a2) * (self.b2 - self.c2)), 3)
return t1 + t2
class DT:
def __init__(self, triangular_obj_1, triangular_obj_2):
self.t1 = triangular_obj_1
self.t2 = triangular_obj_2
def generate(self):
return self.t1.generate() + self.t2.generate()
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,774
|
peteramazonian/simulation_project
|
refs/heads/master
|
/service_station.py
|
import time_management
from time_management import add_to_fel, postponed_rest_log_editor
# ----------------------------------------------------------------
# Creating class ServiceStation
# Our service stations are objects of this class
# Costumer arrivals, departures, servers leaving for rest and getting back to work are handled here
# ----------------------------------------------------------------
# event notices created here are as follow:
# station departure: (time, Di, costumer_id, method)
# server rest: (time, Ri, method)
# server back: (time, Bi, method)
class ServiceStation:
list = []
def __init__(self, name, service_time_generator, num_of_servers):
self.name = name # What you call this station in real world
self.service_time_generator = service_time_generator # Service_time_generator is an object of TimeGenerator cls
self.num_of_servers = num_of_servers # Number of servers working in this ServiceStation
self.available_servers = num_of_servers
self.busy_servers = 0 # Number of busy servers at the beginning of the simulation. Usually equals to 0
self.queue_list = [] # List of costumers waiting in queue for this station. queue_list elements:
# (queue_joined_time, costumer_id)
self.rest_in_waiting = 0 # When there is a server waiting to finish the serve, then go to rest, this will be
# equal to 1
self.server_rest_duration = 10 # How long is each server's rest duration
self.position = len(ServiceStation.list) + 1
ServiceStation.list.append(self)
self.m_list = __import__('movement').Movement.list
self.result = {}
# --------------------------------------------------------
# Variables to measure system evaluation parameters:
self.q_len_cumulative = 0
self.q_len_last_clock = 0
self.q_len_max = 0
# ---
self.service_total_time = 0
self.service_total_count = 0
# ---
self.servers_total_busy_t = 0 # Sum of busy servers * time in different periods
self.servers_busy_last_clock = 0 # Last time the busy servers number changed
self.servers_total_available_t = 0 # Sum of available servers * time in different periods
self.servers_available_last_clock = 0 # Last time the available servers number changed
# ---
self.queue_delay_cumulative = 0 # Total time costumers waited in queue
# ---
# TODO edit this
self.queue_total_time = 0
# Overriding Python's original __repr__ function
def __repr__(self):
return self.name
def return_printables(self):
return([self.available_servers, self.busy_servers, len(self.queue_list), self.rest_in_waiting,
self.q_len_cumulative, self.q_len_max, self.service_total_time, self.service_total_count,
self.queue_delay_cumulative, self.queue_total_time, self.servers_total_busy_t,
self.servers_total_available_t])
# Handles arrivals to this station.
def arrival(self, costumer_id):
if self.busy_servers < self.available_servers: # No waiting in Queue
self.servers_total_busy_t += self.busy_servers * (time_management.clock - self.servers_busy_last_clock)
self.servers_busy_last_clock = time_management.clock
self.busy_servers += 1
event_duration = self.service_time_generator.generate()
event_notice = (
event_duration + time_management.clock, "D" + str(self.position), costumer_id, self.departure)
add_to_fel(event_notice) # Generating departure event for this costumer.
self.service_total_time += event_duration
self.service_total_count += 1
else: # Waiting in queue
self.q_len_cumulative += len(self.queue_list) * (time_management.clock - self.q_len_last_clock)
self.queue_total_time += int(bool(len(self.queue_list))) * (time_management.clock - self.q_len_last_clock)
self.q_len_last_clock = time_management.clock
self.queue_list.append((time_management.clock, costumer_id)) # Adding costumer to queue
if len(self.queue_list) > self.q_len_max:
self.q_len_max = len(self.queue_list)
# Handles all departures from this station. departure will happen when service ends for one costumer.
def departure(self, costumer_id):
if not self.rest_in_waiting: # If there is no server waiting to get rest.
if self.queue_list.__len__() > 0:
event_duration = self.service_time_generator.generate()
event_notice = (
event_duration + time_management.clock, "D" + str(self.position), self.queue_list[0][1],
self.departure)
add_to_fel(event_notice) # Generating departure event for next costumer waiting in queue.
self.service_total_time += event_duration
self.service_total_count += 1
self.q_len_cumulative += len(self.queue_list) * (time_management.clock - self.q_len_last_clock)
self.queue_total_time += int(bool(len(self.queue_list))) * (
time_management.clock - self.q_len_last_clock)
self.q_len_last_clock = time_management.clock
self.queue_delay_cumulative += time_management.clock - self.queue_list[0][0]
del self.queue_list[0] # Deleting the costumer which starts getting service, from queue.
else:
self.servers_total_busy_t += self.busy_servers * (time_management.clock - self.servers_busy_last_clock)
self.servers_busy_last_clock = time_management.clock
self.busy_servers -= 1
else: # If there is a server waiting to get rest
self.servers_total_busy_t += self.busy_servers * (time_management.clock - self.servers_busy_last_clock)
self.servers_busy_last_clock = time_management.clock
self.busy_servers -= 1 # The server is no longer busy
self.rest_in_waiting = 0 # so there is no busy server, waiting to get rest
event_notice = (time_management.clock, "R" + str(self.position), self.server_rest)
add_to_fel(event_notice) # Generating the new server rest event notice
# Adding this new event notice to fel is necessary for fel logging
self.m_list[self.position].move(costumer_id)
# Handles server rest periods. in this model, server rest event notices are initialized in fel.
def server_rest(self, *args):
if self.busy_servers < self.available_servers:
self.servers_total_available_t += self.available_servers * (time_management.clock - self.servers_available_last_clock)
self.servers_available_last_clock = time_management.clock
self.available_servers -= 1
event_notice = (self.server_rest_duration + time_management.clock, "B" + str(self.position), self.server_back)
add_to_fel(event_notice) # Generates event notice for server coming back from rest after 10 mins.
else:
self.rest_in_waiting = 1 # It's used in departure() method.
postponed_rest_log_editor()
# Handles when a server is back from rest and starts serving a new costumer if queue is not empty.
def server_back(self, *args):
self.servers_total_available_t += self.available_servers * (time_management.clock - self.servers_available_last_clock)
self.servers_available_last_clock = time_management.clock
self.available_servers += 1
if self.queue_list.__len__() > 0:
self.servers_total_busy_t += self.busy_servers * (time_management.clock - self.servers_busy_last_clock)
self.servers_busy_last_clock = time_management.clock
self.busy_servers += 1
event_duration = self.service_time_generator.generate()
event_notice = (
event_duration + time_management.clock, "D" + str(self.position), self.queue_list[0][1],
self.departure)
add_to_fel(event_notice) # Generating departure event for next costumer waiting in queue.
self.service_total_time += event_duration
self.service_total_count += 1
self.q_len_cumulative += len(self.queue_list) * (time_management.clock - self.q_len_last_clock)
self.queue_total_time += int(bool(len(self.queue_list))) * (time_management.clock - self.q_len_last_clock)
self.q_len_last_clock = time_management.clock
self.queue_delay_cumulative += time_management.clock - self.queue_list[0][0]
del self.queue_list[0] # Deleting the costumer which starts getting service, from queue.
def set_rest_times(self, rest_times_list):
for t in rest_times_list:
event_notice = (t, "R" + str(self.position), self.server_rest)
add_to_fel(event_notice)
def final_calculations(self):
self.q_len_cumulative += len(self.queue_list) * (time_management.clock - self.q_len_last_clock)
self.queue_total_time += int(bool(len(self.queue_list))) * (time_management.clock - self.q_len_last_clock)
self.servers_total_busy_t += self.busy_servers * (time_management.clock - self.servers_busy_last_clock)
self.servers_total_available_t += self.available_servers * (
time_management.clock - self.servers_available_last_clock)
self.result = dict(
total_wait_time=(self.service_total_time + self.queue_delay_cumulative) / self.service_total_count,
average_queue_delay=self.queue_delay_cumulative / self.service_total_count,
average_queue_length=self.q_len_cumulative / time_management.clock,
maximum_queue_length=self.q_len_max,
servers_efficiency=self.servers_total_busy_t / self.servers_total_available_t,
queue_busy_percentage=self.queue_total_time / time_management.clock
)
|
{"/movement.py": ["/time_management.py", "/system_arrival.py"], "/main_single_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_single_run.py"], "/main_multi_run.py": ["/service_station.py", "/system_arrival.py", "/movement.py", "/time_generator.py", "/number_generator.py", "/time_management.py", "/logger_multi_run.py"], "/system_arrival.py": ["/time_management.py"], "/service_station.py": ["/time_management.py"]}
|
2,785
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/data_curated/annotate_proteins.py
|
"""
Given a CSV where some column indicates peptides, add a column indicating which
protein(s) from some specified proteome contain that peptide.
"""
import argparse
import time
import sys
import tqdm
import pandas
import numpy
import shellinford
from mhc2flurry.fasta import read_fasta_to_dataframe
parser = argparse.ArgumentParser(usage=__doc__)
parser.add_argument(
"reference",
metavar="FASTA",
help="Fasta proteome to search.")
parser.add_argument(
"--annotate",
action="append",
default=[],
nargs=2,
metavar="CSV",
help="Input and output file pairs. Specify this argument multiple times "
"to process multiple input files, each of which will be written to its "
"respective output file. The output file can be specified as '-' to "
"overwrite the input file.")
parser.add_argument(
"--peptide-column",
default="peptide",
help="Name of column that gives peptides. Default: %(default)s")
parser.add_argument(
"--protein-column",
default="proteins",
help="Name of column to write proteins. Default: %(default)s")
parser.add_argument(
"--full-descriptions",
default=False,
action="store_true",
help="Write the full protein descriptions, not just the IDs.")
parser.add_argument(
"--join-character",
default=" ",
help="Separator to use between protein names. Default: '%(default)s'")
parser.add_argument(
"--fm-index-suffix",
metavar="SUFFIX",
help="Use a pre-existing fm index found by concatenating SUFFIX onto each "
"input fasta filename.")
def run():
args = parser.parse_args(sys.argv[1:])
peptides = set()
input_filename_df_and_output_filename = []
for (input, output) in args.annotate:
if output.strip() == "-":
output = input
df = pandas.read_csv(input)
print("Read peptides", input)
print(df)
input_filename_df_and_output_filename.append((input, df, output))
peptides.update(df[args.peptide_column].unique())
print("Read %d peptides to annotate" % len(peptides))
proteome_df = read_fasta_to_dataframe(
args.reference, full_descriptions=args.full_descriptions)
print("Read proteome:")
print(proteome_df)
fm = shellinford.FMIndex()
start = time.time()
if args.fm_index_suffix:
name = args.reference + args.fm_index_suffix
print("Using pre-existing fm index", name)
fm.read(name)
print("Read in %0.3f sec." % (time.time() - start))
else:
print("Building FM index")
fm.build(proteome_df.sequence.tolist())
print("Built index of %d sequences in %0.3f sec." % (
len(proteome_df), time.time() - start))
print("Annotating peptides")
peptide_to_matches = {}
for peptide in tqdm.tqdm(peptides):
matches = [item.doc_id for item in fm.search(peptide)]
names = args.join_character.join(
proteome_df.loc[matches, "sequence_id"].values)
peptide_to_matches[peptide] = names
print("Writing files")
for (input, df, output) in input_filename_df_and_output_filename:
print(input)
df[args.protein_column] = df[args.peptide_column].map(
peptide_to_matches)
df.to_csv(output, index=False)
print("Wrote", output)
if __name__ == '__main__':
run()
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,786
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/data_curated/curate_ms_by_pmid.py
|
"""
Filter and combine various peptide/MHC datasets to derive a composite training set,
optionally including eluted peptides identified by mass-spec.
The handle_pmid_XXXX functions should return a DataFrame with columns:
- peptide
- sample_id
- hla [space separated list of alleles]
- pulldown_antibody
- format [monoallelic, multiallelic, DR-specific]
- mhc_class [should be II]
- sample type [an expression group, e.g. "spleen" or "expi293"]
- cell_line [for samples deriving from a single known cell line]
"""
import sys
import argparse
import os
import json
import collections
from six.moves import StringIO
from mhc2flurry.common import normalize_allele_name
import pandas
parser = argparse.ArgumentParser(usage=__doc__)
parser.add_argument(
"--ms-item",
nargs="+",
action="append",
metavar="PMID FILE, ... FILE",
default=[],
help="Mass spec item to curate: PMID and list of files")
parser.add_argument(
"--expression-item",
nargs="+",
action="append",
metavar="LABEL FILE, ... FILE",
default=[],
help="Expression data to curate: dataset label and list of files")
parser.add_argument(
"--ms-out",
metavar="OUT.csv",
help="Out file path (MS data)")
parser.add_argument(
"--expression-out",
metavar="OUT.csv",
help="Out file path (RNA-seq expression)")
parser.add_argument(
"--expression-metadata-out",
metavar="OUT.csv",
help="Out file path for expression metadata, i.e. which samples used")
parser.add_argument(
"--debug",
action="store_true",
default=False,
help="Leave user in pdb if PMID is unsupported")
PMID_HANDLERS = {}
EXPRESSION_HANDLERS = {}
def load(filenames, **kwargs):
result = {}
for filename in filenames:
if filename.endswith(".csv"):
result[filename] = pandas.read_csv(filename, **kwargs)
elif filename.endswith(".xlsx") or filename.endswith(".xls"):
result[filename] = pandas.read_excel(filename, **kwargs)
else:
result[filename] = filename
return result
def debug(*filenames):
loaded = load(filenames)
import ipdb
ipdb.set_trace()
PMID_31495665_SAMPLE_TYPES = {
"HLA-DR_A375": "a375",
"HLA-DR_Lung": "lung",
"HLA-DR_PBMC_HDSC": "pbmc",
"HLA-DR_PBMC_RG1095": "pbmc",
"HLA-DR_PBMC_RG1104": "pbmc",
"HLA-DR_PBMC_RG1248": "pbmc",
"HLA-DR_SILAC_Donor1_10minLysate": "pbmc",
"HLA-DR_SILAC_Donor1_5hrLysate": "pbmc",
"HLA-DR_SILAC_Donor1_DConly": "pbmc",
"HLA-DR_SILAC_Donor1_UVovernight": "pbmc",
"HLA-DR_SILAC_Donor2_DC_UV_16hr": "pbmc",
"HLA-DR_SILAC_Donor2_DC_UV_24hr": "pbmc",
"HLA-DR_Spleen": "spleen",
"MAPTAC_A*02:01": "mix:a375,expi293,hek293,hela",
"MAPTAC_A*11:01": "mix:expi293,hela",
"MAPTAC_A*32:01": "mix:a375,expi293,hela",
"MAPTAC_B*07:02": "mix:a375,expi293,hela",
"MAPTAC_B*45:01": "expi293",
"MAPTAC_B*52:01": "mix:a375,expi293",
"MAPTAC_C*03:03": "expi293",
"MAPTAC_C*06:02": "mix:a375,expi293",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm+": "expi293",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm-": "expi293",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm+": "expi293",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm-": "expi293",
"MAPTAC_DRB1*01:01": "mix:a375,b721,expi293,kg1,k562",
"MAPTAC_DRB1*03:01": "expi293",
"MAPTAC_DRB1*04:01": "expi293",
"MAPTAC_DRB1*07:01": "mix:expi293,hek293",
"MAPTAC_DRB1*11:01": "mix:expi293,k562,kg1",
"MAPTAC_DRB1*12:01_dm+": "expi293",
"MAPTAC_DRB1*12:01_dm-": "expi293",
"MAPTAC_DRB1*15:01": "expi293",
"MAPTAC_DRB3*01:01_dm+": "expi293",
"MAPTAC_DRB3*01:01_dm-": "expi293",
}
CELL_LINE_MIXTURES = sorted(
set(
x for x in PMID_31495665_SAMPLE_TYPES.values()
if x.startswith("mix:")))
def handle_pmid_25502872(filename):
"""Bergseng, ..., Sollid. Immunogenetics 2015 [PMID 25502872]"""
return None
def handle_pmid_26495903(*filenames):
"""Sofron, ..., Fugmann. Eur. J. Immunol. 2015 [PMID 26495903]"""
return None
def handle_pmid_26740625(*filenames):
"""Clement, ..., Santambrogio. J. Biol. Chem. 2016 [PMID 26740625]"""
# Mouse with transgenic DRB*01:01, collected about 3,000 peptides.
# Peptides are mouse-derived, MHC II is human.
return None
def handle_pmid_27452731(*filenames):
"""Heyder, ..., Ytterberg. Mol. Cell. Proteomics 2016 [PMID 27452731]"""
return None
def handle_pmid_27726376(*filenames):
"""Wang, ..., Costello. J. Proteom. Res. 2017"""
return None
def handle_pmid_28329770(*filenames):
"""Khodadoust, ..., Alizadeh. Nature 2017 [PMID 28329770]"""
return None
def handle_pmid_28467828(filename):
"""Ooi, ..., Kitching. Nature 2017 [PMID 28467828]"""
return None
def handle_pmid_29314611(filename):
"""Ritz, ..., Fugmann. Proteomics 2018 [PMID 29314611]"""
hla_types = {
"MAVER-1": "DRB1*01:01 DRB1*13:01 DRB3*02:02 DQA1*01:01 DQB1*05:01 DQA1*01:03 DQB1*06:03",
"DOHH2": "DRB1*01:01 DRB1*15:01 DRB5*01:01 DQA1*01:01 DQB1*05:01 DQB1*06:02 DQA1*01:02",
}
pulldown_antibody = {
"DR": "L243 (HLA-DR)",
"DQ": "SPVL3 (HLA-DQ)",
}
format = {
"DR": "DR-specific",
"DQ": "DQ-specific",
}
result_dfs = []
dfs = pandas.read_excel(
filename, sheet_name=None, skiprows=1, index_col="Sequence")
for (label, df) in dfs.items():
label = label.upper()
(cell_line, restriction) = label.split("_")
result_df = pandas.DataFrame({"peptide": df.index.values})
result_df["sample_id"] = label
result_df["cell_line"] = cell_line
result_df["sample_type"] = "B-CELL"
result_df["mhc_class"] = "II"
result_df["hla"] = hla_types[cell_line]
result_df["pulldown_antibody"] = pulldown_antibody[restriction]
result_df["format"] = format[restriction]
result_dfs.append(result_df)
result_df = pandas.concat(result_dfs, ignore_index=True)
return result_df
def handle_pmid_29317506(*filenames):
"""Ting, ..., Rossjohn. J. Biol. Chem. 2018 [PMID 29317506]"""
return None
def handle_pmid_29632711(*filenames):
"""Nelde, ..., Walz. Oncoimmunology 2018 [PMID 29632711]"""
return None
def handle_pmid_31495665(filename):
"""Abelin, ..., Rooney Immunity 2019 [PMID 31495665]"""
hla_type = {
"HLA-DR_A375": "DRB1*07:01 DRB4*01:01 DRB1*04:05",
"HLA-DR_Lung": "DRB1*01:01 DRB1*03:01 DRB3*01:01",
"HLA-DR_PBMC_HDSC": "DRB1*03:01 DRB1*11:01 DRB3*01:01 DRB3*02:02",
"HLA-DR_PBMC_RG1095": "DRB1*03:01 DRB1*11:01 DRB3*01:01 DRB3*02:02",
"HLA-DR_PBMC_RG1104": "DRB1*01:01 DRB1*11:01 DRB3*02:02",
"HLA-DR_PBMC_RG1248": "DRB1*03:01 DRB1*03:01 DRB3*01:01 DRB3*01:01",
# Note: the paper and Data S1 are pretty confusing regarding the donor1
# and donor2 SILAC experiments. These HLA types are a best guess but
# I am not 100% confident.
"HLA-DR_SILAC_Donor1_10minLysate": "DRB1*07:01 DRB4*01:01",
"HLA-DR_SILAC_Donor1_5hrLysate": "DRB1*07:01 DRB4*01:01",
"HLA-DR_SILAC_Donor1_DConly": "DRB1*07:01 DRB4*01:01",
"HLA-DR_SILAC_Donor1_UVovernight": "DRB1*07:01 DRB4*01:01",
"HLA-DR_SILAC_Donor2_DC_UV_16hr": "DRB1*04:01 DRB4*01:03 DRB1*15:03 DRB5*01:01 DQB1*03:02 DQA1*01:02 DQB1*06:02 DQA1*03:01 DPB1*02:01 DPA1*01:03 DPB1*04:01",
"HLA-DR_SILAC_Donor2_DC_UV_24hr": "DRB1*04:01 DRB4*01:03 DRB1*15:03 DRB5*01:01 DQB1*03:02 DQA1*01:02 DQB1*06:02 DQA1*03:01 DPB1*02:01 DPA1*01:03 DPB1*04:01",
"HLA-DR_Spleen": "DRB1*04:01 DRB4*01:03 DRB1*15:03 DRB5*01:01",
"MAPTAC_A*02:01": "HLA-A*02:01",
"MAPTAC_A*11:01": "HLA-A*11:01",
"MAPTAC_A*32:01": "HLA-A*32:01",
"MAPTAC_B*07:02": "HLA-B*07:02",
"MAPTAC_B*45:01": "HLA-B*45:01",
"MAPTAC_B*52:01": "HLA-B*52:01",
"MAPTAC_C*03:03": "HLA-C*03:03",
"MAPTAC_C*06:02": "HLA-C*06:02",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm+": "DPA1*01:03 DPB1*06:01",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm-": "DPA1*01:03 DPB1*06:01",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm+": "DQA1*01:02 DQB1*06:04",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm-": "DQA1*01:02 DQB1*06:04",
"MAPTAC_DRB1*01:01": "DRB1*01:01",
"MAPTAC_DRB1*03:01": "DRB1*03:01",
"MAPTAC_DRB1*04:01": "DRB1*04:01",
"MAPTAC_DRB1*07:01": "DRB1*07:01",
"MAPTAC_DRB1*11:01": "DRB1*11:01",
"MAPTAC_DRB1*12:01_dm+": "DRB1*12:01",
"MAPTAC_DRB1*12:01_dm-": "DRB1*12:01",
"MAPTAC_DRB1*15:01": "DRB1*15:01",
"MAPTAC_DRB3*01:01_dm+": "DRB3*01:01",
"MAPTAC_DRB3*01:01_dm-": "DRB3*01:01",
}
pulldown_antibody = {
"HLA-DR_A375": "L243+tal1b5 (HLA-DR)",
"HLA-DR_Lung": "L243 (HLA-DR)",
"HLA-DR_PBMC_HDSC": "tal1b5 (HLA-DR)",
"HLA-DR_PBMC_RG1095": "tal1b5 (HLA-DR)",
"HLA-DR_PBMC_RG1104": "tal1b5 (HLA-DR)",
"HLA-DR_PBMC_RG1248": "tal1b5 (HLA-DR)",
"HLA-DR_SILAC_Donor1_10minLysate": "L243 (HLA-DR)",
"HLA-DR_SILAC_Donor1_5hrLysate": "L243 (HLA-DR)",
"HLA-DR_SILAC_Donor1_DConly": "L243 (HLA-DR)",
"HLA-DR_SILAC_Donor1_UVovernight": "L243 (HLA-DR)",
"HLA-DR_SILAC_Donor2_DC_UV_16hr": "L243 (HLA-DR)",
"HLA-DR_SILAC_Donor2_DC_UV_24hr": "L243 (HLA-DR)",
"HLA-DR_Spleen": "L243 (HLA-DR)",
"MAPTAC_A*02:01": "MAPTAC",
"MAPTAC_A*11:01": "MAPTAC",
"MAPTAC_A*32:01": "MAPTAC",
"MAPTAC_B*07:02": "MAPTAC",
"MAPTAC_B*45:01": "MAPTAC",
"MAPTAC_B*52:01": "MAPTAC",
"MAPTAC_C*03:03": "MAPTAC",
"MAPTAC_C*06:02": "MAPTAC",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm+": "MAPTAC",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm-": "MAPTAC",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm+": "MAPTAC",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm-": "MAPTAC",
"MAPTAC_DRB1*01:01": "MAPTAC",
"MAPTAC_DRB1*03:01": "MAPTAC",
"MAPTAC_DRB1*04:01": "MAPTAC",
"MAPTAC_DRB1*07:01": "MAPTAC",
"MAPTAC_DRB1*11:01": "MAPTAC",
"MAPTAC_DRB1*12:01_dm+": "MAPTAC",
"MAPTAC_DRB1*12:01_dm-": "MAPTAC",
"MAPTAC_DRB1*15:01": "MAPTAC",
"MAPTAC_DRB3*01:01_dm+": "MAPTAC",
"MAPTAC_DRB3*01:01_dm-": "MAPTAC",
}
format = {
"HLA-DR_A375": "DR-specific",
"HLA-DR_Lung": "DR-specific",
"HLA-DR_PBMC_HDSC": "DR-specific",
"HLA-DR_PBMC_RG1095": "DR-specific",
"HLA-DR_PBMC_RG1104": "DR-specific",
"HLA-DR_PBMC_RG1248": "DR-specific",
"HLA-DR_SILAC_Donor1_10minLysate": "DR-specific",
"HLA-DR_SILAC_Donor1_5hrLysate": "DR-specific",
"HLA-DR_SILAC_Donor1_DConly": "DR-specific",
"HLA-DR_SILAC_Donor1_UVovernight": "DR-specific",
"HLA-DR_SILAC_Donor2_DC_UV_16hr": "DR-specific",
"HLA-DR_SILAC_Donor2_DC_UV_24hr": "DR-specific",
"HLA-DR_Spleen": "DR-specific",
"MAPTAC_A*02:01": "monoallelic",
"MAPTAC_A*11:01": "monoallelic",
"MAPTAC_A*32:01": "monoallelic",
"MAPTAC_B*07:02": "monoallelic",
"MAPTAC_B*45:01": "monoallelic",
"MAPTAC_B*52:01": "monoallelic",
"MAPTAC_C*03:03": "monoallelic",
"MAPTAC_C*06:02": "monoallelic",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm+": "monoallelic",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm-": "monoallelic",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm+": "monoallelic",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm-": "monoallelic",
"MAPTAC_DRB1*01:01": "monoallelic",
"MAPTAC_DRB1*03:01": "monoallelic",
"MAPTAC_DRB1*04:01": "monoallelic",
"MAPTAC_DRB1*07:01": "monoallelic",
"MAPTAC_DRB1*11:01": "monoallelic",
"MAPTAC_DRB1*12:01_dm+": "monoallelic",
"MAPTAC_DRB1*12:01_dm-": "monoallelic",
"MAPTAC_DRB1*15:01": "monoallelic",
"MAPTAC_DRB3*01:01_dm+": "monoallelic",
"MAPTAC_DRB3*01:01_dm-": "monoallelic",
}
mhc_class = {
"HLA-DR_A375": "II",
"HLA-DR_Lung": "II",
"HLA-DR_PBMC_HDSC": "II",
"HLA-DR_PBMC_RG1095": "II",
"HLA-DR_PBMC_RG1104": "II",
"HLA-DR_PBMC_RG1248": "II",
"HLA-DR_SILAC_Donor1_10minLysate": "II",
"HLA-DR_SILAC_Donor1_5hrLysate": "II",
"HLA-DR_SILAC_Donor1_DConly": "II",
"HLA-DR_SILAC_Donor1_UVovernight": "II",
"HLA-DR_SILAC_Donor2_DC_UV_16hr": "II",
"HLA-DR_SILAC_Donor2_DC_UV_24hr": "II",
"HLA-DR_Spleen": "II",
"MAPTAC_A*02:01": "I",
"MAPTAC_A*11:01": "I",
"MAPTAC_A*32:01": "I",
"MAPTAC_B*07:02": "I",
"MAPTAC_B*45:01": "I",
"MAPTAC_B*52:01": "I",
"MAPTAC_C*03:03": "I",
"MAPTAC_C*06:02": "I",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm+": "II",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm-": "II",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm+": "II",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm-": "II",
"MAPTAC_DRB1*01:01": "II",
"MAPTAC_DRB1*03:01": "II",
"MAPTAC_DRB1*04:01": "II",
"MAPTAC_DRB1*07:01": "II",
"MAPTAC_DRB1*11:01": "II",
"MAPTAC_DRB1*12:01_dm+": "II",
"MAPTAC_DRB1*12:01_dm-": "II",
"MAPTAC_DRB1*15:01": "II",
"MAPTAC_DRB3*01:01_dm+": "II",
"MAPTAC_DRB3*01:01_dm-": "II",
}
cell_line = {
"HLA-DR_A375": "A375",
"HLA-DR_Lung": "",
"HLA-DR_PBMC_HDSC": "",
"HLA-DR_PBMC_RG1095": "",
"HLA-DR_PBMC_RG1104": "",
"HLA-DR_PBMC_RG1248": "",
"HLA-DR_SILAC_Donor1_10minLysate": "",
"HLA-DR_SILAC_Donor1_5hrLysate": "",
"HLA-DR_SILAC_Donor1_DConly": "",
"HLA-DR_SILAC_Donor1_UVovernight": "",
"HLA-DR_SILAC_Donor2_DC_UV_16hr": "",
"HLA-DR_SILAC_Donor2_DC_UV_24hr": "",
"HLA-DR_Spleen": "L243 (HLA-DR)",
"HLA-DR_Spleen": "",
"MAPTAC_A*02:01": "",
"MAPTAC_A*11:01": "",
"MAPTAC_A*32:01": "",
"MAPTAC_B*07:02": "",
"MAPTAC_B*45:01": "expi293",
"MAPTAC_B*52:01": "",
"MAPTAC_C*03:03": "expi293",
"MAPTAC_C*06:02": "",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm+": "expi293",
"MAPTAC_DPB1*06:01/DPA1*01:03_dm-": "expi293",
"MAPTAC_DQB1*06:04/DQA1*01:02_dm+": "expi293", # don't actually see this in DataS1A!
"MAPTAC_DQB1*06:04/DQA1*01:02_dm-": "expi293",
"MAPTAC_DRB1*01:01": "",
"MAPTAC_DRB1*03:01": "expi293",
"MAPTAC_DRB1*04:01": "expi293",
"MAPTAC_DRB1*07:01": "",
"MAPTAC_DRB1*11:01": "",
"MAPTAC_DRB1*12:01_dm+": "expi293",
"MAPTAC_DRB1*12:01_dm-": "expi293",
"MAPTAC_DRB1*15:01": "expi293",
"MAPTAC_DRB3*01:01_dm+": "expi293",
"MAPTAC_DRB3*01:01_dm-": "expi293",
}
df = pandas.read_excel(filename, sheet_name="DataS1B")
results = []
for sample_id in df.columns:
if hla_type[sample_id] is None:
print("Intentionally skipping", sample_id)
continue
result_df = pandas.DataFrame({
"peptide": df[sample_id].dropna().values,
})
result_df["sample_id"] = sample_id
result_df["hla"] = hla_type[sample_id]
result_df["pulldown_antibody"] = pulldown_antibody[sample_id]
result_df["format"] = format[sample_id]
result_df["mhc_class"] = mhc_class[sample_id]
result_df["sample_type"] = PMID_31495665_SAMPLE_TYPES[sample_id]
result_df["cell_line"] = cell_line[sample_id]
results.append(result_df)
result_df = pandas.concat(results, ignore_index=True)
result_df = result_df.loc[
result_df.mhc_class == "II"
]
return result_df
def handle_pmid_31611696(data_s1_filename, data_s2_filename):
"""Racle, ..., Gfeller. Nature Biotechnology 2019 [PMID 31611696]"""
data_s1 = pandas.read_csv(
data_s1_filename, sep=None, engine="python").set_index("Sequence")
data_s2 = pandas.read_csv(
data_s2_filename, sep=None, engine="python").set_index("Sequence")
# HLA typing is given as a PDF in Supplementary Table 1.
# In cases of ambiguous assignment we use the primary assignment.
text = """
3808_HMC MENINGIOMA DRB1*03:01 DRB1*07:01 DRB3*01:01 DRB4*01:01 DPA1*01:03 DPA1*02:01 DPB1*03:01 DPB1*11:01 DQA1*02:01 DQA1*05:01 DQB1*02:01 DQB1*02:02
3830_NJF MENINGIOMA DRB1*04:04 DRB1*11:01 DRB3*02:02 DRB4*01:03 DPA1*01:03 DPB1*02:01 DPB1*06:01 DQA1*03:01 DQA1*05:05 DQB1*03:01 DQB1*03:02
3849BR MENINGIOMA DRB1*11:04 DRB3*02:02 DPA1*01:03 DPB1*02:01 DPB1*04:01 DQA1*05:05 DQB1*03:01
3865_DM MENINGIOMA DRB1*01:01 DRB1*07:01 DRB4*01:03 DPA1*01:03 DPB1*04:01 DPB1*20:01 DQA1*01:01 DQA1*02:01 DQB1*03:03 DQB1*05:01
3869_GA MENINGIOMA DRB1*01:03 DRB1*04:04 DRB4*01:03 DPA1*01:03 DPB1*04:01 DPB1*126:01 DQA1*03:01 DQA1*05:05 DQB1*03:01 DQB1*03:02
3911_ME MENINGIOMA DRB1*11:01 DRB3*02:02 DPA1*01:03 DPB1*04:01 DQA1*05:05 DQB1*03:01
3912_BAM MENINGIOMA DRB1*03:01 DRB1*04:01 DRB3*01:01 DRB4*01:03 DPA1*01:03 DPB1*04:01 DQA1*03:01 DQA1*05:01 DQB1*02:01 DQB1*03:02
3947_GA MENINGIOMA DRB1*01:01 DRB1*13:01 DRB3*01:01 DPA1*01:03 DPB1*02:01 DPB1*04:02 DQA1*01:01 DQA1*01:03 DQB1*05:01 DQB1*06:03
3971_ORA MENINGIOMA DRB1*13:03 DRB1*07:01 DRB3*01:01 DRB4*01:01 DPA1*01:03 DPA1*02:02 DPB1*04:01 DQA1*02:01 DQA1*05:05 DQB1*02:02 DQB1*03:01
3993 MENINGIOMA DRB1*07:01 DRB1*15:01 DRB4*01:03 DRB5*01:01 DPA1*01:03 DPA1*02:01 DPB1*04:01 DPB1*17:01 DQA1*01:02 DQA1*02:01 DQB1*02:02 DQB1*06:02
4001 MENINGIOMA DRB1*13:01 DRB1*14:01 DRB3*01:01 DRB3*02:02 DPA1*01:03 DPB1*04:01 DPB1*04:02 DQA1*01:03 DQA1*01:04 DQB1*05:03 DQB1*06:03
4021 MENINGIOMA DRB1*11:01 DRB1*04:05 DRB3*02:02 DRB4*01:03 DPA1*01:03 DPB1*03:01 DPB1*104:01 DQA1*03:03 DQA1*05:05 DQB1*02:02 DQB1*03:01
4037_DC MENINGIOMA DRB1*01:01 DPA1*01:03 DPB1*04:01 DPB1*06:01 DQA1*01:01 DQB1*05:01
4052_BA MENINGIOMA DRB1*03:01 DRB1*11:04 DRB3*01:01 DRB3*02:02 DPA1*01:03 DPB1*04:01 DQA1*05:01 DQA1*05:05 DQB1*02:01 DQB1*03:01
BP455 B-CELL DRB1*10:01 DRB1*13:01 DRB3*01:01 DPA1*01:03 DPB1*02:01 DQA1*01:05 DQA1*01:10 DQB1*05:01 DQB1*06:03
CD165 B-CELL DRB1*11:01 DRB3*02:02 DPA1*01:03 DPB1*04:01 DPB1*04:02 DQA1*05:05 DQB1*03:01
CM647 B-CELL DRB1*07:01 DRB1*16:01 DRB4*01:03 DRB5*02:02 DPA1*01:03 DPB1*02:01 DPB1*23:01 DQA1*01:02 DQA1*02:01 DQB1*02:02 DQB1*05:02
GD149 B-CELL DRB1*07:01 DRB1*13:01 DRB3*01:01 DRB4*01:01 DPA1*01:03 DPA1*02:01 DPB1*03:01 DPB1*04:01 DQA1*01:10 DQA1*02:01 DQB1*02:02 DQB1*06:03
JY B-CELL DRB1*04:04 DRB1*13:01 DRB3*01:01 DRB4*01:03 DPA1*01:03 DPB1*02:01 DPB1*04:01 DQA1*01:03 DQA1*03:01 DQB1*03:02 DQB1*06:03
PD42 B-CELL DRB1*01:02 DRB1*15:01 DRB5*01:01 DPA1*01:03 DPA1*02:02 DPB1*04:01 DPB1*05:01 DQA1*01:01 DQA1*01:02 DQB1*05:01 DQB1*06:02
RA957 B-CELL DRB1*04:01 DRB1*08:01 DRB4*01:03 DPA1*01:03 DPB1*04:01 DPB1*04:02 DQA1*03:03 DQA1*04:01 DQB1*03:01 DQB1*04:02
TIL1 TIL DRB1*01:01 DRB1*04:08 DRB4*01:03 DPA1*01:03 DPB1*02:01 DPB1*04:01 DQA1*01:01 DQA1*03:03 DQB1*03:01 DQB1*05:01
TIL3 TIL DRB1*12:01 DRB1*15:01 DRB3*02:02 DRB5*01:01 DPA1*01:03 DPB1*03:01 DPB1*04:01 DQA1*01:02 DQA1*05:05 DQB1*03:01 DQB1*05:02
"""
rows = [
row.split() for row in text.strip().split("\n")
]
rows = [
(row[0].replace("_", "-"), row[1], " ".join(row[2:])) for row in rows
]
info_df = pandas.DataFrame(rows, columns=["kind", "sample_type", "hla"])
info_df = info_df.set_index("kind")
# Data S1
renames = {
c : c.replace("Intensity", "").replace("_II", "").strip()
for c in data_s1.columns if c.startswith("Intensity")
}
data_s1 = data_s1[sorted(renames)].rename(columns=renames).rename(columns={
"3830NJF": "3830-NJF",
"3865DM": "3865-DM",
"3912BAM": "3912-BAM",
"3865DM": "3865-DM",
"CD165_ IFNg": "CD165_IFNg",
})
result1_df = data_s1.stack().reset_index()
result1_df.columns = ["peptide", "sample_id", "intensity"]
result1_df = result1_df.loc[result1_df.intensity > 0]
result1_df["kind"] = result1_df.sample_id.map(lambda s: {
"JY_DR": "JY",
"CD165_IFNg": "CD165",
}.get(s, s))
result1_df["hla"] = result1_df.kind.map(info_df.hla)
result1_df["pulldown_antibody"] = "HB145"
result1_df["format"] = "MULTIALLELIC"
result1_df.loc[
result1_df.sample_id == "JY_DR",
"format"
] = "DR-specific"
result1_df["mhc_class"] = "II"
result1_df["sample_type"] = result1_df.kind.map(info_df.sample_type)
result1_df["cell_line"] = [
row.kind if row.sample_type == "B-CELL" else ""
for _, row in result1_df.iterrows()
]
del result1_df["kind"]
# Data S2
renames = {
c : c.replace("Intensity", "").replace("_II", "").strip()
for c in data_s2.columns if c.startswith("Intensity")
}
data_s2 = data_s2[sorted(renames)].rename(columns=renames).rename(columns={
"3830NJF": "3830-NJF",
"3865DM": "3865-DM",
"3912BAM": "3912-BAM",
"3865DM": "3865-DM",
"CD165_ IFNg": "CD165_IFNg",
})
result2_df = data_s2.stack().reset_index()
result2_df.columns = ["peptide", "sample_id", "intensity"]
result2_df["kind"] = result2_df.sample_id.str.replace(
"-HLA-DR", "").str.replace("-depleted", "").str.replace("_", "-")
result2_df["hla"] = result2_df.kind.map(info_df.hla)
result2_df["pulldown_antibody"] = ""
assert all(result2_df.sample_id.map(
lambda s: s.endswith("DR-depleted") or s.endswith("-DR")))
result2_df["format"] = result2_df.sample_id.map(
lambda s: "DR-depleted" if "DR-depleted" in s else "DR-specific")
result2_df["mhc_class"] = "II"
result2_df["sample_type"] = result2_df.kind.map(info_df.sample_type)
result2_df["cell_line"] = [
row.kind if row.sample_type == "B-CELL" else "" for _, row in
result2_df.iterrows()
]
del result2_df["kind"]
result_df = pandas.concat([result1_df, result2_df], ignore_index=True)
# DR-specific samples used HB298 antibody
result_df.loc[
result_df.format == "DR-specific",
"pulldown_antibody"
] = "HB298"
# Subsample alleles to just DR alleles for DR-specific samples.
result_df.loc[
result_df.format == "DR-specific",
"hla"
] = result_df.loc[result_df.format == "DR-specific", "hla"].map(
lambda s: " ".join([allele for allele in s.split() if "DR" in allele])
)
del result_df["intensity"]
return result_df
def handle_pmid_27869121(filename):
"""Bassani-Sternberg, ..., Krackhardt Nature Comm. 2016 [PMID 27869121]"""
# While this data set includes class II ligands, unfortunately the HLA
# typing (Supp Table 2) seems to be class I only. So we skip this dataset.
return None
EXPRESSION_GROUPS_ROWS = []
def make_expression_groups(dataset_identifier, df, groups):
result_df = pandas.DataFrame(index=df.index)
for (label, columns) in groups.items():
for col in columns:
if col not in df.columns:
raise ValueError(
"Missing: %s. Available: %s" % (col, df.columns.tolist()))
result_df[label] = df[columns].mean(1)
EXPRESSION_GROUPS_ROWS.append((dataset_identifier, label, columns))
return result_df
def handle_expression_GSE113126(*filenames):
"""
Barry, ..., Krummel Nature Medicine 2018 [PMID 29942093]
This is the melanoma met RNA-seq dataset.
"""
df = pandas.read_csv(filenames[0], sep="\t", index_col=0)
df = df[[]] # no columns
for filename in filenames:
df[os.path.basename(filename)] = pandas.read_csv(
filename, sep="\t", index_col=0)["TPM"]
assert len(df.columns) == len(filenames)
groups = {
"sample_type:MELANOMA_MET": df.columns.tolist(),
}
return [make_expression_groups("GSE113126", df, groups)]
def handle_expression_expression_atlas_22460905(filename):
df = pandas.read_csv(filename, sep="\t", skiprows=4, index_col=0)
del df["Gene Name"]
df.columns = df.columns.str.lower()
df = df.fillna(0.0)
def matches(*strings):
return [c for c in df.columns if all(s in c for s in strings)]
groups = {
"sample_type:B-LCL": (
matches("b-cell", "lymphoblast") + matches("b acute lymphoblastic")),
"sample_type:B-CELL": matches("b-cell"),
"sample_type:B721-LIKE": matches("b-cell"),
"sample_type:MELANOMA_CELL_LINE": matches("melanoma"),
"sample_type:MELANOMA": matches("melanoma"),
"sample_type:KG1-LIKE": matches("myeloid leukemia"),
# Using a fibrosarcoma cell line for our fibroblast sample.
"sample_type:FIBROBLAST": ['fibrosarcoma, ht-1080'],
# For GBM tissue we are just using a mixture of cell lines.
"sample_type:GLIOBLASTOMA_TISSUE": matches("glioblastoma"),
"cell_line:A375": ['amelanotic melanoma, a-375'],
"cell_line:THP-1": ["childhood acute monocytic leukemia, thp-1"],
"cell_line:HL-60": ["adult acute myeloid leukemia, hl-60"],
"cell_line:U-87": ['glioblastoma, u-87 mg'],
"cell_line:LNT-229": ['glioblastoma, ln-229'],
"cell_line:T98G": ['glioblastoma, t98g'],
"cell_line:SK-MEL-5": ['cutaneous melanoma, sk-mel-5'],
'cell_line:MEWO': ['melanoma, mewo'],
"cell_line:HCC1937": ['breast ductal adenocarcinoma, hcc1937'],
"cell_line:HCT116": ['colon carcinoma, hct 116'],
"cell_line:HCC1143": ['breast ductal adenocarcinoma, hcc1143'],
}
return [make_expression_groups("expression_atlas_22460905", df, groups)]
def handle_expression_human_protein_atlas(*filenames):
(cell_line_filename,) = [f for f in filenames if "celline" in f]
(blood_filename,) = [f for f in filenames if "blood" in f]
(gtex_filename,) = [f for f in filenames if "gtex" in f]
cell_line_df = pandas.read_csv(cell_line_filename, sep="\t")
blood_df = pandas.read_csv(blood_filename, sep="\t", index_col=0)
gtex_df = pandas.read_csv(gtex_filename, sep="\t")
cell_line_df = cell_line_df.pivot(
index="Gene", columns="Cell line", values="TPM")
gtex_df = gtex_df.pivot(
index="Gene", columns="Tissue", values="TPM")
return [
make_expression_groups(
"human_protein_atlas:%s" % os.path.basename(blood_filename),
blood_df,
groups={
"sample_type:PBMC": [
c for c in blood_df.columns if "total PBMC" in c
],
# for samples labeled leukapheresis we also use PBMC
"sample_type:LEUKAPHERESIS": [
c for c in blood_df.columns if "total PBMC" in c
],
# for samples labeled TIL we are also using PBMC
"sample_type:TIL": [
c for c in blood_df.columns if "total PBMC" in c
],
}),
make_expression_groups(
"human_protein_atlas:%s" % os.path.basename(cell_line_filename),
cell_line_df,
groups={
"cell_line:HELA": ['HeLa'],
"cell_line:K562": ["K-562"],
"cell_line:HEK293": ['HEK 293'],
"cell_line:RPMI8226": ['RPMI-8226'],
"cell_line:EXPI293": ['HEK 293'], # EXPI293 derived from HEK293
}),
make_expression_groups(
"human_protein_atlas:%s" % os.path.basename(gtex_filename),
gtex_df,
groups={
"sample_type:LUNG": ["lung"],
"sample_type:SPLEEN": ["spleen"],
"sample_type:OVARY": ["ovary"],
"sample_type:KIDNEY": ["kidney"],
# This is bad! I just can't find anything better currently.
# We should find some meningioma RNA-seq and switch to that.
"sample_type:MENINGIOMA": [
"amygdala", "basal ganglia", "cerebellum", "cerebral cortex",
"midbrain", "spinal cord",
],
}),
]
def make_expression_mixtures(expression_df):
global CELL_LINE_MIXTURES
groups = {}
for mix in CELL_LINE_MIXTURES:
components = []
for item in mix.replace("mix:", "").upper().split(","):
if "cell_line:%s" % item in expression_df.columns:
components.append("cell_line:%s" % item)
else:
print("No cell line, falling back on similar: ", item)
components.append("sample_type:%s-LIKE" % item)
groups["sample_type:" + mix.upper()] = components
missing = set()
for some in groups.values():
for item in some:
if item not in expression_df.columns:
missing.add(item)
if missing:
raise ValueError(
"Missing [%d]: %s. Available: %s" % (
len(missing), missing, expression_df.columns.tolist()))
return make_expression_groups("mixtures", expression_df, groups)
# Add all functions with names like handle_pmid_XXXX to PMID_HANDLERS dict.
for (key, value) in list(locals().items()):
if key.startswith("handle_pmid_"):
PMID_HANDLERS[key.replace("handle_pmid_", "")] = value
elif key.startswith("handle_expression_"):
EXPRESSION_HANDLERS[key.replace("handle_expression_", "")] = value
def run():
args = parser.parse_args(sys.argv[1:])
expression_dfs = []
for (i, item_tpl) in enumerate(args.expression_item):
(label, filenames) = (item_tpl[0], item_tpl[1:])
label = label.replace("-", "_")
print(
"Processing expression item %d of %d" % (i + 1, len(args.expression_item)),
label,
*[os.path.abspath(f) for f in filenames])
expression_dfs_for_item = []
handler = None
if label in EXPRESSION_HANDLERS:
handler = EXPRESSION_HANDLERS[label]
expression_dfs_for_item = handler(*filenames)
elif args.debug:
debug(*filenames)
else:
raise NotImplementedError(label)
if expression_dfs_for_item:
print(
"Processed expression data",
label,
"result dataframes",
len(expression_dfs_for_item))
print(*[e.columns for e in expression_dfs_for_item])
expression_dfs.extend(expression_dfs_for_item)
expression_df = expression_dfs[0]
for other in expression_dfs[1:]:
expression_df = pandas.merge(
expression_df, other, how='outer', left_index=True, right_index=True)
print("Genes in each expression dataframe: ",
*[len(e) for e in expression_dfs])
print("Genes in merged expression dataframe", len(expression_df))
if CELL_LINE_MIXTURES:
print("Generating cell line mixtures.")
expression_mixture_df = make_expression_mixtures(expression_df)
expression_df = pandas.merge(
expression_df,
expression_mixture_df,
how='outer',
left_index=True,
right_index=True)
ms_dfs = []
for (i, item_tpl) in enumerate(args.ms_item):
(pmid, filenames) = (item_tpl[0], item_tpl[1:])
print(
"Processing MS item %d of %d" % (i + 1, len(args.ms_item)),
pmid,
*[os.path.abspath(f) for f in filenames])
ms_df = None
handler = None
if pmid in PMID_HANDLERS:
handler = PMID_HANDLERS[pmid]
ms_df = handler(*filenames)
elif args.debug:
debug(*filenames)
else:
raise NotImplementedError(pmid)
if ms_df is not None:
ms_df["pmid"] = pmid
if "original_pmid" not in ms_df.columns:
ms_df["original_pmid"] = pmid
if "expression_dataset" not in ms_df.columns:
ms_df["expression_dataset"] = ""
ms_df = ms_df.applymap(str).applymap(str.upper)
ms_df["sample_id"] = ms_df.sample_id.str.replace(" ", "")
print("*** PMID %s: %d peptides ***" % (pmid, len(ms_df)))
if handler is not None:
print(handler.__doc__)
print("Counts by sample id:")
print(ms_df.groupby("sample_id").peptide.nunique())
print("")
print("Counts by sample type:")
print(ms_df.groupby("sample_type").peptide.nunique())
print("****************************")
for value in ms_df.expression_dataset.unique():
if value and value not in expression_df.columns:
raise ValueError("No such expression dataset", value)
ms_dfs.append(ms_df)
else:
print("Skipping MS item", pmid)
ms_df = pandas.concat(ms_dfs, ignore_index=True, sort=False)
ms_df["cell_line"] = ms_df["cell_line"].fillna("")
ms_df["hla"] = ms_df["hla"].str.strip().str.replace(r'\s+', ' ').map(
lambda hla: " ".join(
[
normalize_allele_name(a, raise_on_error=True)
for a in hla.split()
]))
for _, row in ms_df.drop_duplicates("hla").iterrows():
alleles = row.hla.split()
for allele in alleles:
# Catch pairs like HLA-DQA*01:01-DQB1*01:01.
# We want only single alleles. They get paired up in analysis code.
if "-" in allele.replace("HLA-", ""):
raise ValueError(
"Allele pair present: %s. In: %s\n%s" % (
allele, row.hla, row))
sample_table = ms_df[
[
"sample_id",
"pmid",
"format",
"expression_dataset",
"cell_line",
"sample_type",
]
].drop_duplicates().set_index("sample_id")
sample_id_to_expression_dataset = sample_table.expression_dataset.to_dict()
for (sample_id, value) in sorted(sample_id_to_expression_dataset.items()):
if value:
print("Expression dataset for sample", sample_id, "already assigned")
continue
cell_line_col = "cell_line:" + sample_table.loc[sample_id, "cell_line"]
sample_type_col = "sample_type:" + (
sample_table.loc[sample_id, "sample_type"])
expression_dataset = None
for col in [cell_line_col, sample_type_col]:
if col in expression_df.columns:
expression_dataset = col
break
if not expression_dataset:
print("*" * 20)
print("No expression dataset for sample ", sample_id)
print("Sample info:")
print(sample_table.loc[sample_id])
print("*" * 20)
sample_id_to_expression_dataset[sample_id] = expression_dataset
print(
"Sample", sample_id, "assigned exp. dataset", expression_dataset)
print("Expression dataset usage:")
print(pandas.Series(sample_id_to_expression_dataset).value_counts())
print("PMIDs by format:")
print(sample_table.groupby("format").pmid.unique())
missing = [
key for (key, value) in
sample_id_to_expression_dataset.items()
if value is None
]
if missing:
print("Missing expression data for samples", *missing)
print(
"Missing cell lines: ",
*sample_table.loc[missing, "cell_line"].dropna().drop_duplicates().tolist())
print("Missing sample types: ", *sample_table.loc[
missing, "sample_type"].dropna().drop_duplicates().tolist())
if args.debug:
import ipdb; ipdb.set_trace()
else:
raise ValueError("Missing expression data for samples: ", missing)
ms_df["expression_dataset"] = ms_df.sample_id.map(
sample_id_to_expression_dataset)
cols = [
"pmid",
"sample_id",
"peptide",
"format",
"mhc_class",
"hla",
"expression_dataset",
]
cols += [c for c in sorted(ms_df.columns) if c not in cols]
ms_df = ms_df[cols]
null_df = ms_df.loc[ms_df.isnull().any(1)]
if len(null_df) > 0:
print("Nulls:")
print(null_df)
else:
print("No nulls.")
# Each sample should be coming from only one experiment.
assert ms_df.groupby("sample_id").pmid.nunique().max() == 1, (
ms_df.groupby("sample_id").pmid.nunique().sort_values())
expression_df.to_csv(args.expression_out, index=True)
print("Wrote: %s" % os.path.abspath(args.expression_out))
ms_df.to_csv(args.ms_out, index=False)
print("Wrote: %s" % os.path.abspath(args.ms_out))
if args.expression_metadata_out is not None:
expression_metadata_df = pandas.DataFrame(
EXPRESSION_GROUPS_ROWS,
columns=["expression_dataset", "label", "samples"])
expression_metadata_df["samples"] = expression_metadata_df[
"samples"
].map(json.dumps)
expression_metadata_df.to_csv(args.expression_metadata_out, index=False)
print("Wrote: %s" % os.path.abspath(args.expression_metadata_out))
if __name__ == '__main__':
run()
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,787
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/data_curated/curate_t_cell_epitopes.py
|
"""
Curate IEDB T cell epitopes. Currently this doesn't do much except rename the
peptide column from "Description" to "peptide".
"""
import sys
import argparse
import pandas
from mhc2flurry.amino_acid import COMMON_AMINO_ACIDS
parser = argparse.ArgumentParser(usage=__doc__)
parser.add_argument(
"--data-iedb",
metavar="tcell_full_v3.csv",
help="Path to IEDB-style T cell epitope data")
parser.add_argument(
"--max-epitopes",
metavar="N",
type=int,
help="Process first N epitopes (for debugging)")
parser.add_argument(
"--out-csv",
required=True,
help="Result file")
def run():
args = parser.parse_args(sys.argv[1:])
epitopes_df = pandas.read_csv(
args.data_iedb, skiprows=1, nrows=args.max_epitopes)
print("Read epitopes", *epitopes_df.shape)
print(epitopes_df)
epitopes_df.insert(0, "peptide", epitopes_df.Description)
aa_regex = "^[%s]+$" % "".join(sorted(COMMON_AMINO_ACIDS))
epitopes_df = epitopes_df.loc[
epitopes_df.peptide.str.match(aa_regex) &
(epitopes_df.peptide.str.len() >= 5)
]
print("Epitopes with valid peptides", len(epitopes_df))
print("Generated result", *epitopes_df.shape)
print(epitopes_df)
epitopes_df.to_csv(args.out_csv, index=False)
print("Wrote", args.out_csv)
if __name__ == '__main__':
run()
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,788
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/data_pdb/make_pdb_query.py
|
# Just print a JSON PDB query to stdout
# Doing this in a python script so we have comments.
import json
sequences = []
# DRA1*01:01
sequences.append(
"MAISGVPVLGFFIIAVLMSAQESWAIKEEHVIIQAEFYLNPDQSGEFMFDFDGDEIFHVDMAKKETVWRLEEFGRF"
"ASFEAQGALANIAVDKANLEIMTKRSNYTPITNVPPEVTVLTNSPVELREPNVLICFIDKFTPPVVNVTWLRNGKP"
"VTTGVSETVFLPREDHLFRKFHYLPFLPSTEDVYDCRVEHWGLDEPLLKHWEFDAPSPLPETTENVVCALGLTVGL"
"VGIIIGTIFIIKGVRKSNAAERRGPL")
# DRB1*01:01
sequences.append(
"MVCLKLPGGSCMTALTVTLMVLSSPLALAGDTRPRFLWQLKFECHFFNGTERVRLLERCIYNQEESVRFDSDVGEY"
"RAVTELGRPDAEYWNSQKDLLEQRRAAVDTYCRHNYGVGESFTVQRRVEPKVTVYPSKTQPLQHHNLLVCSVSGFY"
"PGSIEVRWFRNGQEEKAGVVSTGLIQNGDWTFQTLVMLETVPRSGEVYTCQVEHPSVTSPLTVEWRARSESAQSKM"
"LSGVGGFVLGLLFLGAGLFIYFRNQKGHSGLQPTGFLS")
# DRB3*01:01
sequences.append(
"MVCLKLPGGSSLAALTVTLMVLSSRLAFAGDTRPRFLELRKSECHFFNGTERVRYLDRYFHNQEEFLRFDSDVGEY"
"RAVTELGRPVAESWNSQKDLLEQKRGRVDNYCRHNYGVGESFTVQRRVHPQVTVYPAKTQPLQHHNLLVCSVSGFY"
"PGSIEVRWFRNGQEEKAGVVSTGLIQNGDWTFQTLVMLETVPRSGEVYTCQVEHPSVTSALTVEWRARSESAQSKM"
"LSGVGGFVLGLLFLGAGLFIYFRNQKGHSGLQPTGFLS")
# DRB4*01:01
sequences.append(
"MVCLKLPGGSCMAALTVTLTVLSSPLALAGDTQPRFLEQAKCECHFLNGTERVWNLIRYI"
"YNQEEYARYNSDLGEYQAVTELGRPDAEYWNSQKDLLERRRAEVDTYCRYNYGVVESFTV"
"QRRVQPKVTVYPSKTQPLQHHNLLVCSVNGFYPGSIEVRWFRNSQEEKAGVVSTGLIQNG"
"DWTFQTLVMLETVPRSGEVYTCQVEHPSMMSPLTVQWSARSESAQSKMLSGVGGFVLGLL"
"FLGTGLFIYFRNQKGHSGLQPTGLLS")
# DRB5*01:01
sequences.append(
"MVCLKLPGGSYMAKLTVTLMVLSSPLALAGDTRPRFLQQDKYECHFFNGTERVRFLHRDIYNQEEDLRFDSDVGEY"
"RAVTELGRPDAEYWNSQKDFLEDRRAAVDTYCRHNYGVGESFTVQRRVEPKVTVYPARTQTLQHHNLLVCSVNGFY"
"PGSIEVRWFRNSQEEKAGVVSTGLIQNGDWTFQTLVMLETVPRSGEVYTCQVEHPSVTSPLTVEWRAQSESAQSKM"
"LSGVGGFVLGLLFLGAGLFIYFKNQKGHSGLHPTGLVS")
# HLA-DQB1*02:01
sequences.append(
"MSWKKALRIPGGLRAATVTLMLSMLSTPVAEGRDSPEDFVYQFKGMCYFTNGTERVRLVS"
"RSIYNREEIVRFDSDVGEFRAVTLLGLPAAEYWNSQKDILERKRAAVDRVCRHNYQLELR"
"TTLQRRVEPTVTISPSRTEALNHHNLLVCSVTDFYPAQIKVRWFRNDQEETAGVVSTPLI"
"RNGDWTFQILVMLEMTPQRGDVYTCHVEHPSLQSPITVEWRAQSESAQSKMLSGIGGFVL"
"GLIFLGLGLIIHHRSQKGLLH")
# HLA-DPB1*01:01
sequences.append(
"MMVLQVSAAPRTVALTALLMVLLTSVVQGRATPENYVYQGRQECYAFNGTQRFLERYIYN"
"REEYARFDSDVGEFRAVTELGRPAAEYWNSQKDILEEKRAVPDRVCRHNYELDEAVTLQR"
"RVQPKVNVSPSKKGPLQHHNLLVCHVTDFYPGSIQVRWFLNGQEETAGVVSTNLIRNGDW"
"TFQILVMLEMTPQQGDVYICQVEHTSLDSPVTVEWKAQSDSAQSKTLTGAGGFVLGLIIC"
"GVGIFMHRRSKKVQRGSA")
# Should be distinct
assert len(sequences) == len(set(sequences))
def node_from_sequence(sequence):
return {
"type": "terminal",
"service": "sequence",
"parameters": {
"evalue_cutoff": 10,
"identity_cutoff": 0.5,
"target": "pdb_protein_sequence",
"value": sequence,
}
}
query = {
"query": {
"type": "group",
"logical_operator": "or",
"nodes": [node_from_sequence(sequence) for sequence in sequences],
},
"request_options": {
"return_all_hits": True
},
"return_type": "entry"
}
print(json.dumps(query))
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,789
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/data_proteomes/index_fasta.py
|
"""
Write a shellinford index for a fasta.
"""
import argparse
import time
import sys
import shellinford
from mhc2flurry.fasta import read_fasta_to_dataframe
parser = argparse.ArgumentParser(usage=__doc__)
parser.add_argument(
"input",
metavar="FASTA",
help="Input file")
parser.add_argument(
"output",
metavar="FM",
help="Output file")
def run():
args = parser.parse_args(sys.argv[1:])
df = read_fasta_to_dataframe(args.input)
print("Read")
print(df)
print("Building FM index")
start = time.time()
fm = shellinford.FMIndex()
fm.build(df.sequence.tolist())
print("Built index of %d sequences in %0.3f sec." % (
len(df), time.time() - start))
print("Writing index")
fm.write(args.output)
print("Wrote", args.output)
if __name__ == '__main__':
run()
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,790
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/data_pdb/parse_results.py
|
# From a PDB results json, print out a comma separated list of PDB IDs
import argparse
import sys
import json
parser = argparse.ArgumentParser()
parser.add_argument("results", metavar="JSON")
parser.add_argument("out", metavar="FILE")
args = parser.parse_args(sys.argv[1:])
parsed = json.load(open(args.results))
print("Loaded %d results" % len(parsed['result_set']))
print("First result")
print(parsed['result_set'][0])
print("Last result")
print(parsed['result_set'][-1])
with open(args.out, "w") as fd:
identifiers = [entry['identifier'] for entry in parsed['result_set']]
fd.write(",".join(identifiers))
fd.write("\n")
print("Wrote: ", args.out)
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,791
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/allele_sequences/make_pseudosequences.py
|
"""
Select allele sequences for pan-class II models by analyzing distances between
each MHC residue and the peptide across a set of structures from PDB.
"""
from __future__ import print_function
import sys
import argparse
import collections
import os
import operator
import numpy
import pandas
import tqdm
import atomium
from mhc2flurry.fasta import read_fasta_to_dataframe
parser = argparse.ArgumentParser(usage=__doc__)
parser.add_argument(
"alpha_aligned_fasta",
metavar="FASTA",
help="Aligned sequences")
parser.add_argument(
"beta_aligned_fasta",
metavar="FASTA",
help="Aligned sequences")
parser.add_argument(
"pdb_dir",
metavar="DIR",
help="Directory containing PDB structures")
parser.add_argument(
"--reference-allele",
nargs=2,
help="Alpha and beta alleles to use for position numbering.")
parser.add_argument(
"--reference-structure",
action="append",
default=[],
help="Structures to write out with b-factors rewritten according to "
"inclusion in pseudosequences(for visualization).")
parser.add_argument(
"--out-csv",
help="Result file for sequences")
parser.add_argument(
"--out-aux-dir",
help="Result DIR for extra information")
parser.add_argument(
"--cutoffs",
default=[2.0, 4.0, 6.0, 8.0, 10.0],
nargs="+",
type=float,
metavar="X",
help="Cutoff distances to evaluate. Default: %(default)s.")
parser.add_argument(
"--criteria",
nargs=3,
type=float,
action="append",
default=[],
required=True,
metavar="X",
help="Criteria for selecting a position. Triple of: min minor allele "
"fraction, cutoff distance, fraction of structures with a contact at "
"the given cutoff. May be specified any number of times.")
parser.add_argument(
"--peptide-chain-min-length",
default=5,
metavar="N",
type=int,
help="Default: %(default)s.")
parser.add_argument(
"--peptide-chain-max-length",
default=50,
metavar="N",
type=int,
help="Default: %(default)s.")
parser.add_argument(
"--subsample-pdb",
metavar="N",
type=int,
help="Subsample to at most N PDB structures. For debugging.")
def make_position_to_aligned_position_dict(aligned_sequence):
result = {}
position = 0
for (i, char) in enumerate(aligned_sequence):
if char != "-":
result[position] = i
position += 1
return result
def make_aligned_position_to_position_dict(aligned_sequence):
result = {}
position = 0
for (i, char) in enumerate(aligned_sequence):
if char != "-":
result[i] = position
position += 1
return result
def run():
args = parser.parse_args(sys.argv[1:])
print(args)
alpha_aligned_df = read_fasta_to_dataframe(
args.alpha_aligned_fasta,
full_descriptions=True)
alpha_aligned_df["kind"] = "alpha"
beta_aligned_df = read_fasta_to_dataframe(
args.beta_aligned_fasta,
full_descriptions=True)
beta_aligned_df["kind"] = "beta"
aligned_df = pandas.concat(
[alpha_aligned_df, beta_aligned_df], ignore_index=True)
aligned_df["unaligned"] = aligned_df.sequence.str.replace("-", "")
aligned_df = aligned_df.rename(columns={
"sequence": "aligned_sequence",
}).set_index("sequence_id")
non_pdb_aligned_df = aligned_df.loc[
~aligned_df.index.str.startswith("pdb")
].copy()
minor_allele_fraction_df = []
for kind, sub_df in non_pdb_aligned_df.groupby("kind"):
print("Calculating minor allelic fractions: ", kind)
(length,) = sub_df.aligned_sequence.str.len().unique()
for pos in tqdm.tqdm(range(length)):
s = sub_df.aligned_sequence.str.get(pos)
mode = s.mode()[0]
maf = (s != mode).mean()
minor_allele_fraction_df.append((kind, pos, mode, maf))
minor_allele_fraction_df = pandas.DataFrame(
minor_allele_fraction_df,
columns=[
"mhc_chain_kind",
"mhc_residue_aligned",
"major_allele",
"minor_allele_fraction",
])
minor_allele_fraction_df = minor_allele_fraction_df.set_index(
["mhc_chain_kind", "mhc_residue_aligned"])
print(minor_allele_fraction_df)
pdb_aligned_df = aligned_df.loc[
aligned_df.index.str.startswith("pdb")
].copy()
pdb_aligned_df["accession"] = pdb_aligned_df.index.str.split(".").str.get(
1).str.split("_").str.get(0)
pdb_aligned_df["chain"] = pdb_aligned_df.index.str.split("_").str.get(-1)
if args.subsample_pdb:
keep_accessions = list(
pandas.Series(
pdb_aligned_df.accession.unique()).sample(
n=args.subsample_pdb)) + args.reference_structure
pdb_aligned_df = pdb_aligned_df.loc[
pdb_aligned_df.accession.isin(keep_accessions)
].copy()
info_by_accession = {}
contacts_df = []
for accession, sub_df in tqdm.tqdm(
pdb_aligned_df.groupby("accession"),
total=pdb_aligned_df.accession.nunique()):
sub_df = sub_df.set_index("chain")
alpha_chains = sub_df.loc[sub_df.kind == "alpha"].index.values
beta_chains = sub_df.loc[sub_df.kind == "beta"].index.values
mhc_chain_to_kind = {}
for chain in alpha_chains:
mhc_chain_to_kind[chain] = "alpha"
for chain in beta_chains:
mhc_chain_to_kind[chain] = "beta"
if len(alpha_chains) != len(beta_chains):
print(
"Skipping", accession,
"because num chains for alpha != beta",
len(alpha_chains), len(beta_chains))
continue
structure = atomium.open(
os.path.join(
args.pdb_dir, "%s.cif.gz" % accession)).model
peptides = [
c for c in structure.chains()
if len(c) >= args.peptide_chain_min_length and
len(c) <= args.peptide_chain_max_length
]
if len(peptides) == 0:
print("Skipping", accession, "because no peptides")
continue
structure.optimise_distances()
if accession in args.reference_structure:
# Save for later
info_by_accession[accession] = {
"structure": structure,
"peptides": peptides,
"mhc_chain_to_kind": mhc_chain_to_kind,
"aligned_df": sub_df.copy(),
}
mhc_chain_to_position_map = {}
for chain in mhc_chain_to_kind:
mhc_chain_to_position_map[chain] = make_position_to_aligned_position_dict(
sub_df.loc[chain, "aligned_sequence"])
for peptide in peptides:
seen = set()
for cutoff in sorted(args.cutoffs):
nearby = [
r for r in peptide.nearby_hets(
cutoff=cutoff, residues=True, ligands=False)
if r not in seen
]
seen.update(nearby)
for residue in nearby:
kind = mhc_chain_to_kind.get(residue.chain.id)
if kind is not None:
index = residue.chain.residues().index(residue)
row = sub_df.loc[residue.chain.id]
numpy.testing.assert_equal(
residue.code,
row.unaligned[index])
aligned_position = (
mhc_chain_to_position_map[residue.chain.id][index])
numpy.testing.assert_equal(
residue.code,
row.aligned_sequence[aligned_position])
contacts_df.append((
accession,
cutoff,
peptide.id,
residue.chain.id,
kind,
index,
aligned_position,
residue.code))
contacts_df = pandas.DataFrame(
contacts_df, columns=[
"accession",
"cutoff",
"peptide_chain",
"mhc_chain",
"mhc_chain_kind",
"mhc_residue_unaligned",
"mhc_residue_aligned",
"mhc_residue",
])
num_accessions = contacts_df.accession.nunique()
positional_contact_rates_df = contacts_df.groupby(
["mhc_chain_kind", "mhc_residue_aligned", "cutoff"]
).accession.nunique().unstack().reindex(
sorted(args.cutoffs), axis=1).fillna(0.0).cumsum(1) / num_accessions
positional_df = minor_allele_fraction_df.merge(
positional_contact_rates_df,
how="left",
left_index=True,
right_index=True).fillna(0)
# Criteria name -> alpha or beta -> list of positions
criteria_to_positions = collections.OrderedDict()
for (maf, cutoff, fraction) in args.criteria:
name = "maf_%s_and_%s_within_%s_angstrom" % (maf, fraction, cutoff)
positional_df[name] = (
(positional_df.minor_allele_fraction >= maf) &
(positional_df[cutoff] >= fraction)
)
positions = positional_df.loc[
positional_df[name]
].index.to_frame().reset_index(drop=True).groupby(
"mhc_chain_kind"
).mhc_residue_aligned.unique().map(sorted).to_dict()
criteria_to_positions[name] = positions
print("Criteria", name, "selected:")
for (k, v) in criteria_to_positions[name].items():
print(k, len(v))
pseudosequences_df = non_pdb_aligned_df.copy()
for (criteria, d) in criteria_to_positions.items():
for kind in ["alpha", "beta"]:
positions = d.get(kind, [])
sub = pseudosequences_df.loc[
pseudosequences_df.kind == kind,
]
pseudosequences_df.loc[
sub.index,
criteria
] = sub.aligned_sequence.map(
operator.itemgetter(*positions)
).map("".join).str.replace("-", "X")
pseudosequences_df.index = pseudosequences_df.index.str.split().str.get(1)
assert pseudosequences_df.index.value_counts().max() == 1
main_result_df = pseudosequences_df[
list(criteria_to_positions) + ["kind"]
].copy()
main_result_df.to_csv(args.out_csv, index=True)
print("Wrote %s: " % str(main_result_df.shape), args.out_csv)
if args.out_aux_dir:
if not os.path.exists(args.out_aux_dir):
os.mkdir(args.out_aux_dir)
filename = os.path.join(args.out_aux_dir, "aligned_sequences.csv")
pseudosequences_df.to_csv(filename, index=True)
print("Wrote: ", filename)
filename = os.path.join(args.out_aux_dir, "contacts.csv")
contacts_df.to_csv(filename, index=True)
print("Wrote: ", filename)
# Positional. We add reference allele position numbering and amino acids.
if args.reference_allele:
write_df = positional_df.copy()
(alpha_reference, beta_reference) = args.reference_allele
reference_name = "%s/%s" % (alpha_reference, beta_reference)
reference_alleles = {
"alpha": alpha_reference,
"beta": beta_reference,
}
for kind in ["alpha", "beta"]:
reference_allele = reference_alleles[kind]
reference_sequence = pseudosequences_df.loc[
reference_allele, "aligned_sequence"
]
position_map = make_aligned_position_to_position_dict(
reference_sequence)
write_df.loc[
kind,
reference_name + " position"
] = write_df.loc[
kind
].index.map(position_map)
write_df.loc[
kind,
reference_name + " aa"
] = write_df.loc[
kind
].index.map(lambda pos: reference_sequence[pos])
filename = os.path.join(args.out_aux_dir, "positional.csv")
write_df.to_csv(filename, index=True)
print("Wrote: ", filename)
# Reference structures
# Write out reference structures with the "bvalue" atom property used
# to indicate minor allele fractions / fraction of residues within a
# given distance of the peptide / inclusion in pseudosequences.
# This can be used to generate colored renderings showing these
# properties, e.g. in pymol.
# This "b-factor" hack is commonly used to store arbitrary user data
# in a PDB file. There may be a better way for CIF files but I don't
# know of one.
for accession in args.reference_structure:
positional_with_residues_df = positional_df.copy()
positional_with_residues_df[
"residues"
] = positional_with_residues_df.index.map(lambda i: [])
info = info_by_accession.get(accession)
if not info:
print("No info for reference structure", accession)
continue
structure = info['structure']
for chain, row in info['aligned_df'].iterrows():
position_map = make_position_to_aligned_position_dict(
row.aligned_sequence)
residues_df = pandas.DataFrame({
"residue": structure.chain(chain).residues(),
})
residues_df["aligned_position"] = residues_df.index.map(
position_map)
for _, residue_row in residues_df.iterrows():
positional_with_residues_df.loc[
(row.kind, residue_row.aligned_position),
"residues"
].append(residue_row.residue)
positional_with_residues_df = positional_with_residues_df.loc[
positional_with_residues_df.residues.str.len() > 0
]
quantitative_columns = positional_with_residues_df.dtypes.loc[
(positional_with_residues_df.dtypes == float) |
(positional_with_residues_df.dtypes == bool)
].index
for atom in structure.atoms():
atom.bvalue = 0
for col in quantitative_columns:
# Assign bfactors based on the particular column.
for _, row in positional_with_residues_df.iterrows():
for residue in row.residues:
for atom in residue.atoms():
atom.bvalue = float(row[col]) * 100.0
# Write out the file with modified bvalues.
filename = os.path.join(
args.out_aux_dir,
"%s.%s.cif" % (accession, col))
structure.save(filename)
print("Wrote:", filename)
if __name__ == '__main__':
run()
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,792
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/mhc2flurry/downloads.py
|
"""
Manage local downloaded data.
"""
from __future__ import (
print_function,
division,
absolute_import,
)
import logging
import yaml
from os.path import join, exists
from os import environ
from pipes import quote
from collections import OrderedDict
from appdirs import user_data_dir
from pkg_resources import resource_string
import pandas
ENVIRONMENT_VARIABLES = [
"MHC2FLURRY_DATA_DIR",
"MHC2FLURRY_DOWNLOADS_CURRENT_RELEASE",
"MHC2FLURRY_DOWNLOADS_DIR",
"MHC2FLURRY_DEFAULT_MODELS_DIR",
"MHC2FLURRY_DOWNLOADS_GITHUB_AUTH_TOKEN"
]
_DOWNLOADS_DIR = None
_CURRENT_RELEASE = None
_METADATA = None
_MHC2FLURRY_DEFAULT_MODELS_DIR = environ.get(
"MHC2FLURRY_DEFAULT_MODELS_DIR")
def get_downloads_dir():
"""
Return the path to local downloaded data
"""
return _DOWNLOADS_DIR
def get_current_release():
"""
Return the current downloaded data release
"""
return _CURRENT_RELEASE
def get_downloads_metadata():
"""
Return the contents of downloads.yml as a dict
"""
global _METADATA
if _METADATA is None:
_METADATA = yaml.safe_load(resource_string(__name__, "downloads.yml"))
return _METADATA
def get_default_class2_models_dir(test_exists=True):
"""
Return the absolute path to the default class2 models dir.
If environment variable MHC2FLURRY_DEFAULT_MODELS_DIR is set to an
absolute path, return that path. If it's set to a relative path (i.e. does
not start with /) then return that path taken to be relative to the mhc2flurry
downloads dir.
If environment variable _MHC2FLURRY_DEFAULT_MODELS_DIR is NOT set,
then return the path to downloaded models in the "models_class2" download.
Parameters
----------
test_exists : boolean, optional
Whether to raise an exception of the path does not exist
Returns
-------
string : absolute path
"""
if _MHC2FLURRY_DEFAULT_MODELS_DIR:
result = join(get_downloads_dir(), _MHC2FLURRY_DEFAULT_MODELS_DIR)
if test_exists and not exists(result):
raise IOError("No such directory: %s" % result)
return result
return get_path(
"models_class2", "models", test_exists=test_exists)
def get_current_release_downloads():
"""
Return a dict of all available downloads in the current release.
The dict keys are the names of the downloads. The values are a dict
with two entries:
downloaded : bool
Whether the download is currently available locally
metadata : dict
Info about the download from downloads.yml such as URL
up_to_date : bool or None
Whether the download URL(s) match what was used to download the current
data. This is None if it cannot be determined.
"""
downloads = (
get_downloads_metadata()
['releases']
[get_current_release()]
['downloads'])
def up_to_date(dir, urls):
try:
df = pandas.read_csv(join(dir, "DOWNLOAD_INFO.csv"))
return list(df.url) == list(urls)
except IOError:
return None
return OrderedDict(
(download["name"], {
'downloaded': exists(join(get_downloads_dir(), download["name"])),
'up_to_date': up_to_date(
join(get_downloads_dir(), download["name"]),
[download['url']] if 'url' in download else download['part_urls']),
'metadata': download,
}) for download in downloads
)
def get_path(download_name, filename='', test_exists=True):
"""
Get the local path to a file in a MHC2flurry download
Parameters
-----------
download_name : string
filename : string
Relative path within the download to the file of interest
test_exists : boolean
If True (default) throw an error telling the user how to download the
data if the file does not exist
Returns
-----------
string giving local absolute path
"""
assert '/' not in download_name, "Invalid download: %s" % download_name
path = join(get_downloads_dir(), download_name, filename)
if test_exists and not exists(path):
raise RuntimeError(
"Missing MHC2flurry downloadable file: %s. "
"To download this data, run:\n\tmhc2flurry-downloads fetch %s\n"
"in a shell."
% (quote(path), download_name))
return path
def configure():
"""
Setup various global variables based on environment variables.
"""
global _DOWNLOADS_DIR
global _CURRENT_RELEASE
_CURRENT_RELEASE = None
_DOWNLOADS_DIR = environ.get("MHC2FLURRY_DOWNLOADS_DIR")
if not _DOWNLOADS_DIR:
metadata = get_downloads_metadata()
_CURRENT_RELEASE = environ.get("MHC2FLURRY_DOWNLOADS_CURRENT_RELEASE")
if not _CURRENT_RELEASE:
_CURRENT_RELEASE = metadata['current-release']
current_release_compatability = (
metadata["releases"][_CURRENT_RELEASE]["compatibility-version"])
current_compatability = metadata["current-compatibility-version"]
if current_release_compatability != current_compatability:
logging.warning(
"The specified downloads are not compatible with this version "
"of the MHC2flurry codebase. Downloads: release %s, "
"compatability version: %d. Code compatability version: %d",
_CURRENT_RELEASE,
current_release_compatability,
current_compatability)
data_dir = environ.get("MHC2FLURRY_DATA_DIR")
if not data_dir:
# increase the version every time we make a breaking change in
# how the data is organized. For changes to e.g. just model
# serialization, the downloads release numbers should be used.
data_dir = user_data_dir("mhc2flurry", version="1")
_DOWNLOADS_DIR = join(data_dir, _CURRENT_RELEASE)
logging.debug("Configured MHC2FLURRY_DOWNLOADS_DIR: %s", _DOWNLOADS_DIR)
configure()
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,793
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/allele_sequences/filter_sequences.py
|
"""
Filter and combine class II sequence fastas.
"""
from __future__ import print_function
import sys
import argparse
from mhc2flurry.common import normalize_allele_name
import Bio.SeqIO # pylint: disable=import-error
parser = argparse.ArgumentParser(usage=__doc__)
parser.add_argument(
"fastas",
nargs="+",
help="Unaligned fastas")
parser.add_argument(
"--kind",
required=True,
choices=("alpha", "beta"),
help="Chain")
parser.add_argument(
"--out",
required=True,
help="Fasta output")
min_lengths = {
"alpha": 200,
"beta": 200,
}
def run():
args = parser.parse_args(sys.argv[1:])
print(args)
min_length = min_lengths[args.kind]
output_records = []
seen = set()
sequences = set()
input_records = []
for fasta in args.fastas:
reader = Bio.SeqIO.parse(fasta, "fasta")
input_records.extend(reader)
# Iterate longest records first so that when multiple records have the
# same two digit normalized allele, we use the longest one.
for record in sorted(input_records, key=lambda r: len(r.seq), reverse=True):
original_name = record.description.split()[1]
name = normalize_allele_name(original_name)
if not name:
print("Skipping due to parsing", original_name)
continue
if name in seen:
continue
if len(record.seq) < min_length:
print("Skipping due to short length", name, record.description)
continue
seen.add(name)
sequences.add(record.seq)
record.id = "%s.%s" % (args.kind, record.id)
record.description = "%s %s" % (name, record.description)
output_records.append(record)
with open(args.out, "w") as fd:
Bio.SeqIO.write(output_records, fd, "fasta")
print("Wrote %d / %d [%d unique] sequences: %s" % (
len(output_records), len(input_records), len(sequences), args.out))
if __name__ == '__main__':
run()
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,794
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/mhc2flurry/allele_encoding_pair.py
|
from .allele_encoding import AlleleEncoding
class AlleleEncodingPair(object):
def __init__(
self,
alpha_allele_encoding,
beta_allele_encoding):
"""
"""
self.alpha_allele_encoding = alpha_allele_encoding
self.beta_allele_encoding = beta_allele_encoding
def from_pairs(self, allele_pairs):
alpha_alleles = [a for (a, b) in allele_pairs]
beta_alleles = [b for (a, b) in allele_pairs]
return AlleleEncodingPair(
AlleleEncoding(
alpha_alleles,
borrow_from=self.alpha_allele_encoding),
AlleleEncoding(
beta_alleles,
borrow_from=self.beta_allele_encoding),
)
@property
def allele_encodings(self):
return [
("alpha", self.alpha_allele_encoding),
("beta", self.beta_allele_encoding)
]
@property
def allele_pairs(self):
return [
(a, b)
for (a, b)
in zip(
self.alpha_allele_encoding.alleles,
self.beta_allele_encoding.alleles)
]
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,795
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/test/test_common.py
|
from mhc2flurry.common import make_allele_pairs
def test_allele_pairs():
alleles = [
"HLA-DRB1*07:01",
"HLA-DRB1*16:01",
"HLA-DRB4*01:03",
"HLA-DRB5*02:02",
"HLA-DPA1*01:03",
"HLA-DPB1*02:01",
"HLA-DPB1*23:01",
"HLA-DQA1*01:02",
"HLA-DQA1*02:01",
"HLA-DQB1*02:02",
"HLA-DQB1*05:02",
]
result = make_allele_pairs(alleles)
assert result == [
'HLA-DRA*01:01-DRB1*07:01',
'HLA-DRA*01:01-DRB1*16:01',
'HLA-DRA*01:01-DRB4*01:03',
'HLA-DRA*01:01-DRB5*02:02',
'HLA-DPA1*01:03-DPB1*02:01',
'HLA-DPA1*01:03-DPB1*23:01',
'HLA-DQA1*01:02-DQB1*02:02',
'HLA-DQA1*01:02-DQB1*05:02',
'HLA-DQA1*02:01-DQB1*02:02',
'HLA-DQA1*02:01-DQB1*05:02',
]
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,796
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/allele_sequences/assign_pdb_sequences_to_alpha_or_beta.py
|
# Assign PDB sequences (searched by mmseqs against IMGT sequences)
# to alpha vs beta based on mmseqs results
import argparse
import sys
import pandas
import os
from mhc2flurry.fasta import read_fasta_to_dataframe
parser = argparse.ArgumentParser()
parser.add_argument(
"pdb_sequences",
metavar="FASTA",
help='PDB sequences')
parser.add_argument(
"search_results",
metavar="TXT",
help='mmseqs search results')
parser.add_argument(
"--mmseqs-output-format",
metavar="A,B,C",
required=True,
help='mmseqs output format (comma separated list of fields)')
parser.add_argument(
"--out-alpha",
metavar="FASTA",
help='Output file')
parser.add_argument(
"--out-beta",
metavar="FASTA",
help='Output file')
args = parser.parse_args(sys.argv[1:])
print(args)
sequences_df = read_fasta_to_dataframe(args.pdb_sequences).set_index("sequence_id")
search_df = pandas.read_csv(
args.search_results,
names=args.mmseqs_output_format.split(","),
sep=None)
search_df["kind"] = search_df.target.str.split(".").str.get(0)
df = search_df.loc[
(search_df.qcov > 0.7) &
(search_df.tcov > 0.5)
].sort_values("evalue").drop_duplicates("query").set_index("query")
print(df)
print("Breakdown by kind [should be equal or nearly equal]")
print(df.kind.value_counts())
def write_fasta(filename, sub_df):
with open(filename, "w") as fd:
for name, row in sub_df.iterrows():
seq = sequences_df.loc[name].sequence
fd.write(">pdb.%s\n" % name)
fd.write(seq)
fd.write("\n")
print("Wrote", filename, "with", len(sub_df), "sequences")
if args.out_alpha:
write_fasta(args.out_alpha, df.loc[df.kind == "alpha"])
if args.out_beta:
write_fasta(args.out_beta, df.loc[df.kind == "beta"])
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,797
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/test/test_class2_neural_network.py
|
import logging
logging.getLogger('tensorflow').disabled = True
logging.getLogger('matplotlib').disabled = True
import numpy
import tensorflow.random
numpy.random.seed(0)
tensorflow.random.set_seed(0)
import pandas
from sklearn.metrics import roc_auc_score
import mhcgnomes
from mhc2flurry.allele_encoding_pair import AlleleEncodingPair
from mhc2flurry.allele_encoding import AlleleEncoding
from mhc2flurry.class2_neural_network import Class2NeuralNetwork
from mhc2flurry.common import random_peptides
from mhc2flurry.testing_utils import cleanup, startup
teardown = cleanup
setup = startup
def make_allele_encoding_pair(allele_names, alpha_sequences, beta_sequences):
"""
Given a list of allele names, return an AlleleEncodingPair
"""
parsed_alleles = pandas.Series([
mhcgnomes.parse(name, infer_class2_pairing=True)
for name in allele_names
])
alpha = parsed_alleles.map(lambda p: p.alpha.to_string())
beta = parsed_alleles.map(lambda p: p.beta.to_string())
encoding = AlleleEncodingPair(
AlleleEncoding(alpha, allele_to_sequence=alpha_sequences),
AlleleEncoding(beta, allele_to_sequence=beta_sequences),
)
return encoding
def test_simple():
# Fake pseudosequences
alpha_sequences = {
"HLA-DRA*01:01": "AAAN",
}
beta_sequences = {
"HLA-DRB1*01:01": "AAAQ",
"HLA-DRB1*03:01": "AAAK",
}
motifs = {
"HLA-DRB1*01:01": "A.K",
"HLA-DRB1*03:01": "Q.Q",
}
df = pandas.DataFrame(
{"peptide": random_peptides(200000, length=15)}
).set_index("peptide")
for (allele, motif) in motifs.items():
df[allele] = (df.index.str.contains(motif)).astype(int)
# Resample to have 1:1 binder / non-binder
positive_train_df = df.loc[df.max(1) > 0.8]
train_df = pandas.concat([
positive_train_df,
df.loc[~df.index.isin(positive_train_df.index)].sample(
n=len(positive_train_df))
])
model = Class2NeuralNetwork(
minibatch_size=1024,
random_negative_rate=1.0,
layer_sizes=[4],
allele_positionwise_embedding_size=4,
patience=10,
max_epochs=500,
peptide_convolutions=[
{'kernel_size': 3, 'filters': 8, 'activation': "relu"},
],
peptide_encoding={
'vector_encoding_name': 'BLOSUM62',
'alignment_method': 'right_pad',
'max_length': 20,
},
)
train_and_check(train_df, model, alpha_sequences, beta_sequences)
def test_combination():
# Fake pseudosequences
alpha_sequences = {
"HLA-DRA*01:01": "AAAN",
}
beta_sequences = {
"HLA-DRB1*01:01": "AAAA",
"HLA-DRB1*03:01": "CAAA",
"HLA-DRB1*04:01": "AAAC",
"HLA-DRB1*05:01": "CAAC",
}
motifs = {
"HLA-DRB1*01:01": "K.AK",
"HLA-DRB1*03:01": "Q.CK",
"HLA-DRB1*04:01": "K.DQ",
"HLA-DRB1*05:01": "Q.EQ",
}
df = pandas.DataFrame(
{"peptide": random_peptides(500000, length=15)}
).set_index("peptide")
for (allele, motif) in motifs.items():
df[allele] = (df.index.str.contains(motif)).astype(int)
# Resample to have 1:1 binder / non-binder
positive_train_df = df.loc[df.max(1) > 0.8]
df = pandas.concat([
positive_train_df,
df.loc[~df.index.isin(positive_train_df.index)].sample(
n=int(len(positive_train_df) / df.shape[1]))
])
model = Class2NeuralNetwork(
minibatch_size=1024,
random_negative_rate=1.0,
layer_sizes=[4],
allele_positionwise_embedding_size=4,
patience=10,
peptide_convolutions=[
{'kernel_size': 4, 'filters': 12, 'activation': "relu"},
],
max_epochs=500,
peptide_encoding={
'vector_encoding_name': 'BLOSUM62',
'alignment_method': 'right_pad',
'max_length': 15,
},
)
train_df = df.sample(frac=0.8).copy()
# Can we generalize to an unseen allele?
# So far, haven't gotten this to work, so leaving this line commented.
#train_df["HLA-DRB1*05:01"] = numpy.nan
train_and_check(
df, model, alpha_sequences, beta_sequences, train_df=train_df)
def train_and_check(df, model, alpha_sequences, beta_sequences, train_df=None):
print("Binders")
print((df > 0.8).sum())
print("Binder rate")
print((df > 0.8).mean())
if train_df is None:
train_df = df.sample(frac=0.5)
test_df = df.loc[~df.index.isin(train_df.index)]
stacked = train_df.stack().reset_index().dropna()
stacked.columns = ['peptide', 'allele', 'measurement_value']
allele_encoding = make_allele_encoding_pair(
stacked.allele, alpha_sequences, beta_sequences)
print(model.hyperparameters)
model.fit(
stacked.peptide.values,
affinities=stacked["measurement_value"].values,
allele_encoding_pair=allele_encoding
)
check_accuracy(
train_df, model, alpha_sequences, beta_sequences, message="TRAIN")
check_accuracy(
test_df, model, alpha_sequences, beta_sequences, message="TEST")
def check_accuracy(df, network, alpha_sequences, beta_sequences, message=""):
stacked = df.stack().reset_index().dropna()
stacked.columns = ['peptide', 'allele', 'measurement_value']
allele_encoding = make_allele_encoding_pair(
stacked.allele, alpha_sequences, beta_sequences)
stacked["prediction"] = network.predict(
stacked.peptide, allele_encoding_pair=allele_encoding)
# Overall AUC
stacked["binder"] = stacked.measurement_value > 0.8
auc = roc_auc_score(stacked.binder, stacked.prediction)
print(message, "Overall AUC", auc)
assert auc > 0.7, message
# Can we discern a binder for one allele from another?
binder_peptides = stacked.loc[stacked.binder].peptide.unique()
stacked_binders = stacked.loc[stacked.peptide.isin(binder_peptides)]
allele_specific_aucs = []
for (allele, sub_df) in stacked_binders.groupby("allele"):
print(allele)
print(sub_df)
auc = roc_auc_score(sub_df.binder.values, sub_df.prediction.values)
allele_specific_aucs.append((allele, auc))
allele_specific_aucs = pandas.DataFrame(
allele_specific_aucs, columns=["allele", "auc"])
print(message, "allele specific AUCs:")
print(allele_specific_aucs)
print(message, "Mean predictions")
print(stacked_binders.groupby(["allele", "binder"]).prediction.mean())
for _, row in allele_specific_aucs.iterrows():
assert row.auc > 0.8, (message, row.allele)
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,798
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/downloads-generation/allele_sequences/extract_pdb_sequences.py
|
# Given a set of PDB .cif.gz files, write out a fasta with the sequences of
# each chain. This will be used to align MHC II PDB structures against
# sequences from IMDB and other sources.
import argparse
import sys
import json
import os
import glob
import atomium
parser = argparse.ArgumentParser()
parser.add_argument(
"input", metavar="JSON", help='Director of .cif.gz files')
parser.add_argument("out", metavar="FILE.fasta", help="Out fasta file")
args = parser.parse_args(sys.argv[1:])
print(args)
files = glob.glob(args.input + "/*.cif.gz")
print("Found %d files" % len(files))
with open(args.out, "w") as fd:
for file in files:
structure = atomium.open(file)
for chain in structure.model.chains():
fd.write(">%s_%s %s\n" % (
structure.code, chain.id, os.path.basename(file)))
fd.write("".join(c.code for c in chain.residues()))
fd.write("\n")
print("Wrote: ", args.out)
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,799
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/mhc2flurry/testing_utils.py
|
"""
Utilities used in MHC2flurry unit tests.
"""
from .common import configure_tensorflow
def startup():
"""
Configure Keras backend for running unit tests.
"""
configure_tensorflow("tensorflow-cpu", num_threads=2)
def cleanup():
"""
Clear tensorflow session and other process-wide resources.
"""
import tensorflow.keras.backend as K
K.clear_session()
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,800
|
luoyuan3316/mhc2flurry
|
refs/heads/master
|
/mhc2flurry/__init__.py
|
"""
Class II MHC ligand prediction package
"""
#from .class2_affinity_predictor import Class2AffinityPredictor
#from .class2_neural_network import Class2NeuralNetwork
from .version import __version__
__all__ = [
"__version__",
# "Class2AffinityPredictor",
# "Class2NeuralNetwork",
]
|
{"/test/test_class2_neural_network.py": ["/mhc2flurry/allele_encoding_pair.py", "/mhc2flurry/testing_utils.py"]}
|
2,801
|
shym98/Recognizer
|
refs/heads/master
|
/imageTools.py
|
from PIL import Image
import numpy as np
def getProcessedData(image, imageSize):
image = image.resize((imageSize, imageSize), resample=Image.ANTIALIAS)
imageData = np.asarray(image, dtype=np.uint8).reshape(imageSize, imageSize, 1)
imageData = imageData/255.
return imageData
def getImageData(filename,imageSize):
image = Image.open(filename)
imageData = getProcessedData(image, imageSize)
return imageData
|
{"/songConverting.py": ["/config.py"], "/main.py": ["/songConverting.py", "/networkModel.py"]}
|
2,802
|
shym98/Recognizer
|
refs/heads/master
|
/config.py
|
# Paths
path = '/home/maxim/PycharmProjects/Recognizer/Songs/'
spectPath = '/home/maxim/PycharmProjects/Recognizer/Spect/'
slicePath = '/home/maxim/PycharmProjects/Recognizer/Spect/Slices/'
#Model parameters
batchSize = 128
numberOfEpoch = 20
#Slice parameters
sliceSize = 128
#Dataset parameters
filesPerGenre = 4000
validationRatio = 0.3
testRatio = 0.1
#Spectrogram resolution
pixelPerSecond = 50
|
{"/songConverting.py": ["/config.py"], "/main.py": ["/songConverting.py", "/networkModel.py"]}
|
2,803
|
shym98/Recognizer
|
refs/heads/master
|
/songConverting.py
|
from subprocess import Popen, PIPE, STDOUT
import os
from PIL import Image
from config import *
currentPath = os.path.dirname(os.path.realpath(__file__))
def createSpectrogram(filename, newFilename):
command = "sox '{}' '/tmp/{}.mp3' remix 1,2".format(path + filename + '.mp3', newFilename)
p = Popen(command, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True, cwd=currentPath)
output, errors = p.communicate()
command = "sox '/tmp/{}.mp3' -n spectrogram -Y 200 -X {} -m -r -o '{}.png'".format(newFilename, 50, spectPath + newFilename)
p = Popen(command, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True, cwd=currentPath)
output, errors = p.communicate()
os.remove("/tmp/{}.mp3".format(newFilename))
def createSlicesFromSpectrograms(desiredSize):
for filename in os.listdir(spectPath):
if filename.endswith(".png"):
sliceSpectrogram(filename, desiredSize)
def sliceSpectrogram(filename, desiredSize):
genre = filename.split("_")[0]
img = Image.open(spectPath + filename)
width, height = img.size
nbSamples = int(width / desiredSize)
width - desiredSize
myslicePath = slicePath + "{}/".format(genre)
if not os.path.exists(os.path.dirname(myslicePath)):
try:
os.makedirs(os.path.dirname(myslicePath))
except OSError as exc:
print('error')
for i in range(nbSamples):
startPixel = i * desiredSize
img.crop((startPixel, 1, startPixel + desiredSize, desiredSize + 1)).save(
slicePath + "{}/{}_{}.png".format(genre, filename[:-4], i))
try:
os.remove(spectPath + filename)
except OSError as exc:
print('No such file')
def songsToData():
files = os.listdir(path)
files = [file for file in files if file.endswith(".mp3")]
nbFiles = len(files)
if not os.path.exists(os.path.dirname(spectPath)):
try:
os.makedirs(os.path.dirname(spectPath))
except OSError as exc:
print("error")
for index, filename in enumerate(files):
print("Creating spectrogram for file {}/{}...".format(index + 1, nbFiles))
genre = filename.split("_")[0]
index1 = filename.split("_")[1].split(".")[0]
newFilename = genre + "_" + str(index1)
createSpectrogram(newFilename, newFilename + "mono")
createSlicesFromSpectrograms(sliceSize)
|
{"/songConverting.py": ["/config.py"], "/main.py": ["/songConverting.py", "/networkModel.py"]}
|
2,804
|
shym98/Recognizer
|
refs/heads/master
|
/main.py
|
import string
import argparse
import random
from songConverting import *
from networkModel import *
from dataset import *
from tkinter.filedialog import *
from tkinter import messagebox
from shutil import copyfile, rmtree
def toFixed(numObj, digits=0):
return f"{numObj:.{digits}f}"
#List genres
genres = os.listdir(slicePath)
genres = [filename for filename in genres if os.path.isdir(slicePath+filename)]
nbClasses = len(genres)
#Create model
model = createModel(nbClasses, sliceSize)
# Choosing file to recognize
def chooseFile():
model.load('musicDNN.tflearn')
filename = askopenfilename()
if filename.endswith(".mp3"):
fileLabel.config(text=filename)
else:
messagebox.showinfo("Error", "Incorrect file extension. Must be *.mp3")
return
# Recognizing song
def recognize():
filePath = fileLabel['text']
copyfile(filePath, path + "test.mp3")
createSpectrogram("test", "test_mono")
sliceSpectrogram("test_mono.png", sliceSize)
data = []
for filename in os.listdir(slicePath + "test/"):
if filename.endswith(".png"):
data.append(getImageData(slicePath + "test/" + filename, sliceSize))
predictionSoftmax = model.predict(data)[0]
print(toFixed(predictionSoftmax[0],3),toFixed(predictionSoftmax[1],3), toFixed(predictionSoftmax[2],3), toFixed(predictionSoftmax[3],3))
predictedIndex = max(enumerate(predictionSoftmax), key=lambda x: x[1])[0]
text = genres[predictedIndex]
messagebox.showinfo("Result", text)
rmtree(slicePath + "test/")
try:
os.remove(path + "test.mp3")
except OSError as exc:
print('No such file')
# Open main form
if len(sys.argv) == 1:
root = Tk()
root.title("Recognizer")
nameLabel = Label(root, text = "File path: ")
nameLabel.grid(row = 1, column = 1)
fileLabel = Label(root, text = " ", bg = "white", justify = "center")
fileLabel.grid(row = 1, column = 2)
choseButton = Button(root, text = "Browse", bg = "white", command = chooseFile).grid(row = 1, column = 3)
recognizeButton = Button(root, text = "Recognize", bg = "white", command = recognize).grid(row = 2, column = 1, columnspan = 3)
root.mainloop()
exit(0)
# Parsing arguments
parser = argparse.ArgumentParser()
parser.add_argument("mode", nargs='+', choices=["train","test","slice"])
args = parser.parse_args()
# Converting songs into spectrogram and slicing them
if "slice" in args.mode:
songsToData()
sys.exit()
# Train model
if "train" in args.mode:
#Create or load new dataset
train_X, train_y, validation_X, validation_y = getDataset(filesPerGenre, genres, sliceSize, validationRatio, testRatio, mode="train")
#Define run id for graphs
run_id = "MusicGenres - "+str(batchSize)+" "+''.join(random.SystemRandom().choice(string.ascii_uppercase) for _ in range(10))
#Train the model
print("[+] Training the model...")
model.fit(train_X, train_y, n_epoch=numberOfEpoch, batch_size=batchSize, shuffle=True, validation_set=(validation_X, validation_y), snapshot_step=100, show_metric=True, run_id=run_id)
print(" Model trained!")
#Save trained model
print("[+] Saving the weights...")
model.save('musicDNN.tflearn')
print("[+] Weights saved!")
# Test model
if "test" in args.mode:
#Create or load new dataset
test_X, test_y = getDataset(filesPerGenre, genres, sliceSize, validationRatio, testRatio, mode="test")
#Load weights
print("[+] Loading weights...")
model.load('musicDNN.tflearn')
print(" Weights loaded! β
")
testAccuracy = model.evaluate(test_X, test_y)[0]
print("[+] Test accuracy: {} ".format(testAccuracy))
#rename()
|
{"/songConverting.py": ["/config.py"], "/main.py": ["/songConverting.py", "/networkModel.py"]}
|
2,805
|
shym98/Recognizer
|
refs/heads/master
|
/networkModel.py
|
import tflearn
from tflearn import input_data, conv_2d, max_pool_2d, fully_connected, dropout, regression
def createModel(classesNumber, imageSize):
print("[+] Creating model ...")
network = input_data(shape=[None, imageSize, imageSize, 1], name='input')
network = conv_2d(network, 64, 2, activation='elu', weights_init="Xavier")
network = max_pool_2d(network, 2)
network = conv_2d(network, 128, 2, activation='elu', weights_init="Xavier")
network = max_pool_2d(network, 2)
network = conv_2d(network, 256, 2, activation='elu', weights_init="Xavier")
network = max_pool_2d(network, 2)
network = conv_2d(network, 512, 2, activation='elu', weights_init="Xavier")
network = max_pool_2d(network, 2)
network = fully_connected(network, 1024, activation='elu')
network = dropout(network, 0.5)
network = fully_connected(network, classesNumber, activation='softmax')
network = regression(network, optimizer='rmsprop', loss='categorical_crossentropy')
network = tflearn.DNN(network)
print("[+] Model created")
return network
|
{"/songConverting.py": ["/config.py"], "/main.py": ["/songConverting.py", "/networkModel.py"]}
|
2,823
|
shacharr/roomba_sim
|
refs/heads/master
|
/arena_model.py
|
import pygame
from helper_functions import *
class RoomModel(object):
DIRTY_COLOR = (0,255,0)
CLEAN_COLOR = (0,0,255)
DEAD_ZONE_COLOR = (0,0,0)
def __init__(self, polygon, obstacles=[]):
self.polygon = polygon
self.obstacles = obstacles
max_x = max([x[0] for x in polygon])
max_y = max([x[1] for x in polygon])
self.state = pygame.Surface((max_x,max_y))
self.state.fill(self.DEAD_ZONE_COLOR)
pygame.draw.polygon(self.state,self.DIRTY_COLOR,polygon)
for p in obstacles:
pygame.draw.polygon(self.state,self.DEAD_ZONE_COLOR,p)
self.clean_count, self.dirty_count = self.count_clean_dirty(0,0,max_x,max_y)
def clean_box(self, len_x, len_y, direction, mid_point):
# Start at zero-coords
coords = [(-len_x/2,-len_y/2),( len_x/2,-len_y/2),
( len_x/2, len_y/2),(-len_x/2, len_y/2)]
#Rotate
coords = rotate_polygon(coords,direction)
#Move
coords = transpose_polygon(coords,mid_point)
self.clean_polygon(coords)
def clean_polygon(self, corners):
bbox = polygon_bbox(corners)
orig_clean,orig_dirty = self.count_clean_dirty(*bbox)
pygame.draw.polygon(self.state,self.CLEAN_COLOR,corners)
new_clean,new_dirty = self.count_clean_dirty(*bbox)
self.clean_count += (new_clean - orig_clean)
self.dirty_count += (new_dirty - orig_dirty)
def is_coliding(self, loc, size):
for p in [self.polygon] + self.obstacles:
if is_circle_coliding_with_poligon(p, loc, size):
return True
return False
def count_clean_dirty(self,start_x,start_y,end_x,end_y):
clean_count = 0
dirty_count = 0
start_x = int(max(start_x-1,0))
max_x = self.state.get_clip().width
delta_x = int(min(end_x+1,max_x)) - start_x
start_y = int(max(start_y-1,0))
max_y = self.state.get_clip().height
delta_y = int(min(end_y+1,max_y)) - start_y
if delta_x <= 0 or delta_y <= 0:
return (0,0)
rect = pygame.Rect(start_x,start_y, delta_x,delta_y)
sub_surf = self.state.subsurface(rect)
ar = pygame.PixelArray(sub_surf)
for x in range(delta_x):
for y in range(delta_y):
if ar[x,y] == self.state.map_rgb(self.DIRTY_COLOR):
dirty_count += 1
elif ar[x,y] == self.state.map_rgb(self.CLEAN_COLOR):
clean_count += 1
del ar,sub_surf
return (clean_count,dirty_count)
def is_good_start_point(self, loc, size):
ar = pygame.PixelArray(self.state)
if ar[loc[0],loc[1]] == self.state.map_rgb(self.DEAD_ZONE_COLOR):
return False
if self.is_coliding(loc, size):
return False
return True
|
{"/arena_model.py": ["/helper_functions.py"], "/simulator.py": ["/arena_model.py", "/arena_view.py", "/roomba_model.py"], "/cleaning_robot_model.py": ["/helper_functions.py"], "/controller.py": ["/simulator.py", "/helper_functions.py"], "/arena_view.py": ["/helper_functions.py"]}
|
2,824
|
shacharr/roomba_sim
|
refs/heads/master
|
/roomba_model.py
|
import math
import random
from cleaning_robot_model import CleaningRobotModel
from helper_functions import *
class RoombaModel(CleaningRobotModel):
MODE_TIME_LIMIT = [500,2000]
TURN_SIZE_ON_WALL_FOLLOW = math.pi/180.
MAX_TURN_STEPS = 360
SPIRAL_ANGLE_INIT = math.pi/18.
SPIRAL_ANGLE_RATIO = 0.995
def __init__(self, *args, **kwargs):
super(RoombaModel,self).__init__(*args, **kwargs)
self.in_random_direction_mode = False
self.looking_for_wall = False
self.spiral_mode = True
self.spiral_angle = self.SPIRAL_ANGLE_INIT
self.time_in_mode = 0
if "MODE_TIME_LIMIT" in kwargs:
self.MODE_TIME_LIMIT = kwargs["MODE_TIME_LIMIT"]
if "TURN_SIZE_ON_WALL_FOLLOW" in kwargs:
self.TURN_SIZE_ON_WALL_FOLLOW = kwargs["TURN_SIZE_ON_WALL_FOLLOW"]
self.MAX_TURN_STEPS = (2*math.pi)/self.TURN_SIZE_ON_WALL_FOLLOW
def left_hand_tracking(self):
found_wall = False
for i in range(self.MAX_TURN_STEPS):
self.turn(-self.TURN_SIZE_ON_WALL_FOLLOW)
if self.check_move():
found_wall = True
break
if not found_wall:
self.looking_for_wall = True
self.turn(self.TURN_SIZE_ON_WALL_FOLLOW)
def spiral_step(self):
self.turn(self.spiral_angle)
self.spiral_angle = self.spiral_angle * self.SPIRAL_ANGLE_RATIO
def step(self):
if not self.in_random_direction_mode and not self.looking_for_wall:
self.left_hand_tracking()
if self.spiral_mode:
self.spiral_step()
collided = self.move()
self.time_in_mode += 1
if collided:
self.looking_for_wall = False
self.spiral_mode = False
if self.in_random_direction_mode:
self.turn(random.randint(0,360)*math.pi/180.)
else:
while self.check_move():
self.turn(self.TURN_SIZE_ON_WALL_FOLLOW)
if not self.spiral_mode and self.time_in_mode > self.MODE_TIME_LIMIT[self.in_random_direction_mode]:
self.in_random_direction_mode = not self.in_random_direction_mode
self.time_in_mode = 0
print "Switched to mode",self.in_random_direction_mode
|
{"/arena_model.py": ["/helper_functions.py"], "/simulator.py": ["/arena_model.py", "/arena_view.py", "/roomba_model.py"], "/cleaning_robot_model.py": ["/helper_functions.py"], "/controller.py": ["/simulator.py", "/helper_functions.py"], "/arena_view.py": ["/helper_functions.py"]}
|
2,825
|
shacharr/roomba_sim
|
refs/heads/master
|
/simulator.py
|
import time
import pygame
import math
import random
import itertools
import arena_model
import arena_view
import roomba_model
def run_simulation(robot_params={}, room_params={}, stop_conditions={}, visual_feedback=True, draw_final_result=True):
stats = []
room_polygon = room_params["ROOM_POLYGON"]
obstecles = room_params["OBSTECLES"]
max_x = max(x[0] for x in room_polygon)
max_y = max(x[1] for x in room_polygon)
robot_size = robot_params["ROBOT_SIZE"]
if visual_feedback:
view = arena_view.ScreenView(robot_size, [max_x,max_y])
room_model = arena_model.RoomModel(room_polygon,obstecles)
if "INITIAL_POS" in robot_params:
start_x,start_y,direction = robot_params["INITIAL_POS"]
else:
start_x,start_y=random.randint(0,max_x),random.randint(0,max_y)
while not room_model.is_good_start_point((start_x,start_y),robot_size):
start_x,start_y=random.randint(0,max_x),random.randint(0,max_y)
direction = random.randint(0,360)*math.pi/180.
roomba = roomba_model.RoombaModel((start_x,start_y), robot_size, robot_params["HEAD_SIZE"],
direction, robot_params["SPEED"], room_model)
done = False
last_coverage = 0
steps_with_no_improvement = 0
min_coverage = None
if "MIN_COVERAGE_TO_EXIT" in stop_conditions:
min_coverage = stop_conditions["MIN_COVERAGE_TO_EXIT"]
max_no_gain_steps = 0
if "MAX_NO_GAIN_STEPS" in stop_conditions:
max_no_gain_steps = stop_conditions["MAX_NO_GAIN_STEPS"]
max_time = None
if "MAX_TIME" in stop_conditions:
max_time = stop_conditions["MAX_TIME"]
for t in itertools.count():
coverage = float(room_model.clean_count)/(room_model.clean_count + room_model.dirty_count)
stats.append(coverage)
if coverage == last_coverage and min_coverage != None and coverage > min_coverage:
steps_with_no_improvement += 1
if steps_with_no_improvement > max_no_gain_steps:
done = True
last_coverage = coverage
if max_time != None and t > max_time:
done = True
if visual_feedback:
view.clear_screen(room_model.state)
for event in pygame.event.get(): # User did something
#print "Got event",event,"type:",event.type
if event.type == pygame.QUIT: # If user clicked close
done=True
if done:
break
roomba.step()
if visual_feedback:
view.draw_roomba(*roomba.get_draw_info())
if not visual_feedback and draw_final_result:
view = arena_view.ScreenView(robot_size, [max_x,max_y])
view.clear_screen(room_model.state)
view.draw_roomba(*roomba.get_draw_info())
view.clear_screen(room_model.state)
return stats
|
{"/arena_model.py": ["/helper_functions.py"], "/simulator.py": ["/arena_model.py", "/arena_view.py", "/roomba_model.py"], "/cleaning_robot_model.py": ["/helper_functions.py"], "/controller.py": ["/simulator.py", "/helper_functions.py"], "/arena_view.py": ["/helper_functions.py"]}
|
2,826
|
shacharr/roomba_sim
|
refs/heads/master
|
/cleaning_robot_model.py
|
import math
from helper_functions import *
class CleaningRobotModel(object):
TURN_STEP_FOR_DRAWING = math.pi/18.
def __init__(self, location, size, cleaning_head_size, direction, speed, room):
self.loc = location
self.direction = direction
self.speed = speed
self.size = size
self.room = room
self.cleaning_head_size = cleaning_head_size
self.trace = [location]
def calc_move_next_loc(self):
x,y = self.loc
step_x = -self.speed * math.sin(self.direction)
step_y = self.speed * math.cos(self.direction)
return (x+step_x, y+step_y)
def check_move(self):
new_loc = self.calc_move_next_loc()
return self.room.is_coliding(new_loc,self.size)
def move(self):
new_loc = self.calc_move_next_loc()
# Assumes speed is slow enough to prevent quantom tunneling of the roomba...
if not self.room.is_coliding(new_loc,self.size):
mid_point = [(x+y)/2. for x,y in zip(new_loc,self.loc)]
self.room.clean_box(self.size*1.9, self.speed,
self.direction, mid_point)
self.loc = new_loc
self.trace.append(new_loc)
return False
return True
def clean_step(self,initial_step,step_size):
delta_x = self.size * self.cleaning_head_size / 2.
cleaned_triangle_1 = [(0,0), (delta_x,0), rotate((delta_x,0), step_size)]
cleaned_triangle_2 = [(0,0), (-delta_x,0), rotate((-delta_x,0), step_size)]
cleaned_triangle_1 = rotate_polygon(cleaned_triangle_1,
self.direction+initial_step)
cleaned_triangle_2 = rotate_polygon(cleaned_triangle_2,
self.direction+initial_step)
cleaned_triangle_1 = transpose_polygon(cleaned_triangle_1,self.loc)
cleaned_triangle_2 = transpose_polygon(cleaned_triangle_2,self.loc)
self.room.clean_polygon(cleaned_triangle_1)
self.room.clean_polygon(cleaned_triangle_2)
def turn(self, relative_direction):
step = 1
if relative_direction < 0:
step = -1
target_step = abs(int(relative_direction/self.TURN_STEP_FOR_DRAWING))
for turn_step in range(0,target_step+1):
self.clean_step(step*turn_step*self.TURN_STEP_FOR_DRAWING,
step*self.TURN_STEP_FOR_DRAWING)
self.clean_step(step*target_step*self.TURN_STEP_FOR_DRAWING,
relative_direction - step*target_step*self.TURN_STEP_FOR_DRAWING)
self.direction += relative_direction
def step(self):
raise Exception("Pure virtual function called")
def get_draw_info(self):
return ([int(x) for x in self.loc],self.direction,self.trace)
|
{"/arena_model.py": ["/helper_functions.py"], "/simulator.py": ["/arena_model.py", "/arena_view.py", "/roomba_model.py"], "/cleaning_robot_model.py": ["/helper_functions.py"], "/controller.py": ["/simulator.py", "/helper_functions.py"], "/arena_view.py": ["/helper_functions.py"]}
|
2,827
|
shacharr/roomba_sim
|
refs/heads/master
|
/controller.py
|
import matplotlib.pyplot
from simulator import run_simulation
from helper_functions import *
#ROOM_POLYGON = [(0,0),(640,0),(640,480),(0,480)]
#ROOM_POLYGON = [(0,0),(640,0),(640,480),(320,480),(320,240),(0,240)]
ROOM_POLYGON = [(0,0),(640,0),(640,480),(320,480),(250,240),(0,240)]
SMALL_SQUARE = [(0,0),(10,0),(10,10),(0,10)]
OBSTECLES = [transpose_polygon(SMALL_SQUARE,(200,45)),
transpose_polygon(SMALL_SQUARE,(270,45)),
transpose_polygon(SMALL_SQUARE,(200,125)),
transpose_polygon(SMALL_SQUARE,(270,125)),]
ROOMBA_SIZE = 20
MIN_COVERAGE_TO_EXIT = 0.988
MAX_NO_GAIN_STEPS = 3000
def main():
robot_params = {"ROBOT_SIZE":ROOMBA_SIZE,
"HEAD_SIZE":1.9,
"SPEED":3}
room_params = {"ROOM_POLYGON":ROOM_POLYGON,
"OBSTECLES":OBSTECLES}
stop_conditions = {"MIN_COVERAGE_TO_EXIT":MIN_COVERAGE_TO_EXIT,
"MAX_NO_GAIN_STEPS":MAX_NO_GAIN_STEPS,
"MAX_TIME":9000}
stats = run_simulation(robot_params, room_params,
stop_conditions, visual_feedback=True)
matplotlib.pyplot.plot(stats)
matplotlib.pyplot.show()
if __name__ == "__main__":
main()
|
{"/arena_model.py": ["/helper_functions.py"], "/simulator.py": ["/arena_model.py", "/arena_view.py", "/roomba_model.py"], "/cleaning_robot_model.py": ["/helper_functions.py"], "/controller.py": ["/simulator.py", "/helper_functions.py"], "/arena_view.py": ["/helper_functions.py"]}
|
2,828
|
shacharr/roomba_sim
|
refs/heads/master
|
/helper_functions.py
|
import math
class Point(object):
def __init__(self,coords):
self.x = coords[0]
self.y = coords[1]
def delta(self,other):
return Point([self.x-other.x,self.y-other.y])
def dot(self,other):
return self.x*other.x + self.y*other.y
def rotate(coords, direction):
# from https://www.siggraph.org/education/materials/HyperGraph/modeling/mod_tran/2drota.htm
x,y = coords
cos_d = math.cos(direction)
sin_d = math.sin(direction)
return (x*cos_d - y*sin_d,
y*cos_d + x*sin_d)
def line_circle_intersect(line_details, circle_details):
# Based upon http://stackoverflow.com/questions/1073336/circle-line-segment-collision-detection-algorithm
E = line_details[0]
L = line_details[1]
C = circle_details[0]
r = circle_details[1]
d = L.delta(E)
f = E.delta(C)
a = d.dot(d)
b = 2*f.dot(d)
c = f.dot(f) - r*r
discriminant = b*b-4*a*c
if discriminant < 0:
return False
discriminant = math.sqrt(discriminant)
t1 = (-b - discriminant)/(2*a)
t2 = (-b + discriminant)/(2*a)
t1_good = t1 >= 0 and t1 <= 1
t2_good = t2 >= 0 and t2 <= 1
return t1_good or t2_good
def rotate_polygon(poly,direction):
return [rotate(p,direction) for p in poly]
def transpose_polygon(poly,delta_coords):
return [[x+y for x,y in zip(p,delta_coords)] for p in poly]
def polygon_bbox(poly):
return [min(x[0] for x in poly),
min(x[1] for x in poly),
max(x[0] for x in poly),
max(x[1] for x in poly)]
def is_circle_coliding_with_poligon(polygon, center, radius):
for line in zip(polygon,polygon[1:]+[polygon[0]]):
if line_circle_intersect([Point(line[0]),Point(line[1])],
[Point(center), radius]):
return True
return False
|
{"/arena_model.py": ["/helper_functions.py"], "/simulator.py": ["/arena_model.py", "/arena_view.py", "/roomba_model.py"], "/cleaning_robot_model.py": ["/helper_functions.py"], "/controller.py": ["/simulator.py", "/helper_functions.py"], "/arena_view.py": ["/helper_functions.py"]}
|
2,829
|
shacharr/roomba_sim
|
refs/heads/master
|
/arena_view.py
|
import time
import pygame
import math
from helper_functions import *
class ScreenView(object):
WHITE = (255,255,255)
BLACK = ( 0, 0, 0)
BLUE = ( 0, 0,255)
GREEN = ( 0,255, 0)
RED = (255, 0, 0)
ARROW_RELATIVE_COORDS = ((0,0.8),(0.4,0.5),(0.2,0.5),(0.2,-0.6),
(-0.2,-0.6),(-0.2,0.5),(-0.4,0.5),(0,0.8))
def __init__(self, roomba_size, screen_size):
self.screen = pygame.display.set_mode(screen_size)
self.roomba_size = roomba_size
self.arrow_scaled_coords = tuple((tuple((y*roomba_size for y in x))
for x in self.ARROW_RELATIVE_COORDS))
def clear_screen(self,room_surface):
pygame.display.flip()
self.screen.fill(self.WHITE)
self.screen.blit(room_surface,(0,0))
def draw_roomba(self,mid_point, direction, trace):
pygame.draw.circle(self.screen, self.RED,
mid_point, self.roomba_size)
rotated_arrow = tuple(rotate(coords, direction)
for coords in self.arrow_scaled_coords)
transposed_arrow = tuple((tuple((y1+y2 for (y1,y2) in zip(x,mid_point)))
for x in rotated_arrow))
pygame.draw.polygon(self.screen, self.BLACK,
transposed_arrow)
pygame.draw.aalines(self.screen, self.RED, False, trace)
def testView():
pygame.init()
clock = pygame.time.Clock()
view = ScreenView(50)
done = False
for i in range(0,360*10):
clock.tick(30)
for event in pygame.event.get(): # User did something
#print "Got event",event,"type:",event.type
if event.type == pygame.QUIT: # If user clicked close
done=True
if done:
break
view.draw_roomba((100,100),i * math.pi / 180. )
view.clear_screen()
#time.sleep(10)
if __name__ == "__main__":
testView()
|
{"/arena_model.py": ["/helper_functions.py"], "/simulator.py": ["/arena_model.py", "/arena_view.py", "/roomba_model.py"], "/cleaning_robot_model.py": ["/helper_functions.py"], "/controller.py": ["/simulator.py", "/helper_functions.py"], "/arena_view.py": ["/helper_functions.py"]}
|
2,844
|
gambler1541/book-mark
|
refs/heads/master
|
/app/bookmark/urls.py
|
from django.urls import path
from .views import BookmarkListView, BookmarkCreateView, BookmarkDetail, BookmarkUpdate, BookmarkDeleteView
urlpatterns = [
path('', BookmarkListView.as_view(), name='list'),
path('add/', BookmarkCreateView.as_view(), name='add'),
path('detail/<int:pk>/', BookmarkDetail.as_view(), name='detail'),
path('update/<int:pk>/', BookmarkUpdate.as_view(), name='update'),
path('delete/<int:pk>/', BookmarkDeleteView.as_view(), name='delete'),
]
|
{"/app/bookmark/urls.py": ["/app/bookmark/views.py"]}
|
2,845
|
gambler1541/book-mark
|
refs/heads/master
|
/app/bookmark/views.py
|
from django.shortcuts import render
from django.urls import reverse_lazy
from django.views.generic import ListView, CreateView, DetailView, UpdateView, DeleteView
from .models import Bookmark
class BookmarkListView(ListView):
# htmlμμ objectλΌλ λ³μλͺ
μΌλ‘ μ¬μ©
model = Bookmark
# ν νμ΄μ§μ λμ¬ κ°μ
paginate_by = 6
class BookmarkCreateView(CreateView):
model = Bookmark
# μ
λ ₯ λ°μ νλ
fields = ['site_name',
'url',
]
# κΈμ°κΈ°λ₯Ό μλ£νκ³ μ΄λν νμ΄μ§
# λ³΄ν΅ μμΈνμ΄μ§λ‘ μ΄λ
success_url = reverse_lazy('list')
# κΈ°λ³Έμ μΌλ‘ μ€μ λμ΄ μλ ν
νλ¦Ώ μ΄λ¦λ€μ λͺ¨λΈλͺ
_xxxμ νν
# CreateViewμ UpdateViewλ formμ΄ μ λ―Έμ¬μΈλ° μ΄κ±Έ λ³κ²½ν΄μ bookmark_createλΌλ μ΄λ¦μ ν
νλ¦Ώ νμΌμ μ¬μ©νλλ‘ μ€μ
template_name_suffix = '_create'
class BookmarkDetail(DetailView):
model = Bookmark
class BookmarkUpdate(UpdateView):
model = Bookmark
fields = ['site_name',
'url',
]
template_name_suffix = '_update'
class BookmarkDeleteView(DeleteView):
model = Bookmark
success_url = reverse_lazy('list')
|
{"/app/bookmark/urls.py": ["/app/bookmark/views.py"]}
|
2,846
|
welloderx/wechat-2021-BigDataChallenge
|
refs/heads/master
|
/src/deepctr_ext/utils.py
|
from collections import OrderedDict
from .feat import SparseFeat, DenseFeat, VarLenSparseFeat
import torch.nn as nn
import numpy as np
import torch
from .layers import SequencePoolingLayer
def get_feature_names(feature_columns):
features = build_input_features(feature_columns)
return list(features.keys())
def build_input_features(feature_columns):
# Return OrderedDict: {feature_name:(start, start+dimension)}
features = OrderedDict()
start = 0
for feat in feature_columns:
feat_name = feat.name
if feat_name in features:
continue
if isinstance(feat, SparseFeat):
features[feat_name] = (start, start + 1)
start += 1
elif isinstance(feat, DenseFeat):
features[feat_name] = (start, start + feat.dimension)
start += feat.dimension
elif isinstance(feat, VarLenSparseFeat):
features[feat_name] = (start, start + feat.maxlen)
start += feat.maxlen
if feat.length_name is not None and feat.length_name not in features:
features[feat.length_name] = (start, start + 1)
start += 1
else:
raise TypeError("Invalid feature column type,got", type(feat))
return features
def create_embedding_matrix(feature_columns, init_std=0.0001, linear=False, sparse=False, device='cpu'):
# Return nn.ModuleDict: for sparse features, {embedding_name: nn.Embedding}
# for varlen sparse features, {embedding_name: nn.EmbeddingBag}
sparse_feature_columns = list(
filter(lambda x: isinstance(x, SparseFeat), feature_columns)) if len(feature_columns) else []
varlen_sparse_feature_columns = list(
filter(lambda x: isinstance(x, VarLenSparseFeat), feature_columns)) if len(feature_columns) else []
embedding_dict = nn.ModuleDict(
{feat.embedding_name: nn.Embedding(feat.vocabulary_size, feat.embedding_dim if not linear else 1, sparse=sparse)
for feat in
sparse_feature_columns + varlen_sparse_feature_columns}
)
# for feat in varlen_sparse_feature_columns:
# embedding_dict[feat.embedding_name] = nn.EmbeddingBag(
# feat.dimension, embedding_size, sparse=sparse, mode=feat.combiner)
for tensor in embedding_dict.values():
nn.init.normal_(tensor.weight, mean=0, std=init_std)
return embedding_dict.to(device)
# ----------------------------------
def get_varlen_pooling_list(embedding_dict, features, feature_index, varlen_sparse_feature_columns, device):
varlen_sparse_embedding_list = []
for feat in varlen_sparse_feature_columns:
seq_emb = embedding_dict[feat.embedding_name](
features[:, feature_index[feat.name][0]:feature_index[feat.name][1]].long())
if feat.length_name is None:
seq_mask = features[:, feature_index[feat.name][0]:feature_index[feat.name][1]].long() != 0
emb = SequencePoolingLayer(mode=feat.combiner, supports_masking=True, device=device)(
[seq_emb, seq_mask])
else:
seq_length = features[:, feature_index[feat.length_name][0]:feature_index[feat.length_name][1]].long()
emb = SequencePoolingLayer(mode=feat.combiner, supports_masking=False, device=device)(
[seq_emb, seq_length])
varlen_sparse_embedding_list.append(emb)
return varlen_sparse_embedding_list
# -------------------------------
def combined_dnn_input(sparse_embedding_list, dense_value_list):
if len(sparse_embedding_list) > 0 and len(dense_value_list) > 0:
sparse_dnn_input = torch.flatten(
torch.cat(sparse_embedding_list, dim=-1), start_dim=1)
dense_dnn_input = torch.flatten(
torch.cat(dense_value_list, dim=-1), start_dim=1)
return concat_fun([sparse_dnn_input, dense_dnn_input])
elif len(sparse_embedding_list) > 0:
return torch.flatten(torch.cat(sparse_embedding_list, dim=-1), start_dim=1)
elif len(dense_value_list) > 0:
return torch.flatten(torch.cat(dense_value_list, dim=-1), start_dim=1)
else:
raise NotImplementedError
def concat_fun(inputs, axis=-1):
if len(inputs) == 1:
return inputs[0]
else:
return torch.cat(inputs, dim=axis)
def slice_arrays(arrays, start=None, stop=None):
"""Slice an array or list of arrays.
This takes an array-like, or a list of
array-likes, and outputs:
- arrays[start:stop] if `arrays` is an array-like
- [x[start:stop] for x in arrays] if `arrays` is a list
Can also work on list/array of indices: `slice_arrays(x, indices)`
Arguments:
arrays: Single array or list of arrays.
start: can be an integer index (start index)
or a list/array of indices
stop: integer (stop index); should be None if
`start` was a list.
Returns:
A slice of the array(s).
Raises:
ValueError: If the value of start is a list and stop is not None.
"""
if arrays is None:
return [None]
if isinstance(arrays, np.ndarray):
arrays = [arrays]
if isinstance(start, list) and stop is not None:
raise ValueError('The stop argument has to be None if the value of start '
'is a list.')
elif isinstance(arrays, list):
if hasattr(start, '__len__'):
# hdf5 datasets only support list objects as indices
if hasattr(start, 'shape'):
start = start.tolist()
return [None if x is None else x[start] for x in arrays]
else:
if len(arrays) == 1:
return arrays[0][start:stop]
return [None if x is None else x[start:stop] for x in arrays]
else:
if hasattr(start, '__len__'):
if hasattr(start, 'shape'):
start = start.tolist()
return arrays[start]
elif hasattr(start, '__getitem__'):
return arrays[start:stop]
else:
return [None]
|
{"/src/deepctr_ext/utils.py": ["/src/deepctr_ext/feat.py", "/src/deepctr_ext/layers.py"]}
|
2,847
|
welloderx/wechat-2021-BigDataChallenge
|
refs/heads/master
|
/src/core/entrypoint.py
|
from core.tasks.deepfm import DeepFM_Manager
from core.tasks.lgb import LightGBM_Manager
class EntryPoint(object):
def __init__(self, cfg):
self.cfg = cfg
def start(self):
if self.cfg.task == 'DeepFM':
task = DeepFM_Manager(self.cfg)
task.start()
elif self.cfg.task == 'LightGBM':
task = LightGBM_Manager(self.cfg)
task.start()
else:
raise ValueError("unknown task name")
|
{"/src/deepctr_ext/utils.py": ["/src/deepctr_ext/feat.py", "/src/deepctr_ext/layers.py"]}
|
2,848
|
welloderx/wechat-2021-BigDataChallenge
|
refs/heads/master
|
/src/core/tasks/lgb.py
|
"""
LightGBM
"""
import lightgbm as lgb
import pandas
from utils import DecoratorTimer
class LightGBM_Manager(object):
model_name = 'LightGBM'
def __init__(self, cfg):
self.cfg = cfg
self.yml_cfg = self.cfg.yml_cfg
self.model_cfg = self.yml_cfg[self.model_name]
assert self.cfg.dataset_name == 'wechat1'
@DecoratorTimer()
def handle_dataset(self):
# config
data_folder_path = self.cfg.data_folder_path
# columns
common_columns = ['userid', 'feedid']
pred_columns = ['read_comment', 'like', 'click_avatar', 'forward']
action_columns = ['play', 'stay', 'device', 'date_', 'follow', 'favorite', 'comment']
feed_columns = [
'authorid', 'videoplayseconds', 'description', 'ocr', 'asr', 'description_char', 'ocr_char',
'asr_char', 'bgm_song_id', 'bgm_singer_id', 'manual_keyword_list', 'machine_keyword_list',
'manual_tag_list', 'machine_tag_list', 'feed_embedding'
]
# feat types
sparse_feat_names = common_columns + \
['follow', 'favorite', 'comment', 'authorid', 'bgm_song_id', 'bgm_singer_id']
dense_feat_names = ['videoplayseconds', 'play', 'stay']
# handle
raw_feed_info = pandas.read_csv(data_folder_path + "/feed_info.csv")
raw_user_action = pandas.read_csv(data_folder_path + "/user_action.csv")
def start(self):
self.handle_dataset()
|
{"/src/deepctr_ext/utils.py": ["/src/deepctr_ext/feat.py", "/src/deepctr_ext/layers.py"]}
|
2,849
|
welloderx/wechat-2021-BigDataChallenge
|
refs/heads/master
|
/src/deepctr_ext/layers.py
|
import torch.nn as nn
import torch
class FM(nn.Module):
"""Factorization Machine models pairwise (order-2) feature interactions
without linear term and bias.
Input shape
- 3D tensor with shape: ``(batch_size,field_size,embedding_size)``.
Output shape
- 2D tensor with shape: ``(batch_size, 1)``.
References
- [Factorization Machines](https://www.csie.ntu.edu.tw/~b97053/paper/Rendle2010FM.pdf)
"""
def __init__(self):
super(FM, self).__init__()
def forward(self, inputs):
fm_input = inputs
square_of_sum = torch.pow(torch.sum(fm_input, dim=1, keepdim=True), 2)
sum_of_square = torch.sum(fm_input * fm_input, dim=1, keepdim=True)
cross_term = square_of_sum - sum_of_square
cross_term = 0.5 * torch.sum(cross_term, dim=2, keepdim=False)
return cross_term
class Identity(nn.Module):
def __init__(self, **kwargs):
super(Identity, self).__init__()
def forward(self, X):
return X
def activation_layer(act_name, hidden_size=None, dice_dim=2):
"""Construct activation layers
Args:
act_name: str or nn.Module, name of activation function
hidden_size: int, used for Dice activation
dice_dim: int, used for Dice activation
Return:
act_layer: activation layer
"""
act_layer = None
if isinstance(act_name, str):
if act_name.lower() == 'sigmoid':
act_layer = nn.Sigmoid()
elif act_name.lower() == 'linear':
act_layer = Identity()
elif act_name.lower() == 'relu':
act_layer = nn.ReLU(inplace=True)
elif act_name.lower() == 'prelu':
act_layer = nn.PReLU()
elif issubclass(act_name, nn.Module):
act_layer = act_name()
else:
raise NotImplementedError
return act_layer
class DNN(nn.Module):
"""The Multi Layer Percetron
Input shape
- nD tensor with shape: ``(batch_size, ..., input_dim)``. The most common situation would be a 2D input with shape ``(batch_size, input_dim)``.
Output shape
- nD tensor with shape: ``(batch_size, ..., hidden_size[-1])``. For instance, for a 2D input with shape ``(batch_size, input_dim)``, the output would have shape ``(batch_size, hidden_size[-1])``.
Arguments
- **inputs_dim**: input feature dimension.
- **hidden_units**:list of positive integer, the layer number and units in each layer.
- **activation**: Activation function to use.
- **l2_reg**: float between 0 and 1. L2 regularizer strength applied to the kernel weights matrix.
- **dropout_rate**: float in [0,1). Fraction of the units to dropout.
- **use_bn**: bool. Whether use BatchNormalization before activation or not.
- **seed**: A Python integer to use as random seed.
"""
def __init__(self, inputs_dim, hidden_units, activation='relu', l2_reg=0, dropout_rate=0, use_bn=False,
init_std=0.0001, dice_dim=3, seed=1024, device='cpu'):
super(DNN, self).__init__()
self.dropout_rate = dropout_rate
self.dropout = nn.Dropout(dropout_rate)
self.seed = seed
self.l2_reg = l2_reg
self.use_bn = use_bn
if len(hidden_units) == 0:
raise ValueError("hidden_units is empty!!")
hidden_units = [inputs_dim] + list(hidden_units)
self.linears = nn.ModuleList(
[nn.Linear(hidden_units[i], hidden_units[i + 1]) for i in range(len(hidden_units) - 1)])
if self.use_bn:
self.bn = nn.ModuleList(
[nn.BatchNorm1d(hidden_units[i + 1]) for i in range(len(hidden_units) - 1)])
self.activation_layers = nn.ModuleList(
[activation_layer(activation, hidden_units[i + 1], dice_dim) for i in range(len(hidden_units) - 1)])
for name, tensor in self.linears.named_parameters():
if 'weight' in name:
nn.init.normal_(tensor, mean=0, std=init_std)
self.to(device)
def forward(self, inputs):
deep_input = inputs
for i in range(len(self.linears)):
fc = self.linears[i](deep_input)
if self.use_bn:
fc = self.bn[i](fc)
fc = self.activation_layers[i](fc)
fc = self.dropout(fc)
deep_input = fc
return deep_input
class PredictionLayer(nn.Module):
"""
Arguments
- **task**: str, ``"binary"`` for binary logloss or ``"regression"`` for regression loss
- **use_bias**: bool.Whether add bias term or not.
"""
def __init__(self, task='binary', use_bias=True, **kwargs):
if task not in ["binary", "multiclass", "regression"]:
raise ValueError("task must be binary,multiclass or regression")
super(PredictionLayer, self).__init__()
self.use_bias = use_bias
self.task = task
if self.use_bias:
self.bias = nn.Parameter(torch.zeros((1,)))
def forward(self, X):
output = X
if self.use_bias:
output += self.bias
if self.task == "binary":
output = torch.sigmoid(output)
return output
class SequencePoolingLayer(nn.Module):
"""The SequencePoolingLayer is used to apply pooling operation(sum,mean,max) on variable-length sequence feature/multi-value feature.
Input shape
- A list of two tensor [seq_value,seq_len]
- seq_value is a 3D tensor with shape: ``(batch_size, T, embedding_size)``
- seq_len is a 2D tensor with shape : ``(batch_size, 1)``,indicate valid length of each sequence.
Output shape
- 3D tensor with shape: ``(batch_size, 1, embedding_size)``.
Arguments
- **mode**:str.Pooling operation to be used,can be sum,mean or max.
"""
def __init__(self, mode='mean', supports_masking=False, device='cpu'):
super(SequencePoolingLayer, self).__init__()
if mode not in ['sum', 'mean', 'max']:
raise ValueError('parameter mode should in [sum, mean, max]')
self.supports_masking = supports_masking
self.mode = mode
self.device = device
self.eps = torch.FloatTensor([1e-8]).to(device)
self.to(device)
def _sequence_mask(self, lengths, maxlen=None, dtype=torch.bool):
# Returns a mask tensor representing the first N positions of each cell.
if maxlen is None:
maxlen = lengths.max()
row_vector = torch.arange(0, maxlen, 1).to(lengths.device)
matrix = torch.unsqueeze(lengths, dim=-1)
mask = row_vector < matrix
mask.type(dtype)
return mask
def forward(self, seq_value_len_list):
if self.supports_masking:
uiseq_embed_list, mask = seq_value_len_list # [B, T, E], [B, 1]
mask = mask.float()
user_behavior_length = torch.sum(mask, dim=-1, keepdim=True)
mask = mask.unsqueeze(2)
else:
uiseq_embed_list, user_behavior_length = seq_value_len_list # [B, T, E], [B, 1]
mask = self._sequence_mask(user_behavior_length, maxlen=uiseq_embed_list.shape[1],
dtype=torch.float32) # [B, 1, maxlen]
mask = torch.transpose(mask, 1, 2) # [B, maxlen, 1]
embedding_size = uiseq_embed_list.shape[-1]
mask = torch.repeat_interleave(mask, embedding_size, dim=2) # [B, maxlen, E]
if self.mode == 'max':
hist = uiseq_embed_list - (1 - mask) * 1e9
hist = torch.max(hist, dim=1, keepdim=True)[0]
return hist
hist = uiseq_embed_list * mask.float()
hist = torch.sum(hist, dim=1, keepdim=False)
if self.mode == 'mean':
self.eps = self.eps.to(user_behavior_length.device)
hist = torch.div(hist, user_behavior_length.type(torch.float32) + self.eps)
hist = torch.unsqueeze(hist, dim=1)
return hist
|
{"/src/deepctr_ext/utils.py": ["/src/deepctr_ext/feat.py", "/src/deepctr_ext/layers.py"]}
|
2,850
|
welloderx/wechat-2021-BigDataChallenge
|
refs/heads/master
|
/src/main.py
|
from utils import UnionConfig, LoggerUtil, DecoratorTimer, PathUtil, add_argument_from_dict_format
from conf import settings
from core.entrypoint import EntryPoint
import os
import argparse
import logging
import shutil
import traceback
import copy
import sys
registered_task_list = ['DeepFM', 'LightGBM']
def get_config_object_and_parse_args():
# first time resolve sys.argv
parser = argparse.ArgumentParser()
parser.add_argument('--dataset_name', type=str, default='wechat1', help='dataset name')
parser.add_argument('--task', type=str, default='LightGBM',
choices=registered_task_list,
help='task_name: {}'.format(registered_task_list))
args, unknown_args = parser.parse_known_args()
config = UnionConfig.from_py_module(settings) # get config from settings.py
config.merge_asdict(args.__dict__) # merge config from argparse
yml_cfg = UnionConfig.from_yml_file(
config.CONFIG_FOLDER_PATH + "/datasets/{}.yml".format(args.dataset_name)
) # get config from {dataset_name}.yml
# filter irrelevant config
tasks = copy.copy(registered_task_list)
tasks.remove(config.task)
[yml_cfg.__delitem__(task) for task in tasks if task in yml_cfg.keys()]
config.yml_cfg = yml_cfg
# second time resolve sys.argv
model_cfg = yml_cfg[config.task]
parser2 = add_argument_from_dict_format(model_cfg, filter_keys=list(args.__dict__.keys()))
args2 = parser2.parse_args(unknown_args)
for key in model_cfg.keys():
if key in args2.__dict__:
model_cfg[key] = args2.__dict__[key]
return config
def init_all(cfg: UnionConfig):
cfg.data_folder_path = cfg.DATA_FOLDER_PATH + "/{}".format(cfg.dataset_name)
cfg.TMPOUT_FOLDER_PATH += "/{}".format(cfg.dataset_name)
cfg.OUTPUT_FOLDER_PATH += "/{}".format(cfg.dataset_name)
cfg.TMPOUT_FOLDER_PATH = os.path.realpath(cfg.TMPOUT_FOLDER_PATH)
cfg.OUTPUT_FOLDER_PATH = os.path.realpath(cfg.OUTPUT_FOLDER_PATH)
PathUtil.check_path_exist(cfg.data_folder_path)
if cfg.task in registered_task_list:
cfg.tmpout_folder_path = cfg.TMPOUT_FOLDER_PATH + "/{}/{}".format(cfg.task, cfg.ID)
cfg.output_folder_path = cfg.OUTPUT_FOLDER_PATH + "/{}".format(cfg.task)
PathUtil.auto_create_folder_path(
cfg.tmpout_folder_path,
cfg.output_folder_path
)
else:
raise ValueError("unknown task name")
log_filepath = cfg.tmpout_folder_path + "/{ID}.log".format(ID=cfg.ID)
cfg.logger = LoggerUtil(logfile=log_filepath, disableFile=False).get_logger()
DecoratorTimer.logger = cfg.logger
def main(config):
config.logger.info("====" * 15)
config.logger.info("[ID]: " + config.ID)
config.logger.info("[DATASET]: " + config.dataset_name)
config.logger.info("[TASK]: " + config.task)
config.logger.info("[ARGV]: {}".format(sys.argv))
config.logger.info("[ALL_CFG]: \n" + config.dump_fmt())
config.dump_file(config.tmpout_folder_path + "/" + "config.json")
config.logger.info("====" * 15)
entrypoint = EntryPoint(config)
entrypoint.start()
config.logger.info("Task Completed!")
if __name__ == '__main__':
config = get_config_object_and_parse_args()
init_all(config) # init config
try:
main(config)
logging.shutdown()
shutil.move(config.tmpout_folder_path, config.output_folder_path)
except Exception as e:
config.logger.error(traceback.format_exc())
raise e
|
{"/src/deepctr_ext/utils.py": ["/src/deepctr_ext/feat.py", "/src/deepctr_ext/layers.py"]}
|
2,851
|
welloderx/wechat-2021-BigDataChallenge
|
refs/heads/master
|
/src/deepctr_ext/feat.py
|
from collections import namedtuple
class SparseFeat(namedtuple('SparseFeat',
['name', 'vocabulary_size', 'embedding_dim', 'use_hash', 'dtype', 'embedding_name'])):
__slots__ = ()
def __new__(cls, name, vocabulary_size, embedding_dim=4, use_hash=False, dtype="int32", embedding_name=None):
if embedding_name is None:
embedding_name = name
if embedding_dim == "auto":
embedding_dim = 6 * int(pow(vocabulary_size, 0.25))
if use_hash:
print(
"Notice! Feature Hashing on the fly currently is not supported in torch version,you can use tensorflow version!")
return super(SparseFeat, cls).__new__(cls, name, vocabulary_size, embedding_dim, use_hash, dtype,
embedding_name)
def __hash__(self):
return self.name.__hash__()
class VarLenSparseFeat(namedtuple('VarLenSparseFeat',
['sparsefeat', 'maxlen', 'combiner', 'length_name'])):
__slots__ = ()
def __new__(cls, sparsefeat, maxlen, combiner="mean", length_name=None):
return super(VarLenSparseFeat, cls).__new__(cls, sparsefeat, maxlen, combiner, length_name)
@property
def name(self):
return self.sparsefeat.name
@property
def vocabulary_size(self):
return self.sparsefeat.vocabulary_size
@property
def embedding_dim(self):
return self.sparsefeat.embedding_dim
@property
def dtype(self):
return self.sparsefeat.dtype
@property
def embedding_name(self):
return self.sparsefeat.embedding_name
@property
def group_name(self):
return self.sparsefeat.group_name
def __hash__(self):
return self.name.__hash__()
class DenseFeat(namedtuple('DenseFeat', ['name', 'dimension', 'dtype'])):
__slots__ = ()
def __new__(cls, name, dimension=1, dtype="float32"):
return super(DenseFeat, cls).__new__(cls, name, dimension, dtype)
def __hash__(self):
return self.name.__hash__()
|
{"/src/deepctr_ext/utils.py": ["/src/deepctr_ext/feat.py", "/src/deepctr_ext/layers.py"]}
|
2,860
|
jeeva-srinivasan/sentimentHeroku
|
refs/heads/main
|
/app.py
|
from flask import Flask,render_template,request
import pickle
from predict import predict
recom_df=pickle.load(open("recom_engine_cosine.pickle", "rb"))
app = Flask(__name__)
@app.route("/",methods =["POST","GET"])
def home():
if request.method == "POST":
user_name = request.form.get("userName")
user_name=user_name.lower().strip()
if len(user_name)==0:
return render_template('base.html') + 'Please enter a user name'
if user_name not in recom_df.index:
return render_template('base.html') + 'Please enter a valid user name'
else:
result_df=predict(user_name,recom_df)
return render_template('home.html',predict=result_df.head(5),user=user_name)
else:
return render_template('base.html')
if __name__ == "__main__":
app.run(debug=True)
|
{"/app.py": ["/predict.py"]}
|
2,861
|
jeeva-srinivasan/sentimentHeroku
|
refs/heads/main
|
/predict.py
|
import pandas as pd
import time
def predict(user_name,recom_df):
predict_df=pd.read_csv('preprocessing_sample30.csv',index_col='Product')
dataframe_df=predict_df[predict_df.index.isin(recom_df.loc[user_name].sort_values(ascending=False)[0:20].index)]
time.sleep(6)
return dataframe_df
|
{"/app.py": ["/predict.py"]}
|
2,862
|
SwannSG/womansSheltersZApython
|
refs/heads/master
|
/geoJsonAddPropName.py
|
"""
geoJsonAddPropName.py
feature.properties = {key_1: value_1, ...}
add new properties {key_N: value_N} for wardId=NNNNNNN
feature.properties = {key_1: value_1, ..., key_N: value:N}
ADD_PROP = {wardId: {key_1: value_1, ...}
additional key-value pairs will be added to
feature.properties where feature.properties.wardId = wardId
"""
import json
import pickle
import pprint
SRC_FILE = '/home/swannsg/development/womansSheleterPy/data/geoJson/WC/merge/WCmergedTest.geojson'
PKL = '/home/swannsg/development/womansSheleterPy/data/femalePopulationFromKirsty/female18-120.pkl'
def add():
fp = open(PKL, 'rb')
ADD_PROP = pickle.load(fp)
fp.close()
fp = open(SRC_FILE, 'r')
x = json.load(fp)
fp.close()
# del properties
for feature in x['features']:
feature_properties = feature['properties']
ward_id = feature_properties['WardID']
if ward_id in ADD_PROP:
feature_properties.update(ADD_PROP[ward_id])
feature['properties'] = feature_properties
# show result
#for each in x['features']:
# pprint.pprint(each['properties'])
fp = open(SRC_FILE, 'w')
json.dump(x, fp)
fp.close()
|
{"/automate.py": ["/geoJsonAddPropName.py", "/geoJsonChgPropName.py", "/geoJsonDelPropName.py"], "/multiFilesKmlToJson.py": ["/kmlToJson.py", "/mergeGeoJsonFiles.py"]}
|
2,863
|
SwannSG/womansSheltersZApython
|
refs/heads/master
|
/analyseWardPop.py
|
"""
analyse ward population
"""
import pprint
import pickle
file = '/home/swannsg/development/womansSheleterPy/data/femalePopulationFromKirsty/wardPop.pkl'
fp = open(file, 'rb')
wardPops = pickle.load(fp)
fp.close()
l = []
for each in wardPops:
l.append(int(wardPops[each]))
l.sort()
# pprint.pprint(l)
a = 0
b = 0
c = 0
d = 0
e = 0
bin_size = 3500 # bad
bin_size = 2000 # ok
#bin_size = 2500
bins = []
for each in l:
if each > bin_size * 4:
e = e + 1
continue
if each > bin_size * 3:
d = d + 1
continue
if each > bin_size * 2:
c = c + 1
continue
if each > bin_size * 1:
b = b + 1
continue
a = a + 1
print (a,b,c,d,e)
|
{"/automate.py": ["/geoJsonAddPropName.py", "/geoJsonChgPropName.py", "/geoJsonDelPropName.py"], "/multiFilesKmlToJson.py": ["/kmlToJson.py", "/mergeGeoJsonFiles.py"]}
|
2,864
|
SwannSG/womansSheltersZApython
|
refs/heads/master
|
/wardPopulation.py
|
"""
Statistics South Africa
Descriptive_Electoral_Wards
Table 1
Geography by Gender
for Person weighted
,"Male","Female","Grand Total"
"21001001: Ward 1",4242,4500,8742
National data is mapped to hash map (dict) called 'result'
key: value
wardId: #females
21001001: 4500
'result' is pickled
"""
import pickle
filename = '/home/swannsg/development/womansSheleterPy/data/femalePopulationFromKirsty/South African population data by most detailed wards and gender.csv'
pkl = '/home/swannsg/development/womansSheleterPy/data/femalePopulationFromKirsty/wardPop.pkl'
result = {}
start = False
fp = open(filename, 'r')
i = 0
for each in fp:
# print (i)
if each == ',"Male","Female","Grand Total"\n':
start = True
continue
if start:
a,b,c,d = each.split(',')
if a == '"Grand Total"':
break
a = a.replace('"', '')
result[a.split(':')[0]] = int(c)
i = i + 1
fp.close()
fp = open(pkl, 'wb')
pickle.dump(result, fp)
fp.close()
|
{"/automate.py": ["/geoJsonAddPropName.py", "/geoJsonChgPropName.py", "/geoJsonDelPropName.py"], "/multiFilesKmlToJson.py": ["/kmlToJson.py", "/mergeGeoJsonFiles.py"]}
|
2,865
|
SwannSG/womansSheltersZApython
|
refs/heads/master
|
/mergeGeoJsonFiles.py
|
"""
Merge ZA ward geoJson files into one output file
"""
import pprint
import json
# global settings
# ---temporary working directory
TEMP_WDIR = '/home/swannsg/development/womansSheleterPy/temp'
DST_FILENAME = 'merge.geojson'
# end global settings
srcFiles = [
'/home/swannsg/development/womansSheleterPy/data/geoJson/WC/WC021.geojson',
'/home/swannsg/development/womansSheleterPy/data/geoJson/WC/WC052.geojson'
]
def mergeGeoJsonFiles(srcFiles, dstFile=TEMP_WDIR + '/' + DST_FILENAME):
"""
srcFiles: list of fq filenames to merge
dstFile: where the output file must be placed
"""
pprint.pprint(srcFiles)
# pprint.pprint(dstFile)
result = {}
result['type'] = 'FeatureCollection'
result['name'] = ''
result['features'] = []
for each in srcFiles:
fp = open(each, 'r')
x = json.load(fp)
fp.close()
result['name'] = result['name'] + ' ' + x['name']
result['features'] = result['features'] + x['features']
result['name'].strip()
# dict 'result' to json
fp = open(dstFile, 'w')
json.dump(result, fp)
fp.close()
# end dict 'result' to json
|
{"/automate.py": ["/geoJsonAddPropName.py", "/geoJsonChgPropName.py", "/geoJsonDelPropName.py"], "/multiFilesKmlToJson.py": ["/kmlToJson.py", "/mergeGeoJsonFiles.py"]}
|
2,866
|
SwannSG/womansSheltersZApython
|
refs/heads/master
|
/geoJsonChgPropName.py
|
"""
geoJsonChgPropName.py
Change the the name of the property
feature.properties = {key_1: value_1, ...}
Change key_oldName to key_newName, keeping value the same
An existing key_newName value will be overwritten
CHANGE_PROP_NAME = [(oldName, newName), ...]
"""
import json
import pickle
import pprint
SRC_FILE = '/home/swannsg/development/womansSheleterPy/data/geoJson/WC/merge/WCmergedTest.geojson'
CHANGE_PROP_NAME = [('Province', 'Pr'), ('MunicName', 'Mn')]
def chg():
fp = open(SRC_FILE, 'r')
x = json.load(fp)
fp.close()
# change property name
for feature in x['features']:
for keyOld, keyNew in CHANGE_PROP_NAME:
if keyOld in feature['properties']:
value = feature['properties'][keyOld]
feature['properties'].pop(keyOld, None)
feature['properties'][keyNew] = value
# show result
#for each in x['features']:
# pprint.pprint(each['properties'])
fp = open(SRC_FILE, 'w')
json.dump(x, fp)
fp.close()
|
{"/automate.py": ["/geoJsonAddPropName.py", "/geoJsonChgPropName.py", "/geoJsonDelPropName.py"], "/multiFilesKmlToJson.py": ["/kmlToJson.py", "/mergeGeoJsonFiles.py"]}
|
2,867
|
SwannSG/womansSheltersZApython
|
refs/heads/master
|
/view_mfp.py
|
"""
View missing female populations for specific wardId
"""
import pprint
import pickle
MFP = '/home/swannsg/development/womansSheleterPy/data/femalePopulationFromKirsty/mfp.pkl'
fp = open(MFP, 'rb')
mfp = pickle.load(fp)
fp.close()
pprint.pprint(mfp)
|
{"/automate.py": ["/geoJsonAddPropName.py", "/geoJsonChgPropName.py", "/geoJsonDelPropName.py"], "/multiFilesKmlToJson.py": ["/kmlToJson.py", "/mergeGeoJsonFiles.py"]}
|
2,868
|
SwannSG/womansSheltersZApython
|
refs/heads/master
|
/kmlToJson.py
|
"""
kml to geojson
shapefiles WC
handles all files
does not handle WC.kml format ????
Read population statistics at the same time
feature.properties.woman = #woman
see wardPopulation.py
Still needed/ to be checked:
can we minimise the file further eg. drop 3rd coord
Questions
do we merge all these files into one provincial file,
or national file ?
missing female populations for certain wardIds - why ?
"""
import kml2geojson as kml
import json
from bs4 import BeautifulSoup as bs
import pickle
import os
import ntpath
# global settings
# ---temporary working directory
temp_wdir = '/home/swannsg/development/womansSheleterPy/temp'
# ---used to merge population data feature.properties.females
PKL = '/home/swannsg/development/womansSheleterPy/data/femalePopulationFromKirsty/wardPop.pkl'
#---wardIds missing female population (MFP)
MFP = '/home/swannsg/development/womansSheleterPy/data/femalePopulationFromKirsty/mfp.pkl'
# end global settings
# load female population
fp = open(PKL, 'rb')
females = pickle.load(fp)
fp.close()
# end load female population
# load wardIds with missing female population
if os.path.isfile(MFP):
# mssing female population pickle file exists
fp = open(MFP, 'rb')
mfp = pickle.load(fp)
fp.close()
else:
mfp = []
# end load wardIds with missing female population
def parse_update(descHTML, ref):
soup = bs(descHTML, 'html.parser')
for each in soup.findAll('tr'):
key, value = each.text.split(':')
ref[key] = value
def runKmlToJson(srcFile, dstDir):
# the arguments are a bit confusing
#---- srcFile: input file to process
#---- dstDir:final destination dir, "dst_dir"
# convert to GeoJSON
kml.main.convert(srcFile, temp_wdir)
# kml seems to automatically generate a filename
# ----<srcFile filename without extension>.geojson
# infer destination filename
infer_filename = ntpath.basename(srcFile).split('.')[0] + '.geojson'
print (infer_filename)
# read geojson file
fp = open(temp_wdir + '/' + infer_filename)
x = json.load(fp)
fp.close()
# delete interim geojson file
os.remove(temp_wdir + '/' + infer_filename)
# clean & minimise geojson file
result = {}
result['type'] = x['type']
result['features'] = []
result['name'] = x['name']
i = 0
for each in x['features']:
# print (i)
# initialise feature
feature = {}
feature['type'] = each['type']
feature['geometry'] = {}
feature['properties'] = {}
# end initialise feature
# add feature props and values
feature['properties']['name'] = each['properties']['name']
parse_update(each['properties']['description'], feature['properties'])
if each['geometry']['type'] == 'GeometryCollection':
feature['geometry']['type'] = each['geometry']['type']
feature['geometry']['geometries'] = each['geometry']['geometries']
else:
feature['geometry']['coordinates'] = each['geometry']['coordinates'] # clean 3rd point !!!!!!
feature['geometry']['type'] = each['geometry']['type']
# end add feature props and values
# remove feature.properties.<key> that are not required
DEL_KEYS = ['CAT_B', 'MapCode', 'OBJECTID', 'Shape_Area', 'Shape_Leng', 'WardNo', 'name', 'shpFID']
for item in DEL_KEYS:
del feature['properties'][item]
# end remove feature.properties.<key> that are not required
# add external feature.properties.females
# we probably need a generic property add approach !!!!
if feature['properties']['WardID'] in females:
feature['properties']['females'] = females[feature['properties']['WardID']]
else:
# don't add duplicates
try:
if mfp.index(feature['properties']['WardID']) > -1:
# wardId exists, do nothing
pass
except:
# new wardId so add it to "mfp"
mfp.append(feature['properties']['WardID'])
# WARNING !!!! arbitrarily sets feature.properties.females to zero
feature['properties']['females'] = 0
# end add external feature.properties.females
# only add geometry.type = 'Polygon'
if feature['geometry']['type'] == 'Polygon' or \
feature['geometry']['type'] == 'GeometryCollection':
result['features'].append(feature)
i = i + 1
# dict 'result' to json
fp = open(dstDir + '/' + result['name'] + '.geojson', 'w')
json.dump(result, fp)
fp.close()
# end dict 'result' to json
# pickle missing_female_population
fp = open(MFP, 'wb')
pickle.dump(mfp, fp)
fp.close()
# end pickle missing_female_population
|
{"/automate.py": ["/geoJsonAddPropName.py", "/geoJsonChgPropName.py", "/geoJsonDelPropName.py"], "/multiFilesKmlToJson.py": ["/kmlToJson.py", "/mergeGeoJsonFiles.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.