code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import random
import time
import json
import hashlib
import requests
def generateNonceStr():
return "".join(random.sample('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ', 16))
# def change_type(byte):
# if isinstance(byte, bytes):
# return str(byte, encoding="utf-8")
# return json.JSONEncoder.default(byte)
def SubmitMessageRequest(sm):
json_str = json.dumps(sm, sort_keys=True, indent=4, separators=(',', ':')).replace("\n", "").replace(
" ", "")
headers = {'content-type': 'application/json'}
try:
resp = requests.post("https://api.msg.launch.im/message", data=json_str, headers=headers)
except ConnectionError as err:
# handle err
print(err)
# examine response
data = json.loads(resp.content)
print(data)
class Message:
msg_dict = {}
p = {}
sm = {}
def __init__(self, title, msg_type, content, group=""):
self.msg_dict['title'] = title
self.msg_dict['msg_type'] = msg_type
self.msg_dict['content'] = content
if len(group) != 0:
self.msg_dict['group'] = group
def check(self):
if len(self.msg_dict["title"]) > 100 or len(self.msg_dict["title"]) < 1:
raise Exception("title count error")
elif len(self.msg_dict["content"]) > 4000 or len(self.msg_dict["content"]) < 1:
raise Exception("content count error")
elif "group" in self.msg_dict and len(self.msg_dict["group"]) > 20:
raise Exception("group count error")
elif self.msg_dict["msg_type"] < 0 or self.msg_dict["msg_type"] > 4:
raise Exception("msg type error")
def sign(self, push_secret):
signStrTmp = ""
for k in sorted(self.p.keys()):
if len(self.p[k]) == 0:
continue
else:
signStrTmp += k + "=" + self.p[k] + "&"
signStrTmp += "secret=" + push_secret
return hashlib.sha256(signStrTmp.encode("utf-8")).hexdigest()
def send_message(self, push_id, push_secret):
timestamp = int(time.time())
nonceStr = generateNonceStr()
self.check()
msgJson = json.dumps(self.msg_dict, indent=4, separators=(',', ':'), ensure_ascii=False).replace("\n",
"").replace(
" ", "")
self.p = {"push_id": push_id, "nonce": nonceStr, "timestamp": str(timestamp),
"message": msgJson}
signStr = self.sign(push_secret)
self.sm = {"push_id": push_id, "nonce": nonceStr, "timestamp": timestamp,
"message": self.msg_dict, "sign": signStr}
SubmitMessageRequest(self.sm)
if __name__ == '__main__':
try:
m = Message(title="test title", msg_type=0, content="test content", group="test group")
m.send_message("your_id", "your_secret")
except Exception as e:
print(e)
| [
"random.sample",
"json.loads",
"requests.post",
"json.dumps",
"time.time"
] | [((773, 797), 'json.loads', 'json.loads', (['resp.content'], {}), '(resp.content)\n', (783, 797), False, 'import json\n'), ((114, 201), 'random.sample', 'random.sample', (['"""0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"""', '(16)'], {}), "('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ',\n 16)\n", (127, 201), False, 'import random\n'), ((580, 667), 'requests.post', 'requests.post', (['"""https://api.msg.launch.im/message"""'], {'data': 'json_str', 'headers': 'headers'}), "('https://api.msg.launch.im/message', data=json_str, headers=\n headers)\n", (593, 667), False, 'import requests\n'), ((2095, 2106), 'time.time', 'time.time', ([], {}), '()\n', (2104, 2106), False, 'import time\n'), ((396, 459), 'json.dumps', 'json.dumps', (['sm'], {'sort_keys': '(True)', 'indent': '(4)', 'separators': "(',', ':')"}), "(sm, sort_keys=True, indent=4, separators=(',', ':'))\n", (406, 459), False, 'import json\n'), ((2186, 2264), 'json.dumps', 'json.dumps', (['self.msg_dict'], {'indent': '(4)', 'separators': "(',', ':')", 'ensure_ascii': '(False)'}), "(self.msg_dict, indent=4, separators=(',', ':'), ensure_ascii=False)\n", (2196, 2264), False, 'import json\n')] |
'''
@version: Python 3.7.3
@Author: Louis
@Date: 2020-06-15 13:27:40
LastEditors: Louis
LastEditTime: 2020-08-19 15:06:25
'''
import os
import sys
from enum import Enum
from datetime import datetime
TODAY = datetime.now().strftime("%Y%m%d")
TODAY_ = datetime.now().strftime("%Y-%m-%d")
TIME = datetime.now().strftime("%H:%M:%S")
NOW = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
TODAY_OBJ = datetime.now().date()
TIME_OBJ = datetime.now().time()
NOW_OBJ = datetime.now()
SEP_MAP = {"xls": "\t", "XLS": "\t", "CSV": ",", "csv": ",", "xlsx": "\t", "XLSX": "\t"}
EQUITY_TRADE_DAYS_PATH = "/dat/all/Equity/Wind/Daily/list/tradedays.csv"
FUTURE_TRADE_DAYS_PATH = "/dat/all/Future/WIND/list/tradedays.csv"
MARKET_MAP = {"SZSE": "SZ", "SSE": "SH", "SZE": "SZ"}
| [
"datetime.datetime.now"
] | [((460, 474), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (472, 474), False, 'from datetime import datetime\n'), ((209, 223), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (221, 223), False, 'from datetime import datetime\n'), ((252, 266), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (264, 266), False, 'from datetime import datetime\n'), ((295, 309), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (307, 309), False, 'from datetime import datetime\n'), ((337, 351), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (349, 351), False, 'from datetime import datetime\n'), ((395, 409), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (407, 409), False, 'from datetime import datetime\n'), ((428, 442), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (440, 442), False, 'from datetime import datetime\n')] |
import datetime
import json
import os
import pathlib
import uuid
from django.conf import settings
from django.db import models
from django_celery_results.models import TaskResult
class Country(models.Model):
class Meta:
db_table = "country"
ordering = ("country",)
country = models.CharField(max_length=100, unique=True, help_text="Country name.")
alpha2 = models.CharField(
max_length=3, unique=True, help_text="ISO 3166-1 Alpha-2 Code"
)
continent = models.CharField(max_length=50, help_text="Continent associated.")
def __str__(self):
return f"{self.country}"
def __repr__(self):
return self.__str__()
class DatabaseType(models.Model):
class Meta:
db_table = "database_type"
type = models.CharField(
max_length=100, unique=True, help_text="Defines the database type."
)
def __str__(self):
return self.type
def __repr__(self):
return self.__str__()
def hash_generator():
return uuid.uuid4().hex
# Not following the relational rules in the database_type field, but it will simplify the SQL queries in the SQL Lab
class DataSource(models.Model):
class Meta:
db_table = "data_source"
name = models.CharField(
max_length=100, unique=True, help_text="Name of the data source."
)
acronym = models.CharField(
max_length=50,
unique=True,
help_text="Short label for the data source, containing only letters, numbers, underscores or hyphens.",
)
hash = models.CharField(
blank=True,
default=hash_generator,
max_length=255,
null=False,
unique=True,
)
release_date = models.CharField(
max_length=50,
help_text="Date at which DB is available for research for current release.",
null=True,
blank=True,
)
database_type = models.CharField(
max_length=100, help_text="Type of the data source. You can create a new type."
)
country = models.ForeignKey(
Country,
on_delete=models.SET_NULL,
null=True,
help_text="Country where the data source is located.",
)
latitude = models.FloatField()
longitude = models.FloatField()
link = models.URLField(help_text="Link to home page of the data source", blank=True)
draft = models.BooleanField(default=True)
def save(
self, force_insert=False, force_update=False, using=None, update_fields=None
):
if DatabaseType.objects.filter(type=self.database_type).count() == 0:
db_type = DatabaseType(type=self.database_type)
db_type.save()
super().save(force_insert, force_update, using, update_fields)
def __str__(self):
return self.name
def __repr__(self):
return self.__str__()
def failure_data_source_directory(instance, filename):
file_path = os.path.join(
settings.ACHILLES_RESULTS_STORAGE_PATH,
instance.data_source.hash,
"failure",
"%Y%m%d%H%M%S%f" + "".join(pathlib.Path(filename).suffixes),
)
return datetime.datetime.now().strftime(file_path)
class PendingUpload(models.Model):
STATE_PENDING = 1
STATE_STARTED = 2
STATE_CANCELED = 3
STATE_FAILED = 4
STATES = (
(STATE_PENDING, "Pending"),
(STATE_STARTED, "Started"),
(STATE_CANCELED, "Canceled"), # currently not being used
(STATE_FAILED, "Failed"),
)
class Meta:
ordering = ("-upload_date",)
data_source = models.ForeignKey(DataSource, on_delete=models.CASCADE)
upload_date = models.DateTimeField(auto_now_add=True)
status = models.IntegerField(choices=STATES, default=STATE_PENDING)
uploaded_file = models.FileField(upload_to=failure_data_source_directory)
task_id = models.CharField(max_length=255, null=True)
def get_status(self):
for status_id, name in self.STATES:
if self.status == status_id:
return name
return None # should never happen
def failure_message(self):
if self.status != self.STATE_FAILED:
return None
try:
task = TaskResult.objects.get(
task_id=self.task_id, task_name="uploader.tasks.upload_results_file"
)
except TaskResult.DoesNotExist:
return (
"The information about this failure was deleted. Probably because this upload history "
"record is an old one. If not please contact the system administrator for more details. "
)
result = json.loads(task.result)
if result["exc_module"] == "uploader.file_handler.checks":
return result["exc_message"][0]
return (
"An unexpected error occurred while processing your file. Please contact the "
"system administrator for more details."
)
def success_data_source_directory(instance, filename):
file_path = os.path.join(
settings.ACHILLES_RESULTS_STORAGE_PATH,
instance.data_source.hash,
"success",
"%Y%m%d%H%M%S%f" + "".join(pathlib.Path(filename).suffixes),
)
return datetime.datetime.now().strftime(file_path)
class UploadHistory(models.Model):
"""
Successful uploads only
"""
class Meta:
get_latest_by = "upload_date"
ordering = ("-upload_date",)
db_table = "upload_history"
data_source = models.ForeignKey(DataSource, on_delete=models.CASCADE)
upload_date = models.DateTimeField(auto_now_add=True)
r_package_version = models.CharField(max_length=50, null=True)
generation_date = models.CharField(max_length=50, null=True)
cdm_release_date = models.CharField(max_length=50, null=True)
cdm_version = models.CharField(max_length=50, null=True)
vocabulary_version = models.CharField(max_length=50, null=True)
uploaded_file = models.FileField(
null=True, upload_to=success_data_source_directory
) # For backwards compatibility its easier to make this null=True
pending_upload_id = models.IntegerField(
null=True,
help_text="The id of the PendingUpload record that originated this successful upload."
# aspedrosa: A foreign key is not used here since a PendingUpload record is erased once is successful. This
# is field is then only used to get the result data of pending upload through the get_upload_task_status view
)
def __repr__(self):
return self.__str__()
def __str__(self):
return f"{self.data_source.name} - {self.upload_date}"
def get_status(self):
return "Done"
class AchillesResults(models.Model):
class Meta:
db_table = "achilles_results"
indexes = [
models.Index(fields=("data_source",)),
models.Index(fields=("analysis_id",)),
]
data_source = models.ForeignKey(DataSource, on_delete=models.CASCADE)
analysis_id = models.BigIntegerField()
stratum_1 = models.TextField(null=True)
stratum_2 = models.TextField(null=True)
stratum_3 = models.TextField(null=True)
stratum_4 = models.TextField(null=True)
stratum_5 = models.TextField(null=True)
count_value = models.BigIntegerField()
min_value = models.BigIntegerField(null=True)
max_value = models.BigIntegerField(null=True)
avg_value = models.FloatField(null=True)
stdev_value = models.FloatField(null=True)
median_value = models.BigIntegerField(null=True)
p10_value = models.BigIntegerField(null=True)
p25_value = models.BigIntegerField(null=True)
p75_value = models.BigIntegerField(null=True)
p90_value = models.BigIntegerField(null=True)
class AchillesResultsArchive(models.Model):
class Meta:
db_table = "achilles_results_archive"
indexes = [
models.Index(fields=("data_source",)),
models.Index(fields=("analysis_id",)),
]
upload_info = models.ForeignKey(UploadHistory, on_delete=models.CASCADE)
data_source = models.ForeignKey(DataSource, on_delete=models.CASCADE)
analysis_id = models.BigIntegerField()
stratum_1 = models.TextField(null=True)
stratum_2 = models.TextField(null=True)
stratum_3 = models.TextField(null=True)
stratum_4 = models.TextField(null=True)
stratum_5 = models.TextField(null=True)
count_value = models.BigIntegerField()
min_value = models.BigIntegerField(null=True)
max_value = models.BigIntegerField(null=True)
avg_value = models.FloatField(null=True)
stdev_value = models.FloatField(null=True)
median_value = models.BigIntegerField(null=True)
p10_value = models.BigIntegerField(null=True)
p25_value = models.BigIntegerField(null=True)
p75_value = models.BigIntegerField(null=True)
p90_value = models.BigIntegerField(null=True)
| [
"django.db.models.Index",
"json.loads",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"pathlib.Path",
"django.db.models.FileField",
"django.db.models.DateTimeField",
"django.db.models.BooleanField",
"uuid.uuid4",
"d... | [((303, 375), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'unique': '(True)', 'help_text': '"""Country name."""'}), "(max_length=100, unique=True, help_text='Country name.')\n", (319, 375), False, 'from django.db import models\n'), ((389, 474), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)', 'unique': '(True)', 'help_text': '"""ISO 3166-1 Alpha-2 Code"""'}), "(max_length=3, unique=True, help_text='ISO 3166-1 Alpha-2 Code'\n )\n", (405, 474), False, 'from django.db import models\n'), ((500, 566), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'help_text': '"""Continent associated."""'}), "(max_length=50, help_text='Continent associated.')\n", (516, 566), False, 'from django.db import models\n'), ((778, 868), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'unique': '(True)', 'help_text': '"""Defines the database type."""'}), "(max_length=100, unique=True, help_text=\n 'Defines the database type.')\n", (794, 868), False, 'from django.db import models\n'), ((1246, 1334), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'unique': '(True)', 'help_text': '"""Name of the data source."""'}), "(max_length=100, unique=True, help_text=\n 'Name of the data source.')\n", (1262, 1334), False, 'from django.db import models\n'), ((1358, 1516), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'unique': '(True)', 'help_text': '"""Short label for the data source, containing only letters, numbers, underscores or hyphens."""'}), "(max_length=50, unique=True, help_text=\n 'Short label for the data source, containing only letters, numbers, underscores or hyphens.'\n )\n", (1374, 1516), False, 'from django.db import models\n'), ((1549, 1647), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': 'hash_generator', 'max_length': '(255)', 'null': '(False)', 'unique': '(True)'}), '(blank=True, default=hash_generator, max_length=255, null=\n False, unique=True)\n', (1565, 1647), False, 'from django.db import models\n'), ((1709, 1850), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'help_text': '"""Date at which DB is available for research for current release."""', 'null': '(True)', 'blank': '(True)'}), "(max_length=50, help_text=\n 'Date at which DB is available for research for current release.', null\n =True, blank=True)\n", (1725, 1850), False, 'from django.db import models\n'), ((1900, 2002), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'help_text': '"""Type of the data source. You can create a new type."""'}), "(max_length=100, help_text=\n 'Type of the data source. You can create a new type.')\n", (1916, 2002), False, 'from django.db import models\n'), ((2026, 2150), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Country'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'help_text': '"""Country where the data source is located."""'}), "(Country, on_delete=models.SET_NULL, null=True, help_text=\n 'Country where the data source is located.')\n", (2043, 2150), False, 'from django.db import models\n'), ((2200, 2219), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (2217, 2219), False, 'from django.db import models\n'), ((2236, 2255), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (2253, 2255), False, 'from django.db import models\n'), ((2267, 2344), 'django.db.models.URLField', 'models.URLField', ([], {'help_text': '"""Link to home page of the data source"""', 'blank': '(True)'}), "(help_text='Link to home page of the data source', blank=True)\n", (2282, 2344), False, 'from django.db import models\n'), ((2357, 2390), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2376, 2390), False, 'from django.db import models\n'), ((3551, 3606), 'django.db.models.ForeignKey', 'models.ForeignKey', (['DataSource'], {'on_delete': 'models.CASCADE'}), '(DataSource, on_delete=models.CASCADE)\n', (3568, 3606), False, 'from django.db import models\n'), ((3625, 3664), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3645, 3664), False, 'from django.db import models\n'), ((3678, 3736), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'STATES', 'default': 'STATE_PENDING'}), '(choices=STATES, default=STATE_PENDING)\n', (3697, 3736), False, 'from django.db import models\n'), ((3757, 3814), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': 'failure_data_source_directory'}), '(upload_to=failure_data_source_directory)\n', (3773, 3814), False, 'from django.db import models\n'), ((3829, 3872), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)'}), '(max_length=255, null=True)\n', (3845, 3872), False, 'from django.db import models\n'), ((5471, 5526), 'django.db.models.ForeignKey', 'models.ForeignKey', (['DataSource'], {'on_delete': 'models.CASCADE'}), '(DataSource, on_delete=models.CASCADE)\n', (5488, 5526), False, 'from django.db import models\n'), ((5545, 5584), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (5565, 5584), False, 'from django.db import models\n'), ((5609, 5651), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)'}), '(max_length=50, null=True)\n', (5625, 5651), False, 'from django.db import models\n'), ((5674, 5716), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)'}), '(max_length=50, null=True)\n', (5690, 5716), False, 'from django.db import models\n'), ((5740, 5782), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)'}), '(max_length=50, null=True)\n', (5756, 5782), False, 'from django.db import models\n'), ((5801, 5843), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)'}), '(max_length=50, null=True)\n', (5817, 5843), False, 'from django.db import models\n'), ((5869, 5911), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)'}), '(max_length=50, null=True)\n', (5885, 5911), False, 'from django.db import models\n'), ((5932, 6000), 'django.db.models.FileField', 'models.FileField', ([], {'null': '(True)', 'upload_to': 'success_data_source_directory'}), '(null=True, upload_to=success_data_source_directory)\n', (5948, 6000), False, 'from django.db import models\n'), ((6104, 6232), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'help_text': '"""The id of the PendingUpload record that originated this successful upload."""'}), "(null=True, help_text=\n 'The id of the PendingUpload record that originated this successful upload.'\n )\n", (6123, 6232), False, 'from django.db import models\n'), ((6915, 6970), 'django.db.models.ForeignKey', 'models.ForeignKey', (['DataSource'], {'on_delete': 'models.CASCADE'}), '(DataSource, on_delete=models.CASCADE)\n', (6932, 6970), False, 'from django.db import models\n'), ((6989, 7013), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {}), '()\n', (7011, 7013), False, 'from django.db import models\n'), ((7030, 7057), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (7046, 7057), False, 'from django.db import models\n'), ((7074, 7101), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (7090, 7101), False, 'from django.db import models\n'), ((7118, 7145), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (7134, 7145), False, 'from django.db import models\n'), ((7162, 7189), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (7178, 7189), False, 'from django.db import models\n'), ((7206, 7233), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (7222, 7233), False, 'from django.db import models\n'), ((7252, 7276), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {}), '()\n', (7274, 7276), False, 'from django.db import models\n'), ((7293, 7326), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (7315, 7326), False, 'from django.db import models\n'), ((7343, 7376), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (7365, 7376), False, 'from django.db import models\n'), ((7393, 7421), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (7410, 7421), False, 'from django.db import models\n'), ((7440, 7468), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (7457, 7468), False, 'from django.db import models\n'), ((7488, 7521), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (7510, 7521), False, 'from django.db import models\n'), ((7538, 7571), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (7560, 7571), False, 'from django.db import models\n'), ((7588, 7621), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (7610, 7621), False, 'from django.db import models\n'), ((7638, 7671), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (7660, 7671), False, 'from django.db import models\n'), ((7688, 7721), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (7710, 7721), False, 'from django.db import models\n'), ((7981, 8039), 'django.db.models.ForeignKey', 'models.ForeignKey', (['UploadHistory'], {'on_delete': 'models.CASCADE'}), '(UploadHistory, on_delete=models.CASCADE)\n', (7998, 8039), False, 'from django.db import models\n'), ((8058, 8113), 'django.db.models.ForeignKey', 'models.ForeignKey', (['DataSource'], {'on_delete': 'models.CASCADE'}), '(DataSource, on_delete=models.CASCADE)\n', (8075, 8113), False, 'from django.db import models\n'), ((8132, 8156), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {}), '()\n', (8154, 8156), False, 'from django.db import models\n'), ((8173, 8200), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (8189, 8200), False, 'from django.db import models\n'), ((8217, 8244), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (8233, 8244), False, 'from django.db import models\n'), ((8261, 8288), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (8277, 8288), False, 'from django.db import models\n'), ((8305, 8332), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (8321, 8332), False, 'from django.db import models\n'), ((8349, 8376), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (8365, 8376), False, 'from django.db import models\n'), ((8395, 8419), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {}), '()\n', (8417, 8419), False, 'from django.db import models\n'), ((8436, 8469), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (8458, 8469), False, 'from django.db import models\n'), ((8486, 8519), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (8508, 8519), False, 'from django.db import models\n'), ((8536, 8564), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (8553, 8564), False, 'from django.db import models\n'), ((8583, 8611), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (8600, 8611), False, 'from django.db import models\n'), ((8631, 8664), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (8653, 8664), False, 'from django.db import models\n'), ((8681, 8714), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (8703, 8714), False, 'from django.db import models\n'), ((8731, 8764), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (8753, 8764), False, 'from django.db import models\n'), ((8781, 8814), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (8803, 8814), False, 'from django.db import models\n'), ((8831, 8864), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (8853, 8864), False, 'from django.db import models\n'), ((1017, 1029), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1027, 1029), False, 'import uuid\n'), ((4617, 4640), 'json.loads', 'json.loads', (['task.result'], {}), '(task.result)\n', (4627, 4640), False, 'import json\n'), ((3115, 3138), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3136, 3138), False, 'import datetime\n'), ((4191, 4288), 'django_celery_results.models.TaskResult.objects.get', 'TaskResult.objects.get', ([], {'task_id': 'self.task_id', 'task_name': '"""uploader.tasks.upload_results_file"""'}), "(task_id=self.task_id, task_name=\n 'uploader.tasks.upload_results_file')\n", (4213, 4288), False, 'from django_celery_results.models import TaskResult\n'), ((5199, 5222), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5220, 5222), False, 'import datetime\n'), ((6796, 6833), 'django.db.models.Index', 'models.Index', ([], {'fields': "('data_source',)"}), "(fields=('data_source',))\n", (6808, 6833), False, 'from django.db import models\n'), ((6847, 6884), 'django.db.models.Index', 'models.Index', ([], {'fields': "('analysis_id',)"}), "(fields=('analysis_id',))\n", (6859, 6884), False, 'from django.db import models\n'), ((7862, 7899), 'django.db.models.Index', 'models.Index', ([], {'fields': "('data_source',)"}), "(fields=('data_source',))\n", (7874, 7899), False, 'from django.db import models\n'), ((7913, 7950), 'django.db.models.Index', 'models.Index', ([], {'fields': "('analysis_id',)"}), "(fields=('analysis_id',))\n", (7925, 7950), False, 'from django.db import models\n'), ((3063, 3085), 'pathlib.Path', 'pathlib.Path', (['filename'], {}), '(filename)\n', (3075, 3085), False, 'import pathlib\n'), ((5147, 5169), 'pathlib.Path', 'pathlib.Path', (['filename'], {}), '(filename)\n', (5159, 5169), False, 'import pathlib\n')] |
from repeated_word import find_first_repeat
import pytest
def test_regular_str():
text = "Once upon a time, there was a brave princess who..."
assert find_first_repeat(text) == "a"
def test_bigger_different_case():
text = "It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness, it was the epoch of belief, it was the epoch of incredulity, it was the season of Light, it was the season of Darkness, it was the spring of hope, it was the winter of despair, we had everything before us, we had nothing before us, we were all going direct to Heaven, we were all going direct the other way – in short, the period was so far like the present period, that some of its noisiest authorities insisted on its being received, for good or for evil, in the superlative degree of comparison only..."
assert find_first_repeat(text) == "it"
def test_no_tepeat():
text = "a b c, d."
assert find_first_repeat(text) == None
def test_punktuation_case():
text = "It was a queer, sultry summer, the summer they electrocuted the Rosenbergs, and I didn’t know what I was doing in New York..."
assert find_first_repeat(text) == 'summer'
def test_numbers_input():
text = 135
with pytest.raises(TypeError):
result = find_first_repeat(text)
def test_repeat_in_the_end():
text = "red blue yellow green green blue blue"
assert find_first_repeat(text) == "green"
def test_list_input():
text = ['a', 'f', 'a', 'vf']
with pytest.raises(TypeError):
result = find_first_repeat(text)
| [
"repeated_word.find_first_repeat",
"pytest.raises"
] | [((159, 182), 'repeated_word.find_first_repeat', 'find_first_repeat', (['text'], {}), '(text)\n', (176, 182), False, 'from repeated_word import find_first_repeat\n'), ((865, 888), 'repeated_word.find_first_repeat', 'find_first_repeat', (['text'], {}), '(text)\n', (882, 888), False, 'from repeated_word import find_first_repeat\n'), ((954, 977), 'repeated_word.find_first_repeat', 'find_first_repeat', (['text'], {}), '(text)\n', (971, 977), False, 'from repeated_word import find_first_repeat\n'), ((1166, 1189), 'repeated_word.find_first_repeat', 'find_first_repeat', (['text'], {}), '(text)\n', (1183, 1189), False, 'from repeated_word import find_first_repeat\n'), ((1253, 1277), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1266, 1277), False, 'import pytest\n'), ((1296, 1319), 'repeated_word.find_first_repeat', 'find_first_repeat', (['text'], {}), '(text)\n', (1313, 1319), False, 'from repeated_word import find_first_repeat\n'), ((1413, 1436), 'repeated_word.find_first_repeat', 'find_first_repeat', (['text'], {}), '(text)\n', (1430, 1436), False, 'from repeated_word import find_first_repeat\n'), ((1514, 1538), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1527, 1538), False, 'import pytest\n'), ((1557, 1580), 'repeated_word.find_first_repeat', 'find_first_repeat', (['text'], {}), '(text)\n', (1574, 1580), False, 'from repeated_word import find_first_repeat\n')] |
# coding=utf-8
import urllib2
import random
url = "http://www.baidu.com/"
# 可以是User-Agent列表,也可以是代理列表
ua_list = [
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv2.0.1) Gecko/20100101 Firefox/4.0.1",
"Mozilla/5.0 (Windows NT 6.1; rv2.0.1) Gecko/20100101 Firefox/4.0.1",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11",
"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11"
]
# 在User-Agent列表里随机选择一个User-Agent
user_agent = random.choice(ua_list)
# 构造一个请求
request = urllib2.Request(url)
# add_header()方法 添加/修改 一个HTTP报头
request.add_header("User-Agent", user_agent)
# get_header() 获取一个已有的HTTP报头的值,注意只能是第一个字母大写,其他的必须小写
print("User-Agent: %s" % request.get_header("User-agent"))
# 向指定的url地址发送请求,并返回服务器响应的类文件对象
response = urllib2.urlopen(request)
# 服务器返回的类文件对象支持Python文件对象的操作方法
# read()方法就是读取文件里的全部内容,返回字符串
html = response.read()
# 返回 HTTP的响应码,成功返回200,4服务器页面出错,5服务器问题
code = response.getcode()
# 返回 返回实际数据的实际URL,防止重定向问题
response_url = response.geturl()
# 返回 服务器响应的HTTP报头
response_head = response.info()
print("code = %d" % code)
print("response_url = %s" % response_url)
print("response_head =\n %s" % response_head)
print(html)
| [
"urllib2.Request",
"urllib2.urlopen",
"random.choice"
] | [((629, 651), 'random.choice', 'random.choice', (['ua_list'], {}), '(ua_list)\n', (642, 651), False, 'import random\n'), ((675, 695), 'urllib2.Request', 'urllib2.Request', (['url'], {}), '(url)\n', (690, 695), False, 'import urllib2\n'), ((938, 962), 'urllib2.urlopen', 'urllib2.urlopen', (['request'], {}), '(request)\n', (953, 962), False, 'import urllib2\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 29 15:44:44 2019
@author: liuhongbing
"""
# coding=utf-8
import tensorflow as tf
import time
import os
from charCNN_model import CharCNN
import datetime
from read_data import Config,Dataset
# Load data
print("正在载入数据...")
# 函数dataset_read:输入文件名,返回训练集,测试集标签
# 注:embedding_w大小为vocabulary_size × embedding_size
#
#train_data = Dataset()
#train_data.dataset_read()
#
print("load the data finished....")
with tf.Graph().as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=False)
sess = tf.Session(config=session_conf)
with sess.as_default():
cnn = CharCNN(
l0=Config.l0,
num_classes=Config.nums_classes,
alphabet_size=Config.alphabet_size,
convLayers=Config.convLayers,
fcLayers=Config.fcLayers,
l2_reg_lambda=0)
# cnn = CharConvNet()
global_step = tf.Variable(0, name="global_step", trainable=False)
optimizer = tf.train.AdamOptimizer(Config.learningRate)
grads_and_vars = optimizer.compute_gradients(cnn.loss)
train_op = optimizer.apply_gradients(grads_and_vars, global_step=global_step)
# Keep track of gradient values and sparsity (optional)
grad_summaries = []
for g, v in grads_and_vars:
if g is not None:
grad_hist_summary = tf.summary.histogram("{}/grad/hist".format(v.name), g)
sparsity_summary = tf.summary.scalar("{}/grad/sparsity".format(v.name), tf.nn.zero_fraction(g))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
grad_summaries_merged = tf.summary.merge(grad_summaries)
# Output directory for models and summaries
timestamp = str(int(time.time()))
out_dir = os.path.abspath(os.path.join(os.path.curdir, "runs", timestamp))
print("Writing to {}\n".format(out_dir))
# Summaries for loss and accuracy
loss_summary = tf.summary.scalar("loss", cnn.loss)
acc_summary = tf.summary.scalar("accuracy", cnn.accuracy)
# Train Summaries
train_summary_op = tf.summary.merge([loss_summary, acc_summary, grad_summaries_merged])
train_summary_dir = os.path.join(out_dir, "summaries", "train")
train_summary_writer = tf.summary.FileWriter(train_summary_dir, sess.graph)
# Dev summaries
dev_summary_op = tf.summary.merge([loss_summary, acc_summary])
dev_summary_dir = os.path.join(out_dir, "summaries", "dev")
dev_summary_writer = tf.summary.FileWriter(dev_summary_dir, sess.graph)
# Checkpoint directory. Tensorflow assumes this directory already exists so we need to create it
checkpoint_dir = os.path.abspath(os.path.join(out_dir, "checkpoints"))
checkpoint_prefix = os.path.join(checkpoint_dir, "model")
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
saver = tf.train.Saver(tf.global_variables())
# Initialize all variables
sess.run(tf.global_variables_initializer())
def train_step(x_batch, y_batch):
"""
A single training step
"""
feed_dict = {
cnn.input_x: x_batch,
cnn.input_y: y_batch,
cnn.dropout_keep_prob: Config.dropoutKeepProb
}
_, step, summaries, loss, accuracy = sess.run(
[train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy],
feed_dict)
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
train_summary_writer.add_summary(summaries, step)
def dev_step(x_batch, y_batch, writer=None):
"""
Evaluates model on a dev set
"""
feed_dict = {
cnn.input_x: x_batch,
cnn.input_y: y_batch,
cnn.dropout_keep_prob: 1.0
}
step, summaries, loss, accuracy = sess.run(
[global_step, dev_summary_op, cnn.loss, cnn.accuracy],
feed_dict)
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
if writer:
writer.add_summary(summaries, step)
print("初始化完毕,开始训练")
for i in range(Config.epoches):
for j in range(train_data.example_nums// Config.batch_size):
batch_train = train_data.next_batch()
# 训练模型
train_step(batch_train[0], batch_train[1])
current_step = tf.train.global_step(sess, global_step)
# train_step.run(feed_dict={x: batch_train[0], y_actual: batch_train[1], keep_prob: 0.5})
# 对结果进行记录
if current_step % Config.evaluateEvery == 0:
print("\nEvaluation:", current_step)
dev_step(train_data.dev_image, train_data.dev_label, writer=dev_summary_writer)
print("")
if current_step % Config.checkpoint_every == 0:
path = saver.save(sess, checkpoint_prefix, global_step=current_step)
print("Saved model checkpoint to {}\n".format(path))
"""
Evaluation: 100
2019-06-29T22:01:32.604023: step 100, loss 1.21269, acc 0.508651
Evaluation: 200
2019-06-29T22:08:41.874223: step 200, loss 1.18934, acc 0.517814
Evaluation: 300
2019-06-29T22:16:12.802301: step 300, loss 1.18706, acc 0.523965
Evaluation: 400
2019-06-29T22:23:01.126583: step 400, loss 1.18321, acc 0.532712
Evaluation: 500
2019-06-29T22:29:55.398533: step 500, loss 1.15435, acc 0.528835
Evaluation: 600
2019-06-29T22:37:12.734557: step 600, loss 1.1356, acc 0.539023
Evaluation: 700
2019-06-29T22:43:42.253525: step 700, loss 1.11152, acc 0.547898
Evaluation: 800
2019-06-29T22:50:57.971248: step 800, loss 1.10185, acc 0.55453
Evaluation: 1200
2019-06-29T23:18:18.294406: step 1200, loss 1.07737, acc 0.562636
Evaluation: 1500
2019-06-29T23:39:26.627900: step 1500, loss 1.0239, acc 0.582532
valuation: 1800
2019-06-29T23:59:47.539924: step 1800, loss 1.01305, acc 0.587402
""" | [
"tensorflow.Graph",
"os.path.exists",
"os.makedirs",
"tensorflow.Variable",
"tensorflow.Session",
"tensorflow.summary.merge",
"os.path.join",
"tensorflow.nn.zero_fraction",
"tensorflow.global_variables",
"charCNN_model.CharCNN",
"tensorflow.summary.scalar",
"tensorflow.global_variables_initial... | [((524, 593), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'allow_soft_placement': '(True)', 'log_device_placement': '(False)'}), '(allow_soft_placement=True, log_device_placement=False)\n', (538, 593), True, 'import tensorflow as tf\n'), ((618, 649), 'tensorflow.Session', 'tf.Session', ([], {'config': 'session_conf'}), '(config=session_conf)\n', (628, 649), True, 'import tensorflow as tf\n'), ((693, 865), 'charCNN_model.CharCNN', 'CharCNN', ([], {'l0': 'Config.l0', 'num_classes': 'Config.nums_classes', 'alphabet_size': 'Config.alphabet_size', 'convLayers': 'Config.convLayers', 'fcLayers': 'Config.fcLayers', 'l2_reg_lambda': '(0)'}), '(l0=Config.l0, num_classes=Config.nums_classes, alphabet_size=Config\n .alphabet_size, convLayers=Config.convLayers, fcLayers=Config.fcLayers,\n l2_reg_lambda=0)\n', (700, 865), False, 'from charCNN_model import CharCNN\n'), ((992, 1043), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'name': '"""global_step"""', 'trainable': '(False)'}), "(0, name='global_step', trainable=False)\n", (1003, 1043), True, 'import tensorflow as tf\n'), ((1064, 1107), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['Config.learningRate'], {}), '(Config.learningRate)\n', (1086, 1107), True, 'import tensorflow as tf\n'), ((1764, 1796), 'tensorflow.summary.merge', 'tf.summary.merge', (['grad_summaries'], {}), '(grad_summaries)\n', (1780, 1796), True, 'import tensorflow as tf\n'), ((2090, 2125), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""loss"""', 'cnn.loss'], {}), "('loss', cnn.loss)\n", (2107, 2125), True, 'import tensorflow as tf\n'), ((2148, 2191), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""accuracy"""', 'cnn.accuracy'], {}), "('accuracy', cnn.accuracy)\n", (2165, 2191), True, 'import tensorflow as tf\n'), ((2246, 2314), 'tensorflow.summary.merge', 'tf.summary.merge', (['[loss_summary, acc_summary, grad_summaries_merged]'], {}), '([loss_summary, acc_summary, grad_summaries_merged])\n', (2262, 2314), True, 'import tensorflow as tf\n'), ((2343, 2386), 'os.path.join', 'os.path.join', (['out_dir', '"""summaries"""', '"""train"""'], {}), "(out_dir, 'summaries', 'train')\n", (2355, 2386), False, 'import os\n'), ((2418, 2470), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (['train_summary_dir', 'sess.graph'], {}), '(train_summary_dir, sess.graph)\n', (2439, 2470), True, 'import tensorflow as tf\n'), ((2521, 2566), 'tensorflow.summary.merge', 'tf.summary.merge', (['[loss_summary, acc_summary]'], {}), '([loss_summary, acc_summary])\n', (2537, 2566), True, 'import tensorflow as tf\n'), ((2593, 2634), 'os.path.join', 'os.path.join', (['out_dir', '"""summaries"""', '"""dev"""'], {}), "(out_dir, 'summaries', 'dev')\n", (2605, 2634), False, 'import os\n'), ((2664, 2714), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (['dev_summary_dir', 'sess.graph'], {}), '(dev_summary_dir, sess.graph)\n', (2685, 2714), True, 'import tensorflow as tf\n'), ((2928, 2965), 'os.path.join', 'os.path.join', (['checkpoint_dir', '"""model"""'], {}), "(checkpoint_dir, 'model')\n", (2940, 2965), False, 'import os\n'), ((480, 490), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (488, 490), True, 'import tensorflow as tf\n'), ((1926, 1973), 'os.path.join', 'os.path.join', (['os.path.curdir', '"""runs"""', 'timestamp'], {}), "(os.path.curdir, 'runs', timestamp)\n", (1938, 1973), False, 'import os\n'), ((2862, 2898), 'os.path.join', 'os.path.join', (['out_dir', '"""checkpoints"""'], {}), "(out_dir, 'checkpoints')\n", (2874, 2898), False, 'import os\n'), ((2981, 3011), 'os.path.exists', 'os.path.exists', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (2995, 3011), False, 'import os\n'), ((3025, 3052), 'os.makedirs', 'os.makedirs', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (3036, 3052), False, 'import os\n'), ((3084, 3105), 'tensorflow.global_variables', 'tf.global_variables', ([], {}), '()\n', (3103, 3105), True, 'import tensorflow as tf\n'), ((3160, 3193), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (3191, 3193), True, 'import tensorflow as tf\n'), ((1878, 1889), 'time.time', 'time.time', ([], {}), '()\n', (1887, 1889), False, 'import time\n'), ((4864, 4903), 'tensorflow.train.global_step', 'tf.train.global_step', (['sess', 'global_step'], {}), '(sess, global_step)\n', (4884, 4903), True, 'import tensorflow as tf\n'), ((1595, 1617), 'tensorflow.nn.zero_fraction', 'tf.nn.zero_fraction', (['g'], {}), '(g)\n', (1614, 1617), True, 'import tensorflow as tf\n'), ((3669, 3692), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3690, 3692), False, 'import datetime\n'), ((4318, 4341), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4339, 4341), False, 'import datetime\n')] |
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 27 11:26:25 2019
@author: engelen
"""
import xarray as xr
import pandas as pd
from glob import glob
import os
import numpy as np
from collections import defaultdict
def get_first_true(df, condition):
time = df[condition].iloc[0:1].index.values
if time.size == 0:
time = df.iloc[-2:-1].index.values
return(time)
#%%Path management
fw_path = r"./plots/FW_volumes/*-S_fw.nc"
fw_paths = glob(fw_path)
or_path = r"./plots/FW_volumes/*-S_origins.csv"
or_paths = glob(or_path)
#%%Read fresh water volumes
d_fw = {}
open_opt = dict(decode_times=False,
drop_variables = ["total_fw_pumpable", "total_onshore"])
for p in fw_paths:
name = os.path.basename(p).split("_fw.nc")[0]
d_fw[name] = xr.open_dataset(p, **open_opt)
#%%Differentiate
for name, ds in d_fw.items():
ds["fw_norm_diff"] = (
ds["total_fw"]/ds["total_fw"].max()
# ds["total_fw"]/8734.5725
).isel(time=slice(None, -7)).differentiate("time")
#%%time to reach steady state fw_vol
diff = xr.merge(
[ds["fw_norm_diff"].rename(name) for name, ds in d_fw.items()]
).drop(["dx", "dy"]).to_dataframe()
diff = np.log10(np.abs(diff))
time_steady={}
for name in diff.columns:
time_steady[name]=get_first_true(diff[name], diff[name] < -6)
#%%Read origins
colnames = []
d_or = defaultdict()
for csv in or_paths:
name = os.path.basename(csv).split("_origins.csv")[0]
d_or[name] = pd.read_csv(csv, header=0).set_index("time").drop(columns=["dx", "dy"])
colnames.extend([(name, var) for var in d_or[name].columns])
d_or = pd.concat(d_or, axis=1)
#%%Differentiate
#Use xarray to differentiate, as it automatically differentiates properly
tot_vol = d_or.loc[:, ("C-F-B-S", slice(None))].sum(axis=1).iloc[0]
diff_or = xr.Dataset(d_or/tot_vol).differentiate("time").to_dataframe()
diff_or = np.log10(np.abs(diff_or))
time_steady_or={}
for name in diff_or.columns:
time_steady_or[name]=get_first_true(diff_or[name], diff_or[name] < -6.25)
#All this stacking, reseting and dropping is to get rid the table in the right format
time_steady_or=pd.DataFrame(time_steady_or).stack().reset_index(level=[0]).drop(columns="level_0")
mx_time_steady_or = time_steady_or[time_steady_or.index=="River"].max(axis=0)
mx_time_steady_or.to_csv(os.path.join(or_path, "..", "time_to_steady.csv"))
#%% | [
"numpy.abs",
"pandas.read_csv",
"os.path.join",
"xarray.Dataset",
"collections.defaultdict",
"os.path.basename",
"pandas.DataFrame",
"xarray.open_dataset",
"pandas.concat",
"glob.glob"
] | [((454, 467), 'glob.glob', 'glob', (['fw_path'], {}), '(fw_path)\n', (458, 467), False, 'from glob import glob\n'), ((528, 541), 'glob.glob', 'glob', (['or_path'], {}), '(or_path)\n', (532, 541), False, 'from glob import glob\n'), ((1383, 1396), 'collections.defaultdict', 'defaultdict', ([], {}), '()\n', (1394, 1396), False, 'from collections import defaultdict\n'), ((1639, 1662), 'pandas.concat', 'pd.concat', (['d_or'], {'axis': '(1)'}), '(d_or, axis=1)\n', (1648, 1662), True, 'import pandas as pd\n'), ((778, 808), 'xarray.open_dataset', 'xr.open_dataset', (['p'], {}), '(p, **open_opt)\n', (793, 808), True, 'import xarray as xr\n'), ((1213, 1225), 'numpy.abs', 'np.abs', (['diff'], {}), '(diff)\n', (1219, 1225), True, 'import numpy as np\n'), ((1915, 1930), 'numpy.abs', 'np.abs', (['diff_or'], {}), '(diff_or)\n', (1921, 1930), True, 'import numpy as np\n'), ((2349, 2398), 'os.path.join', 'os.path.join', (['or_path', '""".."""', '"""time_to_steady.csv"""'], {}), "(or_path, '..', 'time_to_steady.csv')\n", (2361, 2398), False, 'import os\n'), ((722, 741), 'os.path.basename', 'os.path.basename', (['p'], {}), '(p)\n', (738, 741), False, 'import os\n'), ((1430, 1451), 'os.path.basename', 'os.path.basename', (['csv'], {}), '(csv)\n', (1446, 1451), False, 'import os\n'), ((1834, 1860), 'xarray.Dataset', 'xr.Dataset', (['(d_or / tot_vol)'], {}), '(d_or / tot_vol)\n', (1844, 1860), True, 'import xarray as xr\n'), ((1494, 1520), 'pandas.read_csv', 'pd.read_csv', (['csv'], {'header': '(0)'}), '(csv, header=0)\n', (1505, 1520), True, 'import pandas as pd\n'), ((2161, 2189), 'pandas.DataFrame', 'pd.DataFrame', (['time_steady_or'], {}), '(time_steady_or)\n', (2173, 2189), True, 'import pandas as pd\n')] |
from src.features.preprocessing.huawei_traces import HuaweiTracePreprocessor
import dataclass_cli
import dataclasses
import logging
import pandas as pd
from pathlib import Path
from tqdm import tqdm
from typing import List, Dict, Set
import http
import re
from .base import Preprocessor
from collections import Counter
from .drain import Drain, DrainParameters
import numpy as np
@dataclass_cli.add
@dataclasses.dataclass
class HuaweiPreprocessorConfig:
aggregated_log_file: Path = Path("data/logs_aggregated_concurrent.csv")
traces_root_directory: Path = Path("data/concurrent_data/traces/")
final_log_file: Path = Path("data/huawei.pkl")
relevant_aggregated_log_columns: List[str] = dataclasses.field(
default_factory=lambda: [
"Hostname",
"log_level",
"programname",
"python_module",
"http_status",
"http_method",
],
)
relevant_trace_columns: List[str] = dataclasses.field(
default_factory=lambda: [
"Hostname",
"trace_name",
"trace_service",
"python_module",
"trace_project",
"payload",
"etype",
"http_method",
"function",
],
)
use_trace_data: bool = False
aggregate_per_trace: bool = False
aggregate_per_max_number: int = -1
aggregate_per_time_frequency: str = ""
log_datetime_column_name: str = "@timestamp"
log_payload_column_name: str = "Payload"
use_log_hierarchy: bool = False
fine_drain_log_depth: int = 10
fine_drain_log_st: float = 0.75
coarse_drain_log_depth: int = 4
coarse_drain_log_st: float = 0.2
drain_log_depths: List[int] = dataclasses.field(default_factory=lambda: [],)
drain_log_sts: List[float] = dataclasses.field(default_factory=lambda: [],)
url_column_name: str = "http_url"
drain_url_depth: int = 10
drain_url_st: float = 0.5
add_log_clusters: bool = True
min_logs_per_trace: int = 2
min_causality: float = 0.0
log_only_causality: bool = False
relevant_log_column: str = "fine_log_cluster_template"
log_template_file: Path = Path("data/attention_log_templates.csv")
class ConcurrentAggregatedLogsPreprocessor(Preprocessor):
sequence_column_name: str = "all_events"
request_drain_regex: str = "[^a-zA-Z0-9\-\.]"
def __init__(self, config: HuaweiPreprocessorConfig):
self.config = config
self.relevant_columns = set(
[x for x in self.config.relevant_aggregated_log_columns]
)
self.relevant_columns.add("fine_log_cluster_template")
self.relevant_columns.add("coarse_log_cluster_template")
self.relevant_columns.add("url_cluster_template")
for i in range(len(self.config.drain_log_depths)):
self.relevant_columns.add(str(i) + "_log_cluster_template")
if self.config.use_trace_data:
self.relevant_columns.update(self.config.relevant_trace_columns)
def load_data(self) -> pd.DataFrame:
if self.config.aggregate_per_trace:
return self._load_data_per_trace()
elif self.config.aggregate_per_max_number > 0:
log_only_data = (
self._load_log_only_data()
.sort_values(by="timestamp")
.reset_index(drop=True)
.reset_index(drop=False)
)
log_only_data["grouper"] = log_only_data["index"].apply(
lambda x: int(x / self.config.aggregate_per_max_number)
)
return self._aggregate_per(log_only_data, aggregation_column="grouper")
elif len(self.config.aggregate_per_time_frequency) > 0:
log_only_data = (
self._load_log_only_data()
.sort_values(by="timestamp")
.reset_index(drop=True)
.reset_index(drop=False)
)
aggregated_log_data = self._aggregate_per(
log_only_data,
aggregation_column=pd.Grouper(
key="timestamp", freq=self.config.aggregate_per_time_frequency
),
)
return aggregated_log_data[aggregated_log_data["num_events"] > 1]
else:
log_only_data = self._load_log_only_data()
log_only_data["grouper"] = 1
return self._aggregate_per(log_only_data, aggregation_column="grouper")
def _load_data_per_trace(self) -> pd.DataFrame:
full_df = self.load_full_data()
aggregated_df = self._aggregate_per(full_df)
aggregated_df = aggregated_df[
aggregated_df["num_events"] >= self.config.min_logs_per_trace
]
logging.info(
"Summary of num_events:\n %s",
aggregated_df["num_events"].describe().to_string(),
)
return aggregated_df
def _load_log_only_data(self) -> pd.DataFrame:
log_df = self._read_log_df()
log_df = self._add_url_drain_clusters(log_df)
for column in [x for x in log_df.columns if "log_cluster_template" in x]:
log_df[column] = (
log_df[column]
.fillna("")
.astype(str)
.replace(np.nan, "", regex=True)
.apply(lambda x: x if len(x) > 0 else "___empty___")
)
return log_df
def load_full_data(self) -> pd.DataFrame:
logging.info(
"Trying to read full huawei_df from %s", self.config.final_log_file
)
if not self.config.final_log_file.is_file():
full_df = self._load_full_data()
full_df.to_pickle(self.config.final_log_file)
return pd.read_pickle(self.config.final_log_file)
def _load_full_data(self) -> pd.DataFrame:
log_df = self._read_log_df()
trace_df = self._read_trace_df()
merged_df = self._merge_logs_traces(log_df, trace_df)
return self._add_url_drain_clusters(merged_df)
def _aggregate_per(
self, merged_df: pd.DataFrame, aggregation_column: str = "parent_trace_id"
) -> pd.DataFrame:
logging.debug("Aggregating huawei data per %s", aggregation_column)
for column in self.relevant_columns:
merged_df[column] = merged_df[column].apply(
lambda x: column + "#" + x.lower() if len(x) > 0 else ""
)
merged_df["all_events"] = merged_df[self.relevant_columns].values.tolist()
merged_df["attributes"] = merged_df[
[x for x in self.relevant_columns if not "log_cluster_template" in x]
].values.tolist()
for log_template_column in [
x for x in self.relevant_columns if "log_cluster_template" in x
]:
merged_df[log_template_column] = merged_df[log_template_column].apply(
lambda x: [x]
)
events_per_trace = (
merged_df.sort_values(by="timestamp")
.groupby(aggregation_column)
.agg(
{
column_name: lambda x: list(x)
for column_name in ["all_events", "attributes",]
+ [x for x in self.relevant_columns if "log_cluster_template" in x]
}
)
.reset_index()
)
events_per_trace["num_logs"] = events_per_trace[
self.config.relevant_log_column
].apply(lambda x: len([loglist for loglist in x if len(loglist[0]) > 0]))
events_per_trace["num_events"] = events_per_trace[
self.config.relevant_log_column
].apply(lambda x: len(x))
return events_per_trace[
["num_logs", "num_events", "all_events", "attributes",]
+ [x for x in self.relevant_columns if "log_cluster_template" in x]
]
def _merge_logs_traces(self, log_df: pd.DataFrame, trace_df: pd.DataFrame):
log_df_with_trace_id = self._match_logs_to_traces(log_df, trace_df)
if self.config.use_trace_data:
return pd.concat(
[log_df_with_trace_id, trace_df], ignore_index=True
).reset_index(drop=True)
else:
return log_df_with_trace_id.reset_index(drop=True)
def _match_logs_to_traces(self, log_df: pd.DataFrame, trace_df: pd.DataFrame):
max_timestamp_by_trace = trace_df.groupby(by="parent_trace_id").agg(
{"timestamp": max,}
)
min_timestamp_by_trace = trace_df.groupby(by="parent_trace_id").agg(
{"timestamp": min,}
)
timestamps_merged = pd.merge(
max_timestamp_by_trace,
min_timestamp_by_trace,
left_index=True,
right_index=True,
suffixes=("_max", "_min"),
)
merged_dfs = []
for idx, row in tqdm(
timestamps_merged.iterrows(),
total=len(timestamps_merged),
desc="Matching logs to traces...",
):
rel_df = log_df.loc[
(log_df["timestamp"] >= row["timestamp_min"])
& (log_df["timestamp"] <= row["timestamp_max"])
].copy()
rel_df["parent_trace_id"] = idx
merged_dfs.append(rel_df)
return pd.concat(merged_dfs).drop_duplicates().reset_index(drop=True)
def _read_trace_df(self) -> pd.DataFrame:
preprocessor = HuaweiTracePreprocessor(
trace_base_directory=self.config.traces_root_directory
)
trace_df = preprocessor.load_data()
return trace_df
def _read_log_df(self) -> pd.DataFrame:
df = (
pd.read_csv(self.config.aggregated_log_file)
.fillna("")
.astype(str)
.replace(np.nan, "", regex=True)
)
rel_df = df[
self.config.relevant_aggregated_log_columns
+ [self.config.log_datetime_column_name]
+ [self.config.log_payload_column_name]
+ [self.config.url_column_name]
]
rel_df = self._add_log_drain_clusters(rel_df)
if self.config.log_template_file.exists():
rel_df = self._add_precalculated_log_templates(rel_df)
rel_df["timestamp"] = pd.to_datetime(
rel_df[self.config.log_datetime_column_name]
)
return rel_df
def _add_url_drain_clusters(self, df: pd.DataFrame) -> pd.DataFrame:
url_df = pd.DataFrame(
df[self.config.url_column_name].dropna().drop_duplicates()
)
drain = Drain(
DrainParameters(
depth=self.config.drain_url_depth,
st=self.config.drain_url_st,
rex=[(self.request_drain_regex, " "),],
),
data_df=url_df,
data_df_column_name=self.config.url_column_name,
)
drain_result_df = (
drain.load_data().drop_duplicates(ignore_index=False).set_index("log_idx")
)
url_result_df = (
pd.merge(
df,
pd.merge(
url_df,
drain_result_df,
left_index=True,
right_index=True,
how="left",
)
.drop_duplicates()
.reset_index(drop=True),
on=self.config.url_column_name,
how="left",
)
.rename(
columns={
"cluster_template": "url_cluster_template",
"cluster_path": "url_cluster_path",
}
)
.drop(columns=["cluster_id"])
)
url_result_df["url_cluster_template"] = (
url_result_df["url_cluster_template"]
.fillna("")
.astype(str)
.replace(np.nan, "", regex=True)
)
return url_result_df
def _add_log_drain_clusters_prefix(
self, log_df: pd.DataFrame, depth: int, st: float, prefix: str
) -> pd.DataFrame:
all_logs_df = pd.DataFrame(
log_df[self.config.log_payload_column_name].dropna().drop_duplicates()
)
drain = Drain(
DrainParameters(
depth=depth,
st=st,
rex=[
("(/|)([0-9]+\.){3}[0-9]+(:[0-9]+|)(:|)", ""),
(self.request_drain_regex, " "),
("[^a-zA-Z\d\s:]", ""),
],
),
data_df=all_logs_df,
data_df_column_name=self.config.log_payload_column_name,
)
drain_result_df = drain.load_data().drop_duplicates().set_index("log_idx")
log_result_df = (
pd.merge(
log_df,
pd.merge(
all_logs_df,
drain_result_df,
left_index=True,
right_index=True,
how="left",
)
.drop_duplicates()
.reset_index(drop=True),
on=self.config.log_payload_column_name,
how="left",
)
.rename(
columns={
"cluster_template": prefix + "log_cluster_template",
"cluster_path": prefix + "log_cluster_path",
}
)
.drop(columns=["cluster_id"])
)
log_result_df[prefix + "log_cluster_template"] = (
log_result_df[prefix + "log_cluster_template"]
.fillna("")
.astype(str)
.replace(np.nan, "", regex=True)
)
return log_result_df
def _add_precalculated_log_templates(self, log_df: pd.DataFrame) -> pd.DataFrame:
precalculated_templates_df = pd.read_csv(self.config.log_template_file)
if not "Payload" in precalculated_templates_df.columns:
logging.error("Invalid log template file - does not contain Payload column!")
return log_df
self.relevant_columns.update(
[x for x in precalculated_templates_df.columns if "log_cluster_template" in x]
)
return pd.merge(log_df, precalculated_templates_df, on="Payload", how="left")
def _add_log_drain_clusters(self, log_df: pd.DataFrame) -> pd.DataFrame:
log_result_df = self._add_log_drain_clusters_prefix(
log_df=log_df,
depth=self.config.fine_drain_log_depth,
st=self.config.fine_drain_log_st,
prefix="fine_",
)
log_result_df = self._add_log_drain_clusters_prefix(
log_df=log_result_df,
depth=self.config.coarse_drain_log_depth,
st=self.config.coarse_drain_log_st,
prefix="coarse_",
)
for i in range(len(self.config.drain_log_depths)):
log_result_df = self._add_log_drain_clusters_prefix(
log_df=log_result_df,
depth=self.config.drain_log_depths[i],
st=self.config.drain_log_sts[i],
prefix=str(i) + "_",
)
return log_result_df
class ConcurrentAggregatedLogsDescriptionPreprocessor(Preprocessor):
def __init__(
self, config: HuaweiPreprocessorConfig,
):
self.config = config
def load_data(self) -> pd.DataFrame:
preprocessor = ConcurrentAggregatedLogsPreprocessor(self.config)
huawei_df = preprocessor._load_log_only_data()
return self._load_column_descriptions(huawei_df, preprocessor.relevant_columns)
def _load_column_descriptions(
self, huawei_df: pd.DataFrame, relevant_columns: Set[str]
) -> pd.DataFrame:
http_descriptions = self._load_http_descriptions()
column_descriptions = self._get_column_descriptions()
description_records = []
for column in relevant_columns:
values = set(
huawei_df[column].dropna().astype(str).replace(np.nan, "", regex=True)
)
values = set([str(x).lower() for x in values if len(str(x)) > 0])
for value in tqdm(values, desc="Loading descriptions for column " + column):
description = ""
if column == "Hostname":
name = value.rstrip("0123456789")
number = value[len(name) :]
description = name + " " + number
elif column == "http_status":
description = http_descriptions[value]
else:
description = " ".join(re.split("[,._\-\*]+", value))
if column in column_descriptions:
description = column_descriptions[column] + " " + description
description_records.append(
{"label": column + "#" + value, "description": description,},
)
return (
pd.DataFrame.from_records(description_records)
.drop_duplicates()
.reset_index(drop=True)
)
def _get_column_descriptions(self) -> Dict[str, str]:
return {
"Hostname": "Host name",
"log_level": "Log level",
"programname": "Program name",
"python_module": "Python module",
"http_status": "HTTP status",
"http_method": "HTTP method",
"trace_name": "Trace name",
"trace_service": "Trace service",
"trace_project": "Trace project",
"etype": "Error type",
"function": "Function",
"url_cluster_template": "Url Cluster",
}
def _load_http_descriptions(self) -> Dict[str, str]:
logging.debug("Initializing HTTP Status descriptions")
http_descriptions = {}
for status in list(http.HTTPStatus):
http_descriptions[str(status.value) + ".0"] = status.name.lower().replace(
"_", " "
)
return http_descriptions
class ConcurrentAggregatedLogsHierarchyPreprocessor(Preprocessor):
def __init__(
self, config: HuaweiPreprocessorConfig,
):
self.config = config
def load_data(self) -> pd.DataFrame:
if self.config.use_log_hierarchy:
return self._load_log_only_hierarchy()
else:
return self._load_attribute_only_hierarchy()
def _load_log_only_hierarchy(self) -> pd.DataFrame:
preprocessor = ConcurrentAggregatedLogsPreprocessor(self.config)
huawei_df = preprocessor._load_log_only_data()
relevant_log_columns = set(
[x for x in preprocessor.relevant_columns if "log_cluster_template" in x]
+ ["coarse_log_cluster_path"]
)
attribute_hierarchy = self._load_attribute_hierarchy(
huawei_df, set(["coarse_log_cluster_path"])
)
return (
attribute_hierarchy.append(
self._load_log_hierarchy(huawei_df, relevant_log_columns),
ignore_index=True,
)
.drop_duplicates()
.reset_index(drop=True)
)
def _load_attribute_only_hierarchy(self) -> pd.DataFrame:
preprocessor = ConcurrentAggregatedLogsPreprocessor(self.config)
huawei_df = preprocessor._load_log_only_data()
relevant_columns = set(
[
x
for x in preprocessor.relevant_columns
if "log_cluster_template" not in x
]
)
attribute_hierarchy = self._load_attribute_hierarchy(
huawei_df, relevant_columns
)
return (
attribute_hierarchy.append(
self._load_log_hierarchy(huawei_df, relevant_columns),
ignore_index=True,
)
.drop_duplicates()
.reset_index(drop=True)
)
def _load_log_hierarchy(
self, huawei_df: pd.DataFrame, relevant_columns: Set[str]
) -> pd.DataFrame:
hierarchy_records = []
for _, row in tqdm(
huawei_df.iterrows(),
desc="Adding huawei log hierarchy",
total=len(huawei_df),
):
log_template = str(row[self.config.relevant_log_column]).lower()
for column in relevant_columns:
row_value = (
column + "#" + str(row[column]).lower()
if len(str(row[column])) > 0
else ""
)
if len(row_value) == 0:
continue
hierarchy_records.append(
{
"parent_id": row_value,
"parent_name": row_value.split("#")[1],
"child_id": self.config.relevant_log_column + "#" + log_template,
"child_name": log_template,
},
)
return (
pd.DataFrame.from_records(hierarchy_records)
.drop_duplicates()
.reset_index(drop=True)
)
def _load_attribute_hierarchy(
self, huawei_df: pd.DataFrame, relevant_columns: Set[str]
) -> pd.DataFrame:
hierarchy_df = pd.DataFrame(
columns=["parent_id", "child_id", "parent_name", "child_name"]
)
for column in relevant_columns:
hierarchy_df = hierarchy_df.append(
{
"parent_id": "root",
"parent_name": "root",
"child_id": column,
"child_name": column,
},
ignore_index=True,
)
values = set(
[
str(x).lower()
for x in huawei_df[column]
.dropna()
.astype(str)
.replace(np.nan, "", regex=True)
if len(str(x)) > 0 and str(x).lower() != "nan"
]
)
for value in tqdm(values, desc="Loading hierarchy for column " + column):
hierarchy_elements = [column]
if column == "Hostname":
hierarchy_elements.append(value.rstrip("0123456789"))
elif column == "http_status":
hierarchy_elements.append(value[0] + "00")
elif "cluster" in column:
hierarchy_elements = hierarchy_elements + value.split()
else:
hierarchy_elements = hierarchy_elements + re.split(
"[,._\-\*]+", value
)
hierarchy_elements = [
x.strip() for x in hierarchy_elements if len(x.strip()) > 0
]
if hierarchy_elements[len(hierarchy_elements) - 1] == value:
hierarchy_elements = hierarchy_elements[
: len(hierarchy_elements) - 1
]
hierarchy = []
for i in range(1, len(hierarchy_elements) + 1):
hierarchy.append("->".join(hierarchy_elements[0:i]))
hierarchy.append(column + "#" + value)
parent_id = column
parent_name = column
for i in range(len(hierarchy)):
child_id = hierarchy[i]
child_name = child_id.split("->")[-1]
if not parent_id == child_id:
hierarchy_df = hierarchy_df.append(
{
"parent_id": parent_id,
"parent_name": parent_name,
"child_id": child_id,
"child_name": child_name,
},
ignore_index=True,
)
parent_id = child_id
parent_name = child_name
return hierarchy_df[["parent_id", "child_id", "parent_name", "child_name"]]
def _generate_hostname_hierarchy(self, hostname: str) -> List[str]:
name = hostname.rstrip("0123456789")
return [name]
def _generate_http_hierarchy(self, http_code: str) -> List[str]:
return [http_code[0] + "XX"]
class ConcurrentAggregatedLogsCausalityPreprocessor(Preprocessor):
def __init__(
self, config: HuaweiPreprocessorConfig,
):
self.config = config
def load_data(self) -> pd.DataFrame:
preprocessor = ConcurrentAggregatedLogsPreprocessor(self.config)
huawei_df = preprocessor._load_log_only_data().fillna("")
relevant_columns = set(
[
x
for x in preprocessor.relevant_columns
if not self.config.log_only_causality or "log" in x
]
)
counted_causality = self._generate_counted_causality(
huawei_df, relevant_columns
)
causality_records = []
for from_value, to_values in tqdm(
counted_causality.items(),
desc="Generating causality df from counted causality",
):
total_to_counts = len(to_values)
to_values_counter: Dict[str, int] = Counter(to_values)
for to_value, to_count in to_values_counter.items():
if to_count / total_to_counts > self.config.min_causality:
causality_records.append(
{
"parent_id": from_value,
"parent_name": from_value.split("#")[1],
"child_id": to_value,
"child_name": to_value.split("#")[1],
},
)
return (
pd.DataFrame.from_records(causality_records)
.drop_duplicates()
.reset_index(drop=True)
)
def _generate_counted_causality(
self, df: pd.DataFrame, relevant_columns: Set[str]
) -> Dict[str, List[str]]:
causality: Dict[str, List[str]] = {}
previous_row = None
for _, row in tqdm(
df.iterrows(),
desc="Generating counted causality for Huawei log data",
total=len(df),
):
if previous_row is None:
previous_row = row
continue
for previous_column in relevant_columns:
previous_column_value = (
previous_column + "#" + str(previous_row[previous_column]).lower()
if len(str(previous_row[previous_column])) > 0
else ""
)
if len(previous_column_value) < 1:
continue
if previous_column_value not in causality:
causality[previous_column_value] = []
for current_column in relevant_columns:
current_column_value = (
current_column + "#" + str(row[current_column]).lower()
if len(str(row[current_column])) > 0
else ""
)
if len(current_column_value) < 1:
continue
if current_column_value not in causality[previous_column_value]:
causality[previous_column_value].append(current_column_value)
else:
causality[previous_column_value].append(current_column_value)
previous_row = row
return causality
| [
"pandas.read_pickle",
"pandas.DataFrame.from_records",
"re.split",
"logging.debug",
"pandas.read_csv",
"pathlib.Path",
"pandas.Grouper",
"pandas.merge",
"tqdm.tqdm",
"collections.Counter",
"pandas.concat",
"src.features.preprocessing.huawei_traces.HuaweiTracePreprocessor",
"logging.error",
... | [((488, 531), 'pathlib.Path', 'Path', (['"""data/logs_aggregated_concurrent.csv"""'], {}), "('data/logs_aggregated_concurrent.csv')\n", (492, 531), False, 'from pathlib import Path\n'), ((566, 602), 'pathlib.Path', 'Path', (['"""data/concurrent_data/traces/"""'], {}), "('data/concurrent_data/traces/')\n", (570, 602), False, 'from pathlib import Path\n'), ((630, 653), 'pathlib.Path', 'Path', (['"""data/huawei.pkl"""'], {}), "('data/huawei.pkl')\n", (634, 653), False, 'from pathlib import Path\n'), ((703, 838), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': "(lambda : ['Hostname', 'log_level', 'programname', 'python_module',\n 'http_status', 'http_method'])"}), "(default_factory=lambda : ['Hostname', 'log_level',\n 'programname', 'python_module', 'http_status', 'http_method'])\n", (720, 838), False, 'import dataclasses\n'), ((972, 1148), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': "(lambda : ['Hostname', 'trace_name', 'trace_service', 'python_module',\n 'trace_project', 'payload', 'etype', 'http_method', 'function'])"}), "(default_factory=lambda : ['Hostname', 'trace_name',\n 'trace_service', 'python_module', 'trace_project', 'payload', 'etype',\n 'http_method', 'function'])\n", (989, 1148), False, 'import dataclasses\n'), ((1735, 1781), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (1752, 1781), False, 'import dataclasses\n'), ((1815, 1861), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (1832, 1861), False, 'import dataclasses\n'), ((2183, 2223), 'pathlib.Path', 'Path', (['"""data/attention_log_templates.csv"""'], {}), "('data/attention_log_templates.csv')\n", (2187, 2223), False, 'from pathlib import Path\n'), ((5444, 5530), 'logging.info', 'logging.info', (['"""Trying to read full huawei_df from %s"""', 'self.config.final_log_file'], {}), "('Trying to read full huawei_df from %s', self.config.\n final_log_file)\n", (5456, 5530), False, 'import logging\n'), ((5720, 5762), 'pandas.read_pickle', 'pd.read_pickle', (['self.config.final_log_file'], {}), '(self.config.final_log_file)\n', (5734, 5762), True, 'import pandas as pd\n'), ((6145, 6212), 'logging.debug', 'logging.debug', (['"""Aggregating huawei data per %s"""', 'aggregation_column'], {}), "('Aggregating huawei data per %s', aggregation_column)\n", (6158, 6212), False, 'import logging\n'), ((8592, 8714), 'pandas.merge', 'pd.merge', (['max_timestamp_by_trace', 'min_timestamp_by_trace'], {'left_index': '(True)', 'right_index': '(True)', 'suffixes': "('_max', '_min')"}), "(max_timestamp_by_trace, min_timestamp_by_trace, left_index=True,\n right_index=True, suffixes=('_max', '_min'))\n", (8600, 8714), True, 'import pandas as pd\n'), ((9388, 9467), 'src.features.preprocessing.huawei_traces.HuaweiTracePreprocessor', 'HuaweiTracePreprocessor', ([], {'trace_base_directory': 'self.config.traces_root_directory'}), '(trace_base_directory=self.config.traces_root_directory)\n', (9411, 9467), False, 'from src.features.preprocessing.huawei_traces import HuaweiTracePreprocessor\n'), ((10217, 10277), 'pandas.to_datetime', 'pd.to_datetime', (['rel_df[self.config.log_datetime_column_name]'], {}), '(rel_df[self.config.log_datetime_column_name])\n', (10231, 10277), True, 'import pandas as pd\n'), ((13779, 13821), 'pandas.read_csv', 'pd.read_csv', (['self.config.log_template_file'], {}), '(self.config.log_template_file)\n', (13790, 13821), True, 'import pandas as pd\n'), ((14156, 14226), 'pandas.merge', 'pd.merge', (['log_df', 'precalculated_templates_df'], {'on': '"""Payload"""', 'how': '"""left"""'}), "(log_df, precalculated_templates_df, on='Payload', how='left')\n", (14164, 14226), True, 'import pandas as pd\n'), ((17673, 17727), 'logging.debug', 'logging.debug', (['"""Initializing HTTP Status descriptions"""'], {}), "('Initializing HTTP Status descriptions')\n", (17686, 17727), False, 'import logging\n'), ((21176, 21252), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['parent_id', 'child_id', 'parent_name', 'child_name']"}), "(columns=['parent_id', 'child_id', 'parent_name', 'child_name'])\n", (21188, 21252), True, 'import pandas as pd\n'), ((13898, 13975), 'logging.error', 'logging.error', (['"""Invalid log template file - does not contain Payload column!"""'], {}), "('Invalid log template file - does not contain Payload column!')\n", (13911, 13975), False, 'import logging\n'), ((16092, 16154), 'tqdm.tqdm', 'tqdm', (['values'], {'desc': "('Loading descriptions for column ' + column)"}), "(values, desc='Loading descriptions for column ' + column)\n", (16096, 16154), False, 'from tqdm import tqdm\n'), ((21981, 22040), 'tqdm.tqdm', 'tqdm', (['values'], {'desc': "('Loading hierarchy for column ' + column)"}), "(values, desc='Loading hierarchy for column ' + column)\n", (21985, 22040), False, 'from tqdm import tqdm\n'), ((25250, 25268), 'collections.Counter', 'Counter', (['to_values'], {}), '(to_values)\n', (25257, 25268), False, 'from collections import Counter\n'), ((8049, 8111), 'pandas.concat', 'pd.concat', (['[log_df_with_trace_id, trace_df]'], {'ignore_index': '(True)'}), '([log_df_with_trace_id, trace_df], ignore_index=True)\n', (8058, 8111), True, 'import pandas as pd\n'), ((9255, 9276), 'pandas.concat', 'pd.concat', (['merged_dfs'], {}), '(merged_dfs)\n', (9264, 9276), True, 'import pandas as pd\n'), ((16895, 16941), 'pandas.DataFrame.from_records', 'pd.DataFrame.from_records', (['description_records'], {}), '(description_records)\n', (16920, 16941), True, 'import pandas as pd\n'), ((20906, 20950), 'pandas.DataFrame.from_records', 'pd.DataFrame.from_records', (['hierarchy_records'], {}), '(hierarchy_records)\n', (20931, 20950), True, 'import pandas as pd\n'), ((25798, 25842), 'pandas.DataFrame.from_records', 'pd.DataFrame.from_records', (['causality_records'], {}), '(causality_records)\n', (25823, 25842), True, 'import pandas as pd\n'), ((4054, 4128), 'pandas.Grouper', 'pd.Grouper', ([], {'key': '"""timestamp"""', 'freq': 'self.config.aggregate_per_time_frequency'}), "(key='timestamp', freq=self.config.aggregate_per_time_frequency)\n", (4064, 4128), True, 'import pandas as pd\n'), ((16556, 16587), 're.split', 're.split', (['"""[,._\\\\-\\\\*]+"""', 'value'], {}), "('[,._\\\\-\\\\*]+', value)\n", (16564, 16587), False, 'import re\n'), ((9630, 9674), 'pandas.read_csv', 'pd.read_csv', (['self.config.aggregated_log_file'], {}), '(self.config.aggregated_log_file)\n', (9641, 9674), True, 'import pandas as pd\n'), ((22514, 22545), 're.split', 're.split', (['"""[,._\\\\-\\\\*]+"""', 'value'], {}), "('[,._\\\\-\\\\*]+', value)\n", (22522, 22545), False, 'import re\n'), ((11035, 11120), 'pandas.merge', 'pd.merge', (['url_df', 'drain_result_df'], {'left_index': '(True)', 'right_index': '(True)', 'how': '"""left"""'}), "(url_df, drain_result_df, left_index=True, right_index=True, how='left'\n )\n", (11043, 11120), True, 'import pandas as pd\n'), ((12756, 12845), 'pandas.merge', 'pd.merge', (['all_logs_df', 'drain_result_df'], {'left_index': '(True)', 'right_index': '(True)', 'how': '"""left"""'}), "(all_logs_df, drain_result_df, left_index=True, right_index=True,\n how='left')\n", (12764, 12845), True, 'import pandas as pd\n')] |
#!/usr/bin/env python
#
# This script is meant to be run when new security hub events are added or we need
# to regenerate sample securityhub event files
#
import json
import os
import boto3
# To get all supported TTPs, it's easiest to fetch it from the AWS console:
# https://eu-west-1.console.aws.amazon.com/securityhub/home?region=eu-west-1#/findings?search=ProductName%3D%255Coperator%255C%253AEQUALS%255C%253ASecurity%2520Hub&groupbyfield=Type
SUPPORTED_TTPS = {
"Security Hub": [
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS-Foundational-Security-Best-Practices",
"Effects/Data Exposure/AWS-Foundational-Security-Best-Practices",
],
"Macie": [
"Policy:IAMUser/S3BucketPublic",
"Policy:IAMUser/S3BlockPublicAccessDisabled",
"Policy:IAMUser/S3BucketSharedExternally",
"Policy:IAMUser/S3BucketEncryptionDisabled",
],
"IAM Access Analyzer": [
"Software and Configuration Checks/AWS Security Best Practices/External Access Granted",
"Effects/Data Exposure/External Access Granted",
],
"Inspector": [
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Recognized port reachable from a Peered VPC",
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Network exposure from the internet",
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Recognized port with no listener reachable from internet",
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Network exposure from a Peered VPC",
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Recognized port reachable from internet",
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Recognized port with listener reachable from a Peered VPC",
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Recognized port with listener reachable from internet",
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Recognized port with no listener reachable from a Peered VPC",
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Unrecognized port with listener reachable from a Peered VPC",
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability - Unrecognized port with listener reachable from internet",
],
"GuardDuty": [
"TTPs/Command and Control/Backdoor:EC2-C&CActivity.B!DNS",
"Software and Configuration Checks/Network Reachability/Recon:EC2-PortProbeUnprotectedPort",
"TTPs/Initial Access/Recon:EC2-PortProbeUnprotectedPort",
"TTPs/Defense Evasion/Stealth:IAMUser-CloudTrailLoggingDisabled",
"Software and Configuration Checks/Policy:S3.BucketBlockPublicAccessDisabled",
"Unusual Behaviors/User/UnauthorizedAccess:IAMUser-ConsoleLogin",
"TTPs/Command and Control/Trojan:EC2-DGADomainRequest.B",
"TTPs/Initial Access/UnauthorizedAccess:EC2-SSHBruteForce",
"TTPs/Initial Access/Trojan:EC2-DriveBySourceTraffic!DNS",
"Unusual Behaviors/User/Recon:IAMUser-UserPermissions",
"TTPs/Command and Control/Trojan:EC2-BlackholeTraffic!DNS",
"TTPs/Discovery/Recon:IAMUser-ResourcePermissions",
"TTPs/Discovery/Recon:IAMUser-UserPermissions",
"Unusual Behaviors/User/Recon:IAMUser-ResourcePermissions",
"Effects/Data Exfiltration/Trojan:EC2-DropPoint!DNS",
"Effects/Resource Consumption/CryptoCurrency:EC2-BitcoinTool.B!DNS",
"TTPs/Command and Control/CryptoCurrency:EC2-BitcoinTool.B!DNS",
"TTPs/Discovery/Recon:IAMUser-NetworkPermissions",
"Unusual Behaviors/User/Recon:IAMUser-NetworkPermissions",
"Effects/Data Exposure/Policy:S3-BucketBlockPublicAccessDisabled",
"TTPs/Initial Access/Recon:EC2-Portscan",
"Unusual Behaviors/VM/Behavior:EC2-NetworkPortUnusual",
"TTPs/Command and Control/Trojan:EC2-DNSDataExfiltration!DNS",
"TTPs/Discovery/Recon:EC2-Portscan",
"TTPs/Command and Control/UnauthorizedAccess:EC2-TorIPCaller",
"TTPs/Discovery/Recon:EC2-PortProbeUnprotectedPort",
"TTPs/Persistence/Persistence:IAMUser-UserPermissions",
"Software and Configuration Checks/Policy:S3.BucketAnonymousAccessGranted",
"TTPs/Persistence/Persistence:IAMUser-NetworkPermissions",
"Unusual Behaviors/User/Persistence:IAMUser-NetworkPermissions",
"Unusual Behaviors/User/Persistence:IAMUser-UserPermissions",
"Effects/Denial of Service/Backdoor:EC2-DenialOfService.UdpOnTcpPorts",
"Effects/Resource Consumption/ResourceConsumption:IAMUser-ComputeResources",
"Software and Configuration Checks/AWS Security Best Practices/Policy:IAMUser-RootCredentialUsage",
"Unusual Behaviors/User/ResourceConsumption:IAMUser-ComputeResources",
"Effects/Data Exfiltration/Trojan:EC2-DropPoint",
"Effects/Data Exfiltration/UnauthorizedAccess:EC2-TorClient",
"Effects/Data Exfiltration/UnauthorizedAccess:IAMUser-InstanceCredentialExfiltration",
"Effects/Denial of Service/Backdoor:EC2-DenialOfService.Dns",
"Effects/Denial of Service/Backdoor:EC2-DenialOfService.Tcp",
"Effects/Denial of Service/Backdoor:EC2-DenialOfService.Udp",
"Effects/Denial of Service/Backdoor:EC2-DenialOfService.UnusualProtocol",
"Effects/Resource Consumption/CryptoCurrency:EC2-BitcoinTool.B",
"Software and Configuration Checks/Network Reachability/Recon:EC2-PortProbeEMRUnprotectedPort",
"Software and Configuration Checks/PrivilegeEscalation:IAMUser.AdministrativePermissions",
"Software and Configuration Checks/Stealth:S3.ServerAccessLoggingDisabled",
"Software and Configuration Checks/UnauthorizedAccess:EC2.MetadataDNSRebind",
"TTPs/Command and Control/Backdoor:EC2-Spambot",
"TTPs/Command and Control/CryptoCurrency:EC2-BitcoinTool.B",
"TTPs/Command and Control/Trojan:EC2-BlackholeTraffic",
"TTPs/Command and Control/Trojan:EC2-DGADomainRequest.C!DNS",
"TTPs/Command and Control/Trojan:EC2-PhishingDomainRequest!DNS",
"TTPs/Command and Control/UnauthorizedAccess:EC2-MaliciousIPCaller.Custom",
"TTPs/Command and Control/UnauthorizedAccess:EC2-TorClient",
"TTPs/Command and Control/UnauthorizedAccess:EC2-TorRelay",
"TTPs/Command and Control/UnauthorizedAccess:IAMUser-TorIPCaller",
"TTPs/Defense Evasion/Stealth:IAMUser-LoggingConfigurationModified",
"TTPs/Defense Evasion/Stealth:IAMUser-PasswordPolicyChange",
"TTPs/Discovery/PenTest:IAMUser-KaliLinux",
"TTPs/Discovery/PenTest:IAMUser-ParrotLinux",
"TTPs/Discovery/PenTest:IAMUser-PenTooLinux",
"TTPs/Initial Access/Recon:EC2-PortProbeEMRUnprotectedPort",
"TTPs/Initial Access/Recon:IAMUser-MaliciousIPCaller",
"TTPs/Initial Access/Recon:IAMUser-MaliciousIPCaller.Custom",
"TTPs/Initial Access/Recon:IAMUser-TorIPCaller",
"TTPs/Initial Access/UnauthorizedAccess:EC2-RDPBruteForce",
"TTPs/Persistence/Persistence:IAMUser-ResourcePermissions",
"TTPs/UnauthorizedAccess:IAMUser-ConsoleLoginSuccess.B",
"TTPs/UnauthorizedAccess:IAMUser-MaliciousIPCaller",
"TTPs/UnauthorizedAccess:IAMUser-MaliciousIPCaller.Custom",
"Unusual Behaviors/User/Persistence:IAMUser-ResourcePermissions",
"Unusual Behaviors/User/Stealth:IAMUser-LoggingConfigurationModified",
"Unusual Behaviors/VM/Backdoor:EC2-Spambot",
"Unusual Behaviors/VM/Behavior:EC2-TrafficVolumeUnusual",
],
"Systems Manager Patch Manager": [
"Software & Configuration Checks/Patch Management/Compliance"
],
}
def download_seurityhub_findings(product_name, type, client=None, max_results=1):
filters = {
"ProductName": [{"Value": product_name, "Comparison": "EQUALS"}],
"Type": [{"Value": type, "Comparison": "EQUALS"}],
}
kwargs = {
"Filters": filters,
"MaxResults": max_results,
}
findings = []
paginator = client.get_paginator("get_findings")
for page in paginator.paginate(**kwargs):
findings.extend(page["Findings"])
if len(findings) > 0:
break
if len(findings) > 0:
return findings[0]
else:
return None
security_hub = boto3.client("securityhub", region_name="eu-west-1")
for product_name in SUPPORTED_TTPS:
for ttp in SUPPORTED_TTPS[product_name]:
print(f"Fetching {ttp} for {product_name}")
normalized_product_name = product_name.replace(" ", "").lower()
normalized_ttp = ttp.replace("/", "-")
file = (
"tests/data/events/{normalized_product_name}/{normalized_ttp}.json".format(
normalized_product_name=normalized_product_name,
normalized_ttp=normalized_ttp,
)
)
finding = download_seurityhub_findings(
product_name=product_name, type=ttp, client=security_hub
)
if finding:
if not os.path.exists(os.path.dirname(file)):
os.makedirs(os.path.dirname(file))
print(f"Writing {file}")
with open(file, "w") as f:
data = json.dumps(finding, sort_keys=True, indent=4)
f.write(data)
else:
print(f"No finding available for {ttp} for {product_name}")
if os.path.exists(file):
os.remove(file)
| [
"os.path.exists",
"boto3.client",
"json.dumps",
"os.path.dirname",
"os.remove"
] | [((8706, 8758), 'boto3.client', 'boto3.client', (['"""securityhub"""'], {'region_name': '"""eu-west-1"""'}), "('securityhub', region_name='eu-west-1')\n", (8718, 8758), False, 'import boto3\n'), ((9789, 9809), 'os.path.exists', 'os.path.exists', (['file'], {}), '(file)\n', (9803, 9809), False, 'import os\n'), ((9612, 9657), 'json.dumps', 'json.dumps', (['finding'], {'sort_keys': '(True)', 'indent': '(4)'}), '(finding, sort_keys=True, indent=4)\n', (9622, 9657), False, 'import json\n'), ((9827, 9842), 'os.remove', 'os.remove', (['file'], {}), '(file)\n', (9836, 9842), False, 'import os\n'), ((9438, 9459), 'os.path.dirname', 'os.path.dirname', (['file'], {}), '(file)\n', (9453, 9459), False, 'import os\n'), ((9490, 9511), 'os.path.dirname', 'os.path.dirname', (['file'], {}), '(file)\n', (9505, 9511), False, 'import os\n')] |
# -*- coding: utf-8 -*-
import datetime
import time
import math
ABA = ["HERTZ", "HERO"]
SECONDS_PER_DAY = 60 * 60 * 24
def _get_hertz_feed(reference_timestamp, current_timestamp, period_days, phase_days, reference_asset_value, amplitude):
"""
Given the reference timestamp, the current timestamp, the period (in days), the phase (in days), the reference asset value (ie 1.00) and the amplitude (> 0 && < 1), output the current hertz value.
You can use this formula for an alternative HERTZ asset!
Be aware though that extreme values for amplitude|period will create high volatility which could cause black swan events. BSIP 18 should help, but best tread carefully!
"""
hz_reference_timestamp = datetime.datetime.strptime(reference_timestamp, "%Y-%m-%dT%H:%M:%S").timestamp() # Retrieving the Bitshares2.0 genesis block timestamp
hz_period = SECONDS_PER_DAY * period_days
hz_phase = SECONDS_PER_DAY * phase_days
hz_waveform = math.sin(((((current_timestamp - (hz_reference_timestamp + hz_phase))/hz_period) % 1) * hz_period) * ((2*math.pi)/hz_period)) # Only change for an alternative HERTZ ABA.
hz_value = reference_asset_value + ((amplitude * reference_asset_value) * hz_waveform)
return hz_value
def compute_hertz():
hertz_reference_timestamp = "2015-10-13T14:12:24" # Bitshares 2.0 genesis block timestamp
hertz_current_timestamp = datetime.datetime.now() # Current timestamp for reference within the hertz script
hertz_amplitude = 0.14 # 14% fluctuating the price feed $+-0.14 (2% per day)
hertz_period_days = 28 # Aka wavelength, time for one full SIN wave cycle.
hertz_phase_days = 0.908056 # Time offset from genesis till the first wednesday, to set wednesday as the primary Hz day.
hertz_reference_asset_value = 1.00 # $1.00 USD, not much point changing as the ratio will be the same.
hz_value = _get_hertz_feed(hertz_reference_timestamp, hertz_current_timestamp.timestamp(), hertz_period_days, hertz_phase_days, hertz_reference_asset_value, hertz_amplitude)
return hz_value
def compute_hero():
hero_reference_timestamp = datetime.date(1913, 12, 23)
current_timestamp = datetime.date.today()
hero_days_in_year = 365.2425
hero_inflation_rate = 1.05
hero_value = hero_inflation_rate ** ((current_timestamp - hero_reference_timestamp).days / hero_days_in_year)
return hero_value
| [
"datetime.datetime.strptime",
"datetime.datetime.now",
"datetime.date",
"datetime.date.today",
"math.sin"
] | [((965, 1092), 'math.sin', 'math.sin', (['((current_timestamp - (hz_reference_timestamp + hz_phase)) / hz_period % 1 *\n hz_period * (2 * math.pi / hz_period))'], {}), '((current_timestamp - (hz_reference_timestamp + hz_phase)) /\n hz_period % 1 * hz_period * (2 * math.pi / hz_period))\n', (973, 1092), False, 'import math\n'), ((1393, 1416), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1414, 1416), False, 'import datetime\n'), ((2119, 2146), 'datetime.date', 'datetime.date', (['(1913)', '(12)', '(23)'], {}), '(1913, 12, 23)\n', (2132, 2146), False, 'import datetime\n'), ((2171, 2192), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2190, 2192), False, 'import datetime\n'), ((722, 790), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['reference_timestamp', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(reference_timestamp, '%Y-%m-%dT%H:%M:%S')\n", (748, 790), False, 'import datetime\n')] |
# -*- coding: UTF-8 -*-
import json
import logging
import os
import re
import requests
import schedule
import sys
import threading
import time
import yaml
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('VegaOps2N9e')
reload(sys)
sys.setdefaultencoding('utf8')
def _push_metrics(cfg, metrics):
headers = {
"X-User-Token": cfg.get('token'),
"Content-Type": "Application/json"
}
url = cfg.get('url', {}).get('base')
if not url:
raise Exception("N9E URL could not be empty.")
uri = url + '/api/transfer/push'
resp = requests.post(uri, headers=headers, data=json.dumps(metrics))
if resp.status_code not in [200, 201]:
logger.error(resp.text)
raise Exception("Bad request status[%s] for "
"%s" % (resp.status_code, uri))
cont = resp.json()
if cont.get('err'):
logger.error(resp.text)
def _register_resource(cfg, resource):
headers = {
"X-User-Token": cfg.get('token'),
"Content-Type": "Application/json"
}
url = cfg.get('url', {}).get('rdb')
if not url:
raise Exception("N9E URL could not be empty.")
uri = url + '/v1/rdb/resources/register'
resp = requests.post(uri, headers=headers, data=json.dumps(resource))
if resp.status_code not in [200, 201]:
logger.error(resp.text)
raise Exception("Bad request status[%s] for "
"%s" % (resp.status_code, uri))
cont = resp.json()
if cont.get('err'):
logger.error(resp.text)
def _build_item(item, key):
if not isinstance(key, (str, unicode)):
return key
if key.startswith('vm.'):
return item.get(key[3:])
return key
def _build_metrics(res, target):
metrics = []
for item in res:
metric = {}
for key in target:
if isinstance(target[key], (dict, list)):
tmp = {}
for kk in target[key]:
val = _build_item(item, target[key][kk])
tmp[kk] = val
metric[key] = json.dumps(tmp)
else:
val = _build_item(item, target[key])
metric[key] = val
metrics.append(metric)
return metrics
def _build_resources(res, target):
resources = []
for item in res:
resource = {}
for key in target:
if isinstance(target[key], (dict, list)):
tmp = {}
for kk in target[key]:
val = _build_item(item, target[key][kk])
tmp[kk] = val
resource[key] = json.dumps(tmp)
else:
val = _build_item(item, target[key])
resource[key] = val
resources.append(resource)
return resources
def _job(n9e, polling):
if polling.get('type', 'resource') == 'resource':
_job_resource(n9e, polling)
elif polling.get('type', 'resource') == 'metric':
_job_metric(n9e, polling)
def _job_metric(n9e, polling):
if not os.path.exists('./tasks'):
os.system('mkdir -p ./tasks')
regions = polling.get('regions', [])
for region in regions:
task = "%s_%s" % (polling.get('task'), region.get('name'))
logger.info("Start to run task: %s" % task)
task_file = './tasks/Task-%s.yaml' % task
output_dir = './tasks/%s' % task
task_d = {
"componentId": task,
"credentials": polling.get('credentials', {}),
"vendor": polling.get('vendor'),
"version": polling.get('version'),
"nodes": polling.get('nodes', [])
}
task_d['credentials']['regionId'] = region.get('name')
try:
fd = open(task_file, "w")
yaml.dump(task_d, fd)
except Exception as e:
logger.error("Failed to create task file %s" % task)
if not os.path.exists(output_dir):
os.system('mkdir %s' % output_dir)
os.system('/opt/vegaops/bin/vegaops %s %s' % (task_file, output_dir))
output = '%s/out.yaml' % output_dir
if not os.path.isfile(output):
logger.error("Could not find output file %s" % output)
return
try:
out = yaml.safe_load(open(output, 'r').read())
except Exception as e:
logger.error("Failed to load output as %s" % e)
return
for node in polling.get('nodes', []):
target = node.get('target')
component = node.get('componentId')
if component not in out:
continue
if not out[component].get('success'):
continue
dt = out[component].get('resultType')
if not dt.startswith('list:'):
continue
metrics = _build_metrics(
out[component].get(dt[5:], []), target)
if not len(metrics):
continue
_push_metrics(n9e, metrics)
def _job_resource(n9e, polling):
if not os.path.exists('./tasks'):
os.system('mkdir -p ./tasks')
regions = polling.get('regions', [])
for region in regions:
task = "%s_%s" % (polling.get('task'), region.get('name'))
logger.info("Start to run task: %s" % task)
task_file = './tasks/Task-%s.yaml' % task
output_dir = './tasks/%s' % task
task_d = {
"componentId": task,
"credentials": polling.get('credentials', {}),
"vendor": polling.get('vendor'),
"version": polling.get('version'),
"nodes": polling.get('nodes', [])
}
task_d['credentials']['regionId'] = region.get('name')
if not os.path.isfile(task_file):
try:
fd = open(task_file, "w")
yaml.dump(task_d, fd)
except Exception as e:
logger.error("Failed to create task file %s" % task)
if not os.path.exists(output_dir):
os.system('mkdir %s' % output_dir)
os.system('/opt/vegaops/bin/vegaops %s %s' % (task_file, output_dir))
output = '%s/out.yaml' % output_dir
if not os.path.isfile(output):
logger.error("Could not find output file %s" % output)
return
try:
out = yaml.safe_load(open(output, 'r').read())
except Exception as e:
logger.error("Failed to load output as %s" % e)
return
for node in polling.get('nodes', []):
target = node.get('target')
component = node.get('componentId')
if component not in out:
continue
if not out[component].get('success'):
continue
dt = out[component].get('resultType')
if not dt.startswith('list:'):
continue
resources = _build_resources(
out[component].get(dt[5:], []), target)
if not len(resources):
continue
_register_resource(n9e, resources)
def _run_threaded(cfg):
job_thread = threading.Thread(
target=cfg['func'], args=(cfg['n9e'], cfg['job']))
job_thread.start()
def _load_jobs(config):
pollings = config.get('pollings', [])
for polling in pollings:
cfg = {
'n9e': config.get('n9e', {}),
'job': polling,
'func': _job
}
schedule.every(
polling.get('interval', 1800)).seconds.do(_run_threaded, cfg)
# _job(cfg['n9e'], cfg['job'])
def cron_job(config):
_load_jobs(config)
while True:
schedule.run_pending()
time.sleep(1)
def once_job(config):
pass
def main():
argv = sys.argv
config_path = "./config.yaml"
_type = 'cron'
if len(argv) <= 1:
logger.info("Use %s as config file" % config_path)
else:
config_path = argv[1]
if not os.path.isfile(config_path):
logger.error("Could not find file %s" % config_path)
os.exit(1)
if len(argv) >= 3:
_type = argv[2]
try:
config = yaml.safe_load(open(config_path, 'r').read())
except Exception as e:
logger.error("Faild to load config file as "
"error %s" % e)
raise e
if _type == 'cron':
cron_job(config)
elif _type == 'once':
once_job(config)
else:
logger.error("Bad job type %s, only support "
"cron, once job" % _type)
os.exit(1)
if __name__ == "__main__":
main()
| [
"logging.basicConfig",
"logging.getLogger",
"os.path.exists",
"sys.setdefaultencoding",
"yaml.dump",
"schedule.run_pending",
"json.dumps",
"time.sleep",
"os.exit",
"os.path.isfile",
"threading.Thread",
"os.system"
] | [((157, 196), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (176, 196), False, 'import logging\n'), ((206, 238), 'logging.getLogger', 'logging.getLogger', (['"""VegaOps2N9e"""'], {}), "('VegaOps2N9e')\n", (223, 238), False, 'import logging\n'), ((252, 282), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf8"""'], {}), "('utf8')\n", (274, 282), False, 'import sys\n'), ((7120, 7187), 'threading.Thread', 'threading.Thread', ([], {'target': "cfg['func']", 'args': "(cfg['n9e'], cfg['job'])"}), "(target=cfg['func'], args=(cfg['n9e'], cfg['job']))\n", (7136, 7187), False, 'import threading\n'), ((3056, 3081), 'os.path.exists', 'os.path.exists', (['"""./tasks"""'], {}), "('./tasks')\n", (3070, 3081), False, 'import os\n'), ((3091, 3120), 'os.system', 'os.system', (['"""mkdir -p ./tasks"""'], {}), "('mkdir -p ./tasks')\n", (3100, 3120), False, 'import os\n'), ((4000, 4069), 'os.system', 'os.system', (["('/opt/vegaops/bin/vegaops %s %s' % (task_file, output_dir))"], {}), "('/opt/vegaops/bin/vegaops %s %s' % (task_file, output_dir))\n", (4009, 4069), False, 'import os\n'), ((5052, 5077), 'os.path.exists', 'os.path.exists', (['"""./tasks"""'], {}), "('./tasks')\n", (5066, 5077), False, 'import os\n'), ((5087, 5116), 'os.system', 'os.system', (['"""mkdir -p ./tasks"""'], {}), "('mkdir -p ./tasks')\n", (5096, 5116), False, 'import os\n'), ((6058, 6127), 'os.system', 'os.system', (["('/opt/vegaops/bin/vegaops %s %s' % (task_file, output_dir))"], {}), "('/opt/vegaops/bin/vegaops %s %s' % (task_file, output_dir))\n", (6067, 6127), False, 'import os\n'), ((7650, 7672), 'schedule.run_pending', 'schedule.run_pending', ([], {}), '()\n', (7670, 7672), False, 'import schedule\n'), ((7681, 7694), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (7691, 7694), False, 'import time\n'), ((7948, 7975), 'os.path.isfile', 'os.path.isfile', (['config_path'], {}), '(config_path)\n', (7962, 7975), False, 'import os\n'), ((8046, 8056), 'os.exit', 'os.exit', (['(1)'], {}), '(1)\n', (8053, 8056), False, 'import os\n'), ((626, 645), 'json.dumps', 'json.dumps', (['metrics'], {}), '(metrics)\n', (636, 645), False, 'import json\n'), ((1267, 1287), 'json.dumps', 'json.dumps', (['resource'], {}), '(resource)\n', (1277, 1287), False, 'import json\n'), ((3784, 3805), 'yaml.dump', 'yaml.dump', (['task_d', 'fd'], {}), '(task_d, fd)\n', (3793, 3805), False, 'import yaml\n'), ((3917, 3943), 'os.path.exists', 'os.path.exists', (['output_dir'], {}), '(output_dir)\n', (3931, 3943), False, 'import os\n'), ((3957, 3991), 'os.system', 'os.system', (["('mkdir %s' % output_dir)"], {}), "('mkdir %s' % output_dir)\n", (3966, 3991), False, 'import os\n'), ((4129, 4151), 'os.path.isfile', 'os.path.isfile', (['output'], {}), '(output)\n', (4143, 4151), False, 'import os\n'), ((5732, 5757), 'os.path.isfile', 'os.path.isfile', (['task_file'], {}), '(task_file)\n', (5746, 5757), False, 'import os\n'), ((5975, 6001), 'os.path.exists', 'os.path.exists', (['output_dir'], {}), '(output_dir)\n', (5989, 6001), False, 'import os\n'), ((6015, 6049), 'os.system', 'os.system', (["('mkdir %s' % output_dir)"], {}), "('mkdir %s' % output_dir)\n", (6024, 6049), False, 'import os\n'), ((6187, 6209), 'os.path.isfile', 'os.path.isfile', (['output'], {}), '(output)\n', (6201, 6209), False, 'import os\n'), ((8530, 8540), 'os.exit', 'os.exit', (['(1)'], {}), '(1)\n', (8537, 8540), False, 'import os\n'), ((2087, 2102), 'json.dumps', 'json.dumps', (['tmp'], {}), '(tmp)\n', (2097, 2102), False, 'import json\n'), ((2629, 2644), 'json.dumps', 'json.dumps', (['tmp'], {}), '(tmp)\n', (2639, 2644), False, 'import json\n'), ((5834, 5855), 'yaml.dump', 'yaml.dump', (['task_d', 'fd'], {}), '(task_d, fd)\n', (5843, 5855), False, 'import yaml\n')] |
import numpy
from AnyQt.QtGui import QColor, QRadialGradient, QPainterPathStroker
def saturated(color, factor=150):
"""Return a saturated color.
"""
h = color.hsvHueF()
s = color.hsvSaturationF()
v = color.valueF()
a = color.alphaF()
s = factor * s / 100.0
s = max(min(1.0, s), 0.0)
return QColor.fromHsvF(h, s, v, a).convertTo(color.spec())
def sample_path(path, num=10):
"""Sample `num` equidistant points from the `path` (`QPainterPath`).
"""
space = numpy.linspace(0.0, 1.0, num, endpoint=True)
return [path.pointAtPercent(float(p)) for p in space]
def radial_gradient(color, color_light=50):
"""
radial_gradient(QColor, QColor)
radial_gradient(QColor, int)
Return a radial gradient. `color_light` can be a QColor or an int.
In the later case the light color is derived from `color` using
`saturated(color, color_light)`.
"""
if not isinstance(color_light, QColor):
color_light = saturated(color, color_light)
gradient = QRadialGradient(0.5, 0.5, 0.5)
gradient.setColorAt(0.0, color_light)
gradient.setColorAt(0.5, color_light)
gradient.setColorAt(1.0, color)
gradient.setCoordinateMode(QRadialGradient.ObjectBoundingMode)
return gradient
def toGraphicsObjectIfPossible(item):
"""Return the item as a QGraphicsObject if possible.
This function is intended as a workaround for a problem with older
versions of PyQt (< 4.9), where methods returning 'QGraphicsItem *'
lose the type of the QGraphicsObject subclasses and instead return
generic QGraphicsItem wrappers.
"""
if item is None:
return None
obj = item.toGraphicsObject()
return item if obj is None else obj
def linspace(count):
"""Return `count` evenly spaced points from 0..1 interval excluding
both end points, e.g. `linspace(3) == [0.25, 0.5, 0.75]`.
"""
return list(map(float, numpy.linspace(0.0, 1.0, count + 2, endpoint=True)[1:-1]))
def uniform_linear_layout(points):
"""Layout the points (a list of floats in 0..1 range) in a uniform
linear space while preserving the existing sorting order.
"""
indices = numpy.argsort(points)
space = numpy.asarray(linspace(len(points)))
# invert the indices
indices = invert_permutation_indices(indices)
# assert((numpy.argsort(points) == numpy.argsort(space[indices])).all())
points = space[indices]
return points.tolist()
def invert_permutation_indices(indices):
"""Invert the permutation giver by indices.
"""
inverted = [0] * len(indices)
for i, index in enumerate(indices):
inverted[index] = i
return inverted
def stroke_path(path, pen):
"""Create a QPainterPath stroke from the `path` drawn with `pen`.
"""
stroker = QPainterPathStroker()
stroker.setCapStyle(pen.capStyle())
stroker.setJoinStyle(pen.joinStyle())
stroker.setMiterLimit(pen.miterLimit())
stroker.setWidth(max(pen.widthF(), 1e-9))
return stroker.createStroke(path)
| [
"AnyQt.QtGui.QPainterPathStroker",
"AnyQt.QtGui.QColor.fromHsvF",
"numpy.argsort",
"numpy.linspace",
"AnyQt.QtGui.QRadialGradient"
] | [((507, 551), 'numpy.linspace', 'numpy.linspace', (['(0.0)', '(1.0)', 'num'], {'endpoint': '(True)'}), '(0.0, 1.0, num, endpoint=True)\n', (521, 551), False, 'import numpy\n'), ((1030, 1060), 'AnyQt.QtGui.QRadialGradient', 'QRadialGradient', (['(0.5)', '(0.5)', '(0.5)'], {}), '(0.5, 0.5, 0.5)\n', (1045, 1060), False, 'from AnyQt.QtGui import QColor, QRadialGradient, QPainterPathStroker\n'), ((2186, 2207), 'numpy.argsort', 'numpy.argsort', (['points'], {}), '(points)\n', (2199, 2207), False, 'import numpy\n'), ((2812, 2833), 'AnyQt.QtGui.QPainterPathStroker', 'QPainterPathStroker', ([], {}), '()\n', (2831, 2833), False, 'from AnyQt.QtGui import QColor, QRadialGradient, QPainterPathStroker\n'), ((329, 356), 'AnyQt.QtGui.QColor.fromHsvF', 'QColor.fromHsvF', (['h', 's', 'v', 'a'], {}), '(h, s, v, a)\n', (344, 356), False, 'from AnyQt.QtGui import QColor, QRadialGradient, QPainterPathStroker\n'), ((1934, 1984), 'numpy.linspace', 'numpy.linspace', (['(0.0)', '(1.0)', '(count + 2)'], {'endpoint': '(True)'}), '(0.0, 1.0, count + 2, endpoint=True)\n', (1948, 1984), False, 'import numpy\n')] |
#!/usr/bin/env python3
# coding: utf8
__author__ = "<NAME>"
# Librería bot telegram
# doc: https://github.com/python-telegram-bot/python-telegram-bot
from telegram import InlineKeyboardButton, InlineKeyboardMarkup
from telegram.ext import Updater, CommandHandler, MessageHandler, CallbackQueryHandler, Filters
import sys
sys.path.append('../')
class TelegramApi:
def __init__(self, token, config):
self.token = token
self.config = config
def startBot(self):
# Create the Updater and pass it your bot's token.
updater = Updater(self.token)
# Listenings - comands
if self.config['commands'] != None:
for command in self.config['commands']:
updater.dispatcher.add_handler(CommandHandler(command['command'], command['function']))
# Buttons controller
if self.config['buttons'] != None:
updater.dispatcher.add_handler(CallbackQueryHandler(self.config['buttons']))
# Listening noncamnd
if self.config['nocommands'] != None:
for nocommand in self.config['nocommands']:
filter = None
if nocommand['filter'] == 'text':
filter = Filters.text
elif nocommand['filter'] == 'photo':
filter = Filters.photo
else:
filter = Filters.text
updater.dispatcher.add_handler(MessageHandler(filter, nocommand['function']))
# log all error
updater.dispatcher.add_error_handler(self.__error)
# Start the Bot
updater.start_polling()
# Run the bot until the user presses Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT
updater.idle()
def __error(self, bot, update, error):
None
| [
"telegram.ext.MessageHandler",
"telegram.ext.CallbackQueryHandler",
"sys.path.append",
"telegram.ext.Updater",
"telegram.ext.CommandHandler"
] | [((323, 345), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (338, 345), False, 'import sys\n'), ((564, 583), 'telegram.ext.Updater', 'Updater', (['self.token'], {}), '(self.token)\n', (571, 583), False, 'from telegram.ext import Updater, CommandHandler, MessageHandler, CallbackQueryHandler, Filters\n'), ((932, 976), 'telegram.ext.CallbackQueryHandler', 'CallbackQueryHandler', (["self.config['buttons']"], {}), "(self.config['buttons'])\n", (952, 976), False, 'from telegram.ext import Updater, CommandHandler, MessageHandler, CallbackQueryHandler, Filters\n'), ((759, 814), 'telegram.ext.CommandHandler', 'CommandHandler', (["command['command']", "command['function']"], {}), "(command['command'], command['function'])\n", (773, 814), False, 'from telegram.ext import Updater, CommandHandler, MessageHandler, CallbackQueryHandler, Filters\n'), ((1439, 1484), 'telegram.ext.MessageHandler', 'MessageHandler', (['filter', "nocommand['function']"], {}), "(filter, nocommand['function'])\n", (1453, 1484), False, 'from telegram.ext import Updater, CommandHandler, MessageHandler, CallbackQueryHandler, Filters\n')] |
# [START maps_http_places_textsearch_incomplete_address]
import requests
url = "https://maps.googleapis.com/maps/api/place/textsearch/json?query=123%20main%20street&key=YOUR_API_KEY"
payload={}
headers = {}
response = requests.request("GET", url, headers=headers, data=payload)
print(response.text)
# [END maps_http_places_textsearch_incomplete_address] | [
"requests.request"
] | [((221, 280), 'requests.request', 'requests.request', (['"""GET"""', 'url'], {'headers': 'headers', 'data': 'payload'}), "('GET', url, headers=headers, data=payload)\n", (237, 280), False, 'import requests\n')] |
import six
import unittest2
from gcloud.datastore import helpers, key, set_default_dataset_id
from gcloudorm import model, properties
class TestProperties(unittest2.TestCase):
_DATASET_ID = 'DATASET'
def setUp(self):
set_default_dataset_id(self._DATASET_ID)
def testBooleanProperty(self):
class TestModel(model.Model):
test_bool = properties.BooleanProperty()
m = TestModel()
self.assertEqual(m.test_bool, None)
self.assertEqual(m['test_bool'], None)
m = TestModel(test_bool=False)
self.assertEqual(m.test_bool, False)
self.assertEqual(m['test_bool'], False)
m.test_bool = True
self.assertEqual(m.test_bool, True)
self.assertEqual(m['test_bool'], True)
class TestModel(model.Model):
test_bool = properties.BooleanProperty(default=True)
m = TestModel()
self.assertEqual(m.test_bool, True)
self.assertEqual(m['test_bool'], True)
def testIdProperty(self):
class TestModel(model.Model):
test_id = properties.IdProperty()
m = TestModel()
self.assertIsInstance(m.test_id, six.string_types)
self.assertIs(m.test_id, m.key.id_or_name)
def testIntegerProperty(self):
class TestModel(model.Model):
test_int = properties.IntegerProperty()
m = TestModel()
self.assertEqual(m.test_int, None)
self.assertEqual(m['test_int'], None)
class TestModel(model.Model):
test_int = properties.IntegerProperty(default=3)
m = TestModel()
self.assertEqual(m['test_int'], 3)
m.test_int = 4
self.assertEqual(m.test_int, 4)
self.assertEqual(m['test_int'], 4)
def testFloatproperty(self):
class TestModel(model.Model):
test_float = properties.FloatProperty()
m = TestModel()
self.assertEqual(m.test_float, None)
self.assertEqual(m['test_float'], None)
class TestModel(model.Model):
test_float = properties.FloatProperty(default=0.1)
m = TestModel()
self.assertEqual(m['test_float'], 0.1)
m.test_float = 0.2
self.assertEqual(m['test_float'], 0.2)
def testTextProperty(self):
class TestModel(model.Model):
test_text = properties.TextProperty()
m = TestModel()
self.assertEqual(m.test_text, None)
class TestModel(model.Model):
test_text = properties.TextProperty(default="")
m = TestModel()
self.assertEqual(m['test_text'], "")
class TestModel(model.Model):
test_text = properties.TextProperty(default=lambda: "")
m = TestModel()
self.assertEqual(m['test_text'], "")
def testPickleProperty(self):
class TestModel(model.Model):
test_pickle = properties.PickleProperty()
m = TestModel()
self.assertEqual(m.test_pickle, None)
m = TestModel(test_pickle={"123": "456"})
self.assertEqual(m.test_pickle, {"123": "456"})
m.test_pickle = {'456': '789'}
self.assertEqual(m.test_pickle, {'456': '789'})
def testJsonProperty(self):
class TestModel(model.Model):
test_pickle = properties.JsonProperty()
m = TestModel()
self.assertEqual(m.test_pickle, None)
m = TestModel(test_pickle={"123": "456"})
self.assertEqual(m.test_pickle, {"123": "456"})
m.test_pickle = {'456': '789'}
self.assertEqual(m.test_pickle, {'456': '789'})
def testDataTimeProperty(self):
import datetime
class TestModel(model.Model):
test_datetime = properties.DateTimeProperty()
m = TestModel()
self.assertEqual(m.test_datetime, None)
utcnow = datetime.datetime.utcnow()
m.test_datetime = utcnow
self.assertEqual(m.test_datetime, utcnow)
def testDateProperty(self):
import datetime
class TestModel(model.Model):
test_date = properties.DateProperty()
m = TestModel()
self.assertEqual(m.test_date, None)
today = datetime.date.today()
m.test_date = today
self.assertEqual(m.test_date, today)
def testTimeProperty(self):
import datetime
class TestModel(model.Model):
test_time = properties.TimeProperty()
m = TestModel()
self.assertEqual(m.test_time, None)
t = datetime.time()
m.test_time = t
self.assertEqual(m.test_time, t)
| [
"gcloudorm.properties.JsonProperty",
"datetime.time",
"gcloudorm.properties.FloatProperty",
"datetime.datetime.utcnow",
"gcloudorm.properties.PickleProperty",
"gcloudorm.properties.DateProperty",
"gcloudorm.properties.TimeProperty",
"gcloudorm.properties.IntegerProperty",
"gcloudorm.properties.IdPro... | [((238, 278), 'gcloud.datastore.set_default_dataset_id', 'set_default_dataset_id', (['self._DATASET_ID'], {}), '(self._DATASET_ID)\n', (260, 278), False, 'from gcloud.datastore import helpers, key, set_default_dataset_id\n'), ((3831, 3857), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3855, 3857), False, 'import datetime\n'), ((4173, 4194), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (4192, 4194), False, 'import datetime\n'), ((4496, 4511), 'datetime.time', 'datetime.time', ([], {}), '()\n', (4509, 4511), False, 'import datetime\n'), ((377, 405), 'gcloudorm.properties.BooleanProperty', 'properties.BooleanProperty', ([], {}), '()\n', (403, 405), False, 'from gcloudorm import model, properties\n'), ((837, 877), 'gcloudorm.properties.BooleanProperty', 'properties.BooleanProperty', ([], {'default': '(True)'}), '(default=True)\n', (863, 877), False, 'from gcloudorm import model, properties\n'), ((1085, 1108), 'gcloudorm.properties.IdProperty', 'properties.IdProperty', ([], {}), '()\n', (1106, 1108), False, 'from gcloudorm import model, properties\n'), ((1341, 1369), 'gcloudorm.properties.IntegerProperty', 'properties.IntegerProperty', ([], {}), '()\n', (1367, 1369), False, 'from gcloudorm import model, properties\n'), ((1546, 1583), 'gcloudorm.properties.IntegerProperty', 'properties.IntegerProperty', ([], {'default': '(3)'}), '(default=3)\n', (1572, 1583), False, 'from gcloudorm import model, properties\n'), ((1856, 1882), 'gcloudorm.properties.FloatProperty', 'properties.FloatProperty', ([], {}), '()\n', (1880, 1882), False, 'from gcloudorm import model, properties\n'), ((2065, 2102), 'gcloudorm.properties.FloatProperty', 'properties.FloatProperty', ([], {'default': '(0.1)'}), '(default=0.1)\n', (2089, 2102), False, 'from gcloudorm import model, properties\n'), ((2345, 2370), 'gcloudorm.properties.TextProperty', 'properties.TextProperty', ([], {}), '()\n', (2368, 2370), False, 'from gcloudorm import model, properties\n'), ((2503, 2538), 'gcloudorm.properties.TextProperty', 'properties.TextProperty', ([], {'default': '""""""'}), "(default='')\n", (2526, 2538), False, 'from gcloudorm import model, properties\n'), ((2672, 2716), 'gcloudorm.properties.TextProperty', 'properties.TextProperty', ([], {'default': "(lambda : '')"}), "(default=lambda : '')\n", (2695, 2716), False, 'from gcloudorm import model, properties\n'), ((2885, 2912), 'gcloudorm.properties.PickleProperty', 'properties.PickleProperty', ([], {}), '()\n', (2910, 2912), False, 'from gcloudorm import model, properties\n'), ((3283, 3308), 'gcloudorm.properties.JsonProperty', 'properties.JsonProperty', ([], {}), '()\n', (3306, 3308), False, 'from gcloudorm import model, properties\n'), ((3710, 3739), 'gcloudorm.properties.DateTimeProperty', 'properties.DateTimeProperty', ([], {}), '()\n', (3737, 3739), False, 'from gcloudorm import model, properties\n'), ((4061, 4086), 'gcloudorm.properties.DateProperty', 'properties.DateProperty', ([], {}), '()\n', (4084, 4086), False, 'from gcloudorm import model, properties\n'), ((4388, 4413), 'gcloudorm.properties.TimeProperty', 'properties.TimeProperty', ([], {}), '()\n', (4411, 4413), False, 'from gcloudorm import model, properties\n')] |
# TODO: figure out how to put this in the app/ folder and still use serverless
# This line: `handler: main.handler`
# How do we specify a path here, as per uvicorn?
import os
from enum import Enum
from typing import Optional
from pydantic import BaseModel
from fastapi import FastAPI, Query
# for lambda; see https://adem.sh/blog/tutorial-fastapi-aws-lambda-serverless
from mangum import Mangum
from ontology_term_usage.term_usage import OntologyClient, ResultSet, TermUsage, TERM, ServiceMetadataCollection
# necessary for serverless/lambda
stage = os.environ.get('STAGE', None)
openapi_prefix = f"/{stage}" if stage else "/"
client = OntologyClient()
description = """
Wraps multiple endpoints to query for all usages of a term, including
* Terms used in logical definitions in external ontologies
* Terms used in annotation of entities like genes and proteins
* Terms used in specialized annotation such as GO-CAMs
"""
app = FastAPI(title='Ontology Usage API',
description=description,
contact = {
"name": "<NAME>",
"url": "https://github.com/cmungall/ontology-term-usage",
"email": "cjmungall AT lbl DOT gov",
},
openapi_prefix=openapi_prefix)
tags_metadata = [
{
"name": "usages",
"description": "Operations on term usages",
"externalDocs": {
"description": "External docs",
"url": "https://github.com/cmungall/ontology-term-usage",
},
},
{
"name": "metadata",
"description": "Operations to discover more information about system configuration.",
"externalDocs": {
"description": "External docs",
"url": "https://github.com/cmungall/ontology-term-usage",
},
},
]
@app.get("/")
async def root():
return {"message": "Hello World"}
@app.get("/usage/{term}", response_model=ResultSet, summary='Find usages of a term', tags=["usages"])
async def usage(term: TERM, limit: int = None) -> ResultSet:
"""
Find all usages of an ontology term across multiple services.
To obtain metadata on all services called, use the services endpoint
Example terms: GO:0006915 (apoptotic process), RO:0000057 (has participant)
\f
:param term: URI or CURIE of a term.
:param limit: maximum number of usages
:return: usages broken down by service
"""
rs = client.term_usage(term, limit=limit)
return rs
@app.get("/metadata", response_model=ServiceMetadataCollection, tags=["metadata"])
async def metadata() -> ServiceMetadataCollection:
return client.get_services()
handler = Mangum(app) | [
"ontology_term_usage.term_usage.OntologyClient",
"mangum.Mangum",
"fastapi.FastAPI",
"os.environ.get"
] | [((556, 585), 'os.environ.get', 'os.environ.get', (['"""STAGE"""', 'None'], {}), "('STAGE', None)\n", (570, 585), False, 'import os\n'), ((643, 659), 'ontology_term_usage.term_usage.OntologyClient', 'OntologyClient', ([], {}), '()\n', (657, 659), False, 'from ontology_term_usage.term_usage import OntologyClient, ResultSet, TermUsage, TERM, ServiceMetadataCollection\n'), ((938, 1165), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""Ontology Usage API"""', 'description': 'description', 'contact': "{'name': '<NAME>', 'url': 'https://github.com/cmungall/ontology-term-usage',\n 'email': 'cjmungall AT lbl DOT gov'}", 'openapi_prefix': 'openapi_prefix'}), "(title='Ontology Usage API', description=description, contact={\n 'name': '<NAME>', 'url':\n 'https://github.com/cmungall/ontology-term-usage', 'email':\n 'cjmungall AT lbl DOT gov'}, openapi_prefix=openapi_prefix)\n", (945, 1165), False, 'from fastapi import FastAPI, Query\n'), ((2663, 2674), 'mangum.Mangum', 'Mangum', (['app'], {}), '(app)\n', (2669, 2674), False, 'from mangum import Mangum\n')] |
from typing import Dict, List
import bcrypt
from app.session import SessionManager
from app.models.user import User
from app.models.validator import Validator
class UserService:
def __init__(self) -> None:
pass
def create(self, payload: Dict[str, str]) -> User:
hashed_pw = bcrypt.hashpw(payload['password'].encode(), bcrypt.gensalt())
user = User(username=payload['username'], password=hashed_pw.decode())
with SessionManager.session() as session:
session.add(user)
return user
def get_all_validators(self, user_id: int) -> List[Validator]:
with SessionManager.session() as session:
validators = session.query(Validator).filter_by(user_id=user_id)
return validators
def update(self, user_id: int, payload: Dict[str, str]) -> None:
with SessionManager.session() as session:
user = session.query(User).get(user_id)
user.expo_token = payload['expoToken']
user.notification_enabled = payload['notificationEnabled']
| [
"bcrypt.gensalt",
"app.session.SessionManager.session"
] | [((346, 362), 'bcrypt.gensalt', 'bcrypt.gensalt', ([], {}), '()\n', (360, 362), False, 'import bcrypt\n'), ((457, 481), 'app.session.SessionManager.session', 'SessionManager.session', ([], {}), '()\n', (479, 481), False, 'from app.session import SessionManager\n'), ((626, 650), 'app.session.SessionManager.session', 'SessionManager.session', ([], {}), '()\n', (648, 650), False, 'from app.session import SessionManager\n'), ((850, 874), 'app.session.SessionManager.session', 'SessionManager.session', ([], {}), '()\n', (872, 874), False, 'from app.session import SessionManager\n')] |
# -*- encoding: utf-8 -*-
import glob
import io
import re
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import splitext
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get("encoding", "utf8")
).read()
setup(
name="sampleMangler",
version="0.1.1",
license="BSD",
description="Adapter layer between sampleManager and legacy api.",
long_description="%s\n%s" % (read("README.rst"), re.sub(":obj:`~?(.*?)`", r"``\1``", read("CHANGELOG.rst"))),
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/cowanml/sampleMangler",
packages=find_packages("src"),
package_dir={"": "src"},
py_modules=[splitext(basename(i))[0] for i in glob.glob("src/*.py")],
include_package_data=True,
## zip_safe=False,
# zip_safe=True,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
"Development Status :: 0 - Fetal",
"Intended Audience :: Developers",
"Operating System :: Unix",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Utilities",
],
keywords=[
# eg: "keyword1", "keyword2", "keyword3",
],
install_requires=[
# eg: "aspectlib==1.1.1", "six>=1.7",
"pymongo",
"sampleManager"
],
extras_require={
# eg: 'rst': ["docutils>=0.11"],
},
entry_points={
"console_scripts": [
"sampleMangler = sampleMangler.__main__:main"
]
},
# don't do this...? just abstract dependencies here, concrete in requirements.txt...
# dependency_links = [
# "git+https://github.com/NSLS-II/sampleManager.git"
# ],
)
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.basename",
"glob.glob"
] | [((761, 781), 'setuptools.find_packages', 'find_packages', (['"""src"""'], {}), "('src')\n", (774, 781), False, 'from setuptools import find_packages\n'), ((862, 883), 'glob.glob', 'glob.glob', (['"""src/*.py"""'], {}), "('src/*.py')\n", (871, 883), False, 'import glob\n'), ((299, 316), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (306, 316), False, 'from os.path import dirname\n'), ((837, 848), 'os.path.basename', 'basename', (['i'], {}), '(i)\n', (845, 848), False, 'from os.path import basename\n')] |
"""
pyexcel_io.writers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
file writers
:copyright: (c) 2014-2020 by Onni Software Ltd.
:license: New BSD License, see LICENSE for more details
"""
from pyexcel_io.plugins import IOPluginInfoChainV2
IOPluginInfoChainV2(__name__).add_a_writer(
relative_plugin_class_path="csv_in_file.CsvFileWriter",
locations=["file", "content"],
file_types=["csv", "tsv"],
stream_type="text",
).add_a_writer(
relative_plugin_class_path="csv_in_memory.CsvMemoryWriter",
locations=["memory"],
file_types=["csv", "tsv"],
stream_type="text",
).add_a_writer(
relative_plugin_class_path="csvz_writer.CsvZipWriter",
locations=["memory", "file", "content"],
file_types=["csvz", "tsvz"],
stream_type="binary",
)
| [
"pyexcel_io.plugins.IOPluginInfoChainV2"
] | [((253, 282), 'pyexcel_io.plugins.IOPluginInfoChainV2', 'IOPluginInfoChainV2', (['__name__'], {}), '(__name__)\n', (272, 282), False, 'from pyexcel_io.plugins import IOPluginInfoChainV2\n')] |
from collections import Counter
import typing
from pathlib import Path
import subprocess
import os
def run_muscle(guide_alignment, in_file, out_file):
command = ['muscle', '-quiet', '-profile', '-in1', guide_alignment, '-in2', in_file, '-out', out_file]
subprocess.check_call(command)
def add_sequences_to_alignment(new_fasta_file, old_fasta_file, out_file):
run_muscle(old_fasta_file, new_fasta_file, out_file)
alignment = get_sequences_from_fasta(out_file)
return alignment
def get_sequences_from_fasta_yield(fasta_file: typing.Union[str, Path]) -> tuple:
"""
Returns (accession, sequence) iterator
Parameters
----------
fasta_file
Returns
-------
(accession, sequence)
"""
with open(fasta_file) as f:
current_sequence = ""
current_key = None
for line in f:
if not len(line.strip()):
continue
if ">" in line:
if current_key is None:
current_key = line.split(">")[1].strip()
else:
yield current_key, current_sequence
current_sequence = ""
current_key = line.split(">")[1].strip()
else:
current_sequence += line.strip()
yield current_key, current_sequence
def get_sequences_from_fasta(fasta_file: typing.Union[str, Path]) -> dict:
"""
Returns dict of accession to sequence from fasta file
Parameters
----------
fasta_file
Returns
-------
{accession:sequence}
"""
return {
key: sequence for (key, sequence) in get_sequences_from_fasta_yield(fasta_file)
}
def get_alignment_subselection(
alignment: typing.Dict[str, str], keys: typing.List[str], error: bool = True
):
"""
Obtain a subselection of full sequence alignment
Parameters
----------
alignment
dict of key: aligned sequence
keys
set of keys to include in the output alignment
error
if True, errors if key not in alignment
Returns
-------
dictionary of aligned sequences
"""
# compile fasta of selected headers
if error:
for key in keys:
assert key in alignment, f"{key} not found in alignment"
else:
keys = [k for k in keys if k in alignment]
alignment_length = len(alignment[keys[0]])
# find positions where all sequences have a break
breaks = set()
for i in range(alignment_length):
if all(alignment[key][i] == "-" for key in keys):
breaks.add(i)
# keep only char's where not all sequences have a break
sub_alignment = {}
for key in keys:
sub_alignment[key] = "".join(
[char for idx, char in enumerate(alignment[key]) if idx not in breaks]
)
return sub_alignment
def alignment_to_fasta(alignment: typing.Dict[str, str]) -> str:
"""
Convert alignment dict to fasta format
Parameters
----------
alignment
dict of key: aligned sequence
Returns
-------
fasta-formatted string
"""
fasta = []
for key, sequence in alignment.items():
fasta.append(f">{key}\n{sequence}")
return "\n".join(fasta)
def alignment_conservation(alignment: typing.Dict[str, str]) -> typing.List[float]:
"""
Obtain residue conservations of each position in a sequence alignment
Parameters
----------
alignment
dict of key: aligned sequence
Returns
-------
list of conservation values
"""
conservations = []
length = len(alignment[list(alignment.keys())[0]])
for pos in range(length):
aligned_residues_count = Counter(
[alignment[key][pos] for key in alignment if alignment[key][pos] != "-"]
)
conservation = aligned_residues_count.most_common(1)[0][1] / len(alignment)
conservations.append(conservation)
return conservations
def format_as_fasta(alignment):
fasta = []
for key in alignment:
fasta.append(f">{key}\n{alignment[key]}\n")
return "".join(fasta)
| [
"collections.Counter",
"subprocess.check_call"
] | [((267, 297), 'subprocess.check_call', 'subprocess.check_call', (['command'], {}), '(command)\n', (288, 297), False, 'import subprocess\n'), ((3704, 3789), 'collections.Counter', 'Counter', (["[alignment[key][pos] for key in alignment if alignment[key][pos] != '-']"], {}), "([alignment[key][pos] for key in alignment if alignment[key][pos] !=\n '-'])\n", (3711, 3789), False, 'from collections import Counter\n')] |
from perfrunner.helpers.cbmonitor import timeit, with_stats
from perfrunner.tests import PerfTest
from perfrunner.workloads.kvgen import kvgen
class IndexTest(PerfTest):
COLLECTORS = {
'secondary_stats': True,
'secondary_debugstats': True,
'secondary_debugstats_bucket': True,
'secondary_debugstats_index': True,
}
@with_stats
@timeit
def init_index(self):
self.create_indexes()
self.wait_for_indexing()
@with_stats
@timeit
def incr_index(self):
self.access()
self.wait_for_indexing()
def _report_kpi(self, indexing_time: float):
self.reporter.post(
*self.metrics.indexing_time(indexing_time)
)
def run(self):
self.load()
self.wait_for_persistence()
self.init_index()
self.incr_index()
class InitialIndexTest(IndexTest):
def run(self):
self.load()
self.wait_for_persistence()
time_elapsed = self.init_index()
self.report_kpi(time_elapsed)
class FastIndexTest(PerfTest):
def load(self, *args):
kvgen(self.master_node, self.test_config.load_settings.items, wait=True)
def access(self, *args):
kvgen(self.master_node, self.test_config.load_settings.items, wait=False)
class FastInitialIndexTest(FastIndexTest, InitialIndexTest):
pass
| [
"perfrunner.workloads.kvgen.kvgen"
] | [((1122, 1194), 'perfrunner.workloads.kvgen.kvgen', 'kvgen', (['self.master_node', 'self.test_config.load_settings.items'], {'wait': '(True)'}), '(self.master_node, self.test_config.load_settings.items, wait=True)\n', (1127, 1194), False, 'from perfrunner.workloads.kvgen import kvgen\n'), ((1233, 1306), 'perfrunner.workloads.kvgen.kvgen', 'kvgen', (['self.master_node', 'self.test_config.load_settings.items'], {'wait': '(False)'}), '(self.master_node, self.test_config.load_settings.items, wait=False)\n', (1238, 1306), False, 'from perfrunner.workloads.kvgen import kvgen\n')] |
import os
import logging
import conjugatedescent
import coordinatedescent
import gradientdescent
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
def _choose_operation(requested_operation: str):
if requested_operation == 'CONJUGATE_GRADIENT':
optimizer = conjugatedescent.ConjugateGradientDescent()
elif requested_operation == 'COORDINATE_DESCENT':
optimizer = coordinatedescent.CoordinateDescent()
else:
optimizer = gradientdescent.GradientDescent()
log.debug(f'Done activating {type(optimizer).__name__} optimizer for algorithm={requested_operation}')
return optimizer
def main():
optimizer = _choose_operation(os.getenv('ALGORITHM'))
minimiser, min_value, gradient = optimizer.execute()
result = f'''
=====Function F(X1, X2) has a local minimum at {minimiser}=========
- Min Value = {min_value}
- Slope = {gradient}
'''
print(result.strip())
if __name__ == '__main__':
main()
| [
"logging.basicConfig",
"logging.getLogger",
"os.getenv",
"conjugatedescent.ConjugateGradientDescent",
"gradientdescent.GradientDescent",
"coordinatedescent.CoordinateDescent"
] | [((99, 139), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (118, 139), False, 'import logging\n'), ((146, 173), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (163, 173), False, 'import logging\n'), ((297, 340), 'conjugatedescent.ConjugateGradientDescent', 'conjugatedescent.ConjugateGradientDescent', ([], {}), '()\n', (338, 340), False, 'import conjugatedescent\n'), ((694, 716), 'os.getenv', 'os.getenv', (['"""ALGORITHM"""'], {}), "('ALGORITHM')\n", (703, 716), False, 'import os\n'), ((415, 452), 'coordinatedescent.CoordinateDescent', 'coordinatedescent.CoordinateDescent', ([], {}), '()\n', (450, 452), False, 'import coordinatedescent\n'), ((483, 516), 'gradientdescent.GradientDescent', 'gradientdescent.GradientDescent', ([], {}), '()\n', (514, 516), False, 'import gradientdescent\n')] |
import unittest
from dll import Node, DoubleLinkedList
class TestIt(unittest.TestCase):
def test_1(self):
"""make a node"""
n1 = Node('A')
self.assertEqual(n1._value, 'A')
def test_2(self):
"""make a test head is set when add first node"""
n1 = Node('A')
dl = DoubleLinkedList()
dl.append(n1)
self.assertEqual(dl.head._value, 'A')
def test_3(self):
"""add two nodes. test that 2nd append makes changes tail to new node"""
n1 = Node('A')
n2 = Node('B')
dl = DoubleLinkedList()
dl.append(n1)
dl.append(n2)
self.assertEqual(dl.tail._value, 'B')
def test_4(self):
"""check that head is still the first node added to list"""
n1 = Node('A')
n2 = Node('B')
dl = DoubleLinkedList()
dl.append(n1)
dl.append(n2)
self.assertEqual(dl.head._value, 'A')
def test_5(self):
"""check that push adds to front"""
n1 = Node('A')
n2 = Node('B')
dl = DoubleLinkedList()
dl.append(n1)
dl.append(n2)
n3 = Node('C') #will push C to front(head)
dl.push(n3)
self.assertEqual(dl.head._value, 'C')
def test_6(self):
"""check that pop removes head"""
n1 = Node('A')
n2 = Node('B')
dl = DoubleLinkedList()
dl.append(n1) #head and tail at this point
dl.append(n2) # A is head and B is now tail
dl.pop() # removes A so head value should be B
self.assertEqual(dl.head._value, 'B')
def test_7(self):
"""check that shift removes last node"""
n1 = Node('A')
n2 = Node('B')
n3 = Node('C')
dl = DoubleLinkedList()
dl.append(n1)
dl.append(n2)
dl.append(n3)
dl.shift()
self.assertEqual(dl.tail._value, 'B')
def test_8(self):
"""test to remove tail by using remove method"""
n1 = Node('A')
n2 = Node('B')
n3 = Node('C')
dl = DoubleLinkedList()
dl.append(n1)
dl.append(n2)
dl.append(n3)
dl.remove('C') #this removes C so tail should become BaseException
self.assertEqual(dl.tail._value, 'B')
| [
"dll.Node",
"dll.DoubleLinkedList"
] | [((137, 146), 'dll.Node', 'Node', (['"""A"""'], {}), "('A')\n", (141, 146), False, 'from dll import Node, DoubleLinkedList\n'), ((262, 271), 'dll.Node', 'Node', (['"""A"""'], {}), "('A')\n", (266, 271), False, 'from dll import Node, DoubleLinkedList\n'), ((279, 297), 'dll.DoubleLinkedList', 'DoubleLinkedList', ([], {}), '()\n', (295, 297), False, 'from dll import Node, DoubleLinkedList\n'), ((458, 467), 'dll.Node', 'Node', (['"""A"""'], {}), "('A')\n", (462, 467), False, 'from dll import Node, DoubleLinkedList\n'), ((475, 484), 'dll.Node', 'Node', (['"""B"""'], {}), "('B')\n", (479, 484), False, 'from dll import Node, DoubleLinkedList\n'), ((492, 510), 'dll.DoubleLinkedList', 'DoubleLinkedList', ([], {}), '()\n', (508, 510), False, 'from dll import Node, DoubleLinkedList\n'), ((674, 683), 'dll.Node', 'Node', (['"""A"""'], {}), "('A')\n", (678, 683), False, 'from dll import Node, DoubleLinkedList\n'), ((691, 700), 'dll.Node', 'Node', (['"""B"""'], {}), "('B')\n", (695, 700), False, 'from dll import Node, DoubleLinkedList\n'), ((708, 726), 'dll.DoubleLinkedList', 'DoubleLinkedList', ([], {}), '()\n', (724, 726), False, 'from dll import Node, DoubleLinkedList\n'), ((866, 875), 'dll.Node', 'Node', (['"""A"""'], {}), "('A')\n", (870, 875), False, 'from dll import Node, DoubleLinkedList\n'), ((883, 892), 'dll.Node', 'Node', (['"""B"""'], {}), "('B')\n", (887, 892), False, 'from dll import Node, DoubleLinkedList\n'), ((900, 918), 'dll.DoubleLinkedList', 'DoubleLinkedList', ([], {}), '()\n', (916, 918), False, 'from dll import Node, DoubleLinkedList\n'), ((958, 967), 'dll.Node', 'Node', (['"""C"""'], {}), "('C')\n", (962, 967), False, 'from dll import Node, DoubleLinkedList\n'), ((1116, 1125), 'dll.Node', 'Node', (['"""A"""'], {}), "('A')\n", (1120, 1125), False, 'from dll import Node, DoubleLinkedList\n'), ((1133, 1142), 'dll.Node', 'Node', (['"""B"""'], {}), "('B')\n", (1137, 1142), False, 'from dll import Node, DoubleLinkedList\n'), ((1150, 1168), 'dll.DoubleLinkedList', 'DoubleLinkedList', ([], {}), '()\n', (1166, 1168), False, 'from dll import Node, DoubleLinkedList\n'), ((1422, 1431), 'dll.Node', 'Node', (['"""A"""'], {}), "('A')\n", (1426, 1431), False, 'from dll import Node, DoubleLinkedList\n'), ((1439, 1448), 'dll.Node', 'Node', (['"""B"""'], {}), "('B')\n", (1443, 1448), False, 'from dll import Node, DoubleLinkedList\n'), ((1456, 1465), 'dll.Node', 'Node', (['"""C"""'], {}), "('C')\n", (1460, 1465), False, 'from dll import Node, DoubleLinkedList\n'), ((1473, 1491), 'dll.DoubleLinkedList', 'DoubleLinkedList', ([], {}), '()\n', (1489, 1491), False, 'from dll import Node, DoubleLinkedList\n'), ((1672, 1681), 'dll.Node', 'Node', (['"""A"""'], {}), "('A')\n", (1676, 1681), False, 'from dll import Node, DoubleLinkedList\n'), ((1689, 1698), 'dll.Node', 'Node', (['"""B"""'], {}), "('B')\n", (1693, 1698), False, 'from dll import Node, DoubleLinkedList\n'), ((1706, 1715), 'dll.Node', 'Node', (['"""C"""'], {}), "('C')\n", (1710, 1715), False, 'from dll import Node, DoubleLinkedList\n'), ((1723, 1741), 'dll.DoubleLinkedList', 'DoubleLinkedList', ([], {}), '()\n', (1739, 1741), False, 'from dll import Node, DoubleLinkedList\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Spectrassembler main program
@author: <NAME>
"""
from __future__ import print_function
from time import time
import sys
import argparse
from functools import partial
from multiprocessing import Pool
import numpy as np
from Bio import SeqIO
from scipy.sparse import coo_matrix
from scipy.stats.mstats import mquantiles
from overlaps import compute_positions, compute_overlaps
from spectral import sym_max, remove_bridge_reads, reorder_mat_par, reorder_mat
from consensus import run_spoa_in_cc, merge_windows_in_cc
from ioandplots import fill_args_opts, make_dir, oprint, write_layout_to_file, plot_cc_pos_v_ref
# Parse arguments and define global variables
t0 = time()
parser = argparse.ArgumentParser(description="De novo experimental assembler"
"based on a spectral algorithm to reorder the reads")
parser.add_argument("-r", "--root", default="./",
help="directory where to store layout and consensus files.")
parser.add_argument("-f", "--READS_FN", required=True,
help="path to reads file (fasta or fastq)")
parser.add_argument("-m", "--minimapfn", required=True,
help="overlap file path (from minimap in PAF format).")
parser.add_argument("--min_cc_len", type=int, default=10,
help="minimum number of reads for a contig to be considered")
parser.add_argument("--w_len", type=int, default=3000,
help="length of consensus windows for POA.")
parser.add_argument("--w_ovl_len", type=int, default=2000,
help="overlap length between two successive consensus windows.")
parser.add_argument("--len_thr", type=int, default=3500,
help="threshold on length of overlaps (similarity matrix preprocessing).")
parser.add_argument("--sim_qtile", type=float, default=0.4,
help="quantile threshold on overlap score (similarity matrix preprocessing.)" \
"0.5 means you keep only overlaps with num_match > quantile(num_matches, 0.5)")
parser.add_argument("-v", "--verbosity", action="count", default=1,
help="verbosity level (-v, -vv or none)")
parser.add_argument("--ref_pos_csvf",
help="csv file with position of reads (in same order as in READS_FN)" \
"obtained from BWA, in order to plot reads position found vs reference.")
parser.add_argument("--spoapath", default="tools/spoa/spoa",
help="path to spoa executable")
parser.add_argument("--nproc", help="number of parallel processes", type=int,
default=1)
parser.add_argument("--margin", type=int, default=1250,
help="number of bases to add to current consensus to make sure it overlaps next window")
parser.add_argument("--trim_margin", type=int, default=200,
help="length to cut in beginning and end of consensus sequences from spoa (where the consensus is" \
"less good)")
parser.add_argument("--julia", default=None,
help="path to Julia (optional,"\
"though eigenvector computations are clearly faster in Julia than in Python)")
args = parser.parse_args()
opts = fill_args_opts(args)
ROOT_DIR = opts['ROOT_DIR']
VERB = opts['VERB']
# Load reads
reads_fh = open(args.READS_FN, "rU")
record_list = list(SeqIO.parse(reads_fh, opts['READS_FMT']))
reads_fh.close()
oprint("Reads loaded. Compute overlaps from files...", dt=(time() - t0), cond=(VERB >= 2))
# Compute overlaps from the files
(read_nb2id, ovl_list, I, J, K, num_match, ovl_len, n_reads) = compute_overlaps(args.minimapfn, record_list)
# Threshold based on overlaps value (number of matches) and length
THR = mquantiles(num_match, args.sim_qtile)
oprint("THR = %1.1f " % THR)
cond1 = (num_match > THR)
cond2 = (ovl_len > opts['LEN_THR'])
idxok = np.argwhere(cond1 * cond2)[:, 0]
num_match_l = num_match
I = I[idxok]
J = J[idxok]
num_match = num_match[idxok]
# ovl_len = ovl_len[idxok]
K = K[idxok]
# Construct similarity matrix
oprint("Construct thresholded similarity matrix...", dt=(time() - t0), cond=(VERB >= 2))
sim_mat = coo_matrix((num_match, (I, J)), shape=(n_reads, n_reads), dtype=int).tocsr()
oprint("Pre-process similarity matrix...", dt=(time() - t0), cond=(VERB >= 2))
# Overlap index array : overlap(i,j) = ovl_list[k], with k = ovl_idx_arr[i,j]
ovl_idx_arr = coo_matrix((K, (I, J)), shape=(n_reads, n_reads), dtype=int).tocsr()
ovl_idx_arr = sym_max(ovl_idx_arr)
# Symmetrize the matrix when it is not already symmetric
sim_mat = sym_max(sim_mat)
# sim_mat = (sim_mat + sim_mat.T)
# Remove "connecting reads"
sim_mat = remove_bridge_reads(sim_mat)
del I, J, K, ovl_len, num_match
oprint("Similarity matrix built and preprocessed. Reorder it with spectral ordering...", dt=(time() - t0),
cond=(VERB >= 1))
# Reorder connected components with spectral ordering
ccs_list = []
cc = range(sim_mat.shape[0])
qtile = args.sim_qtile
t_start_layout = time()
# reorder_submat(sim_mat, cc, num_match_l, qtile, ccs_list, opts)
thr_list = []
new_qtile = qtile
for k in range(40):
thr_sub = float(mquantiles(num_match_l, new_qtile))
thr_list.append(thr_sub)
new_qtile += min(0.1, 0.5 * (1. - new_qtile))
del num_match_l
if opts['N_PROC'] > 1:
ccs_list = reorder_mat_par(sim_mat, thr_list, opts)
else:
ccs_list = reorder_mat(sim_mat, thr_list, opts['MIN_CC_LEN'], opts['VERB'])
t_rough_layout = time() - t_start_layout
oprint("Rough layout computed in %3.3f." % (t_rough_layout),
dt=(time() - t0), cond=(VERB >= 1))
# Sort by length of connected component
ccs_list.sort(key=len, reverse=True)
oprint("Compute fine grained layout and run spoa in connected components...", dt=(time() - t0), cond=(VERB >= 1))
# If root_dir does not exist, create it
make_dir(ROOT_DIR)
t_total_finegrained = 0
# Get fine-grained layout with dictionary of overlaps in each connected component
for (cc_idx, cc) in enumerate(ccs_list):
# Restrict overlap index array to reads in the connected component (contig)
# ovl_idx_cc = ovl_idx_arr.copy().tocsc()[:, cc]
# ovl_idx_cc = ovl_idx_cc.tocsr()[cc, :]
ovl_idx_cc = ovl_idx_arr[cc,:][:,cc]
# symmetrize if the overlapper does not count overlap twice for (i,j) and (j,i)
# ovl_idx_cc = sym_max(ovl_idx_cc)
# Compute fine-grained position and strand of each read in connected component
t_start_fg_layout = time()
(strand_list, bpos_list, epos_list) = compute_positions(cc, read_nb2id, ovl_list, ovl_idx_cc)
t_finegrained = time() - t_start_fg_layout
t_total_finegrained += t_finegrained
msg = "Positions computed in connected component"\
"%d/%d in %3.3f.\n Now run spoa if provided." % (cc_idx,
len(ccs_list) - 1, t_finegrained)
oprint(msg, dt=(time() - t0), cond=(VERB >= 2))
# Write file with layout
layout_fn = "%s/cc%d.layout" % (ROOT_DIR, cc_idx)
write_layout_to_file(layout_fn, strand_list, bpos_list, epos_list, cc, read_nb2id)
msg = "layout written to file %s" % (layout_fn)
oprint(msg, dt=(time() - t0), cond=(VERB >= 2))
if opts['DO_PLOT_POS_V_REF']:
msg = "Edit graphic : position of reads found by algorithm vs reference"
oprint(msg, dt=(time() - t0), cond=(VERB >= 2))
figpath = ROOT_DIR + "/pos_found_vs_ref_cc%d.eps" % (cc_idx)
plot_cc_pos_v_ref(opts['REF_POS_CSVF'], cc, bpos_list, figpath)
# Generate contigs through multiple sequence alignment
if opts['DO_SPOA']:
# Compute consensus in windows
run_spoa_in_cc(record_list, cc_idx, cc, strand_list, bpos_list,
epos_list, opts)
if opts['N_PROC'] == 1:
# Merge windows to get consensus
cons_in_cc = merge_windows_in_cc(cc_idx, opts)
print(">contig_%d\n%s" % (cc_idx, cons_in_cc), file=sys.stdout)
msg = "Consensus computed in connected component %d/%d. " % (cc_idx, len(ccs_list) - 1)
oprint(msg, dt=(time() - t0), cond=(VERB >= 1))
del strand_list, bpos_list, epos_list, ovl_idx_cc
# Parallelize the merging of consensus windows if several cores
if (opts['N_PROC'] > 1) and opts['DO_SPOA']:
partial_merge = partial(merge_windows_in_cc, opts=opts)
pool = Pool(processes=opts['N_PROC'])
consensi_in_cc = pool.map(partial_merge, range(len(ccs_list)))
pool.close()
pool.join()
for (cc_idx, cons_in_cc) in enumerate(consensi_in_cc):
print(">contig_%d\n%s" % (cc_idx, cons_in_cc), file=sys.stdout)
oprint("Finished.\nRough layout computed in %4.3f.\n Fine-grained layout computed in %4.3f." % (
t_rough_layout, t_total_finegrained),
dt=(time() - t0), cond=(VERB >= 1))
| [
"spectral.remove_bridge_reads",
"argparse.ArgumentParser",
"scipy.sparse.coo_matrix",
"ioandplots.make_dir",
"spectral.sym_max",
"overlaps.compute_positions",
"consensus.run_spoa_in_cc",
"ioandplots.oprint",
"spectral.reorder_mat",
"time.time",
"spectral.reorder_mat_par",
"ioandplots.fill_args... | [((715, 721), 'time.time', 'time', ([], {}), '()\n', (719, 721), False, 'from time import time\n'), ((732, 861), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""De novo experimental assemblerbased on a spectral algorithm to reorder the reads"""'}), "(description=\n 'De novo experimental assemblerbased on a spectral algorithm to reorder the reads'\n )\n", (755, 861), False, 'import argparse\n'), ((3242, 3262), 'ioandplots.fill_args_opts', 'fill_args_opts', (['args'], {}), '(args)\n', (3256, 3262), False, 'from ioandplots import fill_args_opts, make_dir, oprint, write_layout_to_file, plot_cc_pos_v_ref\n'), ((3629, 3674), 'overlaps.compute_overlaps', 'compute_overlaps', (['args.minimapfn', 'record_list'], {}), '(args.minimapfn, record_list)\n', (3645, 3674), False, 'from overlaps import compute_positions, compute_overlaps\n'), ((3749, 3786), 'scipy.stats.mstats.mquantiles', 'mquantiles', (['num_match', 'args.sim_qtile'], {}), '(num_match, args.sim_qtile)\n', (3759, 3786), False, 'from scipy.stats.mstats import mquantiles\n'), ((3787, 3815), 'ioandplots.oprint', 'oprint', (["('THR = %1.1f ' % THR)"], {}), "('THR = %1.1f ' % THR)\n", (3793, 3815), False, 'from ioandplots import fill_args_opts, make_dir, oprint, write_layout_to_file, plot_cc_pos_v_ref\n'), ((4500, 4520), 'spectral.sym_max', 'sym_max', (['ovl_idx_arr'], {}), '(ovl_idx_arr)\n', (4507, 4520), False, 'from spectral import sym_max, remove_bridge_reads, reorder_mat_par, reorder_mat\n'), ((4589, 4605), 'spectral.sym_max', 'sym_max', (['sim_mat'], {}), '(sim_mat)\n', (4596, 4605), False, 'from spectral import sym_max, remove_bridge_reads, reorder_mat_par, reorder_mat\n'), ((4679, 4707), 'spectral.remove_bridge_reads', 'remove_bridge_reads', (['sim_mat'], {}), '(sim_mat)\n', (4698, 4707), False, 'from spectral import sym_max, remove_bridge_reads, reorder_mat_par, reorder_mat\n'), ((5010, 5016), 'time.time', 'time', ([], {}), '()\n', (5014, 5016), False, 'from time import time\n'), ((5831, 5849), 'ioandplots.make_dir', 'make_dir', (['ROOT_DIR'], {}), '(ROOT_DIR)\n', (5839, 5849), False, 'from ioandplots import fill_args_opts, make_dir, oprint, write_layout_to_file, plot_cc_pos_v_ref\n'), ((3381, 3421), 'Bio.SeqIO.parse', 'SeqIO.parse', (['reads_fh', "opts['READS_FMT']"], {}), "(reads_fh, opts['READS_FMT'])\n", (3392, 3421), False, 'from Bio import SeqIO\n'), ((3886, 3912), 'numpy.argwhere', 'np.argwhere', (['(cond1 * cond2)'], {}), '(cond1 * cond2)\n', (3897, 3912), True, 'import numpy as np\n'), ((5324, 5364), 'spectral.reorder_mat_par', 'reorder_mat_par', (['sim_mat', 'thr_list', 'opts'], {}), '(sim_mat, thr_list, opts)\n', (5339, 5364), False, 'from spectral import sym_max, remove_bridge_reads, reorder_mat_par, reorder_mat\n'), ((5386, 5450), 'spectral.reorder_mat', 'reorder_mat', (['sim_mat', 'thr_list', "opts['MIN_CC_LEN']", "opts['VERB']"], {}), "(sim_mat, thr_list, opts['MIN_CC_LEN'], opts['VERB'])\n", (5397, 5450), False, 'from spectral import sym_max, remove_bridge_reads, reorder_mat_par, reorder_mat\n'), ((5469, 5475), 'time.time', 'time', ([], {}), '()\n', (5473, 5475), False, 'from time import time\n'), ((6450, 6456), 'time.time', 'time', ([], {}), '()\n', (6454, 6456), False, 'from time import time\n'), ((6499, 6554), 'overlaps.compute_positions', 'compute_positions', (['cc', 'read_nb2id', 'ovl_list', 'ovl_idx_cc'], {}), '(cc, read_nb2id, ovl_list, ovl_idx_cc)\n', (6516, 6554), False, 'from overlaps import compute_positions, compute_overlaps\n'), ((6937, 7023), 'ioandplots.write_layout_to_file', 'write_layout_to_file', (['layout_fn', 'strand_list', 'bpos_list', 'epos_list', 'cc', 'read_nb2id'], {}), '(layout_fn, strand_list, bpos_list, epos_list, cc,\n read_nb2id)\n', (6957, 7023), False, 'from ioandplots import fill_args_opts, make_dir, oprint, write_layout_to_file, plot_cc_pos_v_ref\n'), ((8212, 8251), 'functools.partial', 'partial', (['merge_windows_in_cc'], {'opts': 'opts'}), '(merge_windows_in_cc, opts=opts)\n', (8219, 8251), False, 'from functools import partial\n'), ((8263, 8293), 'multiprocessing.Pool', 'Pool', ([], {'processes': "opts['N_PROC']"}), "(processes=opts['N_PROC'])\n", (8267, 8293), False, 'from multiprocessing import Pool\n'), ((4168, 4236), 'scipy.sparse.coo_matrix', 'coo_matrix', (['(num_match, (I, J))'], {'shape': '(n_reads, n_reads)', 'dtype': 'int'}), '((num_match, (I, J)), shape=(n_reads, n_reads), dtype=int)\n', (4178, 4236), False, 'from scipy.sparse import coo_matrix\n'), ((4417, 4477), 'scipy.sparse.coo_matrix', 'coo_matrix', (['(K, (I, J))'], {'shape': '(n_reads, n_reads)', 'dtype': 'int'}), '((K, (I, J)), shape=(n_reads, n_reads), dtype=int)\n', (4427, 4477), False, 'from scipy.sparse import coo_matrix\n'), ((5155, 5189), 'scipy.stats.mstats.mquantiles', 'mquantiles', (['num_match_l', 'new_qtile'], {}), '(num_match_l, new_qtile)\n', (5165, 5189), False, 'from scipy.stats.mstats import mquantiles\n'), ((6575, 6581), 'time.time', 'time', ([], {}), '()\n', (6579, 6581), False, 'from time import time\n'), ((7373, 7436), 'ioandplots.plot_cc_pos_v_ref', 'plot_cc_pos_v_ref', (["opts['REF_POS_CSVF']", 'cc', 'bpos_list', 'figpath'], {}), "(opts['REF_POS_CSVF'], cc, bpos_list, figpath)\n", (7390, 7436), False, 'from ioandplots import fill_args_opts, make_dir, oprint, write_layout_to_file, plot_cc_pos_v_ref\n'), ((7568, 7653), 'consensus.run_spoa_in_cc', 'run_spoa_in_cc', (['record_list', 'cc_idx', 'cc', 'strand_list', 'bpos_list', 'epos_list', 'opts'], {}), '(record_list, cc_idx, cc, strand_list, bpos_list, epos_list, opts\n )\n', (7582, 7653), False, 'from consensus import run_spoa_in_cc, merge_windows_in_cc\n'), ((3499, 3505), 'time.time', 'time', ([], {}), '()\n', (3503, 3505), False, 'from time import time\n'), ((4126, 4132), 'time.time', 'time', ([], {}), '()\n', (4130, 4132), False, 'from time import time\n'), ((4292, 4298), 'time.time', 'time', ([], {}), '()\n', (4296, 4298), False, 'from time import time\n'), ((4833, 4839), 'time.time', 'time', ([], {}), '()\n', (4837, 4839), False, 'from time import time\n'), ((5566, 5572), 'time.time', 'time', ([], {}), '()\n', (5570, 5572), False, 'from time import time\n'), ((5758, 5764), 'time.time', 'time', ([], {}), '()\n', (5762, 5764), False, 'from time import time\n'), ((7756, 7789), 'consensus.merge_windows_in_cc', 'merge_windows_in_cc', (['cc_idx', 'opts'], {}), '(cc_idx, opts)\n', (7775, 7789), False, 'from consensus import run_spoa_in_cc, merge_windows_in_cc\n'), ((8685, 8691), 'time.time', 'time', ([], {}), '()\n', (8689, 8691), False, 'from time import time\n'), ((6817, 6823), 'time.time', 'time', ([], {}), '()\n', (6821, 6823), False, 'from time import time\n'), ((7092, 7098), 'time.time', 'time', ([], {}), '()\n', (7096, 7098), False, 'from time import time\n'), ((7264, 7270), 'time.time', 'time', ([], {}), '()\n', (7268, 7270), False, 'from time import time\n'), ((7995, 8001), 'time.time', 'time', ([], {}), '()\n', (7999, 8001), False, 'from time import time\n')] |
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 18 16:59:08 2021
@author: Hatlab_3
"""
from data_processing.ddh5_Plotting.utility_modules.FS_utility_functions import fit_fluxsweep
from data_processing.Helper_Functions import find_all_ddh5
from plottr.apps.autoplot import autoplotDDH5, script, main
import numpy as np
import matplotlib.pyplot as plt
from scipy.signal import argrelextrema, savgol_filter
from scipy.interpolate import interp1d
def find_quanta(currents, res_freqs, show = True, smooth_window = 11, order = 2):
ext = argrelextrema(savgol_filter(res_freqs, smooth_window, 2), np.greater, order = order)[0]
if show:
plt.plot(currents, res_freqs)
for pt in ext:
plt.plot(currents[pt], res_freqs[pt], 'r*')
if np.size(ext) == 2:
quanta_size = np.abs(currents[ext[1]]-currents[ext[0]])
quanta_offset = min(currents[ext])
else:
raise Exception(f'Two extrema not found: {ext}')
current_to_quanta_conversion_function = lambda c: (c-quanta_offset)/quanta_size
quanta_to_current_function = lambda q: q*quanta_size+quanta_offset
return quanta_size, quanta_offset, current_to_quanta_conversion_function, quanta_to_current_function
if __name__ == '__main__':
#adapting an old file to a new file
#%%
datadir = r'Z:/Data/SA_2X_B1/fluxsweep/2021-07-09/2021-07-09_0001_B1_FS1/2021-07-09_0001_B1_FS1.ddh5'
savedir = r'Z:/Data/SA_2X_B1/fluxsweep/fits'
# datadir = r'E:\Data\Cooldown_20210104\fluxsweep\2021-01-04_0003_Recentering_FS.ddh5'
# savedir = r'E:\Data\Cooldown_20210104\fluxsweep'
FS = fit_fluxsweep(datadir, savedir, 'SA_2X_B1')
#%%
FS.initial_fit(8.25e9, QextGuess = 1e2, QintGuess=20e4, magBackGuess = 0.01, phaseOffGuess = 0, debug = False, smooth = False, smooth_win = 15, adaptive_window = False, adapt_win_size = 100e6)
#%% Automatic Fitting (be sure initial fit is good!)
currents, res_freqs, Qints, Qexts, magBacks = FS.semiauto_fit(FS.currents, FS.vna_freqs/(2*np.pi), FS.undriven_vna_power, FS.undriven_vna_phase, FS.initial_popt, debug = False, savedata = True, smooth = False, smooth_win = 5, adaptive_window = True, adapt_win_size = 300e6, fourier_filter = False, pconv_tol = 7)
#%%reloading an old file
#%%plotting the resonant frequency
fig = plt.figure(0)
ax = fig.add_subplot(111)
ax.plot(currents*1000, res_freqs/1e6)
ax.set_xlabel('Bias Currents (mA)')
ax.set_ylabel('Resonant Frequencies (MHz)')
ax.title.set_text('ChemPot Resonant Frequency vs. Bias Current')
#%%Finding and plotting flux quanta and flux variables, interpolating resonance frequencies to generate resonance functions wrt bias current and flux
quanta_size, quanta_offset, conv_func, conv_func_inverse = find_quanta(currents, res_freqs, show = False, smooth_window = 221)
res_func = interp1d(currents, res_freqs, 'linear')
print(f"Quanta size: {quanta_size}\nQuanta_offset: {quanta_offset}")
filt = (conv_func(currents)<0)*(conv_func(currents)>-0.52)
plt.plot(conv_func(currents)[filt], res_freqs[filt])
plt.figure(2)
plt.plot(currents, res_freqs, label = 'fitted data')
plt.plot(currents, res_func(currents), label = 'quadratic interpolation')
plt.legend()
plt.figure(3)
#%%
plt.plot(currents, res_func1(currents)-savgol_filter(res_func(currents), 21, 2)) | [
"numpy.abs",
"data_processing.ddh5_Plotting.utility_modules.FS_utility_functions.fit_fluxsweep",
"matplotlib.pyplot.plot",
"numpy.size",
"scipy.signal.savgol_filter",
"scipy.interpolate.interp1d",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.legend"
] | [((1611, 1654), 'data_processing.ddh5_Plotting.utility_modules.FS_utility_functions.fit_fluxsweep', 'fit_fluxsweep', (['datadir', 'savedir', '"""SA_2X_B1"""'], {}), "(datadir, savedir, 'SA_2X_B1')\n", (1624, 1654), False, 'from data_processing.ddh5_Plotting.utility_modules.FS_utility_functions import fit_fluxsweep\n'), ((2312, 2325), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {}), '(0)\n', (2322, 2325), True, 'import matplotlib.pyplot as plt\n'), ((2855, 2894), 'scipy.interpolate.interp1d', 'interp1d', (['currents', 'res_freqs', '"""linear"""'], {}), "(currents, res_freqs, 'linear')\n", (2863, 2894), False, 'from scipy.interpolate import interp1d\n'), ((3092, 3105), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (3102, 3105), True, 'import matplotlib.pyplot as plt\n'), ((3110, 3160), 'matplotlib.pyplot.plot', 'plt.plot', (['currents', 'res_freqs'], {'label': '"""fitted data"""'}), "(currents, res_freqs, label='fitted data')\n", (3118, 3160), True, 'import matplotlib.pyplot as plt\n'), ((3245, 3257), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3255, 3257), True, 'import matplotlib.pyplot as plt\n'), ((3262, 3275), 'matplotlib.pyplot.figure', 'plt.figure', (['(3)'], {}), '(3)\n', (3272, 3275), True, 'import matplotlib.pyplot as plt\n'), ((647, 676), 'matplotlib.pyplot.plot', 'plt.plot', (['currents', 'res_freqs'], {}), '(currents, res_freqs)\n', (655, 676), True, 'import matplotlib.pyplot as plt\n'), ((764, 776), 'numpy.size', 'np.size', (['ext'], {}), '(ext)\n', (771, 776), True, 'import numpy as np\n'), ((806, 849), 'numpy.abs', 'np.abs', (['(currents[ext[1]] - currents[ext[0]])'], {}), '(currents[ext[1]] - currents[ext[0]])\n', (812, 849), True, 'import numpy as np\n'), ((551, 593), 'scipy.signal.savgol_filter', 'savgol_filter', (['res_freqs', 'smooth_window', '(2)'], {}), '(res_freqs, smooth_window, 2)\n', (564, 593), False, 'from scipy.signal import argrelextrema, savgol_filter\n'), ((713, 756), 'matplotlib.pyplot.plot', 'plt.plot', (['currents[pt]', 'res_freqs[pt]', '"""r*"""'], {}), "(currents[pt], res_freqs[pt], 'r*')\n", (721, 756), True, 'import matplotlib.pyplot as plt\n')] |
"""app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from . import views
urlpatterns = [
path('', views.manager, name='manager'),
path('<int:id>/', views.manager1, name='manager1'),
path('stats/', views.stats, name='stats'),
path('account/', views.account, name='manager-account'),
path('<int:id>/cancel/', views.cancel, name='manager-cancel'),
path('<int:id>/<int:r>/remove', views.remove, name='manager-remove'),
path('<int:id>/add', views.add, name='manager-add'),
path('add-guest/<int:id>', views.add_guest, name='add_guest'),
path('delete-guest/<int:t>/<int:id>', views.delete_guest, name='delete-guest'),
path('booking/<int:id>/details/', views.booking_details, name='booking-details'),
]
| [
"django.urls.path"
] | [((697, 736), 'django.urls.path', 'path', (['""""""', 'views.manager'], {'name': '"""manager"""'}), "('', views.manager, name='manager')\n", (701, 736), False, 'from django.urls import path\n'), ((742, 792), 'django.urls.path', 'path', (['"""<int:id>/"""', 'views.manager1'], {'name': '"""manager1"""'}), "('<int:id>/', views.manager1, name='manager1')\n", (746, 792), False, 'from django.urls import path\n'), ((798, 839), 'django.urls.path', 'path', (['"""stats/"""', 'views.stats'], {'name': '"""stats"""'}), "('stats/', views.stats, name='stats')\n", (802, 839), False, 'from django.urls import path\n'), ((845, 900), 'django.urls.path', 'path', (['"""account/"""', 'views.account'], {'name': '"""manager-account"""'}), "('account/', views.account, name='manager-account')\n", (849, 900), False, 'from django.urls import path\n'), ((906, 967), 'django.urls.path', 'path', (['"""<int:id>/cancel/"""', 'views.cancel'], {'name': '"""manager-cancel"""'}), "('<int:id>/cancel/', views.cancel, name='manager-cancel')\n", (910, 967), False, 'from django.urls import path\n'), ((973, 1041), 'django.urls.path', 'path', (['"""<int:id>/<int:r>/remove"""', 'views.remove'], {'name': '"""manager-remove"""'}), "('<int:id>/<int:r>/remove', views.remove, name='manager-remove')\n", (977, 1041), False, 'from django.urls import path\n'), ((1047, 1098), 'django.urls.path', 'path', (['"""<int:id>/add"""', 'views.add'], {'name': '"""manager-add"""'}), "('<int:id>/add', views.add, name='manager-add')\n", (1051, 1098), False, 'from django.urls import path\n'), ((1104, 1165), 'django.urls.path', 'path', (['"""add-guest/<int:id>"""', 'views.add_guest'], {'name': '"""add_guest"""'}), "('add-guest/<int:id>', views.add_guest, name='add_guest')\n", (1108, 1165), False, 'from django.urls import path\n'), ((1171, 1249), 'django.urls.path', 'path', (['"""delete-guest/<int:t>/<int:id>"""', 'views.delete_guest'], {'name': '"""delete-guest"""'}), "('delete-guest/<int:t>/<int:id>', views.delete_guest, name='delete-guest')\n", (1175, 1249), False, 'from django.urls import path\n'), ((1255, 1340), 'django.urls.path', 'path', (['"""booking/<int:id>/details/"""', 'views.booking_details'], {'name': '"""booking-details"""'}), "('booking/<int:id>/details/', views.booking_details, name='booking-details'\n )\n", (1259, 1340), False, 'from django.urls import path\n')] |
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Function:
Test mindinsight.datavisual.data_transform.data_loader.
Usage:
pytest tests/ut/datavisual
"""
import os
import shutil
import tempfile
import pytest
from mindinsight.datavisual.common.exceptions import SummaryLogPathInvalid
from mindinsight.datavisual.data_transform import data_loader
from mindinsight.datavisual.data_transform.data_loader import DataLoader
from ..mock import MockLogger
class TestDataLoader:
"""Test data_loader."""
@classmethod
def setup_class(cls):
data_loader.logger = MockLogger
def setup_method(self):
self._summary_dir = tempfile.mkdtemp()
if os.path.exists(self._summary_dir):
shutil.rmtree(self._summary_dir)
os.mkdir(self._summary_dir)
def teardown_method(self):
if os.path.exists(self._summary_dir):
shutil.rmtree(self._summary_dir)
def _generate_files(self, dir_path, file_list):
for file_name in file_list:
with open(os.path.join(dir_path, file_name), 'w'):
pass
def test_load_with_not_file_list(self):
"""Test loading method with empty file list."""
loader = DataLoader(self._summary_dir)
with pytest.raises(SummaryLogPathInvalid):
loader.load()
assert 'No valid files can be loaded' in str(MockLogger.log_msg['warning'])
def test_load_with_invalid_file_list(self):
"""Test loading method with valid path and invalid file_list."""
file_list = ['summary.abc01', 'summary.abc02']
self._generate_files(self._summary_dir, file_list)
loader = DataLoader(self._summary_dir)
with pytest.raises(SummaryLogPathInvalid):
loader.load()
assert 'No valid files can be loaded' in str(MockLogger.log_msg['warning'])
def test_load_success(self):
"""Test loading method with valid path and file_list."""
dir_path = tempfile.NamedTemporaryFile().name
if not os.path.exists(dir_path):
os.mkdir(dir_path)
file_list = ['summary.001', 'summary.002']
self._generate_files(dir_path, file_list)
dataloader = DataLoader(dir_path)
dataloader.load()
assert dataloader._loader is not None
shutil.rmtree(dir_path)
| [
"os.path.exists",
"os.path.join",
"tempfile.NamedTemporaryFile",
"tempfile.mkdtemp",
"os.mkdir",
"pytest.raises",
"shutil.rmtree",
"mindinsight.datavisual.data_transform.data_loader.DataLoader"
] | [((1273, 1291), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1289, 1291), False, 'import tempfile\n'), ((1303, 1336), 'os.path.exists', 'os.path.exists', (['self._summary_dir'], {}), '(self._summary_dir)\n', (1317, 1336), False, 'import os\n'), ((1391, 1418), 'os.mkdir', 'os.mkdir', (['self._summary_dir'], {}), '(self._summary_dir)\n', (1399, 1418), False, 'import os\n'), ((1462, 1495), 'os.path.exists', 'os.path.exists', (['self._summary_dir'], {}), '(self._summary_dir)\n', (1476, 1495), False, 'import os\n'), ((1833, 1862), 'mindinsight.datavisual.data_transform.data_loader.DataLoader', 'DataLoader', (['self._summary_dir'], {}), '(self._summary_dir)\n', (1843, 1862), False, 'from mindinsight.datavisual.data_transform.data_loader import DataLoader\n'), ((2277, 2306), 'mindinsight.datavisual.data_transform.data_loader.DataLoader', 'DataLoader', (['self._summary_dir'], {}), '(self._summary_dir)\n', (2287, 2306), False, 'from mindinsight.datavisual.data_transform.data_loader import DataLoader\n'), ((2815, 2835), 'mindinsight.datavisual.data_transform.data_loader.DataLoader', 'DataLoader', (['dir_path'], {}), '(dir_path)\n', (2825, 2835), False, 'from mindinsight.datavisual.data_transform.data_loader import DataLoader\n'), ((2916, 2939), 'shutil.rmtree', 'shutil.rmtree', (['dir_path'], {}), '(dir_path)\n', (2929, 2939), False, 'import shutil\n'), ((1350, 1382), 'shutil.rmtree', 'shutil.rmtree', (['self._summary_dir'], {}), '(self._summary_dir)\n', (1363, 1382), False, 'import shutil\n'), ((1509, 1541), 'shutil.rmtree', 'shutil.rmtree', (['self._summary_dir'], {}), '(self._summary_dir)\n', (1522, 1541), False, 'import shutil\n'), ((1876, 1912), 'pytest.raises', 'pytest.raises', (['SummaryLogPathInvalid'], {}), '(SummaryLogPathInvalid)\n', (1889, 1912), False, 'import pytest\n'), ((2320, 2356), 'pytest.raises', 'pytest.raises', (['SummaryLogPathInvalid'], {}), '(SummaryLogPathInvalid)\n', (2333, 2356), False, 'import pytest\n'), ((2586, 2615), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (2613, 2615), False, 'import tempfile\n'), ((2636, 2660), 'os.path.exists', 'os.path.exists', (['dir_path'], {}), '(dir_path)\n', (2650, 2660), False, 'import os\n'), ((2674, 2692), 'os.mkdir', 'os.mkdir', (['dir_path'], {}), '(dir_path)\n', (2682, 2692), False, 'import os\n'), ((1653, 1686), 'os.path.join', 'os.path.join', (['dir_path', 'file_name'], {}), '(dir_path, file_name)\n', (1665, 1686), False, 'import os\n')] |
from linked_list import LinkedList
from node import Node
from mergeList import ll_merge
# import pytest
def test_linked_list_creation():
"""Validating if Linked List was Created."""
ll = LinkedList([2, 3, 4, 5])
assert Node(2).val is ll.head.val
assert isinstance(ll.head.val, int)
def test_insert_into_linkedlist():
"""Validating if the value inserted into a Linked-List was the same as\
inputed inside a Node object."""
ll = LinkedList()
assert (4 is not ll.head)
ll.insert(4)
assert True == (Node(4).val == ll.head.val)
def test_find_value():
"""Validates if value was found by find function."""
books = LinkedList(['Python The Hard Way', 'The Pragmatic Programmer', 'The\
Hunger Games', 'Mr. Robot'])
books.insert(4)
assert 4 is books.head.val
assert True is books.find('The Pragmatic Programmer')
def test_current_node_value():
"""Checking for attributes inside __init__"""
node = Node([1, 2, 3, 4, 5])
assert isinstance(node.val, list)
def test_append():
ll = LinkedList(['book one', 'book two'])
ll.append('a')
assert ll.find('a') is True
assert 'a' == ll.head._next.val
def test_insert_before():
ll = LinkedList(['a', 'b', 'c'])
ll.insert_before('a', 'z')
assert 'z' == ll.head.val
assert ll.find('z') is True
def test_insert_after():
ll = LinkedList(['a', 'b', 'c'])
ll.insert_after('a', 'z')
assert 'z' == ll.head._next.val
def test_kkthFromEnd():
"""
Checks if the linked-list is empty
Checks if out of range in Linked-List
Returns node with correct key value
"""
nodes1 = LinkedList(['a', 1, 'hello world', 5.0, True])
assert nodes1.kthFromEnd(3) == 1
nodes2 = LinkedList([])
assert nodes2.kthFromEnd(5) == 'LinkedList is empty.'
nodes2.insert([5, 6, 7, 8, 9, 10])
assert nodes2.kthFromEnd(100) is False
def test_ll_merge():
"""
Validates if LinkedList is empty.
Returns Correct head node of LinkedList.
Returns the whole LinkedList merged.
"""
arr1 = LinkedList()
arr2 = LinkedList()
LL = ll_merge(arr1, arr2)
assert LL is False
# Adding 6 to LinkedList
arr1 = LinkedList([6])
LL = ll_merge(arr1, arr2)
assert LL.head.val == 6
# Combining TWO LinkedList
arr1 = LinkedList([1, 3, 5, 7])
arr2 = LinkedList([2, 4, 6, 8])
LL = ll_merge(arr1, arr2)
assert LL.head.val.val == 1
assert LL.head._next.val == 2
assert LL.head._next._next.val.val == 3
assert LL.head._next._next._next.val == 4
| [
"linked_list.LinkedList",
"node.Node",
"mergeList.ll_merge"
] | [((198, 222), 'linked_list.LinkedList', 'LinkedList', (['[2, 3, 4, 5]'], {}), '([2, 3, 4, 5])\n', (208, 222), False, 'from linked_list import LinkedList\n'), ((460, 472), 'linked_list.LinkedList', 'LinkedList', ([], {}), '()\n', (470, 472), False, 'from linked_list import LinkedList\n'), ((662, 765), 'linked_list.LinkedList', 'LinkedList', (["['Python The Hard Way', 'The Pragmatic Programmer', 'The Hunger Games',\n 'Mr. Robot']"], {}), "(['Python The Hard Way', 'The Pragmatic Programmer',\n 'The Hunger Games', 'Mr. Robot'])\n", (672, 765), False, 'from linked_list import LinkedList\n'), ((967, 988), 'node.Node', 'Node', (['[1, 2, 3, 4, 5]'], {}), '([1, 2, 3, 4, 5])\n', (971, 988), False, 'from node import Node\n'), ((1057, 1093), 'linked_list.LinkedList', 'LinkedList', (["['book one', 'book two']"], {}), "(['book one', 'book two'])\n", (1067, 1093), False, 'from linked_list import LinkedList\n'), ((1218, 1245), 'linked_list.LinkedList', 'LinkedList', (["['a', 'b', 'c']"], {}), "(['a', 'b', 'c'])\n", (1228, 1245), False, 'from linked_list import LinkedList\n'), ((1375, 1402), 'linked_list.LinkedList', 'LinkedList', (["['a', 'b', 'c']"], {}), "(['a', 'b', 'c'])\n", (1385, 1402), False, 'from linked_list import LinkedList\n'), ((1645, 1691), 'linked_list.LinkedList', 'LinkedList', (["['a', 1, 'hello world', 5.0, True]"], {}), "(['a', 1, 'hello world', 5.0, True])\n", (1655, 1691), False, 'from linked_list import LinkedList\n'), ((1743, 1757), 'linked_list.LinkedList', 'LinkedList', (['[]'], {}), '([])\n', (1753, 1757), False, 'from linked_list import LinkedList\n'), ((2072, 2084), 'linked_list.LinkedList', 'LinkedList', ([], {}), '()\n', (2082, 2084), False, 'from linked_list import LinkedList\n'), ((2096, 2108), 'linked_list.LinkedList', 'LinkedList', ([], {}), '()\n', (2106, 2108), False, 'from linked_list import LinkedList\n'), ((2118, 2138), 'mergeList.ll_merge', 'll_merge', (['arr1', 'arr2'], {}), '(arr1, arr2)\n', (2126, 2138), False, 'from mergeList import ll_merge\n'), ((2203, 2218), 'linked_list.LinkedList', 'LinkedList', (['[6]'], {}), '([6])\n', (2213, 2218), False, 'from linked_list import LinkedList\n'), ((2228, 2248), 'mergeList.ll_merge', 'll_merge', (['arr1', 'arr2'], {}), '(arr1, arr2)\n', (2236, 2248), False, 'from mergeList import ll_merge\n'), ((2320, 2344), 'linked_list.LinkedList', 'LinkedList', (['[1, 3, 5, 7]'], {}), '([1, 3, 5, 7])\n', (2330, 2344), False, 'from linked_list import LinkedList\n'), ((2356, 2380), 'linked_list.LinkedList', 'LinkedList', (['[2, 4, 6, 8]'], {}), '([2, 4, 6, 8])\n', (2366, 2380), False, 'from linked_list import LinkedList\n'), ((2390, 2410), 'mergeList.ll_merge', 'll_merge', (['arr1', 'arr2'], {}), '(arr1, arr2)\n', (2398, 2410), False, 'from mergeList import ll_merge\n'), ((234, 241), 'node.Node', 'Node', (['(2)'], {}), '(2)\n', (238, 241), False, 'from node import Node\n'), ((540, 547), 'node.Node', 'Node', (['(4)'], {}), '(4)\n', (544, 547), False, 'from node import Node\n')] |
# -*- coding: utf-8 -*-
import scrapy
from bs4 import BeautifulSoup
from cultureBigdata.items import CultureNewsItem, CultureBasicItem, CultureEventItem
from selenium import webdriver
import re
import time
class ChengdulibSpider(scrapy.Spider):
name = 'chengdulib'
# 爬去机构动态所需的参数
news_url = ''
news_base_url = ''
news_count = 1
news_page_end = 221
# 爬去机构介绍所需的参数
intro_url = 'http://www.jnlib.net.cn/gyjt/201311/t20131105_2257.html'
# 爬去机构活动所需的参数
def start_requests(self):
# 请求机构介绍信息
#yield scrapy.Request(self.intro_url, callback=self.intro_parse)
# 请求机构动态信息
for i in range(1,360):
page = (i-1)
url = 'https://act.cdclib.org/action/web/publish.do?actionCmd=listSearch&offset=' + str(page)
yield scrapy.Request(url, callback=self.event_parse)
# 请求机构活动信息
#yield scrapy.Request(self.event_url, callback=self.event_parse)
'''
def news_parse(self, response):
origin_url = 'http://www.zslib.com.cn/TempletPage/List.aspx?dbid=2&page=1'
data = response.body
soup = BeautifulSoup(data, 'html.parser')
article_lists = soup.find('div', {"class": "gl_list"})
for article in article_lists.find_all("li"):
item = CultureNewsItem()
try:
item['pav_name'] = '广东省立中山图书馆'
item['title'] = article.a.string
item['url'] = origin_url + article.a.attrs['href'][2:]
item['time'] = re.findall(r'(\d{4}-\d{1,2}-\d{1,2})', article.span.string)[0]
yield scrapy.Request(item['url'], meta={'item': item}, callback=self.news_text_parse)
except Exception as err:
print(err)
if self.news_count < self.news_page_end:
self.news_count = self.news_count + 1
yield scrapy.Request(self.news_base_url + str(self.news_count) + '.html', callback=self.news_parse)
else:
return None
def news_text_parse(self, response):
item = response.meta['item']
data = response.body
soup = BeautifulSoup(data, "html.parser")
content = soup.find("div", {"class": "xl_show"})
item['content'] = str(content.text).replace('\u3000', '').replace('\xa0', '').replace('\n', '')
return item
'''
def event_parse(self, response):
data = response.body
soup = BeautifulSoup(data, 'html.parser')
event_all = soup.find_all('div', {'class': 'hdzx_search'})[0].find_all('div',{'class':'item'})
for event in event_all:
item = CultureEventItem()
try:
item['pav_name'] = '成都市图书馆'
item['activity_name'] = event.find_all('div',{'class':'name'})[0].text.strip()
print(item['activity_name'])
item['activity_time']=event.find_all('div',{'class':'p'})[1].text.strip()[5:]
item['place'] = event.find_all('div',{'class':'p'})[3].text.strip()[5:]
print(item['place'])
item['url'] = 'https://act.cdclib.org/action/web/' + event.find_all('div',{'class':'name'})[0].a.attrs['href']
print(item['url'])
#item['remark'] = event.find('div',{'class':'hdzx_layer_2'}).text.replace(' ', '').replace('\n', '').replace('\r', '').replace('\xa0', '')
item['organizer'] = '成都市图书馆'
yield scrapy.Request(item['url'], meta={'item': item}, callback=self.event_text_parse)
break
except Exception as err:
print('event_parse')
print(err)
def event_text_parse(self, response):
item = response.meta['item']
data = response.body
soup = BeautifulSoup(data, 'html.parser')
#print(soup.find_all('td',{'valign':'top'})[1])
info = soup.find_all('td',{'valign':'top'})[1].find_all('tr')
print(len(info))
item['activity_time'] = info[6].text[6:17].strip()
print(item['activity_time'])
content = soup.find('div',{'class':'grid_item'}).find_all('table')[-1]
full_text = str(content.text).replace('\u3000', '').replace('\xa0', '')
p_tags = content.find_all('p')
p_content = []
for p in p_tags:
p_content.append(str(p.text).replace('\u3000', '').replace('\xa0', ''))
# print(p_content)
########################################################################################
item['remark'] = full_text.replace('\n', '')
########################################################################################
########################################################################################
item['activity_type'] = ''
try:
if '展览' in full_text:
item['activity_type'] = '展览'
elif '讲座' in full_text:
item['activity_type'] = '讲座'
elif '培训' in full_text:
item['activity_type'] = '培训'
elif '阅读' in full_text:
item['activity_type'] = '阅读'
except:
pass
########################################################################################
item['presenter'] = ''
for i in range(len(p_content)):
if '一、活动主讲人:' in p_content[i]:
item['presenter'] = p_content[i + 1]
break
elif '主 讲 人:' in p_content[i]:
item['presenter'] = p_content[i].split(':')[1]
break
elif '主讲人:' in p_content[i]:
item['presenter'] = p_content[i].split(':')[1]
break
try:
if re.findall(r'(...)老师', content.text)[0] and item['presenter'] == '':
item['presenter'] = re.findall(r'(...)老师', content.text)[0]
except:
pass
try:
if re.findall(r'(...)先生', content.text)[0] and item['presenter'] == '':
item['presenter'] = re.findall(r'(...)先生', content.text)[0]
except:
pass
try:
if re.findall(r'(...)姐姐', content.text)[0] and item['presenter'] == '':
item['presenter'] = re.findall(r'(...)姐姐', content.text)[0]
except:
pass
########################################################################################
item['organizer'] = ''
for i in range(len(p_content)):
if '主办单位:' in p_content[i]:
item['organizer'] = p_content[i].split(':')[1]
break
elif '举办单位:' == p_content[i] or '主办单位' == p_content[i]:
item['organizer'] = p_content[i + 1]
break
elif '举办单位:' in p_content[i]:
item['organizer'] = p_content[i].split(':')[1]
break
elif '主 办:' in p_content[i]:
item['organizer'] = p_content[i].split(':')[1]
break
elif '举办:' in p_content[i]:
item['organizer'] = p_content[i].split(':')[1]
break
# 举办
########################################################################################
item['age_limit'] = ''
try:
if re.findall(r'不限年龄', content.text)[0] and item['age_limit'] == '':
item['age_limit'] = re.findall(r'不限年龄', content.text)[0]
except:
pass
try:
if re.findall(r'([1‐9]?\d~[1‐9]?\d岁)', content.text)[0] and item['age_limit'] == '':
item['age_limit'] = re.findall(r'([1‐9]?\d~[1‐9]?\d岁)', content.text)[0]
except:
pass
try:
if re.findall(r'([1‐9]?\d岁-[1‐9]?\d岁)', content.text)[0] and item['age_limit'] == '':
item['age_limit'] = re.findall(r'([1‐9]?\d岁-[1‐9]?\d岁)', content.text)[0]
except:
pass
try:
if re.findall(r'([1‐9]?\d-[1‐9]?\d岁)', content.text)[0] and item['age_limit'] == '':
item['age_limit'] = re.findall(r'([1‐9]?\d-[1‐9]?\d岁)', content.text)[0]
except:
pass
########################################################################################
item['presenter_introduction'] = ''
for i in range(len(p_content)):
if '作者简介:' == p_content[i] or '主讲人简介:' == p_content[i]:
item['presenter_introduction'] = p_content[i + 1]
break
elif '讲师简介:' in p_content[i]:
item['presenter_introduction'] = p_content[i].split(":")[1]
break
########################################################################################
item['contact'] = ''
for i in range(len(p_content)):
if '预约电话:' in p_content[i]:
item['contact'] = p_content[i].split(':')[1]
break
try:
if re.findall(r'\d{4}—\d{8}', content.text)[0] and item['age_limit'] == '':
item['contact'] = re.findall(r'\d{4}—\d{8}', content.text)[0]
except:
pass
try:
if re.findall(r'\d{8}', content.text)[0] and item['age_limit'] == '':
item['contact'] = re.findall(r'\d{8}', content.text)[0]
except:
pass
########################################################################################
item['participation_number'] = ''
########################################################################################
item['click_number'] = ''
########################################################################################
item['source'] = ''
########################################################################################
item['activity_introduction'] = ''
########################################################################################
return item
def intro_parse(self, response):
item = CultureBasicItem()
data = response.body
soup = BeautifulSoup(data, 'html.parser')
intro = str(soup.find('div', {"class": 'TRS_Editor'}).text).strip()
item['pav_name'] = '海南省图书馆'
item['pav_introduction'] = intro.replace('\u3000\u3000', '')
item['region'] = '海南'
item['area_number'] = '2.5万平方米'
item['collection_number'] = '164万余册'
item['branch_number'] = ''
item['librarian_number'] = ''
item['client_number'] = '17万'
item['activity_number'] = ''
yield item
| [
"cultureBigdata.items.CultureBasicItem",
"bs4.BeautifulSoup",
"scrapy.Request",
"cultureBigdata.items.CultureEventItem",
"re.findall"
] | [((2515, 2549), 'bs4.BeautifulSoup', 'BeautifulSoup', (['data', '"""html.parser"""'], {}), "(data, 'html.parser')\n", (2528, 2549), False, 'from bs4 import BeautifulSoup\n'), ((3890, 3924), 'bs4.BeautifulSoup', 'BeautifulSoup', (['data', '"""html.parser"""'], {}), "(data, 'html.parser')\n", (3903, 3924), False, 'from bs4 import BeautifulSoup\n'), ((10324, 10342), 'cultureBigdata.items.CultureBasicItem', 'CultureBasicItem', ([], {}), '()\n', (10340, 10342), False, 'from cultureBigdata.items import CultureNewsItem, CultureBasicItem, CultureEventItem\n'), ((10389, 10423), 'bs4.BeautifulSoup', 'BeautifulSoup', (['data', '"""html.parser"""'], {}), "(data, 'html.parser')\n", (10402, 10423), False, 'from bs4 import BeautifulSoup\n'), ((2707, 2725), 'cultureBigdata.items.CultureEventItem', 'CultureEventItem', ([], {}), '()\n', (2723, 2725), False, 'from cultureBigdata.items import CultureNewsItem, CultureBasicItem, CultureEventItem\n'), ((847, 893), 'scrapy.Request', 'scrapy.Request', (['url'], {'callback': 'self.event_parse'}), '(url, callback=self.event_parse)\n', (861, 893), False, 'import scrapy\n'), ((3547, 3632), 'scrapy.Request', 'scrapy.Request', (["item['url']"], {'meta': "{'item': item}", 'callback': 'self.event_text_parse'}), "(item['url'], meta={'item': item}, callback=self.event_text_parse\n )\n", (3561, 3632), False, 'import scrapy\n'), ((5896, 5931), 're.findall', 're.findall', (['"""(...)老师"""', 'content.text'], {}), "('(...)老师', content.text)\n", (5906, 5931), False, 'import re\n'), ((6002, 6037), 're.findall', 're.findall', (['"""(...)老师"""', 'content.text'], {}), "('(...)老师', content.text)\n", (6012, 6037), False, 'import re\n'), ((6107, 6142), 're.findall', 're.findall', (['"""(...)先生"""', 'content.text'], {}), "('(...)先生', content.text)\n", (6117, 6142), False, 'import re\n'), ((6213, 6248), 're.findall', 're.findall', (['"""(...)先生"""', 'content.text'], {}), "('(...)先生', content.text)\n", (6223, 6248), False, 'import re\n'), ((6318, 6353), 're.findall', 're.findall', (['"""(...)姐姐"""', 'content.text'], {}), "('(...)姐姐', content.text)\n", (6328, 6353), False, 'import re\n'), ((6424, 6459), 're.findall', 're.findall', (['"""(...)姐姐"""', 'content.text'], {}), "('(...)姐姐', content.text)\n", (6434, 6459), False, 'import re\n'), ((7511, 7543), 're.findall', 're.findall', (['"""不限年龄"""', 'content.text'], {}), "('不限年龄', content.text)\n", (7521, 7543), False, 'import re\n'), ((7614, 7646), 're.findall', 're.findall', (['"""不限年龄"""', 'content.text'], {}), "('不限年龄', content.text)\n", (7624, 7646), False, 'import re\n'), ((7716, 7766), 're.findall', 're.findall', (['"""([1‐9]?\\\\d~[1‐9]?\\\\d岁)"""', 'content.text'], {}), "('([1‐9]?\\\\d~[1‐9]?\\\\d岁)', content.text)\n", (7726, 7766), False, 'import re\n'), ((7835, 7885), 're.findall', 're.findall', (['"""([1‐9]?\\\\d~[1‐9]?\\\\d岁)"""', 'content.text'], {}), "('([1‐9]?\\\\d~[1‐9]?\\\\d岁)', content.text)\n", (7845, 7885), False, 'import re\n'), ((7953, 8004), 're.findall', 're.findall', (['"""([1‐9]?\\\\d岁-[1‐9]?\\\\d岁)"""', 'content.text'], {}), "('([1‐9]?\\\\d岁-[1‐9]?\\\\d岁)', content.text)\n", (7963, 8004), False, 'import re\n'), ((8073, 8124), 're.findall', 're.findall', (['"""([1‐9]?\\\\d岁-[1‐9]?\\\\d岁)"""', 'content.text'], {}), "('([1‐9]?\\\\d岁-[1‐9]?\\\\d岁)', content.text)\n", (8083, 8124), False, 'import re\n'), ((8192, 8242), 're.findall', 're.findall', (['"""([1‐9]?\\\\d-[1‐9]?\\\\d岁)"""', 'content.text'], {}), "('([1‐9]?\\\\d-[1‐9]?\\\\d岁)', content.text)\n", (8202, 8242), False, 'import re\n'), ((8311, 8361), 're.findall', 're.findall', (['"""([1‐9]?\\\\d-[1‐9]?\\\\d岁)"""', 'content.text'], {}), "('([1‐9]?\\\\d-[1‐9]?\\\\d岁)', content.text)\n", (8321, 8361), False, 'import re\n'), ((9210, 9251), 're.findall', 're.findall', (['"""\\\\d{4}—\\\\d{8}"""', 'content.text'], {}), "('\\\\d{4}—\\\\d{8}', content.text)\n", (9220, 9251), False, 'import re\n'), ((9318, 9359), 're.findall', 're.findall', (['"""\\\\d{4}—\\\\d{8}"""', 'content.text'], {}), "('\\\\d{4}—\\\\d{8}', content.text)\n", (9328, 9359), False, 'import re\n'), ((9427, 9461), 're.findall', 're.findall', (['"""\\\\d{8}"""', 'content.text'], {}), "('\\\\d{8}', content.text)\n", (9437, 9461), False, 'import re\n'), ((9529, 9563), 're.findall', 're.findall', (['"""\\\\d{8}"""', 'content.text'], {}), "('\\\\d{8}', content.text)\n", (9539, 9563), False, 'import re\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" calibration_rig.py: trilateration microphone calibration
Since trilateration doesn't account for the acoustic properties of the sound
source, the results will be skewed. To account for this, it's possible
to use machine learning to build a lookup table-like model which takes
the inputs of the microphones and outputputs the cartesian coordinates
without actually touching any of the math behind trilateration
A 3D printer is used as a precise calibration rig to train the model. A
cellphone is zip tied to the head which produces the approprate tone, and
data is captured every 20mm
https://kevinponce.com/blog/python/send-gcode-through-serial-to-a-3d-printer-using-python/
"""
__version__ = "1.0"
__author__ = "<NAME>"
__copyright__ = "Copyright 2021, <NAME>"
__license__ = "Apache 2.0"
import serial
import time
from acoustic_fixture import AcousticFixture as AF, BUFFER, AMPLITUDE_SIZE
import pickle
start_time = time.time()
# Initialize the acoustic fixture in calibration mode
af = AF(cal_mode=True)
# Assume lower left corner of the fixture is located at (0, 0, 0)
FIXTURE_HEIGHT = 56 # Distance to the top of the microphones
PHONE_CENTER = (170, 125, 8)
STEP = 20
X_MAX = 80
Y_MAX = 80
Z_MAX = 120
# Wait for the machine to return ok
def waitFor(ser, response):
ret = ""
while True:
if ser.in_waiting:
while ser.in_waiting:
ret = ser.readline().decode("utf-8")
print("[%12.6f] %s" % (time.time() - start_time, ret.replace("\n", "").replace("\r", "")))
if ret == response:
break
# Write a gcode command to the printer
def sendCommand(ser, gcode):
# Make sure we terminate our gcode
if gcode[-2:] != "\r\n":
gcode += "\r\n"
# Send the command
print("> %s" % (gcode.replace("\n", "").replace("\r", "")))
ser.write(str.encode(gcode))
time.sleep(0.1)
waitFor(ser, "ok\n")
print("Connecting to printer...")
ser = serial.Serial("COM4", 115200)
waitFor(ser, "LCD status changed\n") # Wait for the system to initialize
prompt = "Remove the acoustic fixture and phone, then press enter to home all axis"
sendCommand(ser, "M0 %s" % (prompt))
sendCommand(ser, "G28 0 W") # Home the system
sendCommand(ser, "G1 F9000") # Set the feed rate
# Move to the phone installation position
sendCommand(ser, "G1 X%d Y%d Z%d" % PHONE_CENTER)
sendCommand(ser, "M400") # Wait for moves to finish
prompt = "Install the phone into the holder with the bottom flush to the bed"
sendCommand(ser, "M0 %s" % (prompt))
# Make the bed accessable to the user so we can install the acoustic fixture
sendCommand(ser, "G1 X0 Y200 Z100")
sendCommand(ser, "M400") # Wait for moves to finish
prompt = "Install the acoustic fixture onto the print table"
sendCommand(ser, "M0 %s" % (prompt))
HEIGHT_OFFSET = 20
center_x, center_y, center_z = PHONE_CENTER
center_z += FIXTURE_HEIGHT + HEIGHT_OFFSET # start some distance above the mics
cal_dict = {}
num_datapoints = 0
FWD = True
REV = False
x_dir = FWD
y_dir = FWD
z_dir = FWD
# Cycle through the training at least 3 times
for i in range(3):
# Step through each calibration point
for z in range(0, Z_MAX + STEP, STEP):
y_range = range(-Y_MAX, Y_MAX + STEP, STEP) if x_dir == FWD else range(Y_MAX, -(Y_MAX + STEP), -STEP)
for y in y_range:
x_range = range(-X_MAX, X_MAX + STEP, STEP) if x_dir == FWD else range(X_MAX, -(X_MAX + STEP), -STEP)
for x in x_range:
sendCommand(ser, "G1 X%d Y%d Z%d" % (x + center_x, y + center_y, z + center_z))
sendCommand(ser, "M400")
time.sleep(0.5) # Wait 500ms for the fixture to stabalize before capturing data
# initialize the array of data if it doesn't exit
k = (x, y, z + HEIGHT_OFFSET)
if k not in cal_dict:
cal_dict[k] = []
# discard the first buffer since the fixture class is asynchronous
af.update()
# Take 10 samples of data
for i in range(AMPLITUDE_SIZE):
# fill the buffer with new data
af.update()
# copy the buffer into our training data set
cal_dict[k].append(af.buf_copy)
num_datapoints += 1
# print the average for debug purposes only
print("[%12.6f] M1: %.4f, M2: %.4f, M3: %.4f, n: %d" % (time.time() - start_time, af.amplitude_avg[0], af.amplitude_avg[1], af.amplitude_avg[2], num_datapoints))
# Reverse the direction of x
x_dir = ~x_dir
# Reverse the direction of y
y_dir = ~y_dir
# Dump the training data in binary format
pickle.dump(cal_dict, open("training_data.db", 'wb'))
end_time = time.time() - start_time
print("All done! Captured %d samples in %d minutes and %d seconds." % (num_datapoints, int(end_time/60), int(end_time) % 60))
ser.close | [
"serial.Serial",
"acoustic_fixture.AcousticFixture",
"time.time",
"time.sleep"
] | [((1010, 1021), 'time.time', 'time.time', ([], {}), '()\n', (1019, 1021), False, 'import time\n'), ((1082, 1099), 'acoustic_fixture.AcousticFixture', 'AF', ([], {'cal_mode': '(True)'}), '(cal_mode=True)\n', (1084, 1099), True, 'from acoustic_fixture import AcousticFixture as AF, BUFFER, AMPLITUDE_SIZE\n'), ((2033, 2062), 'serial.Serial', 'serial.Serial', (['"""COM4"""', '(115200)'], {}), "('COM4', 115200)\n", (2046, 2062), False, 'import serial\n'), ((1951, 1966), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1961, 1966), False, 'import time\n'), ((4964, 4975), 'time.time', 'time.time', ([], {}), '()\n', (4973, 4975), False, 'import time\n'), ((3765, 3780), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (3775, 3780), False, 'import time\n'), ((1550, 1561), 'time.time', 'time.time', ([], {}), '()\n', (1559, 1561), False, 'import time\n'), ((4611, 4622), 'time.time', 'time.time', ([], {}), '()\n', (4620, 4622), False, 'import time\n')] |
import unittest
from programy.clients.events.console.config import ConsoleConfiguration
from programy.config.brain.dynamic import BrainDynamicsConfiguration
from programy.config.file.yaml_file import YamlConfigurationFile
class BrainDynamicsConfigurationTests(unittest.TestCase):
def test_with_data(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
dynamic:
variables:
gettime: programy.dynamic.variables.datetime.GetTime
sets:
number: programy.dynamic.sets.numeric.IsNumeric
roman: programy.dynamic.sets.roman.IsRomanNumeral
maps:
romantodec: programy.dynamic.maps.roman.MapRomanToDecimal
dectoroman: programy.dynamic.maps.roman.MapDecimalToRoman
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
dynamic_config = BrainDynamicsConfiguration()
dynamic_config.load_config_section(yaml, brain_config, ".")
self.assertEquals({'GETTIME': 'programy.dynamic.variables.datetime.GetTime'}, dynamic_config.dynamic_vars)
self.assertEquals({'NUMBER': 'programy.dynamic.sets.numeric.IsNumeric', 'ROMAN': 'programy.dynamic.sets.roman.IsRomanNumeral'}, dynamic_config.dynamic_sets)
self.assertEquals({'ROMANTODEC': 'programy.dynamic.maps.roman.MapRomanToDecimal', 'DECTOROMAN': 'programy.dynamic.maps.roman.MapDecimalToRoman'}, dynamic_config.dynamic_maps)
def test_with_missing_vars_sets_maps(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
dynamic:
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
dynamic_config = BrainDynamicsConfiguration()
dynamic_config.load_config_section(yaml, brain_config, ".")
self.assertEquals({}, dynamic_config.dynamic_vars)
self.assertEquals({}, dynamic_config.dynamic_sets)
self.assertEquals({}, dynamic_config.dynamic_maps)
def test_with_missing_vars_sets_maps2(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
dynamic:
something: else
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
dynamic_config = BrainDynamicsConfiguration()
dynamic_config.load_config_section(yaml, brain_config, ".")
self.assertEquals({}, dynamic_config.dynamic_vars)
self.assertEquals({}, dynamic_config.dynamic_sets)
self.assertEquals({}, dynamic_config.dynamic_maps)
def test_without_data(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
dynamic:
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
dynamic_config = BrainDynamicsConfiguration()
dynamic_config.load_config_section(yaml, brain_config, ".")
self.assertEquals({}, dynamic_config.dynamic_vars)
self.assertEquals({}, dynamic_config.dynamic_sets)
self.assertEquals({}, dynamic_config.dynamic_maps)
def test_with_no_data(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
dynamic_config = BrainDynamicsConfiguration()
dynamic_config.load_config_section(yaml, brain_config, ".")
self.assertEquals({}, dynamic_config.dynamic_vars)
self.assertEquals({}, dynamic_config.dynamic_sets)
self.assertEquals({}, dynamic_config.dynamic_maps)
def test_to_yaml_defaults(self):
yaml = {}
dynamic_config = BrainDynamicsConfiguration()
dynamic_config.to_yaml(yaml, defaults=True)
self.assertEquals({'GETTIME': 'programy.dynamic.variables.datetime.GetTime'}, yaml['variables'])
self.assertEquals({'NUMBER': 'programy.dynamic.sets.numeric.IsNumeric',
'ROMAN': 'programy.dynamic.sets.roman.IsRomanNumeral',
'STOPWORD': 'programy.dynamic.sets.stopword.IsStopWord',
'SYNSETS': 'programy.dynamic.sets.synsets.IsSynset'}, yaml['sets'])
self.assertEquals({'ROMANTODDEC': 'programy.dynamic.maps.roman.MapRomanToDecimal',
'DECTOROMAN': 'programy.dynamic.maps.roman.MapDecimalToRoman',
'LEMMATIZE': 'programy.dynamic.maps.lemmatize.LemmatizeMap',
'STEMMER': 'programy.dynamic.maps.stemmer.StemmerMap'}, yaml['maps'])
def test_to_yaml_no_defaults(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
dynamic:
variables:
gettime: programy.dynamic.variables.datetime.GetTime
sets:
number: programy.dynamic.sets.numeric.IsNumeric
roman: programy.dynamic.sets.roman.IsRomanNumeral
maps:
romantodec: programy.dynamic.maps.roman.MapRomanToDecimal
dectoroman: programy.dynamic.maps.roman.MapDecimalToRoman
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
dynamic_config = BrainDynamicsConfiguration()
dynamic_config.load_config_section(yaml, brain_config, ".")
data = {}
dynamic_config.to_yaml(data, defaults=False)
self.assertEquals({'GETTIME': 'programy.dynamic.variables.datetime.GetTime'}, data['variables'])
self.assertEquals({'NUMBER': 'programy.dynamic.sets.numeric.IsNumeric', 'ROMAN': 'programy.dynamic.sets.roman.IsRomanNumeral'}, data['sets'])
self.assertEquals({'ROMANTODEC': 'programy.dynamic.maps.roman.MapRomanToDecimal', 'DECTOROMAN': 'programy.dynamic.maps.roman.MapDecimalToRoman'}, data['maps'])
def test_to_yaml_no_defaults_no_data(self):
yaml = {}
dynamic_config = BrainDynamicsConfiguration()
dynamic_config.to_yaml(yaml, defaults=False)
self.assertEquals({}, yaml['variables'])
self.assertEquals({}, yaml['sets'])
self.assertEquals({}, yaml['maps'])
def test_defaults(self):
dynamic_config = BrainDynamicsConfiguration()
data = {}
dynamic_config.to_yaml(data, True)
BrainDynamicsConfigurationTests.assert_defaults(self, data)
@staticmethod
def assert_defaults(test, data):
test.assertTrue('sets' in data)
test.assertEqual(data['sets']['NUMBER'], 'programy.dynamic.sets.numeric.IsNumeric')
test.assertEqual(data['sets']['ROMAN'], 'programy.dynamic.sets.roman.IsRomanNumeral')
test.assertEqual(data['sets']['STOPWORD'], 'programy.dynamic.sets.stopword.IsStopWord')
test.assertEqual(data['sets']['SYNSETS'], 'programy.dynamic.sets.synsets.IsSynset')
test.assertTrue('maps' in data)
test.assertEqual(data['maps']['ROMANTODDEC'], 'programy.dynamic.maps.roman.MapRomanToDecimal')
test.assertEqual(data['maps']['DECTOROMAN'], 'programy.dynamic.maps.roman.MapDecimalToRoman')
test.assertEqual(data['maps']['LEMMATIZE'], 'programy.dynamic.maps.lemmatize.LemmatizeMap')
test.assertEqual(data['maps']['STEMMER'], 'programy.dynamic.maps.stemmer.StemmerMap')
test.assertTrue('variables' in data)
test.assertEqual(data['variables']['GETTIME'], 'programy.dynamic.variables.datetime.GetTime')
| [
"programy.config.file.yaml_file.YamlConfigurationFile",
"programy.config.brain.dynamic.BrainDynamicsConfiguration",
"programy.clients.events.console.config.ConsoleConfiguration"
] | [((329, 352), 'programy.config.file.yaml_file.YamlConfigurationFile', 'YamlConfigurationFile', ([], {}), '()\n', (350, 352), False, 'from programy.config.file.yaml_file import YamlConfigurationFile\n'), ((1014, 1042), 'programy.config.brain.dynamic.BrainDynamicsConfiguration', 'BrainDynamicsConfiguration', ([], {}), '()\n', (1040, 1042), False, 'from programy.config.brain.dynamic import BrainDynamicsConfiguration\n'), ((1639, 1662), 'programy.config.file.yaml_file.YamlConfigurationFile', 'YamlConfigurationFile', ([], {}), '()\n', (1660, 1662), False, 'from programy.config.file.yaml_file import YamlConfigurationFile\n'), ((1884, 1912), 'programy.config.brain.dynamic.BrainDynamicsConfiguration', 'BrainDynamicsConfiguration', ([], {}), '()\n', (1910, 1912), False, 'from programy.config.brain.dynamic import BrainDynamicsConfiguration\n'), ((2224, 2247), 'programy.config.file.yaml_file.YamlConfigurationFile', 'YamlConfigurationFile', ([], {}), '()\n', (2245, 2247), False, 'from programy.config.file.yaml_file import YamlConfigurationFile\n'), ((2501, 2529), 'programy.config.brain.dynamic.BrainDynamicsConfiguration', 'BrainDynamicsConfiguration', ([], {}), '()\n', (2527, 2529), False, 'from programy.config.brain.dynamic import BrainDynamicsConfiguration\n'), ((2825, 2848), 'programy.config.file.yaml_file.YamlConfigurationFile', 'YamlConfigurationFile', ([], {}), '()\n', (2846, 2848), False, 'from programy.config.file.yaml_file import YamlConfigurationFile\n'), ((3070, 3098), 'programy.config.brain.dynamic.BrainDynamicsConfiguration', 'BrainDynamicsConfiguration', ([], {}), '()\n', (3096, 3098), False, 'from programy.config.brain.dynamic import BrainDynamicsConfiguration\n'), ((3394, 3417), 'programy.config.file.yaml_file.YamlConfigurationFile', 'YamlConfigurationFile', ([], {}), '()\n', (3415, 3417), False, 'from programy.config.file.yaml_file import YamlConfigurationFile\n'), ((3618, 3646), 'programy.config.brain.dynamic.BrainDynamicsConfiguration', 'BrainDynamicsConfiguration', ([], {}), '()\n', (3644, 3646), False, 'from programy.config.brain.dynamic import BrainDynamicsConfiguration\n'), ((3974, 4002), 'programy.config.brain.dynamic.BrainDynamicsConfiguration', 'BrainDynamicsConfiguration', ([], {}), '()\n', (4000, 4002), False, 'from programy.config.brain.dynamic import BrainDynamicsConfiguration\n'), ((4926, 4949), 'programy.config.file.yaml_file.YamlConfigurationFile', 'YamlConfigurationFile', ([], {}), '()\n', (4947, 4949), False, 'from programy.config.file.yaml_file import YamlConfigurationFile\n'), ((5611, 5639), 'programy.config.brain.dynamic.BrainDynamicsConfiguration', 'BrainDynamicsConfiguration', ([], {}), '()\n', (5637, 5639), False, 'from programy.config.brain.dynamic import BrainDynamicsConfiguration\n'), ((6296, 6324), 'programy.config.brain.dynamic.BrainDynamicsConfiguration', 'BrainDynamicsConfiguration', ([], {}), '()\n', (6322, 6324), False, 'from programy.config.brain.dynamic import BrainDynamicsConfiguration\n'), ((6573, 6601), 'programy.config.brain.dynamic.BrainDynamicsConfiguration', 'BrainDynamicsConfiguration', ([], {}), '()\n', (6599, 6601), False, 'from programy.config.brain.dynamic import BrainDynamicsConfiguration\n'), ((909, 931), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (929, 931), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((1779, 1801), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (1799, 1801), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((2396, 2418), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (2416, 2418), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((2965, 2987), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (2985, 2987), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((3513, 3535), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (3533, 3535), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((5506, 5528), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (5526, 5528), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n')] |
from setuptools import setup, find_packages
import codecs
import os
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, "README.md"), encoding="utf-8") as fh:
long_description = "\n" + fh.read()
VERSION = '0.0.3'
DESCRIPTION = 'Communicate with other machines via the local network.'
# Setting up
setup(
name="hoist3",
version=VERSION,
author="ZeroIntensity",
author_email="<<EMAIL>>",
description=DESCRIPTION,
long_description_content_type="text/markdown",
long_description=long_description,
packages=find_packages(),
install_requires=['flask', 'requests'],
keywords=['python', 'web', 'server', 'flask', 'communication', 'network', 'webserver'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3",
"Operating System :: Unix",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
])
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join"
] | [((92, 117), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (107, 117), False, 'import os\n'), ((137, 168), 'os.path.join', 'os.path.join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (149, 168), False, 'import os\n'), ((576, 591), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (589, 591), False, 'from setuptools import setup, find_packages\n')] |
#! python
#
#
from renderer.shape import Shape
from renderer.bolts import Vector, EPSILON
class Plane(Shape):
def LocalNormal( self, localPoint ):
return Vector( 0, 1, 0 )
def LocalIntersect(self, localRay):
if abs(localRay.Direction()[1]) < EPSILON:
return []
timeToIntersect = (0 - localRay.Origin()[1])/ localRay.Direction()[1]
return [{'time': timeToIntersect, 'object':self}]
| [
"renderer.bolts.Vector"
] | [((168, 183), 'renderer.bolts.Vector', 'Vector', (['(0)', '(1)', '(0)'], {}), '(0, 1, 0)\n', (174, 183), False, 'from renderer.bolts import Vector, EPSILON\n')] |
#!/usr/bin/env python3
# coding=utf-8
#
# Copyright (c) 2020 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import hashlib
from _core.logger import platform_logger
__all__ = ["check_pub_key_exist", "do_rsa_encrypt", "do_rsa_decrypt",
"generate_key_file", "get_file_summary"]
PUBLIC_KEY_FILE = "config/pub.key"
LOG = platform_logger("Encrypt")
def check_pub_key_exist():
from xdevice import Variables
if Variables.report_vars.pub_key_file is not None:
if Variables.report_vars.pub_key_file == "":
return False
if not os.path.exists(Variables.report_vars.pub_key_file):
Variables.report_vars.pub_key_file = None
return False
return True
pub_key_path = os.path.join(Variables.exec_dir, PUBLIC_KEY_FILE)
if os.path.exists(pub_key_path):
Variables.report_vars.pub_key_file = pub_key_path
return True
pub_key_path = os.path.join(Variables.top_dir, PUBLIC_KEY_FILE)
if os.path.exists(pub_key_path):
Variables.report_vars.pub_key_file = pub_key_path
else:
Variables.report_vars.pub_key_file = ""
return Variables.report_vars.pub_key_file
def do_rsa_encrypt(content):
try:
if not check_pub_key_exist() or not content:
return content
plain_text = content
if not isinstance(plain_text, bytes):
plain_text = str(content).encode(encoding='utf-8')
import rsa
from xdevice import Variables
with open(Variables.report_vars.pub_key_file, 'rb') as key_content:
# get params
public_key = rsa.PublicKey.load_pkcs1(key_content.read())
max_encrypt_len = int(public_key.n.bit_length() / 8) - 11
try:
# encrypt
cipher_text = b""
for frag in _get_frags(plain_text, max_encrypt_len):
cipher_text_frag = rsa.encrypt(frag, public_key)
cipher_text += cipher_text_frag
return cipher_text
except rsa.pkcs1.CryptoError as error:
error_msg = "rsa encryption error occurs, %s" % error.args[0]
LOG.error(error_msg)
return bytes(error_msg, 'utf-8')
except (ModuleNotFoundError, ValueError, TypeError, UnicodeError) as error:
error_msg = "rsa encryption error occurs, %s" % error.args[0]
LOG.error(error_msg)
return bytes(error_msg, 'utf-8')
def do_rsa_decrypt(content):
try:
if not check_pub_key_exist() or not content:
return content
cipher_text = content
if not isinstance(cipher_text, bytes):
cipher_text = str(content).encode()
import rsa
from xdevice import Variables
pri_key_file = os.path.join(os.path.dirname(
Variables.report_vars.pub_key_file), "pri.key")
if not os.path.exists(pri_key_file):
return content
with open(pri_key_file, "rb") as key_content:
# get params
pri_key = rsa.PrivateKey.load_pkcs1(key_content.read())
max_decrypt_len = int(pri_key.n.bit_length() / 8)
try:
# decrypt
plain_text = b""
for frag in _get_frags(cipher_text, max_decrypt_len):
plain_text_frag = rsa.decrypt(frag, pri_key)
plain_text += plain_text_frag
return plain_text.decode(encoding='utf-8')
except rsa.pkcs1.CryptoError as error:
error_msg = "rsa decryption error occurs, %s" % error.args[0]
LOG.error(error_msg)
return error_msg
except (ModuleNotFoundError, ValueError, TypeError, UnicodeError) as error:
error_msg = "rsa decryption error occurs, %s" % error.args[0]
LOG.error(error_msg)
return error_msg
def generate_key_file(length=2048):
try:
from rsa import key
if int(length) not in [1024, 2048, 3072, 4096]:
LOG.error("length should be 1024, 2048, 3072 or 4096")
return
pub_key, pri_key = key.newkeys(int(length))
pub_key_pem = pub_key.save_pkcs1().decode()
pri_key_pem = pri_key.save_pkcs1().decode()
with open("pri.key", "w") as file_pri:
file_pri.write(pri_key_pem)
with open("pub.key", "w") as file_pub:
file_pub.write(pub_key_pem)
except ModuleNotFoundError:
return
def get_file_summary(src_file, algorithm="sha256", buffer_size=100 * 1024):
if not os.path.exists(src_file):
LOG.error("file '%s' not exists!" % src_file)
return ""
# if the size of file is large, use this function
def _read_file(_src_file):
while True:
_data = _src_file.read(buffer_size)
if not _data:
break
yield _data
if hasattr(hashlib, algorithm):
algorithm_object = hashlib.new(algorithm)
try:
with open(file=src_file, mode="rb") as _file:
for data in _read_file(_file):
algorithm_object.update(data)
except ValueError as error:
LOG.error("read data from '%s' error: %s " % (
src_file, error.args))
return ""
return algorithm_object.hexdigest()
else:
LOG.error("the algorithm '%s' not in hashlib!" % algorithm)
return ""
def _get_frags(text, max_len):
_text = text
while _text:
if len(_text) > max_len:
frag, _text = _text[:max_len], _text[max_len:]
else:
frag, _text = _text, ""
yield frag
| [
"os.path.exists",
"rsa.decrypt",
"hashlib.new",
"os.path.join",
"rsa.encrypt",
"os.path.dirname",
"_core.logger.platform_logger"
] | [((865, 891), '_core.logger.platform_logger', 'platform_logger', (['"""Encrypt"""'], {}), "('Encrypt')\n", (880, 891), False, 'from _core.logger import platform_logger\n'), ((1274, 1323), 'os.path.join', 'os.path.join', (['Variables.exec_dir', 'PUBLIC_KEY_FILE'], {}), '(Variables.exec_dir, PUBLIC_KEY_FILE)\n', (1286, 1323), False, 'import os\n'), ((1331, 1359), 'os.path.exists', 'os.path.exists', (['pub_key_path'], {}), '(pub_key_path)\n', (1345, 1359), False, 'import os\n'), ((1459, 1507), 'os.path.join', 'os.path.join', (['Variables.top_dir', 'PUBLIC_KEY_FILE'], {}), '(Variables.top_dir, PUBLIC_KEY_FILE)\n', (1471, 1507), False, 'import os\n'), ((1515, 1543), 'os.path.exists', 'os.path.exists', (['pub_key_path'], {}), '(pub_key_path)\n', (1529, 1543), False, 'import os\n'), ((5112, 5136), 'os.path.exists', 'os.path.exists', (['src_file'], {}), '(src_file)\n', (5126, 5136), False, 'import os\n'), ((5500, 5522), 'hashlib.new', 'hashlib.new', (['algorithm'], {}), '(algorithm)\n', (5511, 5522), False, 'import hashlib\n'), ((1103, 1153), 'os.path.exists', 'os.path.exists', (['Variables.report_vars.pub_key_file'], {}), '(Variables.report_vars.pub_key_file)\n', (1117, 1153), False, 'import os\n'), ((3344, 3395), 'os.path.dirname', 'os.path.dirname', (['Variables.report_vars.pub_key_file'], {}), '(Variables.report_vars.pub_key_file)\n', (3359, 3395), False, 'import os\n'), ((3436, 3464), 'os.path.exists', 'os.path.exists', (['pri_key_file'], {}), '(pri_key_file)\n', (3450, 3464), False, 'import os\n'), ((2451, 2480), 'rsa.encrypt', 'rsa.encrypt', (['frag', 'public_key'], {}), '(frag, public_key)\n', (2462, 2480), False, 'import rsa\n'), ((3887, 3913), 'rsa.decrypt', 'rsa.decrypt', (['frag', 'pri_key'], {}), '(frag, pri_key)\n', (3898, 3913), False, 'import rsa\n')] |
from base64 import b64encode
from hashlib import sha1
from Crypto.Hash import SHA
from Crypto.Signature import PKCS1_v1_5
from Crypto.PublicKey import RSA
class Rsa:
@classmethod
def sign(cls, text, privateKeyContent):
digest = SHA.new(text)
rsaKey = RSA.importKey(privateKeyContent)
signer = PKCS1_v1_5.new(rsaKey)
signature = signer.sign(digest)
return b64encode(signature)
@classmethod
def digest(cls, text):
hasher = sha1()
hasher.update(text)
digest = hasher.digest()
return b64encode(digest)
| [
"base64.b64encode",
"Crypto.Signature.PKCS1_v1_5.new",
"Crypto.Hash.SHA.new",
"hashlib.sha1",
"Crypto.PublicKey.RSA.importKey"
] | [((247, 260), 'Crypto.Hash.SHA.new', 'SHA.new', (['text'], {}), '(text)\n', (254, 260), False, 'from Crypto.Hash import SHA\n'), ((278, 310), 'Crypto.PublicKey.RSA.importKey', 'RSA.importKey', (['privateKeyContent'], {}), '(privateKeyContent)\n', (291, 310), False, 'from Crypto.PublicKey import RSA\n'), ((328, 350), 'Crypto.Signature.PKCS1_v1_5.new', 'PKCS1_v1_5.new', (['rsaKey'], {}), '(rsaKey)\n', (342, 350), False, 'from Crypto.Signature import PKCS1_v1_5\n'), ((406, 426), 'base64.b64encode', 'b64encode', (['signature'], {}), '(signature)\n', (415, 426), False, 'from base64 import b64encode\n'), ((489, 495), 'hashlib.sha1', 'sha1', ([], {}), '()\n', (493, 495), False, 'from hashlib import sha1\n'), ((572, 589), 'base64.b64encode', 'b64encode', (['digest'], {}), '(digest)\n', (581, 589), False, 'from base64 import b64encode\n')] |
from tornado import gen, httpclient
from tornado.escape import json_decode
import logging
@gen.coroutine
def receiver(args):
logging.info("Enter simple_plugin")
logging.debug("args: %s", str(args))
http_client = httpclient.AsyncHTTPClient()
response = yield http_client.fetch("http://localhost:5984/_session")
logging.debug("reponse: %s", str(response))
raise gen.Return(json_decode(response.body))
PLUGIN = {"name": "SimplePlugin2", "receiver": receiver, "sub": "start",
"pub": "simple3"}
| [
"tornado.escape.json_decode",
"logging.info",
"tornado.httpclient.AsyncHTTPClient"
] | [((130, 165), 'logging.info', 'logging.info', (['"""Enter simple_plugin"""'], {}), "('Enter simple_plugin')\n", (142, 165), False, 'import logging\n'), ((225, 253), 'tornado.httpclient.AsyncHTTPClient', 'httpclient.AsyncHTTPClient', ([], {}), '()\n', (251, 253), False, 'from tornado import gen, httpclient\n'), ((396, 422), 'tornado.escape.json_decode', 'json_decode', (['response.body'], {}), '(response.body)\n', (407, 422), False, 'from tornado.escape import json_decode\n')] |
from setuptools import find_packages, setup
with open("requirements.txt") as f:
requirements = f.read().splitlines()
with open("README.md") as f:
readme = f.read()
setup(
name="kubespec",
version="0.1.dev20200203",
url="https://github.com/machinezone/kubespec",
author="<NAME>",
author_email="<EMAIL>",
description="Kubespec is a set of foundational libraries for expressing Kubernetes resource specifications as code.",
long_description=readme,
long_description_content_type="text/markdown",
license="BSD 3",
install_requires=requirements,
python_requires=">=3.7",
packages=find_packages(),
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
"Topic :: System",
"Topic :: System :: Systems Administration",
"Topic :: Utilities",
"Typing :: Typed",
],
)
| [
"setuptools.find_packages"
] | [((635, 650), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (648, 650), False, 'from setuptools import find_packages, setup\n')] |
import logging
from django import forms
from tantalus.services import get_tantalus_client, TantalusException
class TantalusProductAdminForm(forms.ModelForm):
"""Tantalus Product Admin Form."""
tantalus_id = forms.ChoiceField(required=True)
def __init__(self, *args, **kwargs):
"""Initialize Tantalus Product Admin Form by getting product data from Tantalus."""
super(TantalusProductAdminForm, self).__init__(*args, **kwargs)
try:
tantalus_client = get_tantalus_client()
choices = [(x["id"], x["name"]) for x in tantalus_client.get_products()]
self.fields["tantalus_id"].choices = choices
except TantalusException as e:
logging.error(
"The following Exception occurred while trying to access Tantalus on the administration "
"dashboard: {}".format(e)
)
self.fields["tantalus_id"] = forms.IntegerField(
min_value=1,
help_text="Could not retrieve data from Tantalus, you are still able to enter the Tantalus id "
"yourself. Check the logs for more information.",
)
class TantalusOrderVenueAdminForm(forms.ModelForm):
"""Tantalus Order Venue Admin Form."""
endpoint_id = forms.ChoiceField(required=True)
def __init__(self, *args, **kwargs):
"""Initialize Tantalus Product Admin Form by getting product data from Tantalus."""
super(TantalusOrderVenueAdminForm, self).__init__(*args, **kwargs)
try:
tantalus_client = get_tantalus_client()
choices = [(x["id"], x["name"]) for x in tantalus_client.get_endpoints()]
self.fields["endpoint_id"].choices = choices
except TantalusException as e:
logging.error(
"The following Exception occurred while trying to access Tantalus on the administration "
"dashboard: {}".format(e)
)
self.fields["endpoint_id"] = forms.IntegerField(
min_value=1,
help_text="Could not retrieve data from Tantalus, you are still able to enter the endpoint id "
"yourself. Check the logs for more information.",
)
| [
"django.forms.ChoiceField",
"django.forms.IntegerField",
"tantalus.services.get_tantalus_client"
] | [((220, 252), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'required': '(True)'}), '(required=True)\n', (237, 252), False, 'from django import forms\n'), ((1292, 1324), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'required': '(True)'}), '(required=True)\n', (1309, 1324), False, 'from django import forms\n'), ((502, 523), 'tantalus.services.get_tantalus_client', 'get_tantalus_client', ([], {}), '()\n', (521, 523), False, 'from tantalus.services import get_tantalus_client, TantalusException\n'), ((1577, 1598), 'tantalus.services.get_tantalus_client', 'get_tantalus_client', ([], {}), '()\n', (1596, 1598), False, 'from tantalus.services import get_tantalus_client, TantalusException\n'), ((935, 1119), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(1)', 'help_text': '"""Could not retrieve data from Tantalus, you are still able to enter the Tantalus id yourself. Check the logs for more information."""'}), "(min_value=1, help_text=\n 'Could not retrieve data from Tantalus, you are still able to enter the Tantalus id yourself. Check the logs for more information.'\n )\n", (953, 1119), False, 'from django import forms\n'), ((2011, 2195), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(1)', 'help_text': '"""Could not retrieve data from Tantalus, you are still able to enter the endpoint id yourself. Check the logs for more information."""'}), "(min_value=1, help_text=\n 'Could not retrieve data from Tantalus, you are still able to enter the endpoint id yourself. Check the logs for more information.'\n )\n", (2029, 2195), False, 'from django import forms\n')] |
import matplotlib.pyplot as plt
#---------------------------------------------------------------
# Function plot:
# Plot cost x generation graphic
# Parameters:
# itens - cost of each generation
# itens2 - number of feasible solutions at each generation
#---------------------------------------------------------------
def plot(itens=[], type='cost', itens2=[]):
if type == 'cost':
plt.plot(itens,'b')
plt.ylabel('cost')
plt.xlabel('generations')
if type == 'feasible':
plt.plot(itens,'r')
plt.ylabel('feasible %')
plt.xlabel('generations')
if itens2:
plt.plot(itens2,'r')
plt.show() | [
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.show"
] | [((672, 682), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (680, 682), True, 'import matplotlib.pyplot as plt\n'), ((405, 425), 'matplotlib.pyplot.plot', 'plt.plot', (['itens', '"""b"""'], {}), "(itens, 'b')\n", (413, 425), True, 'import matplotlib.pyplot as plt\n'), ((433, 451), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""cost"""'], {}), "('cost')\n", (443, 451), True, 'import matplotlib.pyplot as plt\n'), ((460, 485), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""generations"""'], {}), "('generations')\n", (470, 485), True, 'import matplotlib.pyplot as plt\n'), ((531, 551), 'matplotlib.pyplot.plot', 'plt.plot', (['itens', '"""r"""'], {}), "(itens, 'r')\n", (539, 551), True, 'import matplotlib.pyplot as plt\n'), ((559, 583), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""feasible %"""'], {}), "('feasible %')\n", (569, 583), True, 'import matplotlib.pyplot as plt\n'), ((592, 617), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""generations"""'], {}), "('generations')\n", (602, 617), True, 'import matplotlib.pyplot as plt\n'), ((642, 663), 'matplotlib.pyplot.plot', 'plt.plot', (['itens2', '"""r"""'], {}), "(itens2, 'r')\n", (650, 663), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python example script showing SecureX Cloud Analytics Alerts.
Copyright (c) 2020 Cisco and/or its affiliates.
This software is licensed to you under the terms of the Cisco Sample
Code License, Version 1.1 (the "License"). You may obtain a copy of the
License at
https://developer.cisco.com/docs/licenses
All use of the material herein must be in accordance with the terms of
the License. All rights not expressly granted by the License are
reserved. Unless required by applicable law or agreed to separately in
writing, software distributed under the License is distributed on an "AS
IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied.
"""
import json
import requests
from datetime import datetime
from datetime import timedelta
from requests.packages.urllib3.exceptions import InsecureRequestWarning
#
import environment as env
# Disable insecure request warnings
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# FUNCTIONS
# Get Open Alerts from SecureX Cloud Analytics
def get_swc_open_alerts(
start_time,
end_time,
host=env.SWC.get("host"),
api_key=env.SWC_API_KEY
):
# Build URL
url = f"https://{host}/api/v3/alerts/alert"
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json','Authorization': api_key
}
query_params = {"status": "open", "time__gte": start_time, "time__lt": end_time}
# HTTP Get Request
response = requests.get(url, headers=headers, params=query_params)
# If response code is 200, then return the json response
if response.status_code == 200:
# JSON Response
swc_alerts = response.json()
return swc_alerts
# If response code is anything but 200, print error message with response code
else:
print(f"An error has ocurred, while fetching alerts, with the following code {response.status_code}")
def get_swc_alert_observables(alert_id,
start_time,
end_time,
host=env.SWC.get("host"),
api_key=env.SWC_API_KEY
):
url = f"https://{host}/api/v3/observations/all"
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json', 'Authorization': api_key
}
query_params = {"alert": alert_id, "time__gte": start_time, "time__lt": end_time}
response = requests.get(url, headers=headers, params=query_params)
response.raise_for_status()
swc_obsv_alert = response.json()
return swc_obsv_alert
def create_alert_message(alert_id, alert_type, alert_time, alert_url):
split_text = alert_url.split("\n")
url = split_text[1]
alert_message = f"\nSecureX Cloud Analytics has triggered Alert {alert_id} - {alert_type}" \
f" at {alert_time}. \nFor further details please go to {url}"
return alert_message
def post_webex_teams_message(message,
host=env.WXT.get("host"),
api_key=env.WEBEX_TEAMS_ACCESS_TOKEN,
room_id=env.WEBEX_TEAMS_ROOM_ID
):
# Build URL
url = f"https://{host}/v1/messages"
headers = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {api_key}'
}
payload = {
"roomId": room_id,
"text": message
}
json_payload = json.dumps(payload)
# HTTP Get Request
response = requests.post(url, headers=headers, data=json_payload)
response.raise_for_status()
# JSON Response
post_res = response.json()
return post_res
# If this script is the "main" script, run...
if __name__ == "__main__":
# Set time variables
end_time = datetime.utcnow()
start_time = end_time - timedelta(days=1)
# print(start_time, end_time)
# Get SWC Open Alerts
open_alerts = get_swc_open_alerts(start_time, end_time)
# print(json.dumps(open_alerts, indent=4))
# Query the total number of alerts
swc_alert_count = open_alerts["meta"]["total_count"]
# print(f"Alert!!! There are {swc_alert_count} total SecureX Cloud Analytics Alerts")
# Array for all Alert Types
swc_alert_table = []
# If no Alerts exit send message and exit the script
if swc_alert_count == 0:
print(f"There are {swc_alert_count} alerts. Exiting the script")
exit()
# If there are Alerts, add alert info to swc_alert_table
else:
for alerts in open_alerts["objects"]:
swc_alert_list = {}
alert_id = alerts["id"]
swc_alert_list.update({"alert_id": alert_id})
alert_type = alerts["type"]
swc_alert_list.update({"alert_type": alert_type})
alert_time = alerts["time"]
swc_alert_list.update({"alert_time": alert_time})
alert_url = alerts["text"]
swc_alert_list.update({"alert_url": alert_url})
source_name = alerts["source_name"]
swc_alert_list.update({"source_name": source_name})
swc_alert_table.append(swc_alert_list)
# print(json.dumps(swc_alert_table, indent=4))
# Loop through Alert Table and create action per Alert Type
for alert_type in swc_alert_table:
if alert_type["alert_type"] == "Inbound Port Scanner":
port_scan_alert_id = alert_type["alert_id"]
port_scan_alert_type = alert_type["alert_type"]
split_text = alert_type["alert_url"].split("\n")
port_scan_alert_url = split_text[1]
port_scan_observ = get_swc_alert_observables(alert_type["alert_id"], start_time, end_time)
# print(json.dumps(port_scan_observ, indent=4))
for observ in port_scan_observ["objects"]:
time = observ["time"]
name = observ["observation_name"]
scanner_ip = observ["scanner_ip"]
scanned_ip = observ["scanned_ip"]
port_scan_alert_message = f"\n SecureX Cloud Analytics has detected an {name} alarm on your network" \
f"\n where internal host(s) {scanned_ip} was scanned by external host(s) {scanner_ip} at {time}." \
f"\n Alert ID {port_scan_alert_id} - {port_scan_alert_type} has been generated" \
f"\n For additional information please go to {port_scan_alert_url}"
print(port_scan_alert_message)
# OPTIONAL - uncomment if testing Webex Teams integration
# Send message to Webex Teams
# post_webex_teams_message(port_scan_alert_message)
elif alert_type["alert_type"] == "Excessive Access Attempts (External)":
access_attempt_alert_id = alert_type["alert_id"]
access_attempt_alert_type = alert_type["alert_type"]
split_text = alert_type["alert_url"].split("\n")
access_attempt_alert_url = split_text[1]
access_attempt_time = alert_type["alert_time"]
access_attempt_source_name = alert_type["source_name"]
access_attempt_observ = get_swc_alert_observables(alert_type["alert_id"], start_time, end_time)
# print(json.dumps(access_attempt_observ, indent=4))
connected_ips = []
for observ in access_attempt_observ["objects"]:
connected_ip = observ["connected_ip"]
connected_ips.append(connected_ip)
# Set list to remove duplicates ip addresses
attacker = []
connected_ips_set = set(connected_ips)
for ip in connected_ips_set:
attacker.append(ip)
# print(attacker)
access_attempt_alert_message = f"\n SecureX Cloud Analytics has detected Multiple Access Failures" \
f" targeted towards internal host {access_attempt_source_name} by attacker IP address(es) " \
f"\n {attacker}. " \
f"\n **Alert ID {access_attempt_alert_id} - {access_attempt_alert_type}** has been generated" \
f"\n For additional information please go to " \
f"\n {access_attempt_alert_url}"
print(access_attempt_alert_message)
# OPTIONAL - uncomment if testing Webex Teams integration
# Send message to Webex Teams
# post_webex_teams_message(access_attempt_alert_message)
else:
alert_message = create_alert_message(alert_type["alert_id"],
alert_type["alert_type"],
alert_type["alert_time"],
alert_type["alert_url"])
print(alert_message)
# OPTIONAL - uncomment if testing Webex Teams integration
# Send message to Webex Teams
# post_webex_teams_message(alert_message)
# End of File
| [
"requests.post",
"environment.SWC.get",
"requests.packages.urllib3.disable_warnings",
"environment.WXT.get",
"datetime.datetime.utcnow",
"json.dumps",
"requests.get",
"datetime.timedelta"
] | [((966, 1032), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', (['InsecureRequestWarning'], {}), '(InsecureRequestWarning)\n', (1008, 1032), False, 'import requests\n'), ((1168, 1187), 'environment.SWC.get', 'env.SWC.get', (['"""host"""'], {}), "('host')\n", (1179, 1187), True, 'import environment as env\n'), ((1533, 1588), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'params': 'query_params'}), '(url, headers=headers, params=query_params)\n', (1545, 1588), False, 'import requests\n'), ((2134, 2153), 'environment.SWC.get', 'env.SWC.get', (['"""host"""'], {}), "('host')\n", (2145, 2153), True, 'import environment as env\n'), ((2516, 2571), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'params': 'query_params'}), '(url, headers=headers, params=query_params)\n', (2528, 2571), False, 'import requests\n'), ((3071, 3090), 'environment.WXT.get', 'env.WXT.get', (['"""host"""'], {}), "('host')\n", (3082, 3090), True, 'import environment as env\n'), ((3511, 3530), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (3521, 3530), False, 'import json\n'), ((3570, 3624), 'requests.post', 'requests.post', (['url'], {'headers': 'headers', 'data': 'json_payload'}), '(url, headers=headers, data=json_payload)\n', (3583, 3624), False, 'import requests\n'), ((3847, 3864), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3862, 3864), False, 'from datetime import datetime\n'), ((3893, 3910), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (3902, 3910), False, 'from datetime import timedelta\n')] |
from __future__ import print_function
import argparse
import os
import re
import nltk.data
parser = argparse.ArgumentParser()
parser.add_argument('--source-dir', help="Source data directory", default="./dataset/original_data")
parser.add_argument('--target-dir', help="Traget data directory", default="./dataset/pre_processed_data")
args = parser.parse_args()
be_verb_form = "am are were was is been being be".split()
def load_text():
path = args.source_dir
directory = os.fsencode(path)
accumulated_text = ""
for file in os.listdir(directory):
filename = os.fsdecode(file)
print(filename)
if filename.startswith("wiki_"):
file = open(path + "/" + filename, "r", encoding='utf-8')
accumulated_text = accumulated_text + (file.read().replace('\n', ''))
continue
else:
continue
text = re.sub('<[^<]+>', "", accumulated_text)
return text
def split_sentences(text): return nltk.sent_tokenize(text)
def split_words(text):
tokens = nltk.word_tokenize(text)
words = [word for word in tokens if word.isalpha()]
return words
def split_words_regexp(text):
pattern = r'''\n'''
return nltk.regexp_tokenize(text, pattern)
def to_example(words):
# words that are not be verbs
new_context = []
# be verbs in sentence
be_verbs = []
# split "be" verbs and other words
for x in words:
if x in be_verb_form:
new_context.append("----")
be_verbs.append(x)
else:
new_context.append(x)
if not be_verbs:
return ""
# make one example per be verb in sentence
return " ".join(new_context), " ".join(be_verbs)
def generate_dataset(text, root):
sentences = split_sentences(text)
# generate data file
data_path = os.path.join(root, 'data.txt')
with open(data_path, mode='w', encoding='utf-8') as fout:
for sentence in sentences:
words = split_words(sentence)
new_example = to_example(words)
if new_example == "":
continue
else:
list1 = list(new_example)
fout.write("\t".join(list1))
fout.write('\n')
if __name__ == '__main__':
target_dir = args.target_dir
if not os.path.exists(target_dir):
os.mkdir(target_dir)
generate_dataset(load_text(), target_dir)
| [
"os.path.exists",
"os.listdir",
"argparse.ArgumentParser",
"os.path.join",
"os.fsencode",
"os.mkdir",
"os.fsdecode",
"re.sub"
] | [((103, 128), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (126, 128), False, 'import argparse\n'), ((484, 501), 'os.fsencode', 'os.fsencode', (['path'], {}), '(path)\n', (495, 501), False, 'import os\n'), ((545, 566), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (555, 566), False, 'import os\n'), ((891, 930), 're.sub', 're.sub', (['"""<[^<]+>"""', '""""""', 'accumulated_text'], {}), "('<[^<]+>', '', accumulated_text)\n", (897, 930), False, 'import re\n'), ((1838, 1868), 'os.path.join', 'os.path.join', (['root', '"""data.txt"""'], {}), "(root, 'data.txt')\n", (1850, 1868), False, 'import os\n'), ((587, 604), 'os.fsdecode', 'os.fsdecode', (['file'], {}), '(file)\n', (598, 604), False, 'import os\n'), ((2322, 2348), 'os.path.exists', 'os.path.exists', (['target_dir'], {}), '(target_dir)\n', (2336, 2348), False, 'import os\n'), ((2358, 2378), 'os.mkdir', 'os.mkdir', (['target_dir'], {}), '(target_dir)\n', (2366, 2378), False, 'import os\n')] |
from logging import StreamHandler
from ipware import get_client_ip
class EnhancedStreamHandler(StreamHandler, object):
def emit(self, record):
record.ip = ''
record.email = ''
try:
request = record.args[0]
record.ip, _ = get_client_ip(request)
record.args = None
record.email = request.user.email
except: # noqa
pass
super(EnhancedStreamHandler, self).emit(record)
| [
"ipware.get_client_ip"
] | [((277, 299), 'ipware.get_client_ip', 'get_client_ip', (['request'], {}), '(request)\n', (290, 299), False, 'from ipware import get_client_ip\n')] |
from genetic_algo import Genetic_algorithm, Network_info
import train_cfar10 as cfar10
import tensorflow as tf
print("GPU Available: ", tf.test.is_gpu_available())
nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test = cfar10.get_cifar10()
dataset = {
# 'name': 'cifar10',
'num_classes': nb_classes,
'batch_size': batch_size,
'input_shape': input_shape,
'x_train': x_train,
'x_test': x_test,
'y_train': y_train,
'y_test': y_test,
}
starting_population = [Network_info()]
evo = Genetic_algorithm(starting_population=starting_population,
num_generations=2,
train_epochs=1,
max_poulation=10,
learning_threshold=0.8)
final_models = evo.evolve(dataset)
# print('models: ', final_models)
# print(len(final_models))
| [
"genetic_algo.Network_info",
"train_cfar10.get_cifar10",
"tensorflow.test.is_gpu_available",
"genetic_algo.Genetic_algorithm"
] | [((238, 258), 'train_cfar10.get_cifar10', 'cfar10.get_cifar10', ([], {}), '()\n', (256, 258), True, 'import train_cfar10 as cfar10\n'), ((569, 709), 'genetic_algo.Genetic_algorithm', 'Genetic_algorithm', ([], {'starting_population': 'starting_population', 'num_generations': '(2)', 'train_epochs': '(1)', 'max_poulation': '(10)', 'learning_threshold': '(0.8)'}), '(starting_population=starting_population, num_generations=\n 2, train_epochs=1, max_poulation=10, learning_threshold=0.8)\n', (586, 709), False, 'from genetic_algo import Genetic_algorithm, Network_info\n'), ((137, 163), 'tensorflow.test.is_gpu_available', 'tf.test.is_gpu_available', ([], {}), '()\n', (161, 163), True, 'import tensorflow as tf\n'), ((547, 561), 'genetic_algo.Network_info', 'Network_info', ([], {}), '()\n', (559, 561), False, 'from genetic_algo import Genetic_algorithm, Network_info\n')] |
"""Half-Cheetah Environment with full observation."""
import gym.error
from .locomotion import LocomotionEnv
try:
from gym.envs.mujoco.half_cheetah_v3 import HalfCheetahEnv
except (ModuleNotFoundError, gym.error.DependencyNotInstalled):
HalfCheetahEnv = object
class MBHalfCheetahEnv(LocomotionEnv, HalfCheetahEnv):
"""Half-Cheetah Environment."""
def __init__(self, ctrl_cost_weight=0.1):
self.base_mujoco_name = "HalfCheetah-v3"
LocomotionEnv.__init__(
self,
dim_pos=1,
dim_action=(6,),
ctrl_cost_weight=ctrl_cost_weight,
forward_reward_weight=1.0,
healthy_reward=0.0,
)
HalfCheetahEnv.__init__(
self, ctrl_cost_weight=ctrl_cost_weight, forward_reward_weight=1.0
)
| [
"gym.envs.mujoco.half_cheetah_v3.HalfCheetahEnv.__init__"
] | [((698, 793), 'gym.envs.mujoco.half_cheetah_v3.HalfCheetahEnv.__init__', 'HalfCheetahEnv.__init__', (['self'], {'ctrl_cost_weight': 'ctrl_cost_weight', 'forward_reward_weight': '(1.0)'}), '(self, ctrl_cost_weight=ctrl_cost_weight,\n forward_reward_weight=1.0)\n', (721, 793), False, 'from gym.envs.mujoco.half_cheetah_v3 import HalfCheetahEnv\n')] |
"""
m3u.py --- Jen Plugin for accessing m3u data
Copyright (C) 2018
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Version:
2018-05-14
Latest version to include with a Jen Release
Usage Examples:
<dir>
<title>M3U NAME</title>
<thumbnail></thumbnail>
<m3u>M3U LINK</m3u>
<fanart></fanart>
<info> </info>
</dir>
"""
import urllib2
import re
import xbmcaddon
from koding import route
from resources.lib.plugin import Plugin
from resources.lib.util.context import get_context_items
from resources.lib.util.xml import JenItem, JenList, display_list
CACHE_TIME = 86400 # change to wanted cache time in seconds
addon_fanart = xbmcaddon.Addon().getAddonInfo('fanart')
addon_icon = xbmcaddon.Addon().getAddonInfo('icon')
class M3U(Plugin):
name = "m3u"
def process_item(self, item_xml):
if "<m3u>" in item_xml:
item = JenItem(item_xml)
result_item = {
'label': item["title"],
'icon': item.get("thumbnail", addon_icon),
'fanart': item.get("fanart", addon_fanart),
'mode': "m3u",
'url': item.get("m3u", ""),
'folder': True,
'imdb': "0",
'content': "files",
'season': "0",
'episode': "0",
'info': {},
'year': "0",
'context': get_context_items(item),
"summary": item.get("summary", None)
}
result_item["properties"] = {'fanart_image': result_item["fanart"]}
result_item['fanart_small'] = result_item["fanart"]
return result_item
@route(mode='m3u', args=["url"])
def m3u(url):
xml = ""
if not xml:
xml = ""
if '.m3u' in url:
listhtml = getHtml(url)
match = re.compile('#EXTINF:.+?,(.+?)\n([^"]+)\n',
re.IGNORECASE | re.DOTALL).findall(listhtml)
for name, url in match:
name = name
url = url
xml += "<item>"\
"<title>%s</title>"\
"<link>%s</link>"\
"<thumbnail></thumbnail>"\
"</item>" % (name, url)
jenlist = JenList(xml)
display_list(jenlist.get_list(), jenlist.get_content_type())
def getHtml(url, referer=None, hdr=None, data=None):
"""GRAB HTML FROM THE LINK"""
USER_AGENT = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'
headers = {
'User-Agent': USER_AGENT,
'Accept': '*/*',
'Connection': 'keep-alive'
}
if not hdr:
req = urllib2.Request(url, data, headers)
else:
req = urllib2.Request(url, data, hdr)
if referer:
req.add_header('Referer', referer)
response = urllib2.urlopen(req, timeout=60)
data = response.read()
response.close()
return data
| [
"urllib2.urlopen",
"resources.lib.util.context.get_context_items",
"re.compile",
"resources.lib.util.xml.JenList",
"koding.route",
"urllib2.Request",
"xbmcaddon.Addon",
"resources.lib.util.xml.JenItem"
] | [((2310, 2341), 'koding.route', 'route', ([], {'mode': '"""m3u"""', 'args': "['url']"}), "(mode='m3u', args=['url'])\n", (2315, 2341), False, 'from koding import route\n'), ((2923, 2935), 'resources.lib.util.xml.JenList', 'JenList', (['xml'], {}), '(xml)\n', (2930, 2935), False, 'from resources.lib.util.xml import JenItem, JenList, display_list\n'), ((3512, 3544), 'urllib2.urlopen', 'urllib2.urlopen', (['req'], {'timeout': '(60)'}), '(req, timeout=60)\n', (3527, 3544), False, 'import urllib2\n'), ((1295, 1312), 'xbmcaddon.Addon', 'xbmcaddon.Addon', ([], {}), '()\n', (1310, 1312), False, 'import xbmcaddon\n'), ((1349, 1366), 'xbmcaddon.Addon', 'xbmcaddon.Addon', ([], {}), '()\n', (1364, 1366), False, 'import xbmcaddon\n'), ((3346, 3381), 'urllib2.Request', 'urllib2.Request', (['url', 'data', 'headers'], {}), '(url, data, headers)\n', (3361, 3381), False, 'import urllib2\n'), ((3406, 3437), 'urllib2.Request', 'urllib2.Request', (['url', 'data', 'hdr'], {}), '(url, data, hdr)\n', (3421, 3437), False, 'import urllib2\n'), ((1516, 1533), 'resources.lib.util.xml.JenItem', 'JenItem', (['item_xml'], {}), '(item_xml)\n', (1523, 1533), False, 'from resources.lib.util.xml import JenItem, JenList, display_list\n'), ((2040, 2063), 'resources.lib.util.context.get_context_items', 'get_context_items', (['item'], {}), '(item)\n', (2057, 2063), False, 'from resources.lib.util.context import get_context_items\n'), ((2484, 2555), 're.compile', 're.compile', (['"""#EXTINF:.+?,(.+?)\n([^"]+)\n"""', '(re.IGNORECASE | re.DOTALL)'], {}), '("""#EXTINF:.+?,(.+?)\n([^"]+)\n""", re.IGNORECASE | re.DOTALL)\n', (2494, 2555), False, 'import re\n')] |
from safenotes.helpers import display_colored_text
from safenotes.colors import red, blue
from typing import Callable, Dict
from os import system
from sys import exit
import safenotes.files_accessor as files_accessor
import questionary
class Displayer:
"""
Class for displaying all notes, handling user action, etc.
"""
def __init__(self, password: str) -> None:
self.password = password
def display_initial_menu(self) -> None:
""" Used to display all notes and allowing user to create new ones, edit existing, etc. """
system('clear')
special_choices = ['New note', 'Refresh encryptions', 'Quit']
choice = questionary.select(
'Available options',
choices=special_choices + files_accessor.get_saved_notes_filenames(),
qmark=''
).ask()
self.handle_choice_initial_menu(choice)
def display_menu_for_note(self, note_name: str) -> None:
""" Displays a menu for a given note. Allows actions such as editing and deleting the note """
system('clear')
display_colored_text(f'Note: {note_name}\n', blue)
display_colored_text('What would you like to do?', blue)
choice = questionary.select(
note_name,
choices=['Read/Edit', 'Delete', 'Go back to initial menu'],
qmark=''
).ask()
self.handle_choice_for_note(note_name, choice)
def create_new_note(self) -> None:
""" Creates a new note and encrypts it """
display_colored_text('Please decide on a name for the file ', blue)
display_colored_text('(this name WILL be publicly accessible): ', red)
file_name = input()
note_path = files_accessor.note_full_path(file_name)
files_accessor.edit_file_and_encrypt(note_path, self.password)
self.display_initial_menu()
def edit_note(self, note_path: str) -> None:
""" Unencrypts a note, allows user to edit it, then encrypts it again """
files_accessor.decrypt_file(note_path, self.password)
note_path = note_path.replace('.gpg', '')
files_accessor.edit_file_and_encrypt(note_path, self.password)
self.display_initial_menu()
def delete_note(self, note_path: str) -> None:
""" Deletes an encrypted note. If note is not encrypted raises error. """
files_accessor.delete_file(note_path)
self.display_initial_menu()
def refresh_encryptions(self) -> None:
files_to_encrypt = [
f for f in files_accessor.get_saved_notes_filenames() if not files_accessor.is_file_encrypted(f)
]
for file in files_to_encrypt:
file_path = files_accessor.note_full_path(file)
files_accessor.encrypt_file(file_path, self.password)
self.display_initial_menu()
def handle_choice_initial_menu(self, choice: str) -> None:
""" Call the correct method based on user's input at initial menu """
available_choices: Dict[str, Callable] = {
'New note': self.create_new_note,
'Refresh encryptions': self.refresh_encryptions,
'Quit': exit
}
if choice in available_choices:
available_choices[choice]()
else: # Else is assumed to be a note which was picked
self.display_menu_for_note(choice)
def handle_choice_for_note(self, note_name: str, choice: str) -> None:
""" Call the correct method based on user's input at note menu """
# Since many of the functions here require arguments I don't think
# it's possible to use the dict like before
note_path = files_accessor.note_full_path(note_name)
if choice == 'Read/Edit':
self.edit_note(note_path)
elif choice == 'Delete':
self.delete_note(note_path)
else:
self.display_initial_menu()
| [
"safenotes.files_accessor.edit_file_and_encrypt",
"safenotes.files_accessor.get_saved_notes_filenames",
"safenotes.files_accessor.decrypt_file",
"safenotes.files_accessor.delete_file",
"safenotes.files_accessor.note_full_path",
"safenotes.files_accessor.encrypt_file",
"safenotes.files_accessor.is_file_e... | [((568, 583), 'os.system', 'system', (['"""clear"""'], {}), "('clear')\n", (574, 583), False, 'from os import system\n'), ((1066, 1081), 'os.system', 'system', (['"""clear"""'], {}), "('clear')\n", (1072, 1081), False, 'from os import system\n'), ((1091, 1141), 'safenotes.helpers.display_colored_text', 'display_colored_text', (['f"""Note: {note_name}\n"""', 'blue'], {}), "(f'Note: {note_name}\\n', blue)\n", (1111, 1141), False, 'from safenotes.helpers import display_colored_text\n'), ((1150, 1206), 'safenotes.helpers.display_colored_text', 'display_colored_text', (['"""What would you like to do?"""', 'blue'], {}), "('What would you like to do?', blue)\n", (1170, 1206), False, 'from safenotes.helpers import display_colored_text\n'), ((1531, 1598), 'safenotes.helpers.display_colored_text', 'display_colored_text', (['"""Please decide on a name for the file """', 'blue'], {}), "('Please decide on a name for the file ', blue)\n", (1551, 1598), False, 'from safenotes.helpers import display_colored_text\n'), ((1607, 1677), 'safenotes.helpers.display_colored_text', 'display_colored_text', (['"""(this name WILL be publicly accessible): """', 'red'], {}), "('(this name WILL be publicly accessible): ', red)\n", (1627, 1677), False, 'from safenotes.helpers import display_colored_text\n'), ((1726, 1766), 'safenotes.files_accessor.note_full_path', 'files_accessor.note_full_path', (['file_name'], {}), '(file_name)\n', (1755, 1766), True, 'import safenotes.files_accessor as files_accessor\n'), ((1775, 1837), 'safenotes.files_accessor.edit_file_and_encrypt', 'files_accessor.edit_file_and_encrypt', (['note_path', 'self.password'], {}), '(note_path, self.password)\n', (1811, 1837), True, 'import safenotes.files_accessor as files_accessor\n'), ((2014, 2067), 'safenotes.files_accessor.decrypt_file', 'files_accessor.decrypt_file', (['note_path', 'self.password'], {}), '(note_path, self.password)\n', (2041, 2067), True, 'import safenotes.files_accessor as files_accessor\n'), ((2126, 2188), 'safenotes.files_accessor.edit_file_and_encrypt', 'files_accessor.edit_file_and_encrypt', (['note_path', 'self.password'], {}), '(note_path, self.password)\n', (2162, 2188), True, 'import safenotes.files_accessor as files_accessor\n'), ((2367, 2404), 'safenotes.files_accessor.delete_file', 'files_accessor.delete_file', (['note_path'], {}), '(note_path)\n', (2393, 2404), True, 'import safenotes.files_accessor as files_accessor\n'), ((3659, 3699), 'safenotes.files_accessor.note_full_path', 'files_accessor.note_full_path', (['note_name'], {}), '(note_name)\n', (3688, 3699), True, 'import safenotes.files_accessor as files_accessor\n'), ((2696, 2731), 'safenotes.files_accessor.note_full_path', 'files_accessor.note_full_path', (['file'], {}), '(file)\n', (2725, 2731), True, 'import safenotes.files_accessor as files_accessor\n'), ((2744, 2797), 'safenotes.files_accessor.encrypt_file', 'files_accessor.encrypt_file', (['file_path', 'self.password'], {}), '(file_path, self.password)\n', (2771, 2797), True, 'import safenotes.files_accessor as files_accessor\n'), ((1224, 1327), 'questionary.select', 'questionary.select', (['note_name'], {'choices': "['Read/Edit', 'Delete', 'Go back to initial menu']", 'qmark': '""""""'}), "(note_name, choices=['Read/Edit', 'Delete',\n 'Go back to initial menu'], qmark='')\n", (1242, 1327), False, 'import questionary\n'), ((2537, 2579), 'safenotes.files_accessor.get_saved_notes_filenames', 'files_accessor.get_saved_notes_filenames', ([], {}), '()\n', (2577, 2579), True, 'import safenotes.files_accessor as files_accessor\n'), ((2587, 2622), 'safenotes.files_accessor.is_file_encrypted', 'files_accessor.is_file_encrypted', (['f'], {}), '(f)\n', (2619, 2622), True, 'import safenotes.files_accessor as files_accessor\n'), ((763, 805), 'safenotes.files_accessor.get_saved_notes_filenames', 'files_accessor.get_saved_notes_filenames', ([], {}), '()\n', (803, 805), True, 'import safenotes.files_accessor as files_accessor\n')] |
from collections import defaultdict
from dataclasses import dataclass
from typing import Dict, List, Optional
import numpy as np
import numpy.typing as npt
from nuplan.common.actor_state.agent import Agent
from nuplan.common.actor_state.ego_state import EgoState
from nuplan.common.actor_state.vehicle_parameters import get_pacifica_parameters
from nuplan.common.geometry.compute import signed_lateral_distance, signed_longitudinal_distance
from nuplan.planning.metrics.evaluation_metrics.base.metric_base import MetricBase
from nuplan.planning.metrics.metric_result import MetricStatistics, MetricStatisticsType, Statistic, TimeSeries
from nuplan.planning.scenario_builder.abstract_scenario import AbstractScenario
from nuplan.planning.simulation.history.simulation_history import SimulationHistory
from nuplan.planning.simulation.observation.observation_type import DetectionsTracks
@dataclass
class EgoAgentPair:
"""Class to pair ego and agent."""
ego_state: EgoState # Ego state
agent: Agent # Agent
@dataclass
class EgoToAgentDistances:
"""
Class to keep track of the history of projected distances from ego to an agent.
It also contains the length of the agent.
"""
agent_lengths: List[float] # A list of Length of agents [m]
longitudinal_distances: List[float] # Longitudinal distance from ego to the agent [m]
lateral_distances: List[float] # Lateral distance from ego to the agent [m]
class ClearanceFromStaticAgentsStatistics(MetricBase):
"""Metric on clearance while passing static vehicles."""
def __init__(self, name: str, category: str, lateral_distance_threshold: float) -> None:
"""
Initializes the ClearanceFromStaticAgentsStatistics class
:param name: Metric name
:param category: Metric category
:param lateral_distance_threshold: Agents laterally further away than this threshold are not considered.
"""
super().__init__(name=name, category=category)
self._lateral_distance_threshold = lateral_distance_threshold
self._ego_half_length = get_pacifica_parameters().half_length
def compute_score(
self,
scenario: AbstractScenario,
metric_statistics: Dict[str, Statistic],
time_series: Optional[TimeSeries] = None,
) -> float:
"""Inherited, see superclass."""
# TODO: Define the metric score
return 0.0
def compute(self, history: SimulationHistory, scenario: AbstractScenario) -> List[MetricStatistics]:
"""
Returns the estimated metric
:param history: History from a simulation engine
:param scenario: Scenario running this metric
:return the estimated metric.
"""
# Compute projected distances
agents_distances = self._extract_agent_projected_distances(history)
clearances_during_passing = self._extract_passing_clearances(agents_distances)
if not clearances_during_passing:
return []
statistics = {
MetricStatisticsType.MAX: Statistic(
name='max_clearance_overtaking_static_agent', unit='meters', value=np.amax(clearances_during_passing)
),
MetricStatisticsType.MIN: Statistic(
name='min_clearance_overtaking_static_agent', unit='meters', value=np.amin(clearances_during_passing)
),
MetricStatisticsType.P90: Statistic(
name='p90_clearance_overtaking_static_agent',
unit='meters',
value=np.percentile(np.abs(clearances_during_passing), 90),
),
}
results = self._construct_metric_results(metric_statistics=statistics, time_series=None, scenario=scenario)
return results # type: ignore
def get_overtake_start_idx(
self, longitudinal_dist: List[float], idx_overtake: int, critical_dist_abs: float
) -> int:
"""
Finds the index of the element which represents the start of the overtake
:param longitudinal_dist: longitudinal distances
:param idx_overtake: index of the distance closest to zero
:param critical_dist_abs: critical distance which represent start of overtake
:return index of the start of overtake.
"""
offset = self._get_overtake_edge(longitudinal_dist[idx_overtake::-1], critical_dist_abs)
return idx_overtake - offset if offset is not None else 0
def get_overtake_end_idx(self, longitudinal_dist: List[float], idx_overtake: int, critical_dist_abs: float) -> int:
"""
Finds the index of the element which represents the end of the overtake
:param longitudinal_dist: longitudinal distances
:param idx_overtake: index of the distance closest to zero
:param critical_dist_abs: critical distance which represent end of overtake
:return index of the end of overtake.
"""
offset = self._get_overtake_edge(longitudinal_dist[idx_overtake:], critical_dist_abs)
return idx_overtake + offset if offset is not None else -1
@staticmethod
def _get_overtake_edge(distances: List[float], critical_distance: float) -> Optional[int]:
"""
Finds the index of the first element which exceeds the given amount in a list
:param distances: list of distances
:param critical_distance: threshold distance
:return index of the first element exceeding the given amount, None if it doesn't happen.
"""
for idx_start, d in enumerate(distances):
if abs(d) > critical_distance:
return idx_start
return None
def _extract_agent_projected_distances(self, history: SimulationHistory) -> Dict[str, EgoToAgentDistances]:
"""
Computes the projected distances, for inactive agents only
:param history: The history of the scenario
:return A dict containing the projected distances to each inactive track in the entire scenario.
"""
agents_distances: Dict[str, EgoToAgentDistances] = {}
inactive_agents_scenario = self._get_inactive_agents_scenario(history)
for track_token, ego_agent_pairs in inactive_agents_scenario.items():
lateral_dist = [
signed_lateral_distance(ego_agent_pair.ego_state.rear_axle, ego_agent_pair.agent.box.geometry)
for ego_agent_pair in ego_agent_pairs
]
longitudinal_dist = [
signed_longitudinal_distance(ego_agent_pair.ego_state.rear_axle, ego_agent_pair.agent.box.geometry)
for ego_agent_pair in ego_agent_pairs
]
lengths = [ego_agent_pair.agent.box.length for ego_agent_pair in ego_agent_pairs]
agents_distances[track_token] = EgoToAgentDistances(
agent_lengths=lengths, longitudinal_distances=longitudinal_dist, lateral_distances=lateral_dist
)
return agents_distances
def _extract_passing_clearances(self, agents_distances: Dict[str, EgoToAgentDistances]) -> List[float]:
"""
Extracts the portion of projected distances relative to the passing of every agent and saves them to a list
:param agents_distances: The projected distances to each inactive agent
:return A list containing the lateral clearance of all inactive agents while ego is passing them.
"""
clearances_during_overtake = []
for distances in agents_distances.values():
max_longitudinal_dist = max(distances.longitudinal_distances)
idx_max = distances.longitudinal_distances.index(max_longitudinal_dist)
min_longitudinal_dist = min(distances.longitudinal_distances)
idx_min = distances.longitudinal_distances.index(min_longitudinal_dist)
if max_longitudinal_dist > 0 > min_longitudinal_dist and idx_max < idx_min:
overtake_idx = int(np.argmin(np.abs(distances.longitudinal_distances)))
if abs(distances.lateral_distances[overtake_idx]) < self._lateral_distance_threshold:
threshold = self._ego_half_length + distances.agent_lengths[overtake_idx] / 2.0
start_idx = self.get_overtake_start_idx(
distances.longitudinal_distances, int(overtake_idx), threshold
)
end_idx = self.get_overtake_end_idx(distances.longitudinal_distances, int(overtake_idx), threshold)
clearances_during_overtake.extend(np.abs(distances.lateral_distances[start_idx : end_idx + 1]))
return clearances_during_overtake
@staticmethod
def _get_inactive_agents_scenario(history: SimulationHistory) -> Dict[str, List[EgoAgentPair]]:
"""
Get a set of agents which are inactive for the full length of the scenario
An inactive agents in this context is an agent that for the entire scenario never moves
:param history: The history from the scenario
:return A dict of inactive tracks and their ego poses with agents.
"""
# Collect a series of agents to their tracks
agent_tracks = defaultdict(list)
for sample in history.data:
ego_state = sample.ego_state
if not isinstance(sample.observation, DetectionsTracks):
continue
for tracked_object in sample.observation.tracked_objects.get_agents():
agent_tracks[tracked_object.track_token].append(EgoAgentPair(ego_state=ego_state, agent=tracked_object))
inactive_track_agents = defaultdict(list)
for track_token, ego_agent_pairs in agent_tracks.items():
velocities: npt.NDArray[np.float64] = np.asarray(
[ego_agent_pair.agent.velocity.magnitude() for ego_agent_pair in ego_agent_pairs]
)
inactive_status = np.isclose(velocities, 0.0)
# Must all inactive
if np.sum(inactive_status) != len(velocities):
continue
inactive_track_agents[track_token] = ego_agent_pairs
return inactive_track_agents
| [
"numpy.abs",
"numpy.isclose",
"numpy.amin",
"nuplan.common.geometry.compute.signed_longitudinal_distance",
"numpy.sum",
"collections.defaultdict",
"nuplan.common.actor_state.vehicle_parameters.get_pacifica_parameters",
"nuplan.common.geometry.compute.signed_lateral_distance",
"numpy.amax"
] | [((9182, 9199), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (9193, 9199), False, 'from collections import defaultdict\n'), ((9608, 9625), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (9619, 9625), False, 'from collections import defaultdict\n'), ((2093, 2118), 'nuplan.common.actor_state.vehicle_parameters.get_pacifica_parameters', 'get_pacifica_parameters', ([], {}), '()\n', (2116, 2118), False, 'from nuplan.common.actor_state.vehicle_parameters import get_pacifica_parameters\n'), ((9896, 9923), 'numpy.isclose', 'np.isclose', (['velocities', '(0.0)'], {}), '(velocities, 0.0)\n', (9906, 9923), True, 'import numpy as np\n'), ((6285, 6384), 'nuplan.common.geometry.compute.signed_lateral_distance', 'signed_lateral_distance', (['ego_agent_pair.ego_state.rear_axle', 'ego_agent_pair.agent.box.geometry'], {}), '(ego_agent_pair.ego_state.rear_axle, ego_agent_pair.\n agent.box.geometry)\n', (6308, 6384), False, 'from nuplan.common.geometry.compute import signed_lateral_distance, signed_longitudinal_distance\n'), ((6498, 6601), 'nuplan.common.geometry.compute.signed_longitudinal_distance', 'signed_longitudinal_distance', (['ego_agent_pair.ego_state.rear_axle', 'ego_agent_pair.agent.box.geometry'], {}), '(ego_agent_pair.ego_state.rear_axle,\n ego_agent_pair.agent.box.geometry)\n', (6526, 6601), False, 'from nuplan.common.geometry.compute import signed_lateral_distance, signed_longitudinal_distance\n'), ((9972, 9995), 'numpy.sum', 'np.sum', (['inactive_status'], {}), '(inactive_status)\n', (9978, 9995), True, 'import numpy as np\n'), ((3159, 3193), 'numpy.amax', 'np.amax', (['clearances_during_passing'], {}), '(clearances_during_passing)\n', (3166, 3193), True, 'import numpy as np\n'), ((3341, 3375), 'numpy.amin', 'np.amin', (['clearances_during_passing'], {}), '(clearances_during_passing)\n', (3348, 3375), True, 'import numpy as np\n'), ((3569, 3602), 'numpy.abs', 'np.abs', (['clearances_during_passing'], {}), '(clearances_during_passing)\n', (3575, 3602), True, 'import numpy as np\n'), ((7961, 8001), 'numpy.abs', 'np.abs', (['distances.longitudinal_distances'], {}), '(distances.longitudinal_distances)\n', (7967, 8001), True, 'import numpy as np\n'), ((8550, 8608), 'numpy.abs', 'np.abs', (['distances.lateral_distances[start_idx:end_idx + 1]'], {}), '(distances.lateral_distances[start_idx:end_idx + 1])\n', (8556, 8608), True, 'import numpy as np\n')] |
import logging
from six.moves.urllib.parse import urlsplit
from django.conf import settings
from django.conf.urls import url, re_path
from django.core.exceptions import PermissionDenied
from django.urls import reverse
from django.utils.html import escape, format_html_join
from wagtail.admin.menu import MenuItem
from wagtail.core import hooks
from wagtail.core.models import Page
from wagtail.core.rich_text.pages import PageLinkHandler
from cosme.v1.util import util
logger = logging.getLogger(__name__)
@hooks.register('before_delete_page')
def raise_delete_error(request, page):
raise PermissionDenied('Deletion via POST is disabled')
@hooks.register('after_delete_page')
def log_page_deletion(request, page):
logger.warning(
(
u'User {user} with ID {user_id} deleted page {title} '
u'with ID {page_id} at URL {url}'
).format(
user=request.user,
user_id=request.user.id,
title=page.title,
page_id=page.id,
url=page.url_path,
)
)
def check_permissions(parent, user, is_publishing, is_sharing):
parent_perms = parent.permissions_for_user(user)
if parent.slug != 'root':
is_publishing = is_publishing and parent_perms.can_publish()
@hooks.register('insert_editor_js')
def editor_js():
js_files = [
'js/table-block.js',
]
js_includes = format_html_join(
'\n',
'<script src="{0}{1}"></script>',
((settings.STATIC_URL, filename) for filename in js_files)
)
return js_includes
@hooks.register('insert_editor_css')
def editor_css():
css_files = [
'css/general-enhancements.css',
'css/table-block.css',
'css/bureau-structure.css',
'css/heading-block.css',
'css/info-unit-group.css',
]
css_includes = format_html_join(
'\n',
'<link rel="stylesheet" href="{0}{1}"><link>',
((settings.STATIC_URL, filename) for filename in css_files)
)
return css_includes
@hooks.register('cfgovpage_context_handlers')
def form_module_handlers(page, request, context, *args, **kwargs):
"""
Hook function that iterates over every Streamfield's blocks on a page and
sets the context for any form modules.
"""
form_modules = {}
streamfields = util.get_streamfields(page)
for fieldname, blocks in streamfields.items():
for index, child in enumerate(blocks):
if hasattr(child.block, 'get_result'):
if fieldname not in form_modules:
form_modules[fieldname] = {}
if not request.method == 'POST':
is_submitted = child.block.is_submitted(
request,
fieldname,
index
)
module_context = child.block.get_result(
page,
request,
child.value,
is_submitted
)
form_modules[fieldname].update({index: module_context})
if form_modules:
context['form_modules'] = form_modules
@hooks.register('register_admin_menu_item')
def register_django_admin_menu_item():
return MenuItem(
'Django Admin',
reverse('admin:index'),
classnames='icon icon-redirect',
order=99999
)
class RelativePageLinkHandler(PageLinkHandler):
"""
Rich text link handler that forces all page links to be relative.
This special page link handler makes it so that any internal Wagtail page
links inserted into rich text fields are rendered as relative links.
Standard Wagtail behavior stores rich text link content in the database in
a psuedo-HTML format like this, including only a page's ID:
<a linktype="page" id="123">foo</a>
When this content is rendered for preview or viewing, it's replaced with
valid HTML including the page's URL. This custom handler ensures that page
URLs are always rendered as relative, like this:
<a href="/path/to/page">foo</a>
Pages rendered with this handler should never be rendered like this:
<a href="https://my.domain/path/to/page">foo</a>
In standard Wagtail behavior, pages will be rendered with an absolute URL
if an installation has multiple Wagtail Sites. In our current custom usage
we have multiple Wagtail Sites (one for production, one for staging) that
share the same root page. So forcing the use of relative URLs would work
fine and allow for easier navigation within a single domain.
This will explicitly break things if users ever wanted to host some
additional site that doesn't share the same root page.
This code is modified from `wagtail.wagtailcore.rich_text.PageLinkHandler`.
"""
@staticmethod
def expand_db_attributes(attrs, for_editor):
try:
page = Page.objects.get(id=attrs['id'])
if for_editor:
editor_attrs = 'data-linktype="page" data-id="%d" ' % page.id
parent_page = page.get_parent()
if parent_page:
editor_attrs += 'data-parent-id="%d" ' % parent_page.id
else:
editor_attrs = ''
page_url = page.specific.url
if page_url:
page_url = urlsplit(page_url).path
return '<a %shref="%s">' % (editor_attrs, escape(page_url))
except Page.DoesNotExist:
return "<a>"
@hooks.register('register_rich_text_link_handler')
def register_cfgov_link_handler():
return ('page', RelativePageLinkHandler)
# @hooks.register('register_admin_menu_item')
# def register_frank_menu_item():
# return MenuItem('CDN Tools',
# reverse('manage-cdn'),
# classnames='icon icon-cogs',
# order=10000)
#
# @hooks.register('register_admin_urls')
# def register_flag_admin_urls():
# handler = 'v1.admin_views.manage_cdn'
# return [re_path(r'^cdn/$', handler, name='manage-cdn'), ]
#
@hooks.register('before_serve_page')
def serve_latest_draft_page(page, request, args, kwargs):
if page.pk in settings.SERVE_LATEST_DRAFT_PAGES:
latest_draft = page.get_latest_revision_as_page()
response = latest_draft.serve(request, *args, **kwargs)
response['Serving-Wagtail-Draft'] = '1'
return response
| [
"logging.getLogger",
"django.core.exceptions.PermissionDenied",
"django.utils.html.format_html_join",
"six.moves.urllib.parse.urlsplit",
"cosme.v1.util.util.get_streamfields",
"wagtail.core.models.Page.objects.get",
"django.utils.html.escape",
"django.urls.reverse",
"wagtail.core.hooks.register"
] | [((484, 511), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (501, 511), False, 'import logging\n'), ((515, 551), 'wagtail.core.hooks.register', 'hooks.register', (['"""before_delete_page"""'], {}), "('before_delete_page')\n", (529, 551), False, 'from wagtail.core import hooks\n'), ((654, 689), 'wagtail.core.hooks.register', 'hooks.register', (['"""after_delete_page"""'], {}), "('after_delete_page')\n", (668, 689), False, 'from wagtail.core import hooks\n'), ((1284, 1318), 'wagtail.core.hooks.register', 'hooks.register', (['"""insert_editor_js"""'], {}), "('insert_editor_js')\n", (1298, 1318), False, 'from wagtail.core import hooks\n'), ((1580, 1615), 'wagtail.core.hooks.register', 'hooks.register', (['"""insert_editor_css"""'], {}), "('insert_editor_css')\n", (1594, 1615), False, 'from wagtail.core import hooks\n'), ((2041, 2085), 'wagtail.core.hooks.register', 'hooks.register', (['"""cfgovpage_context_handlers"""'], {}), "('cfgovpage_context_handlers')\n", (2055, 2085), False, 'from wagtail.core import hooks\n'), ((3206, 3248), 'wagtail.core.hooks.register', 'hooks.register', (['"""register_admin_menu_item"""'], {}), "('register_admin_menu_item')\n", (3220, 3248), False, 'from wagtail.core import hooks\n'), ((5583, 5632), 'wagtail.core.hooks.register', 'hooks.register', (['"""register_rich_text_link_handler"""'], {}), "('register_rich_text_link_handler')\n", (5597, 5632), False, 'from wagtail.core import hooks\n'), ((6151, 6186), 'wagtail.core.hooks.register', 'hooks.register', (['"""before_serve_page"""'], {}), "('before_serve_page')\n", (6165, 6186), False, 'from wagtail.core import hooks\n'), ((601, 650), 'django.core.exceptions.PermissionDenied', 'PermissionDenied', (['"""Deletion via POST is disabled"""'], {}), "('Deletion via POST is disabled')\n", (617, 650), False, 'from django.core.exceptions import PermissionDenied\n'), ((1406, 1527), 'django.utils.html.format_html_join', 'format_html_join', (['"""\n"""', '"""<script src="{0}{1}"></script>"""', '((settings.STATIC_URL, filename) for filename in js_files)'], {}), '(\'\\n\', \'<script src="{0}{1}"></script>\', ((settings.\n STATIC_URL, filename) for filename in js_files))\n', (1422, 1527), False, 'from django.utils.html import escape, format_html_join\n'), ((1852, 1987), 'django.utils.html.format_html_join', 'format_html_join', (['"""\n"""', '"""<link rel="stylesheet" href="{0}{1}"><link>"""', '((settings.STATIC_URL, filename) for filename in css_files)'], {}), '(\'\\n\', \'<link rel="stylesheet" href="{0}{1}"><link>\', ((\n settings.STATIC_URL, filename) for filename in css_files))\n', (1868, 1987), False, 'from django.utils.html import escape, format_html_join\n'), ((2331, 2358), 'cosme.v1.util.util.get_streamfields', 'util.get_streamfields', (['page'], {}), '(page)\n', (2352, 2358), False, 'from cosme.v1.util import util\n'), ((3341, 3363), 'django.urls.reverse', 'reverse', (['"""admin:index"""'], {}), "('admin:index')\n", (3348, 3363), False, 'from django.urls import reverse\n'), ((4982, 5014), 'wagtail.core.models.Page.objects.get', 'Page.objects.get', ([], {'id': "attrs['id']"}), "(id=attrs['id'])\n", (4998, 5014), False, 'from wagtail.core.models import Page\n'), ((5424, 5442), 'six.moves.urllib.parse.urlsplit', 'urlsplit', (['page_url'], {}), '(page_url)\n', (5432, 5442), False, 'from six.moves.urllib.parse import urlsplit\n'), ((5503, 5519), 'django.utils.html.escape', 'escape', (['page_url'], {}), '(page_url)\n', (5509, 5519), False, 'from django.utils.html import escape, format_html_join\n')] |
from flask import render_template, flash, redirect
from flask import session, url_for, request, g
from flask.ext.login import login_user, logout_user
from flask.ext.login import current_user, login_required
from filebarn import app, db, lm
from .forms import LoginForm
from .models import User
@app.route('/secret/<username>')
@login_required
def secret(username):
return render_template('index.html', username=username)
@app.route('/', methods=['GET', 'POST'])
@app.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or
url_for('secret', username=form.username.data))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
| [
"flask.render_template",
"flask.request.args.get",
"filebarn.app.route",
"flask.flash",
"flask.url_for",
"flask.ext.login.login_user"
] | [((297, 328), 'filebarn.app.route', 'app.route', (['"""/secret/<username>"""'], {}), "('/secret/<username>')\n", (306, 328), False, 'from filebarn import app, db, lm\n'), ((430, 469), 'filebarn.app.route', 'app.route', (['"""/"""'], {'methods': "['GET', 'POST']"}), "('/', methods=['GET', 'POST'])\n", (439, 469), False, 'from filebarn import app, db, lm\n'), ((471, 515), 'filebarn.app.route', 'app.route', (['"""/login"""'], {'methods': "['GET', 'POST']"}), "('/login', methods=['GET', 'POST'])\n", (480, 515), False, 'from filebarn import app, db, lm\n'), ((378, 426), 'flask.render_template', 'render_template', (['"""index.html"""'], {'username': 'username'}), "('index.html', username=username)\n", (393, 426), False, 'from flask import render_template, flash, redirect\n'), ((975, 1015), 'flask.render_template', 'render_template', (['"""login.html"""'], {'form': 'form'}), "('login.html', form=form)\n", (990, 1015), False, 'from flask import render_template, flash, redirect\n'), ((925, 963), 'flask.flash', 'flash', (['"""Invalid username or password."""'], {}), "('Invalid username or password.')\n", (930, 963), False, 'from flask import render_template, flash, redirect\n'), ((745, 784), 'flask.ext.login.login_user', 'login_user', (['user', 'form.remember_me.data'], {}), '(user, form.remember_me.data)\n', (755, 784), False, 'from flask.ext.login import login_user, logout_user\n'), ((813, 837), 'flask.request.args.get', 'request.args.get', (['"""next"""'], {}), "('next')\n", (829, 837), False, 'from flask import session, url_for, request, g\n'), ((869, 915), 'flask.url_for', 'url_for', (['"""secret"""'], {'username': 'form.username.data'}), "('secret', username=form.username.data)\n", (876, 915), False, 'from flask import session, url_for, request, g\n')] |
# -*- coding: utf-8 -*-
from django.db import models
from datetime import date
from django.utils import timezone
from user.models import Person,Customer
from .price_category import PriceCategory
from core.models import Address
from core.mixins import TimeStampedMixin,PartComposMixin,ThumbnailMixin
from core.utils import combine_datetime_pk
#from .relationship import Record
# Create your models here.
class Sku(models.Model):
sku = models.CharField("款号",max_length = 20)
description = models.CharField("描述",max_length = 50,blank=True)
def __str__(self):
return self.sku
class Meta:
verbose_name = "款式"
verbose_name_plural = verbose_name
class Merchandise( TimeStampedMixin,
ThumbnailMixin,
PartComposMixin,
models.Model):
description = models.CharField("描述",max_length = 50)
legacy_id = models.CharField("旧条码",max_length = 50,blank=True)
net_weight = models.FloatField("净重(g)",blank=True)
def carat(self):
return self.net_weight/0.2
carat.short_description = '克拉(Ct)'
carat.admin_order_field = 'net_weight'
sku = models.ForeignKey(
Sku,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='merchandise',
verbose_name='款式',
)
depots = models.ManyToManyField(
'Depot',
through = 'MerchandiseDepot',
related_name = 'merchandise',
)
position = models.CharField("库柜",max_length=20,blank=True)
price_category = models.ForeignKey(
PriceCategory,
on_delete=models.CASCADE,
verbose_name="价格类别",
)
price = models.DecimalField("标价",default = 0,max_digits = 10,decimal_places = 2)
margin = models.DecimalField("价格浮动",default = 0,max_digits = 10,decimal_places = 2)
manufacture = models.CharField("产地",max_length=10,blank=True)
records = models.ManyToManyField(
'Record',
through='MerchandiseRecord',
related_name='merchandises',
)
MT_JEWEL = 'JE'
MT_ACCESSORY = 'AC'
MT_PEARL = 'PE'
MT_DIAMOND = 'DM'
MT_COLORED_GEM = 'CG'
MT_OTHER = ''
MERCHANDISE_TYPE = (
(MT_JEWEL,'成品'),
(MT_ACCESSORY,'配件'),
(MT_PEARL,'裸珠'),
(MT_DIAMOND,'钻石'),
(MT_COLORED_GEM,'彩宝'),
(MT_OTHER,'其它')
)
merchandise_type = models.CharField("类型",max_length=4,choices=MERCHANDISE_TYPE,blank=True)
def __str__(self):
return self.description
def serialId(self):
return combine_datetime_pk(self.id,12,self.created)
def get_merchandise_params(self):
return {}
class Meta:
verbose_name = "商品"
verbose_name_plural = verbose_name
ordering = ['-id']
# chain or ring have size
class Jewel(Merchandise):
JEWEL_TYPE = (
("R","戒指"),
("项链",(
("P","项坠"),
("N","珠链"),
)),
("耳饰",(
("D","耳钉"),
("G","耳钩"),
("X","耳线"),
("J","耳夹"),
)),
("W","手链"),
("B","胸针"),
("H","头饰"),
("","其它"),
)
jewel_type = models.CharField('类别',max_length=5,choices=JEWEL_TYPE,default="")
size = models.DecimalField('长度/手寸',default = 0,max_digits = 5,decimal_places = 2)
major_gem = models.CharField('主石',max_length=20,blank=True,default='')
minor_gem = models.CharField('配石',max_length=20,blank=True,default='')
METAL_TYPE = (
("PT","铂金"),
("24KG","24K金"),
("18KY","18K黄"),
("18KW","18K白"),
("18KR","18K红"),
("14KY","14K黄"),
("14KW","14K白"),
("14KR","14K红"),
("10KY","14K黄"),
("10KW","14K白"),
("10KR","14K红"),
("SILV","纯银"),
("S925","S925"),
("GONB","铜镀金"),
("ALLO","合金"),
("","其它")
)
metal_type = models.CharField('金属',max_length=4,choices = METAL_TYPE,default="")
metal_weight = models.FloatField("金重(g)",blank=True,null=True)
def __str__(self):
return "成品"+self.description
class Meta:
verbose_name = "成品"
verbose_name_plural = verbose_name
class Gem(Merchandise):
pass
'''
size, 直径
shape, 形状
color, 颜色
luster, 光泽
surface, 表皮
nacre,珠层
'''
#大小 光泽 瑕疵 颜色
#圆珠 圆度(正圆,近圆,扁圆)
#水滴
#mabe
#巴洛克 keshi(无核)
class Pearl(Gem):
PEARL_TYPE = (
("","其它"),
("海水珍珠",(
("AWHT","南洋白珍珠"),
("SSGD","南洋金珍珠"),
("TBLK","大溪地黑珍珠"),
("AKOY","AKOYA"),
("MABE","马贝珠"),
("KESH","KESHI"),
("CONC","海螺珠"),
)
),
("淡水珍珠",(
("FRWT","淡水珍珠"),
("BARQ","巴洛克珍珠"),
("EDSN","爱迪生珍珠"),
)
)
)
pearl_type = models.CharField(max_length=4,choices=PEARL_TYPE,default="")
min_size = models.DecimalField("最小直径(mm)",default = 0,max_digits = 5,decimal_places = 2)
max_size = models.DecimalField("最大直径(mm)",default = 0,max_digits = 5,decimal_places = 2)
#color
body_color = models.CharField("体色",max_length=10)
overtone = models.CharField("伴色",max_length=10)
IRIDESCENCE = (("","N/A"),("A","强"),("B","明显"),("C","一般"))
iridescence = models.CharField("晕彩",max_length=1,choices=IRIDESCENCE,default="")
LUSTER = (("","N/A"),("A","极强"),("B","强"),("C","中"),("D","弱"))
luster = models.CharField("光泽",max_length=1,choices=LUSTER)
SURFACE = (("","N/A"),("A","无瑕"),("B","微瑕"),("C","小瑕"),("D","瑕疵"),("E","重瑕"))
surface = models.CharField("表皮",max_length=1,choices=SURFACE)
NACRE = (("","N/A"),("A","特厚"),("B","厚"),("C","中"),("D","薄"),("E","极薄"))
nacre = models.CharField("珠层",max_length=1,choices=NACRE)
def __str__(self):
return "珍珠"
class Meta:
verbose_name = "珍珠"
verbose_name_plural = verbose_name
class Diamond(Gem):
COLOR = (
("","其它"),
("D","D"),
("E","E"),
("F","F"),
("G","G"),
("H","H"),
("I","I"),
("J","J"),
("K","K"),
)
color = models.CharField("颜色",max_length=1,choices=COLOR,default="")
CLARITY = (
("","其它"),
("FL","FL"),
("IF","IF"),
("VVS1","VVS1"),
("VVS2","VVS2"),
("VS1","VS1"),
("VS2","VS2"),
("SI1","SI1"),
("SI2","SI2"),
)
clarity = models.CharField("净度",max_length=4,choices=CLARITY,default="")
CUT = (
("","其它"),
("EX","EX"),
("VG","VG"),
("G","G"),
)
cut = models.CharField("切工",max_length=2,choices=CUT,default="")
def __str__(self):
return "钻石"+"{:.2f}".format(self.net_weight/0.2)+"ct"
class Meta:
verbose_name = "钻石"
verbose_name_plural = verbose_name
class ColoredGem(Gem):
def __str__(self):
return "彩宝"
class Meta:
verbose_name = "彩宝"
verbose_name_plural = verbose_name
proxy = True
| [
"django.db.models.FloatField",
"django.db.models.ForeignKey",
"core.utils.combine_datetime_pk",
"django.db.models.ManyToManyField",
"django.db.models.DecimalField",
"django.db.models.CharField"
] | [((438, 475), 'django.db.models.CharField', 'models.CharField', (['"""款号"""'], {'max_length': '(20)'}), "('款号', max_length=20)\n", (454, 475), False, 'from django.db import models\n'), ((492, 541), 'django.db.models.CharField', 'models.CharField', (['"""描述"""'], {'max_length': '(50)', 'blank': '(True)'}), "('描述', max_length=50, blank=True)\n", (508, 541), False, 'from django.db import models\n'), ((769, 806), 'django.db.models.CharField', 'models.CharField', (['"""描述"""'], {'max_length': '(50)'}), "('描述', max_length=50)\n", (785, 806), False, 'from django.db import models\n'), ((821, 871), 'django.db.models.CharField', 'models.CharField', (['"""旧条码"""'], {'max_length': '(50)', 'blank': '(True)'}), "('旧条码', max_length=50, blank=True)\n", (837, 871), False, 'from django.db import models\n'), ((888, 926), 'django.db.models.FloatField', 'models.FloatField', (['"""净重(g)"""'], {'blank': '(True)'}), "('净重(g)', blank=True)\n", (905, 926), False, 'from django.db import models\n'), ((1058, 1181), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Sku'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)', 'related_name': '"""merchandise"""', 'verbose_name': '"""款式"""'}), "(Sku, on_delete=models.SET_NULL, null=True, blank=True,\n related_name='merchandise', verbose_name='款式')\n", (1075, 1181), False, 'from django.db import models\n'), ((1206, 1298), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""Depot"""'], {'through': '"""MerchandiseDepot"""', 'related_name': '"""merchandise"""'}), "('Depot', through='MerchandiseDepot', related_name=\n 'merchandise')\n", (1228, 1298), False, 'from django.db import models\n'), ((1320, 1369), 'django.db.models.CharField', 'models.CharField', (['"""库柜"""'], {'max_length': '(20)', 'blank': '(True)'}), "('库柜', max_length=20, blank=True)\n", (1336, 1369), False, 'from django.db import models\n'), ((1387, 1466), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PriceCategory'], {'on_delete': 'models.CASCADE', 'verbose_name': '"""价格类别"""'}), "(PriceCategory, on_delete=models.CASCADE, verbose_name='价格类别')\n", (1404, 1466), False, 'from django.db import models\n'), ((1486, 1555), 'django.db.models.DecimalField', 'models.DecimalField', (['"""标价"""'], {'default': '(0)', 'max_digits': '(10)', 'decimal_places': '(2)'}), "('标价', default=0, max_digits=10, decimal_places=2)\n", (1505, 1555), False, 'from django.db import models\n'), ((1569, 1640), 'django.db.models.DecimalField', 'models.DecimalField', (['"""价格浮动"""'], {'default': '(0)', 'max_digits': '(10)', 'decimal_places': '(2)'}), "('价格浮动', default=0, max_digits=10, decimal_places=2)\n", (1588, 1640), False, 'from django.db import models\n'), ((1660, 1709), 'django.db.models.CharField', 'models.CharField', (['"""产地"""'], {'max_length': '(10)', 'blank': '(True)'}), "('产地', max_length=10, blank=True)\n", (1676, 1709), False, 'from django.db import models\n'), ((1720, 1815), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""Record"""'], {'through': '"""MerchandiseRecord"""', 'related_name': '"""merchandises"""'}), "('Record', through='MerchandiseRecord', related_name=\n 'merchandises')\n", (1742, 1815), False, 'from django.db import models\n'), ((2105, 2179), 'django.db.models.CharField', 'models.CharField', (['"""类型"""'], {'max_length': '(4)', 'choices': 'MERCHANDISE_TYPE', 'blank': '(True)'}), "('类型', max_length=4, choices=MERCHANDISE_TYPE, blank=True)\n", (2121, 2179), False, 'from django.db import models\n'), ((2723, 2791), 'django.db.models.CharField', 'models.CharField', (['"""类别"""'], {'max_length': '(5)', 'choices': 'JEWEL_TYPE', 'default': '""""""'}), "('类别', max_length=5, choices=JEWEL_TYPE, default='')\n", (2739, 2791), False, 'from django.db import models\n'), ((2797, 2868), 'django.db.models.DecimalField', 'models.DecimalField', (['"""长度/手寸"""'], {'default': '(0)', 'max_digits': '(5)', 'decimal_places': '(2)'}), "('长度/手寸', default=0, max_digits=5, decimal_places=2)\n", (2816, 2868), False, 'from django.db import models\n'), ((2885, 2946), 'django.db.models.CharField', 'models.CharField', (['"""主石"""'], {'max_length': '(20)', 'blank': '(True)', 'default': '""""""'}), "('主石', max_length=20, blank=True, default='')\n", (2901, 2946), False, 'from django.db import models\n'), ((2957, 3018), 'django.db.models.CharField', 'models.CharField', (['"""配石"""'], {'max_length': '(20)', 'blank': '(True)', 'default': '""""""'}), "('配石', max_length=20, blank=True, default='')\n", (2973, 3018), False, 'from django.db import models\n'), ((3338, 3406), 'django.db.models.CharField', 'models.CharField', (['"""金属"""'], {'max_length': '(4)', 'choices': 'METAL_TYPE', 'default': '""""""'}), "('金属', max_length=4, choices=METAL_TYPE, default='')\n", (3354, 3406), False, 'from django.db import models\n'), ((3422, 3471), 'django.db.models.FloatField', 'models.FloatField', (['"""金重(g)"""'], {'blank': '(True)', 'null': '(True)'}), "('金重(g)', blank=True, null=True)\n", (3439, 3471), False, 'from django.db import models\n'), ((4074, 4136), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(4)', 'choices': 'PEARL_TYPE', 'default': '""""""'}), "(max_length=4, choices=PEARL_TYPE, default='')\n", (4090, 4136), False, 'from django.db import models\n'), ((4148, 4222), 'django.db.models.DecimalField', 'models.DecimalField', (['"""最小直径(mm)"""'], {'default': '(0)', 'max_digits': '(5)', 'decimal_places': '(2)'}), "('最小直径(mm)', default=0, max_digits=5, decimal_places=2)\n", (4167, 4222), False, 'from django.db import models\n'), ((4238, 4312), 'django.db.models.DecimalField', 'models.DecimalField', (['"""最大直径(mm)"""'], {'default': '(0)', 'max_digits': '(5)', 'decimal_places': '(2)'}), "('最大直径(mm)', default=0, max_digits=5, decimal_places=2)\n", (4257, 4312), False, 'from django.db import models\n'), ((4339, 4376), 'django.db.models.CharField', 'models.CharField', (['"""体色"""'], {'max_length': '(10)'}), "('体色', max_length=10)\n", (4355, 4376), False, 'from django.db import models\n'), ((4388, 4425), 'django.db.models.CharField', 'models.CharField', (['"""伴色"""'], {'max_length': '(10)'}), "('伴色', max_length=10)\n", (4404, 4425), False, 'from django.db import models\n'), ((4500, 4569), 'django.db.models.CharField', 'models.CharField', (['"""晕彩"""'], {'max_length': '(1)', 'choices': 'IRIDESCENCE', 'default': '""""""'}), "('晕彩', max_length=1, choices=IRIDESCENCE, default='')\n", (4516, 4569), False, 'from django.db import models\n'), ((4642, 4694), 'django.db.models.CharField', 'models.CharField', (['"""光泽"""'], {'max_length': '(1)', 'choices': 'LUSTER'}), "('光泽', max_length=1, choices=LUSTER)\n", (4658, 4694), False, 'from django.db import models\n'), ((4784, 4837), 'django.db.models.CharField', 'models.CharField', (['"""表皮"""'], {'max_length': '(1)', 'choices': 'SURFACE'}), "('表皮', max_length=1, choices=SURFACE)\n", (4800, 4837), False, 'from django.db import models\n'), ((4920, 4971), 'django.db.models.CharField', 'models.CharField', (['"""珠层"""'], {'max_length': '(1)', 'choices': 'NACRE'}), "('珠层', max_length=1, choices=NACRE)\n", (4936, 4971), False, 'from django.db import models\n'), ((5238, 5301), 'django.db.models.CharField', 'models.CharField', (['"""颜色"""'], {'max_length': '(1)', 'choices': 'COLOR', 'default': '""""""'}), "('颜色', max_length=1, choices=COLOR, default='')\n", (5254, 5301), False, 'from django.db import models\n'), ((5476, 5541), 'django.db.models.CharField', 'models.CharField', (['"""净度"""'], {'max_length': '(4)', 'choices': 'CLARITY', 'default': '""""""'}), "('净度', max_length=4, choices=CLARITY, default='')\n", (5492, 5541), False, 'from django.db import models\n'), ((5615, 5676), 'django.db.models.CharField', 'models.CharField', (['"""切工"""'], {'max_length': '(2)', 'choices': 'CUT', 'default': '""""""'}), "('切工', max_length=2, choices=CUT, default='')\n", (5631, 5676), False, 'from django.db import models\n'), ((2255, 2301), 'core.utils.combine_datetime_pk', 'combine_datetime_pk', (['self.id', '(12)', 'self.created'], {}), '(self.id, 12, self.created)\n', (2274, 2301), False, 'from core.utils import combine_datetime_pk\n')] |
import unittest
import tempfile
import numpy as np
import coremltools
import os
import shutil
import tensorflow as tf
from tensorflow.keras import backend as _keras
from tensorflow.keras import layers
from coremltools._deps import HAS_TF_2
from test_utils import generate_data, tf_transpose
class TensorFlowKerasTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
tf.keras.backend.set_learning_phase(False)
def setUp(self):
self.saved_model_dir = tempfile.mkdtemp()
_, self.model_file = tempfile.mkstemp(suffix='.h5', prefix=self.saved_model_dir)
def tearDown(self):
if os.path.exists(self.saved_model_dir):
shutil.rmtree(self.saved_model_dir)
def _get_tf_tensor_name(self, graph, name):
return graph.get_operation_by_name(name).outputs[0].name
def _test_model(self, model, data_mode='random_zero_mean', decimal=4, use_cpu_only=False, has_variables=True, verbose=False):
if not HAS_TF_2:
self._test_keras_model_tf1(model, data_mode, decimal, use_cpu_only, has_variables, verbose)
else:
self._test_keras_model_tf2(model, data_mode, decimal, use_cpu_only, has_variables, verbose)
def _test_keras_model_tf1(self, model, data_mode, decimal, use_cpu_only, has_variables, verbose):
graph_def_file = os.path.join(self.saved_model_dir, 'graph.pb')
frozen_model_file = os.path.join(self.saved_model_dir, 'frozen.pb')
core_ml_model_file = os.path.join(self.saved_model_dir, 'model.mlmodel')
input_shapes = {inp.op.name: inp.shape.as_list() for inp in model.inputs}
for name, shape in input_shapes.items():
input_shapes[name] = [dim if dim is not None else 1 for dim in shape]
output_node_names = [output.op.name for output in model.outputs]
tf_graph = _keras.get_session().graph
tf.reset_default_graph()
if has_variables:
with tf_graph.as_default():
saver = tf.train.Saver()
# note: if Keras backend has_variable is False, we're not making variables constant
with tf.Session(graph=tf_graph) as sess:
sess.run(tf.global_variables_initializer())
feed_dict = {}
for name, shape in input_shapes.items():
tensor_name = tf_graph.get_operation_by_name(name).outputs[0].name
feed_dict[tensor_name] = generate_data(shape, data_mode)
# run the result
fetches = [
tf_graph.get_operation_by_name(name).outputs[0] for name in output_node_names
]
result = sess.run(fetches, feed_dict=feed_dict)
# save graph definition somewhere
tf.train.write_graph(sess.graph, self.saved_model_dir, graph_def_file, as_text=False)
# freeze_graph() has been raising error with tf.keras models since no
# later than TensorFlow 1.6, so we're not using freeze_graph() here.
# See: https://github.com/tensorflow/models/issues/5387
output_graph_def = tf.graph_util.convert_variables_to_constants(
sess, # The session is used to retrieve the weights
tf_graph.as_graph_def(), # The graph_def is used to retrieve the nodes
output_node_names # The output node names are used to select the useful nodes
)
with tf.gfile.GFile(frozen_model_file, 'wb') as f:
f.write(output_graph_def.SerializeToString())
_keras.clear_session()
# convert to Core ML model format
core_ml_model = coremltools.converters.tensorflow.convert(
frozen_model_file,
inputs=input_shapes,
outputs=output_node_names,
use_cpu_only=use_cpu_only)
if verbose:
print('\nFrozen model saved at {}'.format(frozen_model_file))
print('\nCore ML model description:')
from coremltools.models.neural_network.printer import print_network_spec
print_network_spec(core_ml_model.get_spec(), style='coding')
core_ml_model.save(core_ml_model_file)
print('\nCore ML model saved at {}'.format(core_ml_model_file))
# transpose input data as Core ML requires
core_ml_inputs = {
name: tf_transpose(feed_dict[self._get_tf_tensor_name(tf_graph, name)])
for name in input_shapes
}
# run prediction in Core ML
core_ml_output = core_ml_model.predict(core_ml_inputs, useCPUOnly=use_cpu_only)
for idx, out_name in enumerate(output_node_names):
tf_out = result[idx]
if len(tf_out.shape) == 0:
tf_out = np.array([tf_out])
tp = tf_out.flatten()
coreml_out = core_ml_output[out_name]
cp = coreml_out.flatten()
self.assertTrue(tf_out.shape == coreml_out.shape)
for i in range(len(tp)):
max_den = max(1.0, tp[i], cp[i])
self.assertAlmostEqual(tp[i] / max_den, cp[i] / max_den, delta=10 ** -decimal)
def _test_keras_model_tf2(self, model, data_mode, decimal, use_cpu_only, has_variables, verbose):
core_ml_model_file = self.model_file.rsplit('.')[0] + '.mlmodel'
input_dict = {inp.op.name: inp.shape.as_list() for inp in model.inputs}
for name, shape in input_dict.items():
input_dict[name] = [dim if dim is not None else 1 for dim in shape]
output_list = ['Identity']
model.save(self.model_file)
# convert Keras model into Core ML model format
core_ml_model = coremltools.converters.tensorflow.convert(
filename=self.model_file,
inputs=input_dict,
outputs=output_list,
use_cpu_only=use_cpu_only)
if verbose:
print('\nKeras model saved at {}'.format(self.model_file))
print('\nCore ML model description:')
from coremltools.models.neural_network.printer import print_network_spec
print_network_spec(core_ml_model.get_spec(), style='coding')
core_ml_model.save(core_ml_model_file)
print('\nCore ML model saved at {}'.format(core_ml_model_file))
core_ml_inputs = {
name: generate_data(shape, data_mode) for name, shape in input_dict.items()
}
# run prediction and compare results
keras_output = model.predict(list(core_ml_inputs.values())[0])
core_ml_output = core_ml_model.predict(
core_ml_inputs, useCPUOnly=use_cpu_only)[output_list[0]]
if verbose:
print('\nPredictions', keras_output.shape, ' vs.', core_ml_output.shape)
print(keras_output.flatten()[:6])
print(core_ml_output.flatten()[:6])
np.testing.assert_array_equal(
keras_output.shape, core_ml_output.shape)
np.testing.assert_almost_equal(
keras_output.flatten(), core_ml_output.flatten(), decimal=decimal)
class SimpleLayerTests(TensorFlowKerasTests):
def test_dense_softmax(self):
model = tf.keras.Sequential()
model.add(layers.Dense(16, input_shape=(16,), activation=tf.nn.softmax))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_dense_elu(self):
model = tf.keras.Sequential()
model.add(layers.Dense(16, input_shape=(16,), activation=tf.nn.elu))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, decimal=2)
def test_dense_tanh(self):
model = tf.keras.Sequential()
model.add(layers.Dense(16, input_shape=(16,), activation=tf.nn.tanh))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_housenet_random(self):
num_hidden = 2
num_features = 3
model = tf.keras.Sequential()
model.add(layers.Dense(num_hidden, input_dim=num_features))
model.add(layers.Activation(tf.nn.relu))
model.add(layers.Dense(1, input_dim=num_features))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_tiny_conv2d_random(self):
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels, kernel_height, kernel_width = 3, 5, 5
model = tf.keras.Sequential()
model.add(layers.Conv2D(
input_shape=input_shape,
filters=num_kernels, kernel_size=(kernel_height, kernel_width)))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_tiny_conv2d_dilated_random(self):
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels, kernel_height, kernel_width = 3, 5, 5
model = tf.keras.Sequential()
model.add(layers.Conv2D(
input_shape=input_shape, dilation_rate=(2, 2),
filters=num_kernels, kernel_size=(kernel_height, kernel_width)))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_tiny_conv1d_same_random(self):
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = tf.keras.Sequential()
model.add(layers.Conv1D(
nb_filters, kernel_size=filter_length, padding='same',
input_shape=(input_length, input_dim)))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_tiny_conv1d_valid_random(self):
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = tf.keras.Sequential()
model.add(layers.Conv1D(
nb_filters, kernel_size=filter_length, padding='valid',
input_shape=(input_length, input_dim)))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
@unittest.skip('non-equal block shape is not yet supported')
def test_tiny_conv1d_dilated_random(self):
input_shape = (20, 1)
num_kernels = 2
filter_length = 3
model = tf.keras.Sequential()
model.add(layers.Conv1D(
num_kernels, kernel_size=filter_length, padding='valid',
input_shape=input_shape, dilation_rate=3))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_flatten(self):
model = tf.keras.Sequential()
model.add(layers.Flatten(input_shape=(2, 2, 2)))
self._test_model(model, data_mode='linear', has_variables=False)
def test_conv_dense(self):
input_shape = (48, 48, 3)
model = tf.keras.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation=tf.nn.relu, input_shape=input_shape))
model.add(layers.Flatten())
model.add(layers.Dense(10, activation=tf.nn.softmax))
self._test_model(model)
def test_conv_batchnorm_random(self):
input_dim = 10
input_shape = (input_dim, input_dim, 3)
num_kernels = 3
kernel_height = 5
kernel_width = 5
model = tf.keras.Sequential()
model.add(layers.Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width)))
model.add(layers.BatchNormalization(epsilon=1e-5))
model.add(layers.Dense(10, activation=tf.nn.softmax))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, decimal=2, has_variables=True)
@unittest.skip('list index out of range')
def test_tiny_deconv_random(self):
input_dim = 13
input_shape = (input_dim, input_dim, 5)
num_kernels = 16
kernel_height = 3
kernel_width = 3
model = tf.keras.Sequential()
model.add(layers.Conv2DTranspose(
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape, padding='valid', strides=(2, 2)))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
@unittest.skip('Deconvolution layer has weight matrix of size 432 to encode a 3 x 4 x 3 x 3 convolution.')
def test_tiny_deconv_random_same_padding(self):
input_dim = 14
input_shape = (input_dim, input_dim, 3)
num_kernels = 16
kernel_height = 3
kernel_width = 3
model = tf.keras.Sequential()
model.add(layers.Conv2DTranspose(
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape, padding='same', strides=(2, 2)))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_tiny_depthwise_conv_same_pad_depth_multiplier(self):
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 4
kernel_height = 3
kernel_width = 3
model = tf.keras.Sequential()
model.add(layers.DepthwiseConv2D(
depth_multiplier=depth_multiplier,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape, padding='same', strides=(1, 1)))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_tiny_depthwise_conv_valid_pad_depth_multiplier(self):
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 2
kernel_height = 3
kernel_width = 3
model = tf.keras.Sequential()
model.add(layers.DepthwiseConv2D(
depth_multiplier=depth_multiplier,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape, padding='valid', strides=(1, 1)))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_tiny_separable_conv_valid_depth_multiplier(self):
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 5
kernel_height = 3
kernel_width = 3
num_kernels = 40
model = tf.keras.Sequential()
model.add(layers.SeparableConv2D(
filters=num_kernels, kernel_size=(kernel_height, kernel_width),
padding='valid', strides=(1, 1), depth_multiplier=depth_multiplier,
input_shape=input_shape))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, decimal=2)
def test_tiny_separable_conv_same_fancy_depth_multiplier(self):
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 2
kernel_height = 3
kernel_width = 3
num_kernels = 40
model = tf.keras.Sequential()
model.add(layers.SeparableConv2D(
filters=num_kernels, kernel_size=(kernel_height, kernel_width),
padding='same', strides=(2, 2), activation='relu', depth_multiplier=depth_multiplier,
input_shape=input_shape))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, decimal=2)
def test_max_pooling_no_overlap(self):
# no_overlap: pool_size = strides
model = tf.keras.Sequential()
model.add(layers.MaxPooling2D(
input_shape=(16, 16, 3), pool_size=(2, 2),
strides=None, padding='valid'))
self._test_model(model, has_variables=False)
def test_max_pooling_overlap_multiple(self):
# input shape is multiple of pool_size, strides != pool_size
model = tf.keras.Sequential()
model.add(layers.MaxPooling2D(
input_shape=(18, 18, 3), pool_size=(3, 3),
strides=(2, 2), padding='valid'))
self._test_model(model, has_variables=False)
def test_max_pooling_overlap_odd(self):
model = tf.keras.Sequential()
model.add(layers.MaxPooling2D(
input_shape=(16, 16, 3), pool_size=(3, 3),
strides=(2, 2), padding='valid'))
self._test_model(model, has_variables=False)
def test_max_pooling_overlap_same(self):
model = tf.keras.Sequential()
model.add(layers.MaxPooling2D(
input_shape=(16, 16, 3), pool_size=(3, 3),
strides=(2, 2), padding='same'))
self._test_model(model, has_variables=False)
def test_global_max_pooling_2d(self):
model = tf.keras.Sequential()
model.add(layers.GlobalMaxPooling2D(input_shape=(16, 16, 3)))
self._test_model(model, has_variables=False)
def test_global_avg_pooling_2d(self):
model = tf.keras.Sequential()
model.add(layers.GlobalAveragePooling2D(input_shape=(16, 16, 3)))
self._test_model(model, has_variables=False)
def test_max_pooling_1d(self):
model = tf.keras.Sequential()
model.add(layers.MaxPooling1D(input_shape=(16, 3), pool_size=2))
self._test_model(model, has_variables=False)
if __name__ == '__main__':
np.random.seed(1984)
unittest.main()
| [
"numpy.random.rand",
"tensorflow.keras.layers.BatchNormalization",
"numpy.array",
"tensorflow.keras.layers.Dense",
"tensorflow.train.write_graph",
"tensorflow.keras.layers.GlobalMaxPooling2D",
"unittest.main",
"tensorflow.keras.layers.MaxPooling1D",
"tensorflow.gfile.GFile",
"tensorflow.keras.laye... | [((10165, 10224), 'unittest.skip', 'unittest.skip', (['"""non-equal block shape is not yet supported"""'], {}), "('non-equal block shape is not yet supported')\n", (10178, 10224), False, 'import unittest\n'), ((11845, 11885), 'unittest.skip', 'unittest.skip', (['"""list index out of range"""'], {}), "('list index out of range')\n", (11858, 11885), False, 'import unittest\n'), ((12432, 12547), 'unittest.skip', 'unittest.skip', (['"""Deconvolution layer has weight matrix of size 432 to encode a 3 x 4 x 3 x 3 convolution."""'], {}), "(\n 'Deconvolution layer has weight matrix of size 432 to encode a 3 x 4 x 3 x 3 convolution.'\n )\n", (12445, 12547), False, 'import unittest\n'), ((17427, 17447), 'numpy.random.seed', 'np.random.seed', (['(1984)'], {}), '(1984)\n', (17441, 17447), True, 'import numpy as np\n'), ((17452, 17467), 'unittest.main', 'unittest.main', ([], {}), '()\n', (17465, 17467), False, 'import unittest\n'), ((391, 433), 'tensorflow.keras.backend.set_learning_phase', 'tf.keras.backend.set_learning_phase', (['(False)'], {}), '(False)\n', (426, 433), True, 'import tensorflow as tf\n'), ((487, 505), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (503, 505), False, 'import tempfile\n'), ((535, 594), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".h5"""', 'prefix': 'self.saved_model_dir'}), "(suffix='.h5', prefix=self.saved_model_dir)\n", (551, 594), False, 'import tempfile\n'), ((631, 667), 'os.path.exists', 'os.path.exists', (['self.saved_model_dir'], {}), '(self.saved_model_dir)\n', (645, 667), False, 'import os\n'), ((1338, 1384), 'os.path.join', 'os.path.join', (['self.saved_model_dir', '"""graph.pb"""'], {}), "(self.saved_model_dir, 'graph.pb')\n", (1350, 1384), False, 'import os\n'), ((1413, 1460), 'os.path.join', 'os.path.join', (['self.saved_model_dir', '"""frozen.pb"""'], {}), "(self.saved_model_dir, 'frozen.pb')\n", (1425, 1460), False, 'import os\n'), ((1490, 1541), 'os.path.join', 'os.path.join', (['self.saved_model_dir', '"""model.mlmodel"""'], {}), "(self.saved_model_dir, 'model.mlmodel')\n", (1502, 1541), False, 'import os\n'), ((1885, 1909), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (1907, 1909), True, 'import tensorflow as tf\n'), ((3525, 3547), 'tensorflow.keras.backend.clear_session', '_keras.clear_session', ([], {}), '()\n', (3545, 3547), True, 'from tensorflow.keras import backend as _keras\n'), ((3615, 3755), 'coremltools.converters.tensorflow.convert', 'coremltools.converters.tensorflow.convert', (['frozen_model_file'], {'inputs': 'input_shapes', 'outputs': 'output_node_names', 'use_cpu_only': 'use_cpu_only'}), '(frozen_model_file, inputs=\n input_shapes, outputs=output_node_names, use_cpu_only=use_cpu_only)\n', (3656, 3755), False, 'import coremltools\n'), ((5643, 5782), 'coremltools.converters.tensorflow.convert', 'coremltools.converters.tensorflow.convert', ([], {'filename': 'self.model_file', 'inputs': 'input_dict', 'outputs': 'output_list', 'use_cpu_only': 'use_cpu_only'}), '(filename=self.model_file, inputs=\n input_dict, outputs=output_list, use_cpu_only=use_cpu_only)\n', (5684, 5782), False, 'import coremltools\n'), ((6823, 6894), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['keras_output.shape', 'core_ml_output.shape'], {}), '(keras_output.shape, core_ml_output.shape)\n', (6852, 6894), True, 'import numpy as np\n'), ((7126, 7147), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (7145, 7147), True, 'import tensorflow as tf\n'), ((7391, 7412), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (7410, 7412), True, 'import tensorflow as tf\n'), ((7664, 7685), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (7683, 7685), True, 'import tensorflow as tf\n'), ((7980, 8001), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (7999, 8001), True, 'import tensorflow as tf\n'), ((8479, 8500), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (8498, 8500), True, 'import tensorflow as tf\n'), ((8957, 8978), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (8976, 8978), True, 'import tensorflow as tf\n'), ((9421, 9442), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (9440, 9442), True, 'import tensorflow as tf\n'), ((9869, 9890), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (9888, 9890), True, 'import tensorflow as tf\n'), ((10368, 10389), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (10387, 10389), True, 'import tensorflow as tf\n'), ((10707, 10728), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (10726, 10728), True, 'import tensorflow as tf\n'), ((10941, 10962), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (10960, 10962), True, 'import tensorflow as tf\n'), ((11391, 11412), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (11410, 11412), True, 'import tensorflow as tf\n'), ((12088, 12109), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (12107, 12109), True, 'import tensorflow as tf\n'), ((12753, 12774), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (12772, 12774), True, 'import tensorflow as tf\n'), ((13324, 13345), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (13343, 13345), True, 'import tensorflow as tf\n'), ((13910, 13931), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (13929, 13931), True, 'import tensorflow as tf\n'), ((14518, 14539), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (14537, 14539), True, 'import tensorflow as tf\n'), ((15163, 15184), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (15182, 15184), True, 'import tensorflow as tf\n'), ((15667, 15688), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (15686, 15688), True, 'import tensorflow as tf\n'), ((16015, 16036), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (16034, 16036), True, 'import tensorflow as tf\n'), ((16291, 16312), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (16310, 16312), True, 'import tensorflow as tf\n'), ((16568, 16589), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (16587, 16589), True, 'import tensorflow as tf\n'), ((16841, 16862), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (16860, 16862), True, 'import tensorflow as tf\n'), ((17045, 17066), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (17064, 17066), True, 'import tensorflow as tf\n'), ((17246, 17267), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', ([], {}), '()\n', (17265, 17267), True, 'import tensorflow as tf\n'), ((681, 716), 'shutil.rmtree', 'shutil.rmtree', (['self.saved_model_dir'], {}), '(self.saved_model_dir)\n', (694, 716), False, 'import shutil\n'), ((1850, 1870), 'tensorflow.keras.backend.get_session', '_keras.get_session', ([], {}), '()\n', (1868, 1870), True, 'from tensorflow.keras import backend as _keras\n'), ((2123, 2149), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'tf_graph'}), '(graph=tf_graph)\n', (2133, 2149), True, 'import tensorflow as tf\n'), ((2730, 2819), 'tensorflow.train.write_graph', 'tf.train.write_graph', (['sess.graph', 'self.saved_model_dir', 'graph_def_file'], {'as_text': '(False)'}), '(sess.graph, self.saved_model_dir, graph_def_file,\n as_text=False)\n', (2750, 2819), True, 'import tensorflow as tf\n'), ((6300, 6331), 'test_utils.generate_data', 'generate_data', (['shape', 'data_mode'], {}), '(shape, data_mode)\n', (6313, 6331), False, 'from test_utils import generate_data, tf_transpose\n'), ((7166, 7227), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(16)'], {'input_shape': '(16,)', 'activation': 'tf.nn.softmax'}), '(16, input_shape=(16,), activation=tf.nn.softmax)\n', (7178, 7227), False, 'from tensorflow.keras import layers\n'), ((7431, 7488), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(16)'], {'input_shape': '(16,)', 'activation': 'tf.nn.elu'}), '(16, input_shape=(16,), activation=tf.nn.elu)\n', (7443, 7488), False, 'from tensorflow.keras import layers\n'), ((7704, 7762), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(16)'], {'input_shape': '(16,)', 'activation': 'tf.nn.tanh'}), '(16, input_shape=(16,), activation=tf.nn.tanh)\n', (7716, 7762), False, 'from tensorflow.keras import layers\n'), ((8020, 8068), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['num_hidden'], {'input_dim': 'num_features'}), '(num_hidden, input_dim=num_features)\n', (8032, 8068), False, 'from tensorflow.keras import layers\n'), ((8088, 8117), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['tf.nn.relu'], {}), '(tf.nn.relu)\n', (8105, 8117), False, 'from tensorflow.keras import layers\n'), ((8137, 8176), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {'input_dim': 'num_features'}), '(1, input_dim=num_features)\n', (8149, 8176), False, 'from tensorflow.keras import layers\n'), ((8519, 8626), 'tensorflow.keras.layers.Conv2D', 'layers.Conv2D', ([], {'input_shape': 'input_shape', 'filters': 'num_kernels', 'kernel_size': '(kernel_height, kernel_width)'}), '(input_shape=input_shape, filters=num_kernels, kernel_size=(\n kernel_height, kernel_width))\n', (8532, 8626), False, 'from tensorflow.keras import layers\n'), ((8997, 9126), 'tensorflow.keras.layers.Conv2D', 'layers.Conv2D', ([], {'input_shape': 'input_shape', 'dilation_rate': '(2, 2)', 'filters': 'num_kernels', 'kernel_size': '(kernel_height, kernel_width)'}), '(input_shape=input_shape, dilation_rate=(2, 2), filters=\n num_kernels, kernel_size=(kernel_height, kernel_width))\n', (9010, 9126), False, 'from tensorflow.keras import layers\n'), ((9461, 9572), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', (['nb_filters'], {'kernel_size': 'filter_length', 'padding': '"""same"""', 'input_shape': '(input_length, input_dim)'}), "(nb_filters, kernel_size=filter_length, padding='same',\n input_shape=(input_length, input_dim))\n", (9474, 9572), False, 'from tensorflow.keras import layers\n'), ((9909, 10021), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', (['nb_filters'], {'kernel_size': 'filter_length', 'padding': '"""valid"""', 'input_shape': '(input_length, input_dim)'}), "(nb_filters, kernel_size=filter_length, padding='valid',\n input_shape=(input_length, input_dim))\n", (9922, 10021), False, 'from tensorflow.keras import layers\n'), ((10408, 10524), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', (['num_kernels'], {'kernel_size': 'filter_length', 'padding': '"""valid"""', 'input_shape': 'input_shape', 'dilation_rate': '(3)'}), "(num_kernels, kernel_size=filter_length, padding='valid',\n input_shape=input_shape, dilation_rate=3)\n", (10421, 10524), False, 'from tensorflow.keras import layers\n'), ((10747, 10784), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {'input_shape': '(2, 2, 2)'}), '(input_shape=(2, 2, 2))\n', (10761, 10784), False, 'from tensorflow.keras import layers\n'), ((10981, 11054), 'tensorflow.keras.layers.Conv2D', 'layers.Conv2D', (['(32)', '(3, 3)'], {'activation': 'tf.nn.relu', 'input_shape': 'input_shape'}), '(32, (3, 3), activation=tf.nn.relu, input_shape=input_shape)\n', (10994, 11054), False, 'from tensorflow.keras import layers\n'), ((11074, 11090), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (11088, 11090), False, 'from tensorflow.keras import layers\n'), ((11110, 11152), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(10)'], {'activation': 'tf.nn.softmax'}), '(10, activation=tf.nn.softmax)\n', (11122, 11152), False, 'from tensorflow.keras import layers\n'), ((11431, 11538), 'tensorflow.keras.layers.Conv2D', 'layers.Conv2D', ([], {'input_shape': 'input_shape', 'filters': 'num_kernels', 'kernel_size': '(kernel_height, kernel_width)'}), '(input_shape=input_shape, filters=num_kernels, kernel_size=(\n kernel_height, kernel_width))\n', (11444, 11538), False, 'from tensorflow.keras import layers\n'), ((11590, 11630), 'tensorflow.keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {'epsilon': '(1e-05)'}), '(epsilon=1e-05)\n', (11615, 11630), False, 'from tensorflow.keras import layers\n'), ((11649, 11691), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(10)'], {'activation': 'tf.nn.softmax'}), '(10, activation=tf.nn.softmax)\n', (11661, 11691), False, 'from tensorflow.keras import layers\n'), ((12128, 12276), 'tensorflow.keras.layers.Conv2DTranspose', 'layers.Conv2DTranspose', ([], {'filters': 'num_kernels', 'kernel_size': '(kernel_height, kernel_width)', 'input_shape': 'input_shape', 'padding': '"""valid"""', 'strides': '(2, 2)'}), "(filters=num_kernels, kernel_size=(kernel_height,\n kernel_width), input_shape=input_shape, padding='valid', strides=(2, 2))\n", (12150, 12276), False, 'from tensorflow.keras import layers\n'), ((12793, 12940), 'tensorflow.keras.layers.Conv2DTranspose', 'layers.Conv2DTranspose', ([], {'filters': 'num_kernels', 'kernel_size': '(kernel_height, kernel_width)', 'input_shape': 'input_shape', 'padding': '"""same"""', 'strides': '(2, 2)'}), "(filters=num_kernels, kernel_size=(kernel_height,\n kernel_width), input_shape=input_shape, padding='same', strides=(2, 2))\n", (12815, 12940), False, 'from tensorflow.keras import layers\n'), ((13364, 13530), 'tensorflow.keras.layers.DepthwiseConv2D', 'layers.DepthwiseConv2D', ([], {'depth_multiplier': 'depth_multiplier', 'kernel_size': '(kernel_height, kernel_width)', 'input_shape': 'input_shape', 'padding': '"""same"""', 'strides': '(1, 1)'}), "(depth_multiplier=depth_multiplier, kernel_size=(\n kernel_height, kernel_width), input_shape=input_shape, padding='same',\n strides=(1, 1))\n", (13386, 13530), False, 'from tensorflow.keras import layers\n'), ((13950, 14117), 'tensorflow.keras.layers.DepthwiseConv2D', 'layers.DepthwiseConv2D', ([], {'depth_multiplier': 'depth_multiplier', 'kernel_size': '(kernel_height, kernel_width)', 'input_shape': 'input_shape', 'padding': '"""valid"""', 'strides': '(1, 1)'}), "(depth_multiplier=depth_multiplier, kernel_size=(\n kernel_height, kernel_width), input_shape=input_shape, padding='valid',\n strides=(1, 1))\n", (13972, 14117), False, 'from tensorflow.keras import layers\n'), ((14558, 14746), 'tensorflow.keras.layers.SeparableConv2D', 'layers.SeparableConv2D', ([], {'filters': 'num_kernels', 'kernel_size': '(kernel_height, kernel_width)', 'padding': '"""valid"""', 'strides': '(1, 1)', 'depth_multiplier': 'depth_multiplier', 'input_shape': 'input_shape'}), "(filters=num_kernels, kernel_size=(kernel_height,\n kernel_width), padding='valid', strides=(1, 1), depth_multiplier=\n depth_multiplier, input_shape=input_shape)\n", (14580, 14746), False, 'from tensorflow.keras import layers\n'), ((15203, 15408), 'tensorflow.keras.layers.SeparableConv2D', 'layers.SeparableConv2D', ([], {'filters': 'num_kernels', 'kernel_size': '(kernel_height, kernel_width)', 'padding': '"""same"""', 'strides': '(2, 2)', 'activation': '"""relu"""', 'depth_multiplier': 'depth_multiplier', 'input_shape': 'input_shape'}), "(filters=num_kernels, kernel_size=(kernel_height,\n kernel_width), padding='same', strides=(2, 2), activation='relu',\n depth_multiplier=depth_multiplier, input_shape=input_shape)\n", (15225, 15408), False, 'from tensorflow.keras import layers\n'), ((15707, 15804), 'tensorflow.keras.layers.MaxPooling2D', 'layers.MaxPooling2D', ([], {'input_shape': '(16, 16, 3)', 'pool_size': '(2, 2)', 'strides': 'None', 'padding': '"""valid"""'}), "(input_shape=(16, 16, 3), pool_size=(2, 2), strides=None,\n padding='valid')\n", (15726, 15804), False, 'from tensorflow.keras import layers\n'), ((16055, 16155), 'tensorflow.keras.layers.MaxPooling2D', 'layers.MaxPooling2D', ([], {'input_shape': '(18, 18, 3)', 'pool_size': '(3, 3)', 'strides': '(2, 2)', 'padding': '"""valid"""'}), "(input_shape=(18, 18, 3), pool_size=(3, 3), strides=(2, \n 2), padding='valid')\n", (16074, 16155), False, 'from tensorflow.keras import layers\n'), ((16331, 16431), 'tensorflow.keras.layers.MaxPooling2D', 'layers.MaxPooling2D', ([], {'input_shape': '(16, 16, 3)', 'pool_size': '(3, 3)', 'strides': '(2, 2)', 'padding': '"""valid"""'}), "(input_shape=(16, 16, 3), pool_size=(3, 3), strides=(2, \n 2), padding='valid')\n", (16350, 16431), False, 'from tensorflow.keras import layers\n'), ((16608, 16707), 'tensorflow.keras.layers.MaxPooling2D', 'layers.MaxPooling2D', ([], {'input_shape': '(16, 16, 3)', 'pool_size': '(3, 3)', 'strides': '(2, 2)', 'padding': '"""same"""'}), "(input_shape=(16, 16, 3), pool_size=(3, 3), strides=(2, \n 2), padding='same')\n", (16627, 16707), False, 'from tensorflow.keras import layers\n'), ((16881, 16931), 'tensorflow.keras.layers.GlobalMaxPooling2D', 'layers.GlobalMaxPooling2D', ([], {'input_shape': '(16, 16, 3)'}), '(input_shape=(16, 16, 3))\n', (16906, 16931), False, 'from tensorflow.keras import layers\n'), ((17085, 17139), 'tensorflow.keras.layers.GlobalAveragePooling2D', 'layers.GlobalAveragePooling2D', ([], {'input_shape': '(16, 16, 3)'}), '(input_shape=(16, 16, 3))\n', (17114, 17139), False, 'from tensorflow.keras import layers\n'), ((17286, 17339), 'tensorflow.keras.layers.MaxPooling1D', 'layers.MaxPooling1D', ([], {'input_shape': '(16, 3)', 'pool_size': '(2)'}), '(input_shape=(16, 3), pool_size=2)\n', (17305, 17339), False, 'from tensorflow.keras import layers\n'), ((2000, 2016), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (2014, 2016), True, 'import tensorflow as tf\n'), ((2180, 2213), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2211, 2213), True, 'import tensorflow as tf\n'), ((2419, 2450), 'test_utils.generate_data', 'generate_data', (['shape', 'data_mode'], {}), '(shape, data_mode)\n', (2432, 2450), False, 'from test_utils import generate_data, tf_transpose\n'), ((3408, 3447), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', (['frozen_model_file', '"""wb"""'], {}), "(frozen_model_file, 'wb')\n", (3422, 3447), True, 'import tensorflow as tf\n'), ((4722, 4740), 'numpy.array', 'np.array', (['[tf_out]'], {}), '([tf_out])\n', (4730, 4740), True, 'import numpy as np\n'), ((7256, 7280), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (7270, 7280), True, 'import numpy as np\n'), ((7517, 7541), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (7531, 7541), True, 'import numpy as np\n'), ((7791, 7815), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (7805, 7815), True, 'import numpy as np\n'), ((8205, 8229), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (8219, 8229), True, 'import numpy as np\n'), ((8675, 8699), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (8689, 8699), True, 'import numpy as np\n'), ((9175, 9199), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (9189, 9199), True, 'import numpy as np\n'), ((9622, 9646), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (9636, 9646), True, 'import numpy as np\n'), ((10071, 10095), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (10085, 10095), True, 'import numpy as np\n'), ((10574, 10598), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (10588, 10598), True, 'import numpy as np\n'), ((11720, 11744), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (11734, 11744), True, 'import numpy as np\n'), ((12338, 12362), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (12352, 12362), True, 'import numpy as np\n'), ((13002, 13026), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (13016, 13026), True, 'import numpy as np\n'), ((13587, 13611), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (13601, 13611), True, 'import numpy as np\n'), ((14174, 14198), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (14188, 14198), True, 'import numpy as np\n'), ((14803, 14827), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (14817, 14827), True, 'import numpy as np\n'), ((15466, 15490), 'numpy.random.rand', 'np.random.rand', (['*w.shape'], {}), '(*w.shape)\n', (15480, 15490), True, 'import numpy as np\n')] |
# Third party imports
from django.conf.urls import url, include
from rest_framework import routers
from django.contrib import admin
# Pepper imports
from pepper.facebook.api import UserViewSet, GroupViewSet
# Relative imports
from . import api_urls
# Default user and group routers
router = routers.DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'groups', GroupViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
# Default API, subjected to change in case of custom 'users' app
url('', include(router.urls)),
# Admin
url('admin/', admin.site.urls),
# Rest API
url(r'^api/', include(api_urls)),
url('api-auth/', include('rest_framework.urls', namespace='facebook')),
]
| [
"django.conf.urls.include",
"django.conf.urls.url",
"rest_framework.routers.DefaultRouter"
] | [((294, 317), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (315, 317), False, 'from rest_framework import routers\n'), ((644, 674), 'django.conf.urls.url', 'url', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (647, 674), False, 'from django.conf.urls import url, include\n'), ((604, 624), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (611, 624), False, 'from django.conf.urls import url, include\n'), ((710, 727), 'django.conf.urls.include', 'include', (['api_urls'], {}), '(api_urls)\n', (717, 727), False, 'from django.conf.urls import url, include\n'), ((752, 804), 'django.conf.urls.include', 'include', (['"""rest_framework.urls"""'], {'namespace': '"""facebook"""'}), "('rest_framework.urls', namespace='facebook')\n", (759, 804), False, 'from django.conf.urls import url, include\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import markovify
import glob
# use 3 or even 2 to add more nonsense. using 5 will eliminate cross-corpora sentences. so 4 is optimal
STATE_SIZE = 4
SENTENCES = 1000
models = []
for filename in glob.glob("corpus-programming/*.txt"):
print('Loading file:', filename)
with open(filename) as f:
models.append(markovify.Text(f.read(), state_size=STATE_SIZE))
for filename in glob.glob("corpus-veda/*.txt"):
print('Loading file:', filename)
with open(filename) as f:
models.append(markovify.Text(f.read(), state_size=STATE_SIZE))
model = markovify.combine(models)
i = 0
while i < SENTENCES:
sentence = model.make_sentence()
if sentence is None:
continue
i += 1
print(sentence)
| [
"markovify.combine",
"glob.glob"
] | [((243, 280), 'glob.glob', 'glob.glob', (['"""corpus-programming/*.txt"""'], {}), "('corpus-programming/*.txt')\n", (252, 280), False, 'import glob\n'), ((437, 467), 'glob.glob', 'glob.glob', (['"""corpus-veda/*.txt"""'], {}), "('corpus-veda/*.txt')\n", (446, 467), False, 'import glob\n'), ((616, 641), 'markovify.combine', 'markovify.combine', (['models'], {}), '(models)\n', (633, 641), False, 'import markovify\n')] |
"""
Graph exploration module.
"""
import itertools
import time
from qas.wikidata import Wikidata, NoSPARQLResponse
MAX_PATH_LENGTH = 5
DISABLE_PARALLEL = True
RETRY_PARALLEL_SPARQL = False
class Path(object):
def __init__(self, path, config, item_from, item_to):
self.length = len(path) // 2 + 1
# filter statements to calculate length
for element in path:
if element.startswith('http://www.wikidata.org/entity/statement/'):
self.length -= 1
self.path = path
self.config = config
self.item_from = item_from
self.item_to = item_to
def __str__(self):
if self.item_from is not None:
text_from = "[{}]".format(self.item_from.wikidata_item.item_id)
else:
text_from = "[__]"
if self.item_to is not None:
text_to = "[{}]".format(self.item_to.wikidata_item.item_id)
else:
text_to = "[__]"
nodes = []
for idx, node in enumerate(self.path):
if idx % 2 == 0: # property
prop_direction = self.config[int(idx/2)]
if prop_direction == 0:
nodes.append("= {} =>".format(node))
else:
nodes.append("<= {} =".format(node))
else: # item
nodes.append("{}".format(node))
nodes.insert(0, text_from)
nodes.append(text_to)
return "@{} ".format(self.length) + " ".join(nodes)
def is_symetric(self):
return len(set(self.path)) != len(self.path)
def is_pp(self):
item_types = [item[0] for item in self.path]
for idx in range(1, len(item_types)):
if item_types[idx] == item_types[idx-1] == "P":
return True
return False
def is_similar(self, other):
for item_a, item_b in zip(self.path, other.path):
if item_a == item_b:
return True
return False
def similatiy_to_others(self, others):
similatiy = [0] * (len(self.path) // 2 + 1)
for other in others:
for idx, item_a, item_b in zip(range(len(self.path)),
self.path,
other.path):
if idx % 2 == 0:
pos = idx // 2
if item_a == item_b:
similatiy[pos] += 1
return similatiy
@property
def items(self):
return [element
for element in self.path
if element.startswith('Q')]
def construct_sparql(self):
# print(self.path)
path_wo_statements = [(idx, element)
for (idx, element) in enumerate(self.path)
if element.startswith('Q') or
element.startswith('P')]
# print(path_wo_statements)
triples = ""
from_item = None
to_item = None
for real_idx, (idx, node) in enumerate(path_wo_statements):
if idx % 2 == 0: # property
prop_direction = self.config[int(idx/2)]
item_before = "?item{}".format(real_idx)
if from_item is None:
from_item = item_before
item_after = "?item{}".format(real_idx+2)
to_item = item_after
template = "{} wdt:{} {} .\n"
triples += template.format(
item_before if prop_direction == 0 else item_after,
node,
item_after if prop_direction == 0 else item_before,
)
return from_item, to_item, triples
def substitutes(self, strict=False):
if strict:
from_item, to_item, triples = self.construct_sparql_strict()
else:
from_item, to_item, triples = self.construct_sparql()
template = """
SELECT {} {}Label {} {}Label WHERE {{
{}
SERVICE wikibase:label {{ bd:serviceParam wikibase:language "en" }}
}} LIMIT 500
"""
query = template.format(from_item,
from_item,
to_item,
to_item,
triples)
# print(query)
try:
response = Wikidata.sparql(query)
except NoSPARQLResponse:
return None, []
count = len(response['results']['bindings'])
substitutes = []
for path in response['results']['bindings']:
question = path[from_item[1:]+'Label']['value']
answer = path[to_item[1:]+'Label']['value']
substitutes.append((question, answer, ))
return count, substitutes
def apply_path(self, from_item):
from_item = "wd:{}".format(from_item)
_, to_item, triples = self.construct_sparql()
template = """
SELECT {} {}Label WHERE {{
{}
SERVICE wikibase:label {{ bd:serviceParam wikibase:language "en" }}
}} LIMIT 5
"""
query = template.format(to_item,
to_item,
triples)
query = query.replace("?item0", from_item)
# print(query)
try:
response = Wikidata.sparql(query)
except NoSPARQLResponse:
return None, []
count = len(response['results']['bindings'])
answers = []
for path in response['results']['bindings']:
answer = path[to_item[1:]+'Label']['value']
answers.append(answer)
return count, answers
def construct_sparql_strict(self):
# print(self.path)
path_wo_statements = [(idx, element)
for (idx, element) in enumerate(self.path)
if element.startswith('Q') or
element.startswith('P')]
# print(path_wo_statements)
triples = ""
from_item = "?from"
to_item = "?to"
for real_idx, (idx, node) in enumerate(path_wo_statements):
if idx % 2 == 0: # property
prop_direction = self.config[int(idx/2)]
if (real_idx - 1) == -1:
item_before = from_item
else:
item_before = "wd:{}".format(
path_wo_statements[real_idx-1][1])
if (real_idx + 1) == len(path_wo_statements):
item_after = to_item
else:
item_after = "wd:{}".format(
path_wo_statements[real_idx+1][1])
template = "{} wdt:{} {} .\n"
triples += template.format(
item_before if prop_direction == 0 else item_after,
node,
item_after if prop_direction == 0 else item_before,
)
return from_item, to_item, triples
def pp_links(self):
result = "-----------------\n"
result += " Links to items: \n"
result += "-----------------\n"
for element in self.path:
if element.startswith('Q'):
result += "https://www.wikidata.org/wiki/{}".format(
element) + '\n'
elif element.startswith('P'):
result += "https://www.wikidata.org/wiki/Property:{}".format(
element) + '\n'
else:
result += element + '\n'
return result[:-1]
class Graph():
def __init__(self, labeled_enitites):
self.entities = {}
for label, entity in labeled_enitites:
if label not in self.entities:
self.entities[label] = []
# filter entities without assigned items (too filtered)
if len(entity.items):
self.entities[label].append(entity)
def construct_query(self, config, item_from, item_to):
length = len(config)
select = []
for idx in range(1, length+1):
select.append("?prop{}".format(idx))
if idx != len(config):
select.append("?item{}".format(idx+1))
select = " ".join(select)
# print(select)
statement = ""
for idx in range(1, length+1):
subject = "?item{}".format(idx)
predicate = "?prop{}".format(idx)
object_ = "?item{}".format(idx+1)
if idx == 1:
subject = "wd:{}".format(item_from.wikidata_item.item_id)
if idx == length:
object_ = "wd:{}".format(item_to.wikidata_item.item_id)
line = "{} {} {}.\n"
if config[idx-1] == 0:
statement += line.format(subject, predicate, object_)
else:
statement += line.format(object_, predicate, subject)
# print(statement)
filters = ""
for idx, element in enumerate(select.split(" ")):
startswith = 'http://www.wikidata.org/prop/' \
if (idx % 2) == 0 else \
'http://www.wikidata.org/entity/'
filter_template = 'FILTER ( strstarts(str({}), "{}") )\n'
filters += filter_template.format(element, startswith)
# TODO: Filters
query = 'SELECT {}\nWHERE {{\n{}\n{}\nSERVICE wikibase:label {{ bd:serviceParam wikibase:language "en"}}\n}}'
return query.format(select, statement, filters)
@staticmethod
def process_response(response):
if len(response['results']['bindings']) == 0:
return []
fields = response['head']['vars']
pathes = []
for result in response['results']['bindings']:
path = []
for field in fields:
item = result[field]['value']
# .split('/')[-1]
if item.startswith('http://www.wikidata.org/prop/') and \
not item.startswith('http://www.wikidata.org/prop/statement/'):
item = item.split('/')[-1]
if item.startswith('http://www.wikidata.org/entity/') and \
not item.startswith('http://www.wikidata.org/entity/statement/'):
item = item.split('/')[-1]
path.append(item)
pathes.append(path)
return pathes
@staticmethod
def filter_pathes(pathes):
pathes = [path
for path in pathes
if not path.is_symetric()]
if len([path
for path in pathes
if path.is_symetric()]):
print("FILTERED SYMMETRICAL:",
[path
for path in pathes
if path.is_symetric()])
pathes = [path
for path in pathes
if not path.is_pp()]
return pathes
@staticmethod
def extract_shared(solutions):
items = {}
for key, pathes in solutions.items():
items[key] = []
for path in pathes:
items[key] += path.items
items[key] = list(set(items[key]))
print(items)
score = {}
for key1, value1 in items.items():
for key2, value2 in items.items():
if key1 != key2:
for item1 in value1:
if item1 not in score:
score[item1] = 0
for item2 in value2:
if item1 == item2:
score[item1] += 1
print(score)
results = list(score.items())
if len(results) == 0:
return results
max_score = max([result[1] for result in results])
results = [result
for result in results
if result[1] == max_score]
return results
@staticmethod
def get_directions(length):
list(itertools.product(range(2), repeat=3))
def skip_direction(self, path_length, direction):
# if solution for direction is found
# skip this direction at length more than
# min length + 1 (to include deductive)
if frozenset(direction) in self.solutions:
pathes = self.solutions[frozenset(direction)]
min_length = min([path.length for path in pathes])
if path_length <= (min_length + 1):
print("Solution found, last level attempt.")
return False # do NOT skip
else:
print("Solution found. {} -> {}".format(
direction[0], direction[1]))
return True
def items_comb(self, direction):
# create sets of items
set_from = []
for entity in self.entities[direction[0]]:
set_from += entity.items
set_to = []
for entity in self.entities[direction[1]]:
set_to += entity.items
# for each possible direction between items
return list(itertools.product(set_from, set_to))
@staticmethod
def dir_comb(path_length):
return list(itertools.product(range(2), repeat=path_length))
@staticmethod
def pp_link_config(link_config):
result = "{ "
for direction in link_config:
if direction == 0:
result += "-> "
else:
result += "<- "
return result + "}"
def path_comb(self, direction, path_length):
return zip(self.items_comb(direction), self.dir_comb(path_length))
def connect(self, *labels, interrupt="first"):
print("==== CONNECTION OVER GRAPH ====")
# directions of search
# for a basic example: [['question', 'answer']]
directions = [list(pair)
for pair in itertools.combinations(labels, 2)]
# dictionary for final solutions
# frozenset is a key, path is a value
self.solutions = {}
timeout = None
# for path length until maximum
path_length_at_times = []
for path_length in range(1, MAX_PATH_LENGTH):
# save processing time measure
path_length_at_times.append(time.time())
if timeout is not None:
timeout = (timeout + 5.0) ** 2
# optimization step, async SPARQL querying
if not DISABLE_PARALLEL:
sparql_queries = []
for direction in directions:
if self.skip_direction(path_length, direction):
continue
for (item_from, item_to), link_config in \
self.path_comb(direction, path_length):
query = self.construct_query(link_config,
item_from,
item_to)
sparql_queries.append(query)
print("Timeout for path length", path_length, ":", timeout)
sparql_responses, timeout = Wikidata.sparql_parallel(
sparql_queries,
timeout=timeout)
print("Elapsed at path length", path_length, ":", timeout)
else:
# print("parallel querying is disabled")
sparql_responses, timeout = {}, None
# for direction between labels (question -> answer)
for direction in directions:
print("Length: {}, Labels: {} -> {}:".format(
path_length, direction[0], direction[1]))
if self.skip_direction(path_length, direction):
continue
pathes_at_length = []
for (item_from, item_to), link_config in \
self.path_comb(direction, path_length):
query = self.construct_query(link_config, item_from, item_to)
response = None
# use preloaded parallel results
if query in sparql_responses:
response = sparql_responses[query]
if response is None:
if RETRY_PARALLEL_SPARQL or DISABLE_PARALLEL:
try:
response = Wikidata.sparql(query)
except NoSPARQLResponse:
print("RTRETIME @",
self.pp_link_config(link_config))
continue
else:
print("PARNONE @",
self.pp_link_config(link_config))
continue
else:
try:
response = Wikidata.sparql(query)
except NoSPARQLResponse:
print("TIMEOUT @",
self.pp_link_config(link_config))
continue
pathes = self.process_response(response)
pathes = [Path(path, link_config, item_from, item_to)
for path in pathes]
pathes = self.filter_pathes(pathes)
if len(pathes) == 0:
print("NO_CONN @",
self.pp_link_config(link_config))
continue
print("SUCCESS @",
self.pp_link_config(link_config))
if len(pathes) <= 3:
for path in pathes:
print(path)
else:
print("[ ... {} paths found ... ]".format(
len(pathes)))
pathes_at_length += pathes
if len(pathes_at_length):
if frozenset(direction) in self.solutions:
self.solutions[frozenset(direction)] += pathes_at_length
else:
self.solutions[frozenset(direction)] = pathes_at_length
# print processing time info
path_length_at_times.append(time.time())
print("-" * 20)
for idx, timestamp in list(enumerate(path_length_at_times))[1:]:
processing_time = timestamp - path_length_at_times[idx-1]
print('TIME AT LENGTH {}: {:.4f}'.format(idx, processing_time, ))
for direction, pathes in self.solutions.items():
# print(direction)
min_length = min([path.length for path in pathes])
pathes = [path
for path in pathes
if path.length == min_length]
# pathes = sorted(pathes, key=lambda x: x.length)
# for path in pathes:
# print(path)
# print(self.solutions)
return self.solutions
# print("==== RESULTS ====")
# results = self.extract_shared(solutions)
# print(results)
# for query_length in range(6):
# for
# res = Wikidata.sparql(query)
# print(res)
@staticmethod
def evaluate_solutions(solutions):
k = 3
pathes = []
for _, solution_pathes in solutions.items():
pathes += solution_pathes
directions = []
for path in pathes:
directions.append((path.item_from.wikidata_item.item_id,
path.item_to.wikidata_item.item_id, ))
directions = list(set(directions)) # unique directions
evaluated_pathes = []
for direction in directions:
item_from, item_to = direction
pathes_for_direction = \
[path
for path in pathes
if path.item_from.wikidata_item.item_id == item_from and
path.item_to.wikidata_item.item_id == item_to]
# filter not shortest pathes
min_length = min([path.length for path in pathes_for_direction])
pathes_for_direction = [path
for path in pathes_for_direction
if path.length == min_length]
# calculate score
if len(pathes_for_direction) == 1:
score = 1.0 / float(k ** min_length)
evaluated_pathes.append((score, pathes_for_direction[0], ))
continue
else:
for idx1, path1 in enumerate(pathes_for_direction):
other_pathes = [
path2
for idx2, path2 in enumerate(pathes_for_direction)
if idx1 != idx2]
occurance_score = path1.similatiy_to_others(other_pathes)
avg_occurance = sum(occurance_score) / \
float(len(occurance_score))
score = 1.0 / float(k ** min_length) / (avg_occurance + 1)
evaluated_pathes.append((score, path1, ))
evaluated_pathes = sorted(evaluated_pathes, key=lambda x: x[0])
return evaluated_pathes
| [
"qas.wikidata.Wikidata.sparql",
"itertools.product",
"itertools.combinations",
"qas.wikidata.Wikidata.sparql_parallel",
"time.time"
] | [((4410, 4432), 'qas.wikidata.Wikidata.sparql', 'Wikidata.sparql', (['query'], {}), '(query)\n', (4425, 4432), False, 'from qas.wikidata import Wikidata, NoSPARQLResponse\n'), ((5383, 5405), 'qas.wikidata.Wikidata.sparql', 'Wikidata.sparql', (['query'], {}), '(query)\n', (5398, 5405), False, 'from qas.wikidata import Wikidata, NoSPARQLResponse\n'), ((13134, 13169), 'itertools.product', 'itertools.product', (['set_from', 'set_to'], {}), '(set_from, set_to)\n', (13151, 13169), False, 'import itertools\n'), ((18421, 18432), 'time.time', 'time.time', ([], {}), '()\n', (18430, 18432), False, 'import time\n'), ((13929, 13962), 'itertools.combinations', 'itertools.combinations', (['labels', '(2)'], {}), '(labels, 2)\n', (13951, 13962), False, 'import itertools\n'), ((14315, 14326), 'time.time', 'time.time', ([], {}), '()\n', (14324, 14326), False, 'import time\n'), ((15183, 15240), 'qas.wikidata.Wikidata.sparql_parallel', 'Wikidata.sparql_parallel', (['sparql_queries'], {'timeout': 'timeout'}), '(sparql_queries, timeout=timeout)\n', (15207, 15240), False, 'from qas.wikidata import Wikidata, NoSPARQLResponse\n'), ((17004, 17026), 'qas.wikidata.Wikidata.sparql', 'Wikidata.sparql', (['query'], {}), '(query)\n', (17019, 17026), False, 'from qas.wikidata import Wikidata, NoSPARQLResponse\n'), ((16455, 16477), 'qas.wikidata.Wikidata.sparql', 'Wikidata.sparql', (['query'], {}), '(query)\n', (16470, 16477), False, 'from qas.wikidata import Wikidata, NoSPARQLResponse\n')] |
from copy import deepcopy
import os
import uuid
import pyparsing
from lxml import etree
from resources.sbtabpy import modelSystem
OUTPUT_NAME = "test_model.xml"
######################
######################
##
## Utility Functions
##
######################
######################
def genID():
return str(uuid.uuid4())
######################
######################
##
## Load tsv files
##
######################
######################
compiler = modelSystem()
compiler.load_folder("resources","tsv")
active_gene_list = []
for key,val in compiler.tables.get("Reaction").data.items():
genes = val["!GeneAssociation"].split(" ")
genes = [i.replace("(","").replace(")","") for i in genes]
while "and" in genes:
genes.remove("and")
while "or" in genes:
genes.remove("or")
active_gene_list.extend(genes)
active_gene_list = set(active_gene_list)
print(len(active_gene_list))
######################
######################
##
## Build Model
##
######################
######################
output_model = open(OUTPUT_NAME,"wb")
xmlns = "http://www.sbml.org/sbml/level3/version1/core"
fbc="http://www.sbml.org/sbml/level3/version1/fbc/version2"
groups="http://www.sbml.org/sbml/level3/version1/groups/version1"
xhtml="http://www.w3.org/1999/xhtml"
rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
dc="http://purl.org/dc/elements/1.1/"
vCard="http://www.w3.org/2001/vcard-rdf/3.0#"
dcterms="http://purl.org/dc/terms/"
bqbiol="http://biomodels.net/biology-qualifiers/"
NS_MAP = {
'fbc': fbc,
'groups':groups,
'xhtml':xhtml,
'rdf':rdf,
'dc':dc,
'vCard':vCard,
'dcterms':dcterms,
'bqbiol':bqbiol,
None: xmlns}
sbml = etree.Element("sbml",metaid=genID(),attrib={"{%s}"%fbc+"required":"false","{%s}"%groups+"required":"false"},nsmap=NS_MAP)
other_attribs = {
"level":"3",
"version":"1",
}
for key,val in other_attribs.items():
sbml.set(key,val)
model = etree.SubElement(sbml,"model",id="WormJamTestBuild",attrib={"{%s}"%fbc+"strict":"false"},metaid=genID(),name="WormJam Draft Model")
model_notes = etree.SubElement(model,"notes")
model_notes_desc = etree.SubElement(model_notes,"{%s}"%xhtml+"p")
model_notes_desc.text="Genome Scale Model of the organism Caenorhabditis elegans"
#
# curators
#
model_annotation = etree.SubElement(model,"annotation")
model_annotation_RDF = etree.SubElement(model_annotation,"{%s}"%rdf+"RDF")
model_annotation_RDF_description_DC_bag = etree.SubElement(etree.SubElement(etree.SubElement(model_annotation_RDF,"{%s}"%rdf+"Description",attrib={"{%s}"%rdf+"about":"#"+model.get("metaid")}),"{%s}"%dc+"creator"),"{%s}"%rdf+"Bag")
for key,val in compiler.tables.get("Curator").data.items():
rdf_li = etree.SubElement(model_annotation_RDF_description_DC_bag,"{%s}"%rdf+"li",attrib={"{%s}"%rdf+"parseType":"Resource"})
vCard_N = etree.SubElement(rdf_li,"{%s}"%vCard+"N",attrib={"{%s}"%rdf+"parseType":"Resource"})
etree.SubElement(vCard_N,"{%s}"%vCard+"Family").text = val["!family-name"]
etree.SubElement(vCard_N,"{%s}"%vCard+"Given").text = val["!given-name"]
etree.SubElement(rdf_li,"{%s}"%vCard+"EMAIL").text = val["!email"]
vCard_ORG = etree.SubElement(rdf_li,"{%s}"%vCard+"ORG",attrib={"{%s}"%rdf+"parseType":"Resource"})
etree.SubElement(vCard_ORG,"{%s}"%vCard+"Orgname").text = val["!organization-name"]
#
# genes
# I should add a gene filter here probably to prevent export of EVERY gene in the model
#
model_listOfGeneProducts = etree.SubElement(model,"{%s}"%fbc+"listOfGeneProducts")
for key,val in compiler.tables.get("Gene").data.items():
if key in active_gene_list:
attribs = {
"{%s}"%fbc+"id":"G_"+key,
"{%s}"%fbc+"label":key,
"{%s}"%fbc+"name":val["!Locus"],
"metaid":genID()
}
fbc_gene_prod = etree.SubElement(model_listOfGeneProducts,"{%s}"%fbc+"geneProduct",attrib=attribs)
annotation = etree.SubElement(fbc_gene_prod,"annotation")
rdf_RDF = etree.SubElement(annotation,"{%s}"%rdf+"RDF")
rdf_desc = etree.SubElement(rdf_RDF,"{%s}"%rdf+"Description",attrib={"{%s}"%rdf+"about":"#"+attribs["metaid"]})
rdf_bag_and_bqbio_is = etree.SubElement(etree.SubElement(etree.SubElement(rdf_desc,"{%s}"%bqbiol+"is"),"{%s}"%rdf+"Bag"),"{%s}"%rdf+"li",attrib={"{%s}"%rdf+"resource":"http://identifiers.org/wormbase/"+key})
if val["!GO_process"] != "":
rdf_bqbiol_occurs_in_bag = etree.SubElement(etree.SubElement(rdf_desc,"{%s}"%bqbiol+"occursIn"),"{%s}"%rdf+"Bag")
for i in val["!GO_process"].split(";"):
etree.SubElement(rdf_bqbiol_occurs_in_bag,"{%s}"%rdf+"li",attrib={"{%s}"%rdf+"resource":"http://identifiers.org/go/"+i})
#
# Pathways
#
group_tree = etree.SubElement(model,"{%s}"%groups+"listOfGroups")
for key,val in compiler.tables.get("Pathway").data.items():
attribs = {
"{%s}"%groups+"id":"P_"+key.replace(" ","_"),
"{%s}"%groups+"kind":"partonomy",
"{%s}"%groups+"name":key,
"metaid":genID()
}
groups_group = etree.SubElement(group_tree,"{%s}"%groups+"group",attrib=attribs)
descriptors = [val["!Identifiers:GO_process"],val["!Identifiers:kegg:pathway"],val["!Identifiers:BioCyc"],val["!Identifiers:pw"]]
links = ["http://identifiers.org/go/","http://identifiers.org/kegg:","http://identifiers.org/biocyc/","http://identifiers.org/pw/"]
merge = zip(links,descriptors)
new = []
for i in merge:
if i[1] != "":
ids = i[1].replace(" ","").split(";")
ids = [i[0] + j for j in ids]
new += ids
if new != []:
annotation = etree.SubElement(groups_group,"annotation")
rdf_desc = etree.SubElement(etree.SubElement(annotation,"{%s}"%rdf+"RDF"),"{%s}"%rdf+"Description",attrib={"{%s}"%rdf+"about":"#"+attribs["metaid"]})
is_bag = etree.SubElement(etree.SubElement(rdf_desc,"{%s}"%bqbiol+"is"),"{%s}"%rdf+"Bag")
for i in new:
etree.SubElement(is_bag,"{%s}"%rdf+"li",attrib={"{%s}"%rdf+"resource":i})
listOfMembers = [rxn for rxn,info in compiler.tables.get("Reaction").data.items() if info["!Pathway"] == key]
group_listOfMemebers = etree.SubElement(groups_group,"{%s}"%groups+"listOfMembers")
for i in listOfMembers:
etree.SubElement(group_listOfMemebers,"{%s}"%groups+"member",attrib={"{%s}"%groups+"id":"GM_"+i,"{%s}"%groups+"idRef":i})
#
# Compartments
#
compartment_tree = etree.SubElement(model,"listOfCompartments")
for key,val in compiler.tables.get("Compartment").data.items():
metaid = genID()
compartment = etree.SubElement(compartment_tree,"compartment",attrib={"constant":"true","id":key,"metaid":metaid,"name":val["!Name"],"size":"1","spatialDimensions":"3"})
if val["!Comment"] != "":
etree.SubElement(etree.SubElement(compartment,"notes"),"{%s}"%xhtml+"p").text = val["!Comment"]
if val["!Identifiers:go"] != "":
annotation = etree.SubElement(compartment,"annotation")
rdf_desc = etree.SubElement(etree.SubElement(annotation,"{%s}"%rdf+"RDF"),"{%s}"%rdf+"Description",attrib={"{%s}"%rdf+"about":"#"+metaid})
is_bag = etree.SubElement(etree.SubElement(rdf_desc,"{%s}"%bqbiol+"is"),"{%s}"%rdf+"Bag")
etree.SubElement(is_bag,"{%s}"%rdf+"li",attrib={"{%s}"%rdf+"resource":"http://identifiers.org/go/"+val["!Identifiers:go"]})
#
# Species
#
species_tree = etree.SubElement(model,"listOfSpecies")
for key,val in compiler.tables.get("Compound").data.items():
attribs = {
"boundaryCondition":"false",
"compartment":val["!Location"],
"constant":"false",
"{%s}"%fbc+"charge":val["!Charge"],
"{%s}"%fbc+"chemicalFormula":val["!Formula"],
"hasOnlySubstanceUnits":"false",
"id":key,
"initialConcentration":"0",
"name":"!Name"
}
if attribs["{%s}"%fbc+"charge"] == "":
attribs["{%s}"%fbc+"charge"] = "0"
metaid = genID()
metabolite = etree.SubElement(species_tree,"species",metaid=metaid,attrib=attribs)
notes_body = etree.SubElement(etree.SubElement(metabolite,"notes"),"{%s}"%xhtml+"body")
for i in [key for key in list(val.keys()) if "!Identifier" not in key]:
if val[i]!="":
if key=="!Charge" and val[i]=="":
val[i] == "0"
etree.SubElement(notes_body,"{%s}"%xhtml+"p").text=i.replace("!","").replace("Notes:","").upper() + ": " + val[i]
if any([val[i] for i in ["!Identifiers:chebi","!Identifiers:pubmed","!Identifiers:doi","!Identifiers:eco"] if val[i] != ""]):
annotation_tree = etree.SubElement(etree.SubElement(etree.SubElement(metabolite,"annotation"),"{%s}"%rdf+"RDF"),"{%s}"%rdf+"Description",attrib={"{%s}"%rdf+"about":"#"+metaid})
next_level = etree.SubElement(etree.SubElement(annotation_tree,"{%s}"%bqbiol+"is"),"{%s}"%rdf+"Bag")
annotation_links={
"!Identifiers:chebi":"http://identifiers.org/",
"!Identifiers:pubmed":"http://identifiers.org/pubmed/",
"!Identifiers:doi":"http://identifiers.org/doi/",
"!Identifiers:eco":"http://www.evidenceontology.org/term/"
}
for i in ["!Identifiers:chebi","!Identifiers:pubmed","!Identifiers:doi","!Identifiers:eco"]:
if val[i]!="":
if i == "!Identifiers:pubmed":
etree.SubElement(next_level,"{%s}"%rdf+"li",attrib={"{%s}"%rdf+"resource":"https://identifiers.org/pubchem.compound/"+val[i]})
else:
etree.SubElement(next_level,"{%s}"%rdf+"li",attrib={"{%s}"%rdf+"resource":annotation_links[i]+val[i]})
#
# Parameters
#
parameter_tree = etree.SubElement(model,"listOfParameters")
etree.SubElement(parameter_tree,"parameter",attrib={"constant":"true","id":"LOWER_BOUND","value":"-1000"})
etree.SubElement(parameter_tree,"parameter",attrib={"constant":"true","id":"ZERO_BOUND","value":"0"})
etree.SubElement(parameter_tree,"parameter",attrib={"constant":"true","id":"UPPER_BOUND","value":"1000"})
#
# Reactions
#
# GPR helper functions
def genHead(parent,booltype):
#function to generate the and/or xml field
if booltype == "or":
branch = etree.SubElement(parent,"{%s}"%fbc+"or",attrib={"sboTerm":"SBO:0000174"})
else:
branch = etree.SubElement(parent,"{%s}"%fbc+"and",attrib={"sboTerm":"SBO:0000173"})
return branch
def parse(parent,my_list):
if my_list == []: #handle empty gene associations
result = None
return None
while type(my_list) == list and len(my_list) == 1: #whilst there is a single entry in the list, unpack it
my_list = my_list[0]
if type(my_list) == str: #Handling single genes
result = ("single",my_list)
else:
if any(type(i) == list for i in my_list): #If there are lists (nested Gene associations)
for index,item in enumerate(my_list):
#unpack
if type(item) == list and len(item) == 1:
my_list[index] = item[0]
types = None
op_type = my_list[1]
types = op_type
while op_type in my_list:
my_list.remove(op_type)
for index,item in enumerate(my_list): #start diving down levels
if type(item)==list:
op_type=item[1]
while op_type in item:
item.remove(op_type)
for index2,item2 in enumerate(item):
if type(item2)==list:
op_type2=item2[1]
while op_type2 in item2:
item2.remove(op_type2)
item[index2]=(op_type2,item2)
my_list[index] = (op_type,item)
result = (types,my_list)
else:
op_type = my_list[1]
while op_type in my_list:
my_list.remove(op_type)
result = (op_type,my_list)
#create the xml tree
gpr = etree.SubElement(parent,"{%s}"%fbc+"GeneProductAssociation")
#simple case
if result[0] == 'single':
etree.SubElement(gpr,"{%s}"%fbc+"geneProductRef",attrib={"{%s}"%fbc+"geneProduct":"G_"+result[1]})
#No nesting bool
elif all(type(i) != tuple for i in result[1]):
branch = genHead(gpr,result[0])
for i in result[1]:
etree.SubElement(branch,"{%s}"%fbc+"geneProductRef",attrib={"{%s}"%fbc+"geneProduct":"G_"+i})
#complex case
else:
branch = genHead(gpr,result[0])
for i in result[1]: #level diving
if type(i) == tuple:
inner = genHead(branch,i[0])
for j in i[1]:
if type(j) == tuple:
inner2 = genHead(branch,j[0])
for k in j[1]:
etree.SubElement(inner2,"{%s}"%fbc+"geneProductRef",attrib={"{%s}"%fbc+"geneProduct":"G_"+k})
else:
etree.SubElement(inner,"{%s}"%fbc+"geneProductRef",attrib={"{%s}"%fbc+"geneProduct":"G_"+j})
else:
etree.SubElement(branch,"{%s}"%fbc+"geneProductRef",attrib={"{%s}"%fbc+"geneProduct":"G_"+i[1]})
return gpr
##reaction string handling
def react_proc(rxn):
r,p = rxn.split("<=>")
def quick(frag):
frag = frag.split("+")
frag = [i.rstrip().lstrip() for i in frag]
frag = [i.split(" ") for i in frag]
return frag
r = quick(r)
p = quick(p)
#packaging
reactants = {(i[1] if len(i) == 2 else i[0]):(i[0] if len(i)==2 else "1") for i in r}
products = {(i[1] if len(i) == 2 else i[0]):(i[0] if len(i)==2 else "1") for i in p}
for d in [reactants,products]:
for key,val in d.items():
try:
d[key] = str(float(val))
except:
pass
return (reactants,products)
#### Actually doing the reactions
reaction_tree = etree.SubElement(model,"listOfReactions")
# IDs !Identifiers:kegg.reaction !Identifiers:rheadb_exact !Identifiers:rheadb_fuzzy !Identifiers:pubmed !Identifiers:doi !Identifiers:eco
# Other !Reaction !Name !ReactionFormula !IsReversible !GeneAssociation !Pathway !SuperPathway !Comment !Curator !Notes:EC NUMBER !Notes:AUTHORS
ignore = ["!Identifiers:kegg.reaction","!Identifiers:rheadb_exact","!Identifiers:rheadb_fuzzy","!Identifiers:pubmed","!Identifiers:doi","!Identifiers:eco",
"!Authors","!ReactionFormula","!SuperPathway","!Name","!IsReversible"]
for key,val in compiler.tables.get("Reaction").data.items():
metaid = genID()
attribs = {
"fast":"false",
"reversible":val["!IsReversible"].lower(),
"metaid":metaid,
"id":key,
"name":val["!Name"],
"{%s}"%fbc+"upperFluxBound":"UPPER_BOUND"
}
if attribs["reversible"] == "true":
attribs["{%s}"%fbc+"lowerFluxBound"] = "LOWER_BOUND"
else:
attribs["{%s}"%fbc+"lowerFluxBound"] = "ZERO_BOUND"
reaction_field = etree.SubElement(reaction_tree,"reaction",attrib=attribs)
notes_body = etree.SubElement(etree.SubElement(reaction_field,"notes"),"{%s}"%xhtml+"body")
for i in [key2 for key2 in list(val.keys()) if key2 not in ignore]:
if val[i]!="":
etree.SubElement(notes_body,"{%s}"%xhtml+"p").text=i.replace("!","").replace("Notes:","").replace("Pathway","Subsystem").upper() + ": " + val[i]
annotation_links={
"!Identifiers:kegg.reaction":"http://identifiers.org/kegg:",
"!Identifiers:pubmed":"http://identifiers.org/pubmed/",
"!Identifiers:doi":"http://identifiers.org/doi/",
"!Identifiers:eco":"http://www.evidenceontology.org/term/",
"!Identifiers:rheadb_exact":"http://identifiers.org//reaction?id="
}
if any([val[i] for i in annotation_links if val[i] != ""]):
annotation_tree = etree.SubElement(etree.SubElement(etree.SubElement(reaction_field,"annotation"),"{%s}"%rdf+"RDF"),"{%s}"%rdf+"Description",attrib={"{%s}"%rdf+"about":"#"+metaid})
next_level = etree.SubElement(etree.SubElement(annotation_tree,"{%s}"%bqbiol+"is"),"{%s}"%rdf+"Bag")
for i in list(annotation_links.keys()):
if val[i]!="":
for j in val[i].replace(" ","").split(";"):
etree.SubElement(next_level,"{%s}"%rdf+"li",attrib={"{%s}"%rdf+"resource":annotation_links[i]+j})
genes = "("+val["!GeneAssociation"]+")"
parens = pyparsing.nestedExpr( '(', ')', content=pyparsing.Word(pyparsing.alphanums) | ' or ' | " and " )
r = parens.parseString(genes)[0].asList()
er = deepcopy(r)
try:
parse(reaction_field,r)
except Exception as e:
print(key,er)
print(e)
reactants,products = react_proc(val["!ReactionFormula"])
if "" not in reactants:
listOfReactants = etree.SubElement(reaction_field,"listOfReactants")
for key2,val2 in reactants.items():
etree.SubElement(listOfReactants,"speciesReference",attrib={"constant":"true","species":key2,"stoichiometry":val2})
if "" not in products:
listOfProducts = etree.SubElement(reaction_field,"listOfProducts")
for key2,val2 in products.items():
etree.SubElement(listOfProducts,"speciesReference",attrib={"constant":"true","species":key2,"stoichiometry":val2})
######################
######################
##
## Output
##
######################
######################
output_model.write(etree.tostring(sbml,encoding="UTF-8",standalone=False,xml_declaration=True,pretty_print=True))
output_model.close()
#######################################################################################################################
## pretty print fragment
# with open(OUTPUT_NAME,"rb") as f:
# parser = etree.XMLParser(remove_blank_text=True)
# tree = etree.parse(f, parser)
# print(etree.tostring(root,encoding="UTF-8",standalone=False,xml_declaration=True,pretty_print=True).decode())
| [
"lxml.etree.SubElement",
"pyparsing.Word",
"uuid.uuid4",
"resources.sbtabpy.modelSystem",
"copy.deepcopy",
"lxml.etree.tostring"
] | [((488, 501), 'resources.sbtabpy.modelSystem', 'modelSystem', ([], {}), '()\n', (499, 501), False, 'from resources.sbtabpy import modelSystem\n'), ((2172, 2204), 'lxml.etree.SubElement', 'etree.SubElement', (['model', '"""notes"""'], {}), "(model, 'notes')\n", (2188, 2204), False, 'from lxml import etree\n'), ((2224, 2275), 'lxml.etree.SubElement', 'etree.SubElement', (['model_notes', "('{%s}' % xhtml + 'p')"], {}), "(model_notes, '{%s}' % xhtml + 'p')\n", (2240, 2275), False, 'from lxml import etree\n'), ((2396, 2433), 'lxml.etree.SubElement', 'etree.SubElement', (['model', '"""annotation"""'], {}), "(model, 'annotation')\n", (2412, 2433), False, 'from lxml import etree\n'), ((2457, 2513), 'lxml.etree.SubElement', 'etree.SubElement', (['model_annotation', "('{%s}' % rdf + 'RDF')"], {}), "(model_annotation, '{%s}' % rdf + 'RDF')\n", (2473, 2513), False, 'from lxml import etree\n'), ((3590, 3650), 'lxml.etree.SubElement', 'etree.SubElement', (['model', "('{%s}' % fbc + 'listOfGeneProducts')"], {}), "(model, '{%s}' % fbc + 'listOfGeneProducts')\n", (3606, 3650), False, 'from lxml import etree\n'), ((4889, 4946), 'lxml.etree.SubElement', 'etree.SubElement', (['model', "('{%s}' % groups + 'listOfGroups')"], {}), "(model, '{%s}' % groups + 'listOfGroups')\n", (4905, 4946), False, 'from lxml import etree\n'), ((6620, 6665), 'lxml.etree.SubElement', 'etree.SubElement', (['model', '"""listOfCompartments"""'], {}), "(model, 'listOfCompartments')\n", (6636, 6665), False, 'from lxml import etree\n'), ((7587, 7627), 'lxml.etree.SubElement', 'etree.SubElement', (['model', '"""listOfSpecies"""'], {}), "(model, 'listOfSpecies')\n", (7603, 7627), False, 'from lxml import etree\n'), ((9888, 9931), 'lxml.etree.SubElement', 'etree.SubElement', (['model', '"""listOfParameters"""'], {}), "(model, 'listOfParameters')\n", (9904, 9931), False, 'from lxml import etree\n'), ((9932, 10049), 'lxml.etree.SubElement', 'etree.SubElement', (['parameter_tree', '"""parameter"""'], {'attrib': "{'constant': 'true', 'id': 'LOWER_BOUND', 'value': '-1000'}"}), "(parameter_tree, 'parameter', attrib={'constant': 'true',\n 'id': 'LOWER_BOUND', 'value': '-1000'})\n", (9948, 10049), False, 'from lxml import etree\n'), ((10040, 10152), 'lxml.etree.SubElement', 'etree.SubElement', (['parameter_tree', '"""parameter"""'], {'attrib': "{'constant': 'true', 'id': 'ZERO_BOUND', 'value': '0'}"}), "(parameter_tree, 'parameter', attrib={'constant': 'true',\n 'id': 'ZERO_BOUND', 'value': '0'})\n", (10056, 10152), False, 'from lxml import etree\n'), ((10143, 10259), 'lxml.etree.SubElement', 'etree.SubElement', (['parameter_tree', '"""parameter"""'], {'attrib': "{'constant': 'true', 'id': 'UPPER_BOUND', 'value': '1000'}"}), "(parameter_tree, 'parameter', attrib={'constant': 'true',\n 'id': 'UPPER_BOUND', 'value': '1000'})\n", (10159, 10259), False, 'from lxml import etree\n'), ((14313, 14355), 'lxml.etree.SubElement', 'etree.SubElement', (['model', '"""listOfReactions"""'], {}), "(model, 'listOfReactions')\n", (14329, 14355), False, 'from lxml import etree\n'), ((2818, 2951), 'lxml.etree.SubElement', 'etree.SubElement', (['model_annotation_RDF_description_DC_bag', "('{%s}' % rdf + 'li')"], {'attrib': "{('{%s}' % rdf + 'parseType'): 'Resource'}"}), "(model_annotation_RDF_description_DC_bag, '{%s}' % rdf +\n 'li', attrib={('{%s}' % rdf + 'parseType'): 'Resource'})\n", (2834, 2951), False, 'from lxml import etree\n'), ((2950, 3051), 'lxml.etree.SubElement', 'etree.SubElement', (['rdf_li', "('{%s}' % vCard + 'N')"], {'attrib': "{('{%s}' % rdf + 'parseType'): 'Resource'}"}), "(rdf_li, '{%s}' % vCard + 'N', attrib={('{%s}' % rdf +\n 'parseType'): 'Resource'})\n", (2966, 3051), False, 'from lxml import etree\n'), ((3282, 3385), 'lxml.etree.SubElement', 'etree.SubElement', (['rdf_li', "('{%s}' % vCard + 'ORG')"], {'attrib': "{('{%s}' % rdf + 'parseType'): 'Resource'}"}), "(rdf_li, '{%s}' % vCard + 'ORG', attrib={('{%s}' % rdf +\n 'parseType'): 'Resource'})\n", (3298, 3385), False, 'from lxml import etree\n'), ((5208, 5279), 'lxml.etree.SubElement', 'etree.SubElement', (['group_tree', "('{%s}' % groups + 'group')"], {'attrib': 'attribs'}), "(group_tree, '{%s}' % groups + 'group', attrib=attribs)\n", (5224, 5279), False, 'from lxml import etree\n'), ((6355, 6420), 'lxml.etree.SubElement', 'etree.SubElement', (['groups_group', "('{%s}' % groups + 'listOfMembers')"], {}), "(groups_group, '{%s}' % groups + 'listOfMembers')\n", (6371, 6420), False, 'from lxml import etree\n'), ((6773, 6949), 'lxml.etree.SubElement', 'etree.SubElement', (['compartment_tree', '"""compartment"""'], {'attrib': "{'constant': 'true', 'id': key, 'metaid': metaid, 'name': val['!Name'],\n 'size': '1', 'spatialDimensions': '3'}"}), "(compartment_tree, 'compartment', attrib={'constant':\n 'true', 'id': key, 'metaid': metaid, 'name': val['!Name'], 'size': '1',\n 'spatialDimensions': '3'})\n", (6789, 6949), False, 'from lxml import etree\n'), ((8173, 8245), 'lxml.etree.SubElement', 'etree.SubElement', (['species_tree', '"""species"""'], {'metaid': 'metaid', 'attrib': 'attribs'}), "(species_tree, 'species', metaid=metaid, attrib=attribs)\n", (8189, 8245), False, 'from lxml import etree\n'), ((12298, 12363), 'lxml.etree.SubElement', 'etree.SubElement', (['parent', "('{%s}' % fbc + 'GeneProductAssociation')"], {}), "(parent, '{%s}' % fbc + 'GeneProductAssociation')\n", (12314, 12363), False, 'from lxml import etree\n'), ((15382, 15441), 'lxml.etree.SubElement', 'etree.SubElement', (['reaction_tree', '"""reaction"""'], {'attrib': 'attribs'}), "(reaction_tree, 'reaction', attrib=attribs)\n", (15398, 15441), False, 'from lxml import etree\n'), ((17009, 17020), 'copy.deepcopy', 'deepcopy', (['r'], {}), '(r)\n', (17017, 17020), False, 'from copy import deepcopy\n'), ((17909, 18011), 'lxml.etree.tostring', 'etree.tostring', (['sbml'], {'encoding': '"""UTF-8"""', 'standalone': '(False)', 'xml_declaration': '(True)', 'pretty_print': '(True)'}), "(sbml, encoding='UTF-8', standalone=False, xml_declaration=\n True, pretty_print=True)\n", (17923, 18011), False, 'from lxml import etree\n'), ((332, 344), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (342, 344), False, 'import uuid\n'), ((3040, 3092), 'lxml.etree.SubElement', 'etree.SubElement', (['vCard_N', "('{%s}' % vCard + 'Family')"], {}), "(vCard_N, '{%s}' % vCard + 'Family')\n", (3056, 3092), False, 'from lxml import etree\n'), ((3120, 3171), 'lxml.etree.SubElement', 'etree.SubElement', (['vCard_N', "('{%s}' % vCard + 'Given')"], {}), "(vCard_N, '{%s}' % vCard + 'Given')\n", (3136, 3171), False, 'from lxml import etree\n'), ((3198, 3248), 'lxml.etree.SubElement', 'etree.SubElement', (['rdf_li', "('{%s}' % vCard + 'EMAIL')"], {}), "(rdf_li, '{%s}' % vCard + 'EMAIL')\n", (3214, 3248), False, 'from lxml import etree\n'), ((3374, 3429), 'lxml.etree.SubElement', 'etree.SubElement', (['vCard_ORG', "('{%s}' % vCard + 'Orgname')"], {}), "(vCard_ORG, '{%s}' % vCard + 'Orgname')\n", (3390, 3429), False, 'from lxml import etree\n'), ((3948, 4040), 'lxml.etree.SubElement', 'etree.SubElement', (['model_listOfGeneProducts', "('{%s}' % fbc + 'geneProduct')"], {'attrib': 'attribs'}), "(model_listOfGeneProducts, '{%s}' % fbc + 'geneProduct',\n attrib=attribs)\n", (3964, 4040), False, 'from lxml import etree\n'), ((4053, 4098), 'lxml.etree.SubElement', 'etree.SubElement', (['fbc_gene_prod', '"""annotation"""'], {}), "(fbc_gene_prod, 'annotation')\n", (4069, 4098), False, 'from lxml import etree\n'), ((4117, 4167), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', "('{%s}' % rdf + 'RDF')"], {}), "(annotation, '{%s}' % rdf + 'RDF')\n", (4133, 4167), False, 'from lxml import etree\n'), ((4183, 4302), 'lxml.etree.SubElement', 'etree.SubElement', (['rdf_RDF', "('{%s}' % rdf + 'Description')"], {'attrib': "{('{%s}' % rdf + 'about'): '#' + attribs['metaid']}"}), "(rdf_RDF, '{%s}' % rdf + 'Description', attrib={('{%s}' %\n rdf + 'about'): '#' + attribs['metaid']})\n", (4199, 4302), False, 'from lxml import etree\n'), ((5800, 5844), 'lxml.etree.SubElement', 'etree.SubElement', (['groups_group', '"""annotation"""'], {}), "(groups_group, 'annotation')\n", (5816, 5844), False, 'from lxml import etree\n'), ((6454, 6603), 'lxml.etree.SubElement', 'etree.SubElement', (['group_listOfMemebers', "('{%s}' % groups + 'member')"], {'attrib': "{('{%s}' % groups + 'id'): 'GM_' + i, ('{%s}' % groups + 'idRef'): i}"}), "(group_listOfMemebers, '{%s}' % groups + 'member', attrib={\n ('{%s}' % groups + 'id'): 'GM_' + i, ('{%s}' % groups + 'idRef'): i})\n", (6470, 6603), False, 'from lxml import etree\n'), ((7125, 7168), 'lxml.etree.SubElement', 'etree.SubElement', (['compartment', '"""annotation"""'], {}), "(compartment, 'annotation')\n", (7141, 7168), False, 'from lxml import etree\n'), ((7424, 7566), 'lxml.etree.SubElement', 'etree.SubElement', (['is_bag', "('{%s}' % rdf + 'li')"], {'attrib': "{('{%s}' % rdf + 'resource'): 'http://identifiers.org/go/' + val[\n '!Identifiers:go']}"}), "(is_bag, '{%s}' % rdf + 'li', attrib={('{%s}' % rdf +\n 'resource'): 'http://identifiers.org/go/' + val['!Identifiers:go']})\n", (7440, 7566), False, 'from lxml import etree\n'), ((8278, 8315), 'lxml.etree.SubElement', 'etree.SubElement', (['metabolite', '"""notes"""'], {}), "(metabolite, 'notes')\n", (8294, 8315), False, 'from lxml import etree\n'), ((10421, 10506), 'lxml.etree.SubElement', 'etree.SubElement', (['parent', "('{%s}' % fbc + 'or')"], {'attrib': "{'sboTerm': 'SBO:0000174'}"}), "(parent, '{%s}' % fbc + 'or', attrib={'sboTerm': 'SBO:0000174'}\n )\n", (10437, 10506), False, 'from lxml import etree\n'), ((10524, 10609), 'lxml.etree.SubElement', 'etree.SubElement', (['parent', "('{%s}' % fbc + 'and')"], {'attrib': "{'sboTerm': 'SBO:0000173'}"}), "(parent, '{%s}' % fbc + 'and', attrib={'sboTerm':\n 'SBO:0000173'})\n", (10540, 10609), False, 'from lxml import etree\n'), ((12417, 12534), 'lxml.etree.SubElement', 'etree.SubElement', (['gpr', "('{%s}' % fbc + 'geneProductRef')"], {'attrib': "{('{%s}' % fbc + 'geneProduct'): 'G_' + result[1]}"}), "(gpr, '{%s}' % fbc + 'geneProductRef', attrib={('{%s}' %\n fbc + 'geneProduct'): 'G_' + result[1]})\n", (12433, 12534), False, 'from lxml import etree\n'), ((15475, 15516), 'lxml.etree.SubElement', 'etree.SubElement', (['reaction_field', '"""notes"""'], {}), "(reaction_field, 'notes')\n", (15491, 15516), False, 'from lxml import etree\n'), ((17257, 17308), 'lxml.etree.SubElement', 'etree.SubElement', (['reaction_field', '"""listOfReactants"""'], {}), "(reaction_field, 'listOfReactants')\n", (17273, 17308), False, 'from lxml import etree\n'), ((17543, 17593), 'lxml.etree.SubElement', 'etree.SubElement', (['reaction_field', '"""listOfProducts"""'], {}), "(reaction_field, 'listOfProducts')\n", (17559, 17593), False, 'from lxml import etree\n'), ((5881, 5931), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', "('{%s}' % rdf + 'RDF')"], {}), "(annotation, '{%s}' % rdf + 'RDF')\n", (5897, 5931), False, 'from lxml import etree\n'), ((6038, 6088), 'lxml.etree.SubElement', 'etree.SubElement', (['rdf_desc', "('{%s}' % bqbiol + 'is')"], {}), "(rdf_desc, '{%s}' % bqbiol + 'is')\n", (6054, 6088), False, 'from lxml import etree\n'), ((6138, 6228), 'lxml.etree.SubElement', 'etree.SubElement', (['is_bag', "('{%s}' % rdf + 'li')"], {'attrib': "{('{%s}' % rdf + 'resource'): i}"}), "(is_bag, '{%s}' % rdf + 'li', attrib={('{%s}' % rdf +\n 'resource'): i})\n", (6154, 6228), False, 'from lxml import etree\n'), ((7205, 7255), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', "('{%s}' % rdf + 'RDF')"], {}), "(annotation, '{%s}' % rdf + 'RDF')\n", (7221, 7255), False, 'from lxml import etree\n'), ((7351, 7401), 'lxml.etree.SubElement', 'etree.SubElement', (['rdf_desc', "('{%s}' % bqbiol + 'is')"], {}), "(rdf_desc, '{%s}' % bqbiol + 'is')\n", (7367, 7401), False, 'from lxml import etree\n'), ((8998, 9055), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation_tree', "('{%s}' % bqbiol + 'is')"], {}), "(annotation_tree, '{%s}' % bqbiol + 'is')\n", (9014, 9055), False, 'from lxml import etree\n'), ((16458, 16515), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation_tree', "('{%s}' % bqbiol + 'is')"], {}), "(annotation_tree, '{%s}' % bqbiol + 'is')\n", (16474, 16515), False, 'from lxml import etree\n'), ((17366, 17492), 'lxml.etree.SubElement', 'etree.SubElement', (['listOfReactants', '"""speciesReference"""'], {'attrib': "{'constant': 'true', 'species': key2, 'stoichiometry': val2}"}), "(listOfReactants, 'speciesReference', attrib={'constant':\n 'true', 'species': key2, 'stoichiometry': val2})\n", (17382, 17492), False, 'from lxml import etree\n'), ((17650, 17775), 'lxml.etree.SubElement', 'etree.SubElement', (['listOfProducts', '"""speciesReference"""'], {'attrib': "{'constant': 'true', 'species': key2, 'stoichiometry': val2}"}), "(listOfProducts, 'speciesReference', attrib={'constant':\n 'true', 'species': key2, 'stoichiometry': val2})\n", (17666, 17775), False, 'from lxml import etree\n'), ((4350, 4400), 'lxml.etree.SubElement', 'etree.SubElement', (['rdf_desc', "('{%s}' % bqbiol + 'is')"], {}), "(rdf_desc, '{%s}' % bqbiol + 'is')\n", (4366, 4400), False, 'from lxml import etree\n'), ((4596, 4652), 'lxml.etree.SubElement', 'etree.SubElement', (['rdf_desc', "('{%s}' % bqbiol + 'occursIn')"], {}), "(rdf_desc, '{%s}' % bqbiol + 'occursIn')\n", (4612, 4652), False, 'from lxml import etree\n'), ((4736, 4876), 'lxml.etree.SubElement', 'etree.SubElement', (['rdf_bqbiol_occurs_in_bag', "('{%s}' % rdf + 'li')"], {'attrib': "{('{%s}' % rdf + 'resource'): 'http://identifiers.org/go/' + i}"}), "(rdf_bqbiol_occurs_in_bag, '{%s}' % rdf + 'li', attrib={(\n '{%s}' % rdf + 'resource'): 'http://identifiers.org/go/' + i})\n", (4752, 4876), False, 'from lxml import etree\n'), ((6986, 7024), 'lxml.etree.SubElement', 'etree.SubElement', (['compartment', '"""notes"""'], {}), "(compartment, 'notes')\n", (7002, 7024), False, 'from lxml import etree\n'), ((8528, 8578), 'lxml.etree.SubElement', 'etree.SubElement', (['notes_body', "('{%s}' % xhtml + 'p')"], {}), "(notes_body, '{%s}' % xhtml + 'p')\n", (8544, 8578), False, 'from lxml import etree\n'), ((8834, 8876), 'lxml.etree.SubElement', 'etree.SubElement', (['metabolite', '"""annotation"""'], {}), "(metabolite, 'annotation')\n", (8850, 8876), False, 'from lxml import etree\n'), ((12673, 12785), 'lxml.etree.SubElement', 'etree.SubElement', (['branch', "('{%s}' % fbc + 'geneProductRef')"], {'attrib': "{('{%s}' % fbc + 'geneProduct'): 'G_' + i}"}), "(branch, '{%s}' % fbc + 'geneProductRef', attrib={('{%s}' %\n fbc + 'geneProduct'): 'G_' + i})\n", (12689, 12785), False, 'from lxml import etree\n'), ((15647, 15697), 'lxml.etree.SubElement', 'etree.SubElement', (['notes_body', "('{%s}' % xhtml + 'p')"], {}), "(notes_body, '{%s}' % xhtml + 'p')\n", (15663, 15697), False, 'from lxml import etree\n'), ((16290, 16336), 'lxml.etree.SubElement', 'etree.SubElement', (['reaction_field', '"""annotation"""'], {}), "(reaction_field, 'annotation')\n", (16306, 16336), False, 'from lxml import etree\n'), ((9572, 9717), 'lxml.etree.SubElement', 'etree.SubElement', (['next_level', "('{%s}' % rdf + 'li')"], {'attrib': "{('{%s}' % rdf + 'resource'): 'https://identifiers.org/pubchem.compound/' +\n val[i]}"}), "(next_level, '{%s}' % rdf + 'li', attrib={('{%s}' % rdf +\n 'resource'): 'https://identifiers.org/pubchem.compound/' + val[i]})\n", (9588, 9717), False, 'from lxml import etree\n'), ((9743, 9864), 'lxml.etree.SubElement', 'etree.SubElement', (['next_level', "('{%s}' % rdf + 'li')"], {'attrib': "{('{%s}' % rdf + 'resource'): annotation_links[i] + val[i]}"}), "(next_level, '{%s}' % rdf + 'li', attrib={('{%s}' % rdf +\n 'resource'): annotation_links[i] + val[i]})\n", (9759, 9864), False, 'from lxml import etree\n'), ((13436, 13551), 'lxml.etree.SubElement', 'etree.SubElement', (['branch', "('{%s}' % fbc + 'geneProductRef')"], {'attrib': "{('{%s}' % fbc + 'geneProduct'): 'G_' + i[1]}"}), "(branch, '{%s}' % fbc + 'geneProductRef', attrib={('{%s}' %\n fbc + 'geneProduct'): 'G_' + i[1]})\n", (13452, 13551), False, 'from lxml import etree\n'), ((16690, 16806), 'lxml.etree.SubElement', 'etree.SubElement', (['next_level', "('{%s}' % rdf + 'li')"], {'attrib': "{('{%s}' % rdf + 'resource'): annotation_links[i] + j}"}), "(next_level, '{%s}' % rdf + 'li', attrib={('{%s}' % rdf +\n 'resource'): annotation_links[i] + j})\n", (16706, 16806), False, 'from lxml import etree\n'), ((16895, 16930), 'pyparsing.Word', 'pyparsing.Word', (['pyparsing.alphanums'], {}), '(pyparsing.alphanums)\n', (16909, 16930), False, 'import pyparsing\n'), ((13305, 13416), 'lxml.etree.SubElement', 'etree.SubElement', (['inner', "('{%s}' % fbc + 'geneProductRef')"], {'attrib': "{('{%s}' % fbc + 'geneProduct'): 'G_' + j}"}), "(inner, '{%s}' % fbc + 'geneProductRef', attrib={('{%s}' %\n fbc + 'geneProduct'): 'G_' + j})\n", (13321, 13416), False, 'from lxml import etree\n'), ((13159, 13271), 'lxml.etree.SubElement', 'etree.SubElement', (['inner2', "('{%s}' % fbc + 'geneProductRef')"], {'attrib': "{('{%s}' % fbc + 'geneProduct'): 'G_' + k}"}), "(inner2, '{%s}' % fbc + 'geneProductRef', attrib={('{%s}' %\n fbc + 'geneProduct'): 'G_' + k})\n", (13175, 13271), False, 'from lxml import etree\n')] |
import asyncio
import logging
import sys
import discord
import yaml
from discord.ext import commands
from twilio.rest import Client
config = yaml.safe_load(open("config.yml"))
bot = commands.Bot(command_prefix=config["prefix"], intents=discord.Intents.default())
logging.basicConfig(
format="%(levelname)s | %(asctime)s | %(name)s | %(message)s",
stream=sys.stdout,
level=logging.INFO,
)
def create_embed(message: discord.Message, status: str, colour: int) -> discord.Embed:
"""
Creates an embed based on the specified params.
:param message: discord.Message object to extract content from
:param status: Status to use in title, e.g. 'pending, dispatched, timed out'
:param colour: Hex code to use for embed colour
:return: discord.Embed
"""
embed = discord.Embed(
title=f"Urgent Notification ({status})",
description=f"Your alert has been set to {status.lower()}.",
colour=colour,
)
embed.add_field(name="Messsage", value=message.clean_content, inline=False)
if status.lower() == "pending":
embed.add_field(
name="Actions",
value="Please react with \N{MOBILE PHONE} to dispatch a notification.",
inline=False,
)
return embed
def make_call(message: discord.Message) -> None:
"""
Initiates a call based on Twilio configuration
:param message: Message to read out after initial message
:return: Log message confirming call SID
"""
client = Client(config["account_sid"], config["auth_token"])
for number in config["numbers"]:
logging.info(f"Calling {number} with message {message.clean_content}")
call = client.calls.create(
twiml=f"<Response>"
f"<Say>"
f"{config['initial_message']} {message.clean_content}"
f"</Say>"
f"</Response>",
to=number,
from_=config["number_from"],
)
logging.info(f"Initiated call with SID {call.sid}")
@bot.event
async def on_message(message: discord.Message) -> None:
"""Triggers a pending call when message is posted in alert channel"""
if message.author.bot:
return
channel = bot.get_channel(config["alert_channel"])
if message.channel.id == channel.id:
pending = await channel.send(embed=create_embed(message, "Pending", 0xFF0000))
await pending.add_reaction("\N{MOBILE PHONE}")
logging.info(f"Pending message has been created by {message.author}")
def valid_reactions(user_reaction, member) -> bool:
"""Checks requirements for dispatching a call"""
return (
user_reaction.message.id == pending.id
and str(user_reaction.emoji) == "\N{MOBILE PHONE}"
and not member.bot
)
try:
author = await bot.wait_for(
"reaction_add", check=valid_reactions, timeout=config["timeout"]
)
except asyncio.TimeoutError:
await pending.edit(embed=create_embed(message, "Timed out", 0x000000))
await pending.remove_reaction("\N{MOBILE PHONE}", bot.user)
return await pending.add_reaction("\N{CROSS MARK}")
await pending.edit(embed=create_embed(message, "Dispatched", 0x88FF00))
make_call(message)
logging.info(f"Pending message has been dispatched by {author}")
if __name__ == "__main__":
bot.run(config["token"])
| [
"logging.basicConfig",
"twilio.rest.Client",
"logging.info",
"discord.Intents.default"
] | [((265, 396), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(levelname)s | %(asctime)s | %(name)s | %(message)s"""', 'stream': 'sys.stdout', 'level': 'logging.INFO'}), "(format=\n '%(levelname)s | %(asctime)s | %(name)s | %(message)s', stream=sys.\n stdout, level=logging.INFO)\n", (284, 396), False, 'import logging\n'), ((1506, 1557), 'twilio.rest.Client', 'Client', (["config['account_sid']", "config['auth_token']"], {}), "(config['account_sid'], config['auth_token'])\n", (1512, 1557), False, 'from twilio.rest import Client\n'), ((238, 263), 'discord.Intents.default', 'discord.Intents.default', ([], {}), '()\n', (261, 263), False, 'import discord\n'), ((1604, 1674), 'logging.info', 'logging.info', (['f"""Calling {number} with message {message.clean_content}"""'], {}), "(f'Calling {number} with message {message.clean_content}')\n", (1616, 1674), False, 'import logging\n'), ((1965, 2016), 'logging.info', 'logging.info', (['f"""Initiated call with SID {call.sid}"""'], {}), "(f'Initiated call with SID {call.sid}')\n", (1977, 2016), False, 'import logging\n'), ((2450, 2519), 'logging.info', 'logging.info', (['f"""Pending message has been created by {message.author}"""'], {}), "(f'Pending message has been created by {message.author}')\n", (2462, 2519), False, 'import logging\n'), ((3356, 3420), 'logging.info', 'logging.info', (['f"""Pending message has been dispatched by {author}"""'], {}), "(f'Pending message has been dispatched by {author}')\n", (3368, 3420), False, 'import logging\n')] |
from django.dispatch import receiver
from django.utils.text import slugify
from django.db.models.signals import post_delete, pre_save
from blog.models import *
def category_create(sender, instance, **kwargs):
instance.name = instance.name.lower()
pre_save.connect(category_create, sender=Category)
# Create Slug to each new Post before you save to DB.
def slug_create(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = slugify(instance.author.first_name + "-" + instance.author.last_name + "-" + instance.title)
pre_save.connect(slug_create, sender=Post)
@receiver(post_delete, sender=Post)
def submission_delete(sender, instance, **kwargs):
instance.image.delete(False)
| [
"django.dispatch.receiver",
"django.utils.text.slugify",
"django.db.models.signals.pre_save.connect"
] | [((256, 306), 'django.db.models.signals.pre_save.connect', 'pre_save.connect', (['category_create'], {'sender': 'Category'}), '(category_create, sender=Category)\n', (272, 306), False, 'from django.db.models.signals import post_delete, pre_save\n'), ((560, 602), 'django.db.models.signals.pre_save.connect', 'pre_save.connect', (['slug_create'], {'sender': 'Post'}), '(slug_create, sender=Post)\n', (576, 602), False, 'from django.db.models.signals import post_delete, pre_save\n'), ((606, 640), 'django.dispatch.receiver', 'receiver', (['post_delete'], {'sender': 'Post'}), '(post_delete, sender=Post)\n', (614, 640), False, 'from django.dispatch import receiver\n'), ((465, 561), 'django.utils.text.slugify', 'slugify', (["(instance.author.first_name + '-' + instance.author.last_name + '-' +\n instance.title)"], {}), "(instance.author.first_name + '-' + instance.author.last_name + '-' +\n instance.title)\n", (472, 561), False, 'from django.utils.text import slugify\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=no-member
#
# @Author: oesteban
# @Date: 2016-01-05 11:33:39
# @Email: <EMAIL>
# @Last modified by: oesteban
""" Encapsulates report generation functions """
from __future__ import print_function, division, absolute_import, unicode_literals
def individual_html(in_iqms, in_plots=None, exclude_index=0, wf_details=None):
import os.path as op #pylint: disable=W0404
import datetime
from json import load
from mriqc import __version__ as ver
from mriqc.utils.misc import BIDS_COMP
from mriqc.reports.utils import iqms2html, anat_flags, read_report_snippet
from mriqc.data import IndividualTemplate
from mriqc import logging
from io import open #pylint: disable=W0622
report_log = logging.getLogger('mriqc.report')
report_log.setLevel(logging.INFO)
with open(in_iqms) as jsonfile:
iqms_dict = load(jsonfile)
# Now, the in_iqms file should be correctly named
fname = op.splitext(op.basename(in_iqms))[0]
out_file = op.abspath(fname + '.html')
if in_plots is None:
in_plots = []
if wf_details is None:
wf_details = []
# Extract and prune metadata
metadata = iqms_dict.pop('metadata', None)
mod = metadata.pop('modality', None)
file_id = [metadata.pop(k, None)
for k in list(BIDS_COMP.keys())]
file_id = [comp for comp in file_id if comp is not None]
pred_qa = None #metadata.pop('mriqc_pred', None)
# Deal with special IQMs
if mod in ('T1w', 'T2w'):
flags = anat_flags(iqms_dict)
if flags:
wf_details.append(flags)
elif mod == 'bold':
pass
else:
RuntimeError('Unknown modality "%s"' % mod)
config = {
'modality': mod,
'sub_id': '_'.join(file_id),
'timestamp': datetime.datetime.now().strftime("%Y-%m-%d, %H:%M"),
'version': ver,
'imparams': iqms2html(iqms_dict, 'iqms-table'),
'svg_files': [read_report_snippet(pfile) for pfile in in_plots],
'exclude_index': exclude_index,
'workflow_details': wf_details,
'metadata': iqms2html(metadata, 'metadata-table'),
'pred_qa': pred_qa
}
if config['metadata'] is None:
config['workflow_details'].append(
'<span class="warning">File has no metadata</span> '
'<span>(sidecar JSON file missing or empty)</span>')
tpl = IndividualTemplate()
tpl.generate_conf(config, out_file)
report_log.info('Generated individual log (%s)', out_file)
return out_file
| [
"mriqc.data.IndividualTemplate",
"mriqc.reports.utils.anat_flags",
"mriqc.reports.utils.iqms2html",
"io.open",
"json.load",
"datetime.datetime.now",
"mriqc.reports.utils.read_report_snippet",
"os.path.basename",
"mriqc.logging.getLogger",
"os.path.abspath",
"mriqc.utils.misc.BIDS_COMP.keys"
] | [((903, 936), 'mriqc.logging.getLogger', 'logging.getLogger', (['"""mriqc.report"""'], {}), "('mriqc.report')\n", (920, 936), False, 'from mriqc import logging\n'), ((1166, 1193), 'os.path.abspath', 'op.abspath', (["(fname + '.html')"], {}), "(fname + '.html')\n", (1176, 1193), True, 'import os.path as op\n'), ((2565, 2585), 'mriqc.data.IndividualTemplate', 'IndividualTemplate', ([], {}), '()\n', (2583, 2585), False, 'from mriqc.data import IndividualTemplate\n'), ((985, 998), 'io.open', 'open', (['in_iqms'], {}), '(in_iqms)\n', (989, 998), False, 'from io import open\n'), ((1032, 1046), 'json.load', 'load', (['jsonfile'], {}), '(jsonfile)\n', (1036, 1046), False, 'from json import load\n'), ((1692, 1713), 'mriqc.reports.utils.anat_flags', 'anat_flags', (['iqms_dict'], {}), '(iqms_dict)\n', (1702, 1713), False, 'from mriqc.reports.utils import iqms2html, anat_flags, read_report_snippet\n'), ((2064, 2098), 'mriqc.reports.utils.iqms2html', 'iqms2html', (['iqms_dict', '"""iqms-table"""'], {}), "(iqms_dict, 'iqms-table')\n", (2073, 2098), False, 'from mriqc.reports.utils import iqms2html, anat_flags, read_report_snippet\n'), ((2273, 2310), 'mriqc.reports.utils.iqms2html', 'iqms2html', (['metadata', '"""metadata-table"""'], {}), "(metadata, 'metadata-table')\n", (2282, 2310), False, 'from mriqc.reports.utils import iqms2html, anat_flags, read_report_snippet\n'), ((1126, 1146), 'os.path.basename', 'op.basename', (['in_iqms'], {}), '(in_iqms)\n', (1137, 1146), True, 'import os.path as op\n'), ((2122, 2148), 'mriqc.reports.utils.read_report_snippet', 'read_report_snippet', (['pfile'], {}), '(pfile)\n', (2141, 2148), False, 'from mriqc.reports.utils import iqms2html, anat_flags, read_report_snippet\n'), ((1482, 1498), 'mriqc.utils.misc.BIDS_COMP.keys', 'BIDS_COMP.keys', ([], {}), '()\n', (1496, 1498), False, 'from mriqc.utils.misc import BIDS_COMP\n'), ((1967, 1990), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1988, 1990), False, 'import datetime\n')] |
import restic
import unittest
import shutil
class TestBackup(unittest.TestCase):
def test_backup_file(self):
repo = restic.Repo.init('repos_test/test_repo', '12345678')
try:
repo.backup('setup.py')
snapshots = repo.snapshots()
self.assertEqual(len(snapshots), 1)
self.assertTrue(snapshots[0].get_paths()[0].endswith('setup.py'))
except Exception as e:
shutil.rmtree('repos_test/test_repo')
raise e
shutil.rmtree('repos_test/test_repo')
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"shutil.rmtree",
"restic.Repo.init"
] | [((578, 593), 'unittest.main', 'unittest.main', ([], {}), '()\n', (591, 593), False, 'import unittest\n'), ((130, 182), 'restic.Repo.init', 'restic.Repo.init', (['"""repos_test/test_repo"""', '"""12345678"""'], {}), "('repos_test/test_repo', '12345678')\n", (146, 182), False, 'import restic\n'), ((508, 545), 'shutil.rmtree', 'shutil.rmtree', (['"""repos_test/test_repo"""'], {}), "('repos_test/test_repo')\n", (521, 545), False, 'import shutil\n'), ((442, 479), 'shutil.rmtree', 'shutil.rmtree', (['"""repos_test/test_repo"""'], {}), "('repos_test/test_repo')\n", (455, 479), False, 'import shutil\n')] |
# Copyright (c) 2019 <NAME> <<EMAIL>>
# ISC License <https://opensource.org/licenses/isc>
import typing
from textwrap import dedent
import pytest
import file_config
from hypothesis import given
from hypothesis.strategies import from_regex
@file_config.config
class A:
@file_config.config
class B:
bar = file_config.var(str)
foo = file_config.var(str)
bar = file_config.var(B)
@given(from_regex(r"\A[a-zA-Z]\Z", fullmatch=True))
def test_ini_root(root):
instance = A(foo="test", bar=A.B(bar="test"))
content = instance.dumps_ini(root=root)
assert content[1 : len(root) + 1] == root
def test_ini_delimiter():
instance = A(foo="test", bar=A.B(bar="test"))
content = instance.dumps_ini(delimiter="-")
assert content.split("\n")[3] == "[A:bar]"
with pytest.warns(UserWarning):
instance.dumps_ini(delimiter="-", root="test-root")
def test_ini_exceptions():
@file_config.config
class Alpha:
foo = file_config.var(typing.List[dict])
instance = Alpha(foo=[{"test": "test"}])
with pytest.raises(ValueError):
instance.dumps_ini()
| [
"hypothesis.strategies.from_regex",
"pytest.warns",
"pytest.raises",
"file_config.var"
] | [((355, 375), 'file_config.var', 'file_config.var', (['str'], {}), '(str)\n', (370, 375), False, 'import file_config\n'), ((386, 404), 'file_config.var', 'file_config.var', (['B'], {}), '(B)\n', (401, 404), False, 'import file_config\n'), ((414, 458), 'hypothesis.strategies.from_regex', 'from_regex', (['"""\\\\A[a-zA-Z]\\\\Z"""'], {'fullmatch': '(True)'}), "('\\\\A[a-zA-Z]\\\\Z', fullmatch=True)\n", (424, 458), False, 'from hypothesis.strategies import from_regex\n'), ((323, 343), 'file_config.var', 'file_config.var', (['str'], {}), '(str)\n', (338, 343), False, 'import file_config\n'), ((807, 832), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (819, 832), False, 'import pytest\n'), ((978, 1012), 'file_config.var', 'file_config.var', (['typing.List[dict]'], {}), '(typing.List[dict])\n', (993, 1012), False, 'import file_config\n'), ((1068, 1093), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1081, 1093), False, 'import pytest\n')] |
#!/usr/bin/env ipython
import unittest
from clisn import loads
from match import LISNPattern
from pprint import pprint
class PatternTest(unittest.TestCase):
def test_very_basic_pattern(self):
@LISNPattern
def pat_vb(case, default):
@case
def A(res):
'''
FooBar
'''
return True
@case
def B(res):
'''
NAME$a
'''
return res['a']
@case
def C(res):
'''
$a
'''
return res['a']
@LISNPattern
def pat_node(case, default):
@case
def D(res):
'''
c(NAME$x)
'''
return res['x']
foobar_lisn = loads("FooBar")["exprs"][0]["param"]
b_lisn = loads("b")["exprs"][0]["param"]
c_lisn = loads("c(a)")["exprs"][0]["param"]
'''
self.assertTrue(pat_vb(foobar_lisn))
self.assertEqual(pat_vb(b_lisn), 'b')
self.assertIs(pat_vb(c_lisn), c_lisn)
self.assertEqual(pat_node(c_lisn), 'a')
'''
def test_basic_node(self):
thunk_lisn = loads('thunk: a + 2')["exprs"][0]["param"]
defvar_lisn = loads('defvar x: foo(1 + 2)')["exprs"][0]["param"]
f_lisn = loads('f(parg_first, parg_second, \
label=karg, \
**dstar, &, &&damp)')["exprs"][0]["param"]
@LISNPattern
def pat_v(case, default):
@case
def a(res):
'''
thunk: $node
'''
return res['node']
@case
def b(res):
'''
defvar NAME$var_name:
$val_node
'''
return (res["var_name"], res["val_node"])
@LISNPattern
def pat_arg(case, default):
@case
def a(res):
'''
f>
parg_first
parg_second
keyword -> dict:
label -> karg
*__optional__:
star
**dstar
&
&&damp
'''
return True
'''
self.assertEqual(pat_v(thunk_lisn)["type"], "binop")
var_name, val_node = pat_v(defvar_lisn)
self.assertEqual(var_name, "x")
self.assertEqual(val_node["head_expr"]["name"], "foo")
self.assertTrue(pat_arg(f_lisn))
'''
def test_fun(self):
fun_lisn = loads('''
def go(a, b, c, d=2, d=7, e=3, *f, **g, &h, &&i)
''')["exprs"][0]["param"]
@LISNPattern
def pat_fun(case, default):
@case
def case1(res):
'''
def NAME$funname>
__kleene_star__(pargs): NAME$argname
keyword -> dict(kargs)
*__optional__(star): NAME$argname
**__optional__(dstar): NAME$argname
&__optional__(amp): NAME$argname
&&__optional__(damp): NAME$argname
--
__kleene_star__(body): $expr
'''
return res
pprint(pat_fun(fun_lisn))
def test_lets(self):
lets_lisn = loads('''
lets>
a -> 1
a -> a + 1
a -> a + 2
--
a
''')["exprs"][0]["param"]
@LISNPattern
def pat_lets(case, default):
@case
def case1(res):
'''
lets>
keyword -> seq:
__kleene_star__(definition):
NAME$key -> $value
--
__kleene_plus__(body): $expr
'''
return res
'''
pprint(pat_lets(lets_lisn))
'''
def test_or(self):
yy_lisn = loads('''
yinyang:
yin
yang
yang
yin
''')["exprs"][0]["param"]
@LISNPattern
def pat_yinyang(case, default):
@case
def case1(res):
'''
yinyang:
__kleene_star__(list):
__or__:
__group__(yin):
yin
__group__(yang):
yang
'''
return res
pprint(pat_yinyang(yy_lisn))
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"clisn.loads"
] | [((4727, 4742), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4740, 4742), False, 'import unittest\n'), ((897, 912), 'clisn.loads', 'loads', (['"""FooBar"""'], {}), "('FooBar')\n", (902, 912), False, 'from clisn import loads\n'), ((951, 961), 'clisn.loads', 'loads', (['"""b"""'], {}), "('b')\n", (956, 961), False, 'from clisn import loads\n'), ((1000, 1013), 'clisn.loads', 'loads', (['"""c(a)"""'], {}), "('c(a)')\n", (1005, 1013), False, 'from clisn import loads\n'), ((1297, 1318), 'clisn.loads', 'loads', (['"""thunk: a + 2"""'], {}), "('thunk: a + 2')\n", (1302, 1318), False, 'from clisn import loads\n'), ((1362, 1391), 'clisn.loads', 'loads', (['"""defvar x: foo(1 + 2)"""'], {}), "('defvar x: foo(1 + 2)')\n", (1367, 1391), False, 'from clisn import loads\n'), ((1430, 1560), 'clisn.loads', 'loads', (['"""f(parg_first, parg_second, label=karg, **dstar, &, &&damp)"""'], {}), "(\n 'f(parg_first, parg_second, label=karg, **dstar, &, &&damp)'\n )\n", (1435, 1560), False, 'from clisn import loads\n'), ((2786, 2849), 'clisn.loads', 'loads', (['"""\ndef go(a, b, c, d=2, d=7, e=3, *f, **g, &h, &&i)\n"""'], {}), '("""\ndef go(a, b, c, d=2, d=7, e=3, *f, **g, &h, &&i)\n""")\n', (2791, 2849), False, 'from clisn import loads\n'), ((3547, 3617), 'clisn.loads', 'loads', (['"""\nlets>\n a -> 1\n a -> a + 1\n a -> a + 2\n--\n a\n"""'], {}), '("""\nlets>\n a -> 1\n a -> a + 1\n a -> a + 2\n--\n a\n""")\n', (3552, 3617), False, 'from clisn import loads\n'), ((4143, 4200), 'clisn.loads', 'loads', (['"""\nyinyang:\n yin\n yang\n yang\n yin\n"""'], {}), '("""\nyinyang:\n yin\n yang\n yang\n yin\n""")\n', (4148, 4200), False, 'from clisn import loads\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
大智慧数据的处理
"""
import urllib
import urllib.request
import numpy as np
from struct import *
from ..xio.h5 import write_dataframe_set_struct_keep_head
dzh_h5_type = np.dtype([
('time', np.uint64),
('pre_day', np.float64),
('pre_close', np.float64),
('split', np.float64),
('purchase', np.float64),
('purchase_price', np.float64),
('dividend', np.float64),
('dr_pre_close', np.float64),
('dr_factor', np.float64),
('backward_factor', np.float64),
('forward_factor', np.float64),
])
def dividend_to_h5(input_path, data):
write_dataframe_set_struct_keep_head(input_path, data, dzh_h5_type, 'Dividend')
return
class DzhFetcher(object):
_IPS = ('192.168.127.12', '172.16.58.3')
_PATH = None
_FILE_PATH = None
def __init__(self, filepath=None):
self.ips = list(self._IPS)
self._fetched = False
self._FILE_PATH = filepath
def fetch_next_server(self):
self.ips.pop
if len(self.ips) == 0:
raise FileNotFoundError
return self.fetch()
def fetch(self):
if self._FILE_PATH is None:
return self._fetch_url()
else:
return self._fetch_file()
def _fetch_url(self):
try:
r = urllib
data = r.read()
self.f = io.StringIO(data)
self._fetched = True
except urllib.URLError:
return self.fetch_next_server()
def _fetch_file(self):
try:
self.f = open(self._FILE_PATH, 'rb')
self._fetched = True
except OSError as e:
raise e
def data_url(self):
assert self._PATH, "No file path."
if len(self.ips) == 0:
return None
return "http://" + self.ips[-1] + self._PATH
class DzhDividend(DzhFetcher):
"""大智慧除权数据"""
_PATH = '/platform/download/PWR/full.PWR'
def read(self):
"""Generator of 大智慧除权数据
Example of yield data:
symbol: 'SZ000001'
dividends: [{ :date_ex_dividend => '1992-03-23',
:split => 0.500,
:purchase => 0.000,
:purchase_price => 0.000,
:dividend => 0.200 }... ]
"""
if not self._fetched:
self.fetch()
# skip head
self.f.seek(12, 0)
try:
while True:
yield self._read_symbol()
except EOFError:
raise StopIteration
finally:
self.f.close()
# except Exception as e:
# print(e)
def _read_symbol(self):
dividends = []
rawsymbol = self.f.read(16)
if rawsymbol == b'':
raise EOFError
symbol = unpack('16s', rawsymbol)[0].replace(b'\x00', b'')
rawdate = self.f.read(4)
dt = np.dtype([('time', np.int32),
('split', np.float32),
('purchase', np.float32),
('purchase_price', np.float32),
('dividend', np.float32)])
while (rawdate) != b"\xff" * 4:
dividend = np.frombuffer(rawdate + self.f.read(16), dtype=dt)
dividends.append(dividend)
rawdate = self.f.read(4)
if rawdate == b'':
break
return (symbol, np.fromiter(dividends, dtype=dt))
def download_pwr(
local_file=r"D:\dzh2\Download\PWR\full.PWR",
url='http://192.168.127.12/platform/download/PWR/full.PWR',
proxy=None):
if proxy is not None:
# create the object, assign it to a variable
proxy = urllib.request.ProxyHandler(proxy) # {'http': '192.168.1.60:808'}
# construct a new opener using your proxy settings
opener = urllib.request.build_opener(proxy)
# install the openen on the module-level
urllib.request.install_opener(opener)
# 这里需要处理一下,除权信息已经没法直接下载了
f = urllib.request.urlopen(url)
data = f.read()
with open(local_file, "wb") as code:
code.write(data)
print(u'下载除权除息信息完成')
| [
"numpy.fromiter",
"urllib.request.install_opener",
"urllib.request.ProxyHandler",
"urllib.request.build_opener",
"numpy.dtype",
"urllib.request.urlopen"
] | [((213, 545), 'numpy.dtype', 'np.dtype', (["[('time', np.uint64), ('pre_day', np.float64), ('pre_close', np.float64), (\n 'split', np.float64), ('purchase', np.float64), ('purchase_price', np.\n float64), ('dividend', np.float64), ('dr_pre_close', np.float64), (\n 'dr_factor', np.float64), ('backward_factor', np.float64), (\n 'forward_factor', np.float64)]"], {}), "([('time', np.uint64), ('pre_day', np.float64), ('pre_close', np.\n float64), ('split', np.float64), ('purchase', np.float64), (\n 'purchase_price', np.float64), ('dividend', np.float64), (\n 'dr_pre_close', np.float64), ('dr_factor', np.float64), (\n 'backward_factor', np.float64), ('forward_factor', np.float64)])\n", (221, 545), True, 'import numpy as np\n'), ((4011, 4038), 'urllib.request.urlopen', 'urllib.request.urlopen', (['url'], {}), '(url)\n', (4033, 4038), False, 'import urllib\n'), ((2910, 3052), 'numpy.dtype', 'np.dtype', (["[('time', np.int32), ('split', np.float32), ('purchase', np.float32), (\n 'purchase_price', np.float32), ('dividend', np.float32)]"], {}), "([('time', np.int32), ('split', np.float32), ('purchase', np.\n float32), ('purchase_price', np.float32), ('dividend', np.float32)])\n", (2918, 3052), True, 'import numpy as np\n'), ((3700, 3734), 'urllib.request.ProxyHandler', 'urllib.request.ProxyHandler', (['proxy'], {}), '(proxy)\n', (3727, 3734), False, 'import urllib\n'), ((3843, 3877), 'urllib.request.build_opener', 'urllib.request.build_opener', (['proxy'], {}), '(proxy)\n', (3870, 3877), False, 'import urllib\n'), ((3935, 3972), 'urllib.request.install_opener', 'urllib.request.install_opener', (['opener'], {}), '(opener)\n', (3964, 3972), False, 'import urllib\n'), ((3409, 3441), 'numpy.fromiter', 'np.fromiter', (['dividends'], {'dtype': 'dt'}), '(dividends, dtype=dt)\n', (3420, 3441), True, 'import numpy as np\n')] |
from collections import namedtuple
from itertools import chain
from os import makedirs, rename, scandir, listdir
from os.path import (join as p, exists, relpath, isdir, isfile,
expanduser, expandvars, realpath)
from struct import pack
import errno
import hashlib
import json
import logging
import re
import shutil
from rdflib import plugin
from rdflib.parser import Parser, create_input_source
from rdflib.term import URIRef
import six
from textwrap import dedent
import transaction
import yaml
from .. import OWMETA_PROFILE_DIR, connect
from ..context import (DEFAULT_CONTEXT_KEY, IMPORTS_CONTEXT_KEY,
CLASS_REGISTRY_CONTEXT_KEY, Context)
from ..mapper import Mapper
from ..context_common import CONTEXT_IMPORTS
from ..data import Data
from ..file_match import match_files
from ..file_lock import lock_file
from ..file_utils import hash_file
from ..graph_serialization import write_canonical_to_file
from ..rdf_utils import transitive_lookup, BatchAddGraph
from ..utils import FCN, aslist
from .archive import Unarchiver
from .common import (find_bundle_directory, fmt_bundle_directory, BUNDLE_MANIFEST_FILE_NAME,
BUNDLE_INDEXED_DB_NAME, validate_manifest, BUNDLE_MANIFEST_VERSION)
from .exceptions import (NotADescriptor, BundleNotFound, NoRemoteAvailable, NoBundleLoader,
NotABundlePath, MalformedBundle, NoAcceptableUploaders,
FetchTargetIsNotEmpty, TargetIsNotEmpty, UncoveredImports)
from .loaders import LOADER_CLASSES, UPLOADER_CLASSES, load_entry_point_loaders
from urllib.parse import quote as urlquote, unquote as urlunquote
L = logging.getLogger(__name__)
DEFAULT_BUNDLES_DIRECTORY = p(OWMETA_PROFILE_DIR, 'bundles')
'''
Default directory for the bundle cache
'''
DEFAULT_REMOTES_DIRECTORY = p(OWMETA_PROFILE_DIR, 'remotes')
'''
Default directory for descriptors of user-level remotes as opposed to project-specific
remotes
'''
class Remote(object):
'''
A place where bundles come from and go to
'''
def __init__(self, name, accessor_configs=()):
'''
Parameters
----------
name : str
The name of the remote
accessor_configs : iterable of AccessorConfig
Configs for how you access the remote
'''
self.name = name
''' Name of the remote '''
self.accessor_configs = list(accessor_configs)
'''
Configs for how you access the remote.
One might configure mirrors or replicas for a given bundle repository as multiple
accessor configs
'''
self.file_name = None
'''
If read from a file, the remote should have this attribute set to its source
file's path
'''
def add_config(self, accessor_config):
'''
Add the given accessor config to this remote
Parameters
----------
accessor_config : AccessorConfig
The config to add
Returns
-------
bool
`True` if the accessor config was added (meaning there's no equivalent one
already set for this remote). Otherwise, `False`.
'''
if accessor_config in self.accessor_configs:
return False
self.accessor_configs.append(accessor_config)
return True
def generate_loaders(self):
'''
Generate the bundle loaders for this remote.
Loaders are generated from `accessor_configs` and `LOADER_CLASSES` according with
which type of `.Loader` can load a type of accessor
'''
for ac in self.accessor_configs:
for lc in LOADER_CLASSES:
if lc.can_load_from(ac):
loader = lc(ac)
yield loader
def generate_uploaders(self):
'''
Generate the bundle uploaders for this remote
'''
for ac in self.accessor_configs:
for uc in UPLOADER_CLASSES:
if uc.can_upload_to(ac):
loader = uc(ac)
yield loader
def write(self, out):
'''
Serialize the `Remote` and write to `out`
Parameters
----------
out : :term:`file object`
Target for writing the remote
'''
yaml.dump(self, out)
@classmethod
def read(cls, inp):
'''
Read a serialized `Remote`
Parameters
----------
inp : :term:`file object`
File-like object containing the serialized `Remote`
'''
res = yaml.unsafe_load(inp)
assert isinstance(res, cls)
return res
def __eq__(self, other):
return (self.name == other.name and
self.accessor_configs == other.accessor_configs)
def __hash__(self):
return hash((self.name, self.accessor_configs))
def __str__(self):
if self.accessor_configs:
accessors = '\n' + '\n'.join(' ' + '\n '.join(str(acc).split('\n')) for acc in self.accessor_configs)
else:
accessors = ' <none>'
return dedent('''\
{name}
Accessors:{accessors}''').format(name=self.name,
accessors=accessors)
def __repr__(self):
return f'{FCN(type(self))}({repr(self.name)}, {repr(self.accessor_configs)})'
class DependencyDescriptor(namedtuple('_DependencyDescriptor',
('id', 'version', 'excludes'))):
__slots__ = ()
def __new__(cls, id, version=None, excludes=()):
return super(DependencyDescriptor, cls).__new__(cls, id, version, excludes)
class AccessorConfig(object):
'''
Configuration for accessing a `Remote`. `Loaders <Loader>` are added to a remote according to
which accessors are avaialble
'''
def __eq__(self, other):
raise NotImplementedError()
def __hash__(self):
raise NotImplementedError()
class _DepList(list):
def add(self, dd):
self.append(dd)
class URLConfig(AccessorConfig):
'''
Configuration for accessing a remote with just a URL.
Note that URLConfigs should be pickle-able since they are written to a YAML file as
part of the `.Remote` they're apart of.
'''
def __init__(self, url):
self.url = url
def __eq__(self, other):
return isinstance(other, URLConfig) and self.url == other.url
def __hash__(self):
return hash(self.url)
def __str__(self):
return '{}(url={})'.format(FCN(type(self)), repr(self.url))
@classmethod
def register(cls, scheme):
URL_CONFIG_MAP[scheme] = cls
__repr__ = __str__
URL_CONFIG_MAP = {}
'''
`URLConfigs <URLConfig>` by scheme. Can be populated by pkg_resources entry points
'''
class Descriptor(object):
'''
Descriptor for a bundle.
The descriptor is sufficient to build a distributable bundle directory tree from a
`~rdflib.graph.ConjunctiveGraph` and a set of files (see `Installer`).
'''
def __init__(self, ident, **kwargs):
self.id = ident
self._set(kwargs)
@classmethod
def make(cls, obj):
'''
Makes a descriptor from the given object.
Parameters
----------
obj : a `dict-like object <dict>`
An object with parameters for the Descriptor. Typically a dict
Returns
-------
Descriptor
The created descriptor
'''
res = cls(ident=obj['id'])
res._set(obj)
return res
@classmethod
def load(cls, descriptor_source):
'''
Load a descriptor from a YAML record
Parameters
----------
descriptor_source : str or :term:`file object`
The descriptor source. Handled by `yaml.safe_load
<https://pyyaml.org/wiki/PyYAMLDocumentation#the-yaml-package>`_
Raises
------
.NotADescriptor
Thrown when the object loaded from `descriptor_source` isn't a `dict`
'''
dat = yaml.safe_load(descriptor_source)
if isinstance(dat, dict):
return cls.make(dat)
else:
raise NotADescriptor()
def _set(self, obj):
self.name = obj.get('name', self.id)
self.version = obj.get('version', 1)
self.description = obj.get('description', None)
self.patterns = set(make_pattern(x) for x in obj.get('patterns', ()))
self.includes = set(make_include_func(x) for x in obj.get('includes', ()))
self.empties = {uri for uri, options in (inc.popitem()
for inc in obj.get('includes', ())
if isinstance(inc, dict))
if options.get('empty', False) is True}
deps_set = set()
deps = _DepList()
for x in obj.get('dependencies', ()):
if isinstance(x, six.string_types):
dd = DependencyDescriptor(x)
elif isinstance(x, dict):
dd = DependencyDescriptor(**x)
else:
dd = DependencyDescriptor(*x)
if dd not in deps_set:
deps.append(dd)
deps_set.add(dd)
self.dependencies = deps
self.files = FilesDescriptor.make(obj.get('files', None))
def __str__(self):
return (FCN(type(self)) + '(ident={},'
'name={},version={},description={},'
'patterns={},includes={},'
'files={},dependencies={})').format(
repr(self.id),
repr(self.name),
repr(self.version),
repr(self.description),
repr(self.patterns),
repr(self.includes),
repr(self.files),
repr(self.dependencies))
class Bundle(object):
'''
Main entry point for using bundles
Typical usage is something like this::
>>> with Bundle('example/bundleId', version=42) as bnd:
... for aDataObject in bnd(DataObject)().load():
... # Do something with `aDataObject`
... print(aDataObject)
DataObject(<http://example.org/entities#aDataObject>)
'''
def __init__(self, ident, bundles_directory=DEFAULT_BUNDLES_DIRECTORY, version=None,
conf=None, remotes=None, remotes_directory=DEFAULT_REMOTES_DIRECTORY):
'''
.. note::
Paths, `bundles_directory` and `remotes_directory`, will have symbolic links,
environment variables, and "~" (for the current user's home directory)
expanded when the `Bundle` is initialized. To reflect changes to symbolic
links or home directories, the `bundles_directory` or `remotes_directory`
attributes must be updated directly or a new instance must be created.
Parameters
----------
ident : str
Bundle ID
bundles_directory : str, optional
Path to the bundles directory. Defaults to `.DEFAULT_BUNDLES_DIRECTORY`
version : int, optional
Bundle version to access. By default, the latest version will be used.
conf : .Configuration or dict, optional
Configuration to add to the one created for the bundle automatically. Values
for the default imports context (`.IMPORTS_CONTEXT_KEY`), the default context
(`.DEFAULT_CONTEXT_KEY`) and store (``'rdf.store'``, ``'rdf.source'``, and,
``'rdf.store_conf'``) will be ignored and overwritten.
remotes : iterable of Remote or str, optional
A subset of remotes and additional remotes to fetch from. See `Fetcher.fetch`
remotes_directory : str, optional
The directory to load `Remotes <Remote>` from in case a bundle is not in the
bundle cache. Defaults to `.DEFAULT_REMOTES_DIRECTORY`
'''
if not ident or not isinstance(ident, str):
raise ValueError('ident must be a non-empty string')
self.ident = ident
if not bundles_directory:
bundles_directory = DEFAULT_BUNDLES_DIRECTORY
self.bundles_directory = realpath(expandvars(expanduser(bundles_directory)))
if not conf:
conf = {}
conf.update({'rdf.source': 'default'})
self.version = version
self.remotes = remotes
# XXX: Look at how we bring in projects remotes directory
if not remotes_directory:
remotes_directory = DEFAULT_REMOTES_DIRECTORY
self.remotes_directory = realpath(expandvars(expanduser(remotes_directory)))
self._store_config_builder = \
BundleDependentStoreConfigBuilder(
bundles_directory=bundles_directory,
remotes_directory=remotes_directory,
remotes=remotes)
self._bundle_dep_mgr = BundleDependencyManager(
bundles_directory=self.bundles_directory,
remotes=self.remotes,
remotes_directory=self.remotes_directory,
dependencies=self.dependencies)
self._given_conf = conf
self.conf = None
self._contexts = None
self.connection = None
''' The owmeta_core connection to the bundle's indexed database '''
self._bundle_context = None
self._loaded_dependencies = dict()
@property
def identifier(self):
return self.ident
def resolve(self):
try:
bundle_directory = self._get_bundle_directory()
except BundleNotFound:
bundle_directory = self._fetch_bundle(self.ident, self.version)
return bundle_directory
@property
def manifest_data(self):
bundle_directory = self.resolve()
with open(p(bundle_directory, BUNDLE_MANIFEST_FILE_NAME)) as mf:
return json.load(mf)
def _get_bundle_directory(self):
# - look up the bundle in the bundle cache
# - generate a config based on the current config load the config
# - make a database from the graphs, if necessary (similar to `owm regendb`). If
# delete the existing database if it doesn't match the store config
return find_bundle_directory(self.bundles_directory, self.ident, self.version)
def initdb(self):
'''
Initialize the bundle's `conf` `~owmeta_core.data.Data` instance
'''
if self.conf is None:
bundle_directory = self.resolve()
self.conf = Data().copy(self._given_conf)
with open(p(bundle_directory, BUNDLE_MANIFEST_FILE_NAME)) as mf:
manifest_data = json.load(mf)
self.conf[DEFAULT_CONTEXT_KEY] = manifest_data.get(DEFAULT_CONTEXT_KEY)
self.conf[IMPORTS_CONTEXT_KEY] = manifest_data.get(IMPORTS_CONTEXT_KEY)
self.conf[CLASS_REGISTRY_CONTEXT_KEY] = manifest_data.get(CLASS_REGISTRY_CONTEXT_KEY)
indexed_db_path = p(bundle_directory, BUNDLE_INDEXED_DB_NAME)
store_name, store_conf = self._store_config_builder.build(
indexed_db_path,
manifest_data.get('dependencies', ()))
self.conf['rdf.store'] = store_name
self.conf['rdf.store_conf'] = store_conf
self.connection = connect(conf=self.conf)
def _fetch_bundle(self, bundle_ident, version):
remotes_list = list(retrieve_remotes(self.remotes_directory))
f = Fetcher(self.bundles_directory, remotes_list)
return f.fetch(bundle_ident, version, self.remotes)
@property
def contexts(self):
'''
`List <list>` of `str`. Context IDs in this bundle
'''
# Since bundles are meant to be immutable, we won't need to add
if self._contexts is not None:
return self._contexts
bundle_directory = self.resolve()
contexts = list()
graphs_directory = p(bundle_directory, 'graphs')
idx_fname = p(graphs_directory, 'index')
if not exists(idx_fname):
raise Exception('Cannot find an index at {}'.format(repr(idx_fname)))
with open(idx_fname, 'rb') as index_file:
for l in index_file:
l = l.strip()
if not l:
continue
ctx, _ = l.split(b'\x00')
contexts.append(ctx.decode('UTF-8'))
self._contexts = frozenset(contexts)
return self._contexts
@property
def rdf(self):
self.initdb()
return self.conf['rdf.graph']
def __str__(self):
return f'Bundle({self.ident}' + (')' if self.version is None else f', {self.version})')
def __enter__(self):
self.initdb()
return self
def __exit__(self, exc_type, exc_value, traceback):
# Close the database connection
self.connection.disconnect()
self.connection = None
self.conf = None
def dependencies(self):
return self.manifest_data.get('dependencies', ())
def load_dependencies_transitive(self):
'''
Load dependencies from this bundle transitively
Yields
------
Bundle
A direct or indirect dependency of this bundle
'''
return self._bundle_dep_mgr.load_dependencies_transitive()
def load_dependencies(self):
'''
Load direct dependencies of this bundle
Yields
------
Bundle
A direct dependency of this bundle
'''
return self._bundle_dep_mgr._load_dependencies()
def _lookup_context_bundle(self, context_id):
owner = self._bundle_dep_mgr.lookup_context_bundle(
self.contexts,
context_id)
if owner is self._bundle_dep_mgr:
return self
def _load_dependency(self, dependencies_item):
try:
return self._bundle_dep_mgr._load_dependency(dependencies_item)
except BundleDependencyConfigIsMalformed as e:
bundle_directory = self.resolve()
raise MalformedBundle(bundle_directory, str(e)) from e
def __call__(self, target):
if not target or not hasattr(target, 'contextualize'):
return target
self.initdb()
if self._bundle_context is None:
self._bundle_context = _BundleContext(
None, conf=self.conf, bundle=self).stored
return target.contextualize(self._bundle_context)
class BundleDependencyManager(object):
'''
Finds the bundle in which a context is defined.
For a given bundle graph, that there is *one* Bundle that "owns" a given context.
Although multiple bundles may provide that context, the one closest to the root of the
graph which provides some statements in that context is called the owner. Note that
this does not mean that bundles on which the owner depends do not also be queried;
however, the exact behavior is up to the component that uses this component.
'''
def __init__(self, dependencies, **common_bundle_arguments):
self._loaded_dependencies = dict()
self._common_bundle_arguments = common_bundle_arguments
self.dependencies = dependencies
def load_dependencies_transitive(self):
'''
Load dependencies from this bundle transitively
Yields
------
Bundle
A direct or indirect dependency of this bundle
'''
border = {None: self}
seen = set()
while border:
new_border = {}
for bnd in border.values():
for d_bnd in bnd.load_dependencies():
key = (d_bnd.ident, d_bnd.version)
if key in seen:
continue
seen.add(key)
new_border[key] = d_bnd
yield d_bnd
border = new_border
def lookup_context_bundle(self, contexts, context_id):
if context_id is None or str(context_id) in contexts:
return self
for d in self.dependencies():
d_excludes = frozenset(d.get('excludes', ()))
if context_id in d_excludes:
continue
d_bnd = self._load_dependency(d)
match = d_bnd._lookup_context_bundle(context_id)
if match:
return match
return None
def _load_dependencies(self):
for d in self.dependencies():
yield self._load_dependency(d)
load_dependencies = _load_dependencies
def _load_dependency(self, dependencies_item):
d_id = dependencies_item.get('id')
if not d_id:
raise BundleDependencyConfigIsMalformed('Dependency entry is missing an identifier')
d_version = dependencies_item.get('version')
if not d_version:
raise BundleDependencyConfigIsMalformed(f'Dependency entry for {d_id} is'
' missing a version number')
bundle = self._loaded_dependencies.get((d_id, d_version))
if not bundle:
bundle = Bundle(d_id, version=d_version,
**self._common_bundle_arguments)
self._loaded_dependencies[(d_id, d_version)] = bundle
return bundle
class BundleDependencyConfigIsMalformed(Exception):
pass
class BundleDependentStoreConfigBuilder(object):
'''
Builds an RDFLib store configuration that depends on bundles.
The process of building the store configurationi requires traversing the graph of
dependencies so that duplicate dependencies in the graph can be omitted. To support
this process, this builder will fetch bundles as needed to resolve transitive
dependencies
'''
def __init__(self, bundles_directory=None, remotes_directory=None, remotes=None,
read_only=True):
if not bundles_directory:
bundles_directory = DEFAULT_BUNDLES_DIRECTORY
self.bundles_directory = realpath(expandvars(expanduser(bundles_directory)))
if not remotes_directory:
remotes_directory = DEFAULT_REMOTES_DIRECTORY
self.remotes_directory = realpath(expandvars(expanduser(remotes_directory)))
self.remotes = remotes
self.read_only = read_only
def build(self, indexed_db_path, dependencies, bundle_directory=None):
'''
Builds the store configuration
Parameters
----------
indexed_db_path : str
Path to the indexed database of the store that depends on the listed
dependenices
dependencies : list of dict
List of dependencies info at least including keys for 'id' and 'version'
bundle_directory : str, optional
Path to the bundle directory for the dependent store, if the dependent store
is a bundle. Used for information in an exceptional path, but not otherwise
used
Returns
-------
str
The type of the store. This is the name used to look up the RDFLib store plugin
object
The configuration for the store. This is the object that will be passed to
`rdflib.store.Store.open` to configure the store.
'''
return 'agg', self._construct_store_config(indexed_db_path, dependencies,
read_only=self.read_only)
__call__ = build
def _construct_store_config(self, indexed_db_path, dependencies,
current_path=None, paths=None, bundle_directory=None,
read_only=True):
if paths is None:
paths = set()
if current_path is None:
current_path = _BDTD()
dependency_configs = self._gather_dependency_configs(dependencies, current_path, paths, bundle_directory)
fs_store_config = dict(url=indexed_db_path, read_only=read_only)
return [
('FileStorageZODB', fs_store_config)
] + dependency_configs
@aslist
def _gather_dependency_configs(self, dependencies, current_path, paths, bundle_directory=None):
for dd in dependencies:
dep_path = current_path.merge_excludes(dd.get('excludes', ()))
dep_ident = dd.get('id')
dep_version = dd.get('version')
if not dep_ident:
if bundle_directory:
raise MalformedBundle(bundle_directory, 'bundle dependency descriptor is lacking an identifier')
else:
raise ValueError('bundle dependency descriptor is lacking an identifier')
if (dep_path, (dep_ident, dep_version)) in paths:
return
paths.add((dep_path, (dep_ident, dep_version)))
tries = 0
while tries < 2:
try:
bundle_directory = find_bundle_directory(self.bundles_directory, dep_ident, dep_version)
with open(p(bundle_directory, BUNDLE_MANIFEST_FILE_NAME)) as mf:
manifest_data = json.load(mf)
break
except (BundleNotFound, FileNotFoundError):
bundle_directory = self._fetch_bundle(dep_ident, dep_version)
tries += 1
# We don't want to include items in the configuration that aren't specified by
# the dependency descriptor. Also, all of the optionals have defaults that
# BundleDependencyStore handles itself, so we don't want to impose them here.
addl_dep_confs = {k: v for k, v in dd.items()
if k in ('excludes',) and v}
yield ('owmeta_core_bds', dict(type='agg',
conf=self._construct_store_config(
p(bundle_directory, BUNDLE_INDEXED_DB_NAME),
manifest_data.get('dependencies', ()),
dep_path, paths, bundle_directory),
**addl_dep_confs))
def _fetch_bundle(self, bundle_ident, version):
remotes_list = list(retrieve_remotes(self.remotes_directory))
f = Fetcher(self.bundles_directory, remotes_list)
return f.fetch(bundle_ident, version, self.remotes)
class _BDTD(namedtuple('_BDTD', ('excludes',))):
'''
Bundle Dependency Traversal Data (BDTD)
Holds data we use in traversing bundle dependencies. Looks a lot like a dependency
descriptor, but without an ID and version
'''
__slots__ = ()
def __new__(cls, *args, excludes=(), **kwargs):
return super(_BDTD, cls).__new__(cls, *args, excludes=excludes, **kwargs)
def merge_excludes(self, excludes):
return self._replace(excludes=self.excludes +
tuple(e for e in excludes if e not in self.excludes))
class _BundleContext(Context):
'''
`Context` for a bundle.
'''
def __init__(self, *args, bundle, **kwargs):
super().__init__(*args, **kwargs)
self.bundle = bundle
self._mapper = None
@property
def mapper(self):
if self._mapper is None:
self._mapper = _BundleMapper(bundle=self.bundle)
return self._mapper
class _BundleMapper(Mapper):
def __init__(self, bundle):
try:
bundle_conf = bundle.conf
except AttributeError:
raise Exception('Bundle connection has not been established.'
' Call `initdb` or use the bundle in a context manager')
super().__init__(name=f'{bundle.ident}' +
(f'@{bundle.version}' if bundle.version else ''),
conf=bundle_conf)
self.bundle = bundle
self._resolved_classes = dict()
def resolve_class(self, rdf_type, context):
prev_resolved_class = self._resolved_classes.get((rdf_type, context.identifier))
if prev_resolved_class:
return prev_resolved_class
own_resolved_class = super().resolve_class(rdf_type, context)
if own_resolved_class:
self._resolved_classes[(rdf_type, context.identifier)] = own_resolved_class
return own_resolved_class
target_id = context.identifier
target_bundle = self.bundle._lookup_context_bundle(target_id)
deps = target_bundle.load_dependencies_transitive()
for bnd in deps:
crctx_id = bnd.manifest_data.get(CLASS_REGISTRY_CONTEXT_KEY, None)
if not crctx_id:
continue
with bnd:
resolved_class = bnd.connection.mapper.resolve_class(rdf_type, context)
if resolved_class:
self._resolved_classes[(rdf_type, context.identifier)] = resolved_class
return resolved_class
return None
class _RemoteHandlerMixin(object):
'''
Utility mixin for handling remotes
The mixed-in class must have a `remotes` attribute which is a list of `Remote`
'''
def __init__(self, load_entry_points=True, **kwargs):
'''
Parameters
----------
load_entry_points : bool, optional
If `False`, then entry points will not be loaded
'''
super(_RemoteHandlerMixin, self).__init__(**kwargs)
self.load_entry_points = load_entry_points
def _get_remotes(self, remotes):
''''
Get remotes
Parameters
----------
remotes : iterable of Remote or str
A subset of names of remotes to act on and additional remotes to act on
'''
if self.load_entry_points:
load_entry_point_loaders()
instance_remotes = []
additional_remotes = []
if remotes:
configured_remotes = {r.name: r for r in self.remotes}
for r in remotes:
if isinstance(r, six.text_type):
instance_remotes.append(configured_remotes.get(r))
elif isinstance(r, Remote):
additional_remotes.append(r)
else:
instance_remotes = self.remotes
has_remote = False
for rem in chain(additional_remotes, instance_remotes):
has_remote = True
yield rem
if not has_remote:
raise NoRemoteAvailable()
class Fetcher(_RemoteHandlerMixin):
'''
Fetches bundles from `Remotes <Remote>`
A fetcher takes a list of remotes, a bundle ID, and, optionally, a version number and
downloads the bundle to a local directory. `Deployer` is, functionally, the dual of
this class.
'''
def __init__(self, bundles_root, remotes, **kwargs):
'''
Parameters
----------
bundles_root : str
The root directory of the bundle cache
remotes : list of Remote or str
List of pre-configured remotes used in calls to `fetch`
'''
super(Fetcher, self).__init__(**kwargs)
self.bundles_root = bundles_root
self.remotes = remotes
def __call__(self, *args, **kwargs):
'''
Calls `fetch` with the given arguments
'''
return self.fetch(*args, **kwargs)
def fetch(self, bundle_id, bundle_version=None, remotes=None, progress_reporter=None,
triples_progress_reporter=None):
'''
Retrieve a bundle by name from a remote and put it in the local bundle cache.
The first remote that can retrieve the bundle will be tried. Each remote will be
tried in succession until one downloads the bundle.
Parameters
----------
bundle_id : str
The id of the bundle to retrieve
bundle_version : int
The version of the bundle to retrieve. optional
remotes : iterable of Remote or str
A subset of remotes and additional remotes to fetch from. If an entry in the
iterable is a string, then it will be looked for amongst the remotes passed in
initially.
progress_reporter : `tqdm.tqdm <https://tqdm.github.io/>`_-like object, optional
Receives updates of progress in fetching and installing locally
triples_progress_reporter : `tqdm.tqdm <https://tqdm.github.io/>`_-like object, optional
Receives updates of progress for adding triples for an individual graph
Returns
-------
str
returns the directory where the bundle has been placed
Raises
------
.exceptions.NoBundleLoader
Thrown when none of the loaders are able to download the bundle
.FetchTargetIsNotEmpty
Thrown when the requested bundle is already in the cache
'''
if remotes:
remotes = list(remotes)
given_bundle_version = bundle_version
loaders = self._get_bundle_loaders(bundle_id, given_bundle_version, remotes)
loaders_list = list(loaders)
if bundle_version is None:
bundle_version = self._find_latest_remote_bundle_versions(bundle_id, loaders_list)
bdir = fmt_bundle_directory(self.bundles_root, bundle_id, bundle_version)
self._assert_target_is_empty(bdir)
for loader in loaders_list:
try:
loader.base_directory = bdir
loader(bundle_id, bundle_version)
with open(p(bdir, BUNDLE_MANIFEST_FILE_NAME)) as mf:
manifest_data = json.load(mf)
for dd in manifest_data.get('dependencies', ()):
try:
find_bundle_directory(self.bundles_root, dd['id'], dd.get('version'))
except BundleNotFound:
self.fetch(dd['id'], dd.get('version'), remotes=remotes)
dat = self._post_fetch_dest_conf(bdir)
build_indexed_database(dat['rdf.graph'], bdir, progress_reporter,
triples_progress_reporter)
dat.close()
return bdir
except Exception:
L.warning('Failed to load bundle %s with %s', bundle_id, loader, exc_info=True)
shutil.rmtree(bdir)
else: # no break
raise NoBundleLoader(bundle_id, given_bundle_version)
def _post_fetch_dest_conf(self, bundle_directory):
res = Data().copy({
'rdf.source': 'default',
'rdf.store': 'FileStorageZODB',
'rdf.store_conf': p(bundle_directory, BUNDLE_INDEXED_DB_NAME)
})
res.init()
if not exists(res['rdf.store_conf']):
raise Exception('Could not create the database file at ' + res['rdf.store_conf'])
return res
def _find_latest_remote_bundle_versions(self, bundle_id, loaders_list):
latest_bundle_version = 0
for loader in loaders_list:
versions = loader.bundle_versions(bundle_id)
if not versions:
L.warning('Loader %s does not have any versions of the bundle %s', loader, bundle_id)
continue
loader_latest_version = max(versions)
if loader_latest_version > latest_bundle_version:
latest_bundle_version = loader_latest_version
if latest_bundle_version <= 0:
raise BundleNotFound(bundle_id, 'No versions of the requested bundle found from any remotes')
return latest_bundle_version
def _assert_target_is_empty(self, bdir):
target_empty = True
try:
for _ in scandir(bdir):
target_empty = False
break
except FileNotFoundError:
return
if not target_empty:
raise FetchTargetIsNotEmpty(bdir)
def _get_bundle_loaders(self, bundle_id, bundle_version, remotes):
for rem in self._get_remotes(remotes):
for loader in rem.generate_loaders():
if loader.can_load(bundle_id, bundle_version):
yield loader
class Deployer(_RemoteHandlerMixin):
'''
Deploys bundles to `Remotes <Remote>`.
A deployer takes a bundle directory tree or bundle archive and uploads it to a remote.
`Fetcher` is, functionally, the dual of this class.
Deployer is responsible for selecting remotes and corresponding uploaders among a set
of options. `Uploaders <Uploader>` are responsible for actually doing the upload.
'''
def __init__(self, remotes=(), **kwargs):
super(Deployer, self).__init__(**kwargs)
self.remotes = remotes
def __call__(self, *args, **kwargs):
return self.deploy(*args, **kwargs)
def deploy(self, bundle_path, remotes=None):
'''
Deploy a bundle
Parameters
----------
bundle_path : str
Path to a bundle directory tree or archive
remotes : iterable of Remote or str
A subset of remotes to deploy to and additional remotes to deploy to
Raises
------
.NoAcceptableUploaders
Thrown when none of the selected uploaders could upload the bundle
'''
if not exists(bundle_path):
raise NotABundlePath(bundle_path, 'the file does not exist')
manifest_data = self._extract_manifest_data_from_bundle_path(bundle_path)
validate_manifest(bundle_path, manifest_data)
uploaded = False
for uploader in self._get_bundle_uploaders(bundle_path, remotes=remotes):
uploader(bundle_path)
uploaded = True
if not uploaded:
raise NoAcceptableUploaders(bundle_path)
def _extract_manifest_data_from_bundle_path(self, bundle_path):
if isdir(bundle_path):
return self._get_directory_manifest_data(bundle_path)
elif isfile(bundle_path):
return self._get_archive_manifest_data(bundle_path)
else:
raise NotABundlePath(bundle_path, 'path does not point to a file or directory')
def _get_bundle_uploaders(self, bundle_directory, remotes=None):
for rem in self._get_remotes(remotes):
for uploader in rem.generate_uploaders():
if uploader.can_upload(bundle_directory):
yield uploader
def _get_directory_manifest_data(self, bundle_path):
try:
with open(p(bundle_path, BUNDLE_MANIFEST_FILE_NAME)) as mf:
return json.load(mf)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT: # FileNotFound
raise MalformedBundle(bundle_path, 'no bundle manifest found')
if e.errno == errno.EISDIR: # IsADirectoryError
raise MalformedBundle(bundle_path, 'manifest is not a regular file')
raise
except json.decoder.JSONDecodeError:
raise MalformedBundle(bundle_path, 'manifest is malformed: expected a'
' JSON file')
def _get_archive_manifest_data(self, bundle_path):
with Unarchiver().to_tarfile(bundle_path) as tf:
try:
mf0 = tf.extractfile(BUNDLE_MANIFEST_FILE_NAME)
if mf0 is None:
raise MalformedBundle(bundle_path, 'manifest is not a regular file')
# Would like to pull the
with mf0 as mf:
return json.load(mf)
except KeyError:
raise MalformedBundle(bundle_path, 'no bundle manifest found')
except json.decoder.JSONDecodeError:
raise MalformedBundle(bundle_path, 'manifest is malformed: expected a'
' JSON file')
class Cache(object):
'''
Cache of bundles
'''
def __init__(self, bundles_directory):
'''
Parameters
----------
bundles_directory : str
The where bundles are stored
'''
self.bundles_directory = bundles_directory
def list(self):
'''
Returns a generator of summary bundle info
'''
try:
bundle_directories = scandir(self.bundles_directory)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return
raise
for bundle_directory in bundle_directories:
if not bundle_directory.is_dir():
continue
# Ignore deletes out from under us
try:
version_directories = scandir(bundle_directory.path)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
continue
raise
def keyfunc(x):
try:
return int(x.name)
except ValueError:
return float('+inf')
for version_directory in sorted(version_directories, key=keyfunc, reverse=True):
if not version_directory.is_dir():
continue
try:
manifest_fname = p(version_directory.path, BUNDLE_MANIFEST_FILE_NAME)
with open(manifest_fname) as mf:
try:
manifest_data = json.load(mf)
bd_id = urlunquote(bundle_directory.name)
bd_version = int(version_directory.name)
if (bd_id != manifest_data.get('id') or
bd_version != manifest_data.get('version')):
L.warning('Bundle manifest at %s does not match bundle'
' directory', manifest_fname)
continue
yield manifest_data
except json.decoder.JSONDecodeError:
L.warning("Bundle manifest at %s is malformed",
manifest_fname)
except (OSError, IOError) as e:
if e.errno != errno.ENOENT:
raise
def retrieve_remote_by_name(remotes_dir, name, **kwargs):
for rem in retrieve_remotes(remotes_dir, **kwargs):
if rem.name == name:
return rem
def retrieve_remotes(remotes_dir, load_entry_points=True):
'''
Retrieve remotes from a project directory or user remotes directory
Parameters
----------
owmdir : str
path to the project directory
load_entry_points : bool, optional
if `True`, then the entry points for `~.loaders.Loader` and `~.loaders.Uploader`
implementations that have been added as entry points
'''
if not exists(remotes_dir):
return
if load_entry_points:
load_entry_point_loaders()
for r in listdir(remotes_dir):
if r.endswith('.remote'):
fname = p(remotes_dir, r)
with open(fname) as inp:
try:
rem = Remote.read(inp)
rem.file_name = fname
yield rem
except Exception:
L.warning('Unable to read remote %s', r, exc_info=True)
class Installer(object):
'''
Installs a bundle locally
'''
def __init__(self, source_directory, bundles_directory, graph,
imports_ctx=None, default_ctx=None, class_registry_ctx=None,
installer_id=None, remotes=(), remotes_directory=None):
'''
Parameters
----------
source_directory : str
Directory where files come from. All files for a bundle must be below this
directory
bundles_directory : str
Directory where the bundles files go. Usually this is the bundle cache
directory
graph : rdflib.graph.ConjunctiveGraph
The graph from which we source contexts for this bundle
default_ctx : str, optional
The ID of the default context -- the target of a query when not otherwise
specified.
imports_ctx : str, optional
The ID of the imports context this installer should use. Imports relationships
are selected from this graph according to the included contexts.
class_registry_ctx : str, optional
The ID of the class registry context this installer should use. Class registry
entries are retrieved from this graph.
installer_id : iterable of Remote or str, optional
Name of this installer for purposes of mutual exclusion
remotes : iterable of Remote, optional
Remotes to be used for retrieving dependencies when needed during
installation. If not provided, the remotes will be collected from
`remotes_directory`
remotes_directory : str, optional
The directory to load `Remotes <Remote>` from in case a bundle is not in the
bundle cache. Defaults to `.DEFAULT_REMOTES_DIRECTORY`
'''
self.context_hash = hashlib.sha224
self.file_hash = hashlib.sha224
self.source_directory = source_directory
self.bundles_directory = bundles_directory
self.graph = graph
self.installer_id = installer_id
self.imports_ctx = imports_ctx
self.default_ctx = default_ctx
self.class_registry_ctx = class_registry_ctx
self.remotes = list(remotes)
self.remotes_directory = remotes_directory
def install(self, descriptor, progress_reporter=None):
'''
Given a descriptor, install a bundle
Parameters
----------
descriptor : Descriptor
The descriptor for the bundle
progress_reporter : `tqdm.tqdm <https://tqdm.github.io/>`_-like object
Used for reporting progress during installation. optional
Returns
-------
str
The directory where the bundle is installed
Raises
------
.TargetIsNotEmpty
Thrown when the target directory for installation is not empty.
'''
# Create the staging directory in the base directory to reduce the chance of
# moving across file systems
try:
staging_directory = fmt_bundle_directory(self.bundles_directory, descriptor.id,
descriptor.version)
makedirs(staging_directory)
except OSError:
pass
target_empty = True
for _ in scandir(staging_directory):
target_empty = False
break
if not target_empty:
raise TargetIsNotEmpty(staging_directory)
with lock_file(p(staging_directory, '.lock'), unique_key=self.installer_id):
try:
self._install(descriptor, staging_directory,
progress_reporter=progress_reporter)
return staging_directory
except Exception:
self._cleanup_failed_install(staging_directory)
raise
def _cleanup_failed_install(self, staging_directory):
shutil.rmtree(p(staging_directory, 'graphs'))
shutil.rmtree(p(staging_directory, 'files'))
def _install(self, descriptor, staging_directory, progress_reporter=None):
graphs_directory, files_directory = self._set_up_directories(staging_directory)
self._write_file_hashes(descriptor, files_directory)
self._write_context_data(descriptor, graphs_directory)
self._generate_bundle_class_registry_ctx(descriptor, graphs_directory)
self._generate_bundle_imports_ctx(descriptor, graphs_directory)
self._write_manifest(descriptor, staging_directory)
self._initdb(staging_directory)
self._build_indexed_database(staging_directory, progress_reporter)
def _set_up_directories(self, staging_directory):
graphs_directory = p(staging_directory, 'graphs')
files_directory = p(staging_directory, 'files')
try:
makedirs(graphs_directory)
makedirs(files_directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
return graphs_directory, files_directory
def _write_file_hashes(self, descriptor, files_directory):
with open(p(files_directory, 'hashes'), 'wb') as hash_out:
for fname in _select_files(descriptor, self.source_directory):
hsh = self.file_hash()
source_fname = p(self.source_directory, fname)
hash_file(hsh, source_fname)
self._write_hash_line(hash_out, fname.encode('UTF-8'), hsh)
shutil.copy2(source_fname, p(files_directory, fname))
def _write_context_data(self, descriptor, graphs_directory):
contexts = _select_contexts(descriptor, self.graph)
imports_ctxg = None
if self.imports_ctx:
imports_ctxg = self.graph.get_context(self.imports_ctx)
included_context_ids = set()
for ctxid in self._write_graphs(graphs_directory, *contexts):
included_context_ids.add(ctxid)
# Compute imported contexts
imported_contexts = set()
for ctxid in included_context_ids:
if imports_ctxg is not None:
imported_contexts |= transitive_lookup(imports_ctxg,
ctxid,
CONTEXT_IMPORTS,
seen=imported_contexts)
uncovered_contexts = imported_contexts - included_context_ids
if self.class_registry_ctx:
uncovered_contexts.discard(URIRef(self.class_registry_ctx))
uncovered_contexts = self._cover_with_dependencies(uncovered_contexts, descriptor)
if uncovered_contexts:
raise UncoveredImports(uncovered_contexts)
def _write_manifest(self, descriptor, staging_directory):
manifest_data = {}
if self.default_ctx:
manifest_data[DEFAULT_CONTEXT_KEY] = self.default_ctx
if self.imports_ctx:
# If an imports context was specified, then we'll need to generate an
# imports context with the appropriate imports. We don't use the source
# imports context ID for the bundle's imports context because the bundle
# imports that we actually need are a subset of the total set of imports
manifest_data[IMPORTS_CONTEXT_KEY] = fmt_bundle_imports_ctx_id(descriptor.id,
descriptor.version)
if self.class_registry_ctx:
manifest_data[CLASS_REGISTRY_CONTEXT_KEY] = fmt_bundle_class_registry_ctx_id(descriptor.id,
descriptor.version)
manifest_data['id'] = descriptor.id
manifest_data['version'] = descriptor.version
manifest_data['manifest_version'] = BUNDLE_MANIFEST_VERSION
mf_deps = []
for dd in descriptor.dependencies:
bnd = self._dd_to_bundle(dd)
# Fetch the dependency if necessary and get the version of the latest from the
# bundle manifest. Usually, the bundle will already be on the system since it
# *should have* been used for testing.
#
# (It's probably possible to do something like just grabbing the bundle
# manifest data in the case there is not a local copy of the bundle, but that
# should be unusual enough that it's probably not justified considering the
# overhead of having an alternative to fetching that bundle loaders might be
# expected to support.)
dd_version = bnd.manifest_data['version']
mf_deps.append({'version': dd_version,
'id': dd.id,
'excludes': dd.excludes})
manifest_data['dependencies'] = mf_deps
self.manifest_data = manifest_data
with open(p(staging_directory, BUNDLE_MANIFEST_FILE_NAME), 'w') as mf:
json.dump(manifest_data, mf, separators=(',', ':'))
def _generate_bundle_imports_ctx(self, descriptor, graphs_directory):
if not self.imports_ctx:
return
imports_ctxg = self.graph.get_context(self.imports_ctx)
# select all of the imports for all of the contexts in the bundle and serialize
contexts = []
idx_fname = p(graphs_directory, 'index')
with open(idx_fname) as index_file:
for l in index_file:
ctx, _ = l.strip().split('\x00')
contexts.append(URIRef(ctx))
for c in descriptor.empties:
contexts.append(URIRef(c))
ctxgraph = imports_ctxg.triples_choices((contexts, CONTEXT_IMPORTS, None))
if self.class_registry_ctx:
cr_ctxid = URIRef(fmt_bundle_class_registry_ctx_id(descriptor.id, descriptor.version))
contexts.append(cr_ctxid)
old_ctxgraph = ctxgraph
def replace_cr_ctxid():
src_cr_ctxid = URIRef(self.class_registry_ctx)
for t in old_ctxgraph:
if t[0] == src_cr_ctxid:
yield (cr_ctxid, t[1], t[2])
elif t[2] == src_cr_ctxid:
yield (t[0], t[1], cr_ctxid)
else:
yield t
ctxgraph = replace_cr_ctxid()
ctxid = fmt_bundle_imports_ctx_id(descriptor.id, descriptor.version)
self._write_graph(graphs_directory, ctxid, ctxgraph)
def _generate_bundle_class_registry_ctx(self, descriptor, graphs_directory):
if not self.class_registry_ctx:
return
ctx_id = fmt_bundle_class_registry_ctx_id(descriptor.id, descriptor.version)
class_registry_ctxg = self.graph.get_context(self.class_registry_ctx)
self._write_graph(graphs_directory, ctx_id, class_registry_ctxg)
def _write_graph(self, graphs_directory, ctxid, ctxgraph):
for _ in self._write_graphs(graphs_directory, (ctxid, ctxgraph)):
pass
def _write_graphs(self, graphs_directory, *graphs_sequence):
with open(p(graphs_directory, 'hashes'), 'ab') as hash_out,\
open(p(graphs_directory, 'index'), 'ab') as index_out:
for ctxid, ctxgraph in graphs_sequence:
ctxidb = ctxid.encode('UTF-8')
gbname, hsh = self._write_graph_to_file(ctxgraph, graphs_directory)
self._write_hash_line(hash_out, ctxidb, hsh)
self._write_index_line(index_out, ctxidb, gbname)
yield ctxid
hash_out.flush()
index_out.flush()
def _write_graph_to_file(self, ctxgraph, graphs_directory):
hsh = self.context_hash()
temp_fname = p(graphs_directory, 'graph.tmp')
write_canonical_to_file(ctxgraph, temp_fname)
hash_file(hsh, temp_fname)
gbname = hsh.hexdigest() + '.nt'
ctx_file_name = p(graphs_directory, gbname)
rename(temp_fname, ctx_file_name)
return gbname, hsh
def _write_hash_line(self, hash_out, key, hsh):
hash_out.write(key + b'\x00' + pack('B', hsh.digest_size) + hsh.digest() + b'\n')
def _write_index_line(self, index_out, ctxidb, gbname):
index_out.write(ctxidb + b'\x00' + gbname.encode('UTF-8') + b'\n')
def _initdb(self, staging_directory):
self.conf = Data().copy({
'rdf.source': 'default',
'rdf.store': 'FileStorageZODB',
'rdf.store_conf': p(staging_directory, BUNDLE_INDEXED_DB_NAME)
})
# Create the database file and initialize some needed data structures
self.conf.init()
if not exists(self.conf['rdf.store_conf']):
raise Exception('Could not create the database file at ' + self.conf['rdf.store_conf'])
def _build_indexed_database(self, staging_directory, progress=None):
try:
dest = self.conf['rdf.graph']
build_indexed_database(dest, staging_directory, progress)
finally:
self.conf.close()
def _dd_to_bundle(self, dependency_descriptor):
return Bundle(dependency_descriptor.id,
version=dependency_descriptor.version,
bundles_directory=self.bundles_directory,
remotes=self.remotes,
remotes_directory=self.remotes_directory)
def _cover_with_dependencies(self, uncovered_contexts, descriptor):
# XXX: Will also need to check for the contexts having a given ID being consistent
# with each other across dependencies
dependencies = descriptor.dependencies
for d in dependencies:
bnd = self._dd_to_bundle(d)
for c in bnd.contexts:
uncovered_contexts.discard(URIRef(c))
if not uncovered_contexts:
break
for c in descriptor.empties:
uncovered_contexts.discard(URIRef(c))
if not uncovered_contexts:
break
return uncovered_contexts
def fmt_bundle_imports_ctx_id(id, version):
return fmt_bundle_ctx_id('generated_imports_ctx', id, version)
def fmt_bundle_class_registry_ctx_id(id, version):
return fmt_bundle_ctx_id('generated_class_registry_ctx', id, version)
def fmt_bundle_class_registry_ctx_list_id(id, version):
return fmt_bundle_ctx_id('generated_class_registry_ctx_list', id, version)
def fmt_bundle_ctx_id(kind, id, version):
return f'http://data.openworm.org/bundle/{kind}?bundle_id={urlquote(id)}&bundle_version={version}'
class FilesDescriptor(object):
'''
Descriptor for files
'''
def __init__(self):
self.patterns = set()
self.includes = set()
@classmethod
def make(cls, obj):
if not obj:
return
res = cls()
res.patterns = set(obj.get('patterns', ()))
res.includes = set(obj.get('includes', ()))
return res
def make_pattern(s):
if s.startswith('rgx:'):
return RegexURIPattern(s[4:])
else:
return GlobURIPattern(s)
def make_include_func(s):
if isinstance(s, str):
return URIIncludeFunc(s)
elif isinstance(s, dict):
uri = None
for k in s.keys():
if uri is not None:
raise ValueError('Context "includes" entry must have one key--the URI of'
f' the context to include. Extra key is "{k}"')
uri = k
return URIIncludeFunc(uri)
else:
raise ValueError('Context "includes" entry must be a str or a dict')
class URIIncludeFunc(object):
def __init__(self, include):
self.include = URIRef(include.strip())
def __hash__(self):
return hash(self.include)
def __call__(self, uri):
return URIRef(uri.strip()) == self.include
def __str__(self):
return '{}({})'.format(FCN(type(self)), repr(self.include))
__repr__ = __str__
class URIPattern(object):
def __init__(self, pattern):
self._pattern = pattern
def __hash__(self):
return hash(self._pattern)
def __call__(self, uri):
return False
def __str__(self):
return '{}({})'.format(FCN(type(self)), self._pattern)
class RegexURIPattern(URIPattern):
def __init__(self, pattern):
super(RegexURIPattern, self).__init__(re.compile(pattern))
def __call__(self, uri):
# Cast the pattern match result to a boolean
return not not self._pattern.match(str(uri))
class GlobURIPattern(RegexURIPattern):
def __init__(self, pattern):
replacements = [
['*', '.*'],
['?', '.?'],
['[!', '[^']
]
for a, b in replacements:
pattern = pattern.replace(a, b)
super(GlobURIPattern, self).__init__(re.compile(pattern))
def _select_files(descriptor, directory):
fdescr = descriptor.files
if not fdescr:
return
for f in fdescr.includes:
if not exists(p(directory, f)):
raise Exception('Included file in bundle does not exist', f)
yield f
for f in fdescr.patterns:
for match in match_files(directory, p(directory, f)):
yield relpath(match, directory)
def _select_contexts(descriptor, graph):
for context in graph.contexts():
ctx = context.identifier
for inc in descriptor.includes:
if inc(ctx):
yield ctx, context
break
for pat in descriptor.patterns:
if pat(ctx):
yield ctx, context
break
def build_indexed_database(dest, bundle_directory, progress=None, trip_prog=None):
'''
Build the indexed database from a bundle directory
'''
idx_fname = p(bundle_directory, 'graphs', 'index')
# This code was copied from OWM._load_all_graphs, but we don't have a specific
# reason for projects and bundles to have the same format, so keeping the logic
# separate
triples_read = 0
with open(idx_fname) as index_file:
cnt = 0
for l in index_file:
cnt += 1
index_file.seek(0)
if progress is not None:
progress.total = cnt
with transaction.manager:
bag = BatchAddGraph(dest, batchsize=10000)
for l in index_file:
ctx, fname = l.strip().split('\x00')
parser = plugin.get('nt', Parser)()
graph_fname = p(bundle_directory, 'graphs', fname)
with open(graph_fname, 'rb') as f, bag.get_context(ctx) as g:
parser.parse(create_input_source(f), g)
if progress is not None:
progress.update(1)
if trip_prog is not None:
trip_prog.update(bag.count - triples_read)
triples_read = g.count
if progress is not None:
progress.write('Finalizing writes to database...')
if progress is not None:
progress.write('Loaded {:,} triples'.format(triples_read))
| [
"logging.getLogger",
"itertools.chain",
"rdflib.parser.create_input_source",
"re.compile",
"os.path.exists",
"textwrap.dedent",
"os.listdir",
"os.path.isdir",
"os.path.expanduser",
"os.path.relpath",
"urllib.parse.unquote",
"collections.namedtuple",
"rdflib.term.URIRef",
"yaml.dump",
"os... | [((1648, 1675), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1665, 1675), False, 'import logging\n'), ((1705, 1737), 'os.path.join', 'p', (['OWMETA_PROFILE_DIR', '"""bundles"""'], {}), "(OWMETA_PROFILE_DIR, 'bundles')\n", (1706, 1737), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((1814, 1846), 'os.path.join', 'p', (['OWMETA_PROFILE_DIR', '"""remotes"""'], {}), "(OWMETA_PROFILE_DIR, 'remotes')\n", (1815, 1846), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((5395, 5461), 'collections.namedtuple', 'namedtuple', (['"""_DependencyDescriptor"""', "('id', 'version', 'excludes')"], {}), "('_DependencyDescriptor', ('id', 'version', 'excludes'))\n", (5405, 5461), False, 'from collections import namedtuple\n'), ((26324, 26358), 'collections.namedtuple', 'namedtuple', (['"""_BDTD"""', "('excludes',)"], {}), "('_BDTD', ('excludes',))\n", (26334, 26358), False, 'from collections import namedtuple\n'), ((42809, 42829), 'os.listdir', 'listdir', (['remotes_dir'], {}), '(remotes_dir)\n', (42816, 42829), False, 'from os import makedirs, rename, scandir, listdir\n'), ((60857, 60895), 'os.path.join', 'p', (['bundle_directory', '"""graphs"""', '"""index"""'], {}), "(bundle_directory, 'graphs', 'index')\n", (60858, 60895), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((4327, 4347), 'yaml.dump', 'yaml.dump', (['self', 'out'], {}), '(self, out)\n', (4336, 4347), False, 'import yaml\n'), ((4600, 4621), 'yaml.unsafe_load', 'yaml.unsafe_load', (['inp'], {}), '(inp)\n', (4616, 4621), False, 'import yaml\n'), ((8043, 8076), 'yaml.safe_load', 'yaml.safe_load', (['descriptor_source'], {}), '(descriptor_source)\n', (8057, 8076), False, 'import yaml\n'), ((15982, 16011), 'os.path.join', 'p', (['bundle_directory', '"""graphs"""'], {}), "(bundle_directory, 'graphs')\n", (15983, 16011), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((16032, 16060), 'os.path.join', 'p', (['graphs_directory', '"""index"""'], {}), "(graphs_directory, 'index')\n", (16033, 16060), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((30170, 30213), 'itertools.chain', 'chain', (['additional_remotes', 'instance_remotes'], {}), '(additional_remotes, instance_remotes)\n', (30175, 30213), False, 'from itertools import chain\n'), ((37737, 37755), 'os.path.isdir', 'isdir', (['bundle_path'], {}), '(bundle_path)\n', (37742, 37755), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((42697, 42716), 'os.path.exists', 'exists', (['remotes_dir'], {}), '(remotes_dir)\n', (42703, 42716), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((46514, 46540), 'os.scandir', 'scandir', (['staging_directory'], {}), '(staging_directory)\n', (46521, 46540), False, 'from os import makedirs, rename, scandir, listdir\n'), ((47924, 47954), 'os.path.join', 'p', (['staging_directory', '"""graphs"""'], {}), "(staging_directory, 'graphs')\n", (47925, 47954), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((47981, 48010), 'os.path.join', 'p', (['staging_directory', '"""files"""'], {}), "(staging_directory, 'files')\n", (47982, 48010), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((52435, 52463), 'os.path.join', 'p', (['graphs_directory', '"""index"""'], {}), "(graphs_directory, 'index')\n", (52436, 52463), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((54835, 54867), 'os.path.join', 'p', (['graphs_directory', '"""graph.tmp"""'], {}), "(graphs_directory, 'graph.tmp')\n", (54836, 54867), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((55022, 55049), 'os.path.join', 'p', (['graphs_directory', 'gbname'], {}), '(graphs_directory, gbname)\n', (55023, 55049), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((55058, 55091), 'os.rename', 'rename', (['temp_fname', 'ctx_file_name'], {}), '(temp_fname, ctx_file_name)\n', (55064, 55091), False, 'from os import makedirs, rename, scandir, listdir\n'), ((13901, 13914), 'json.load', 'json.load', (['mf'], {}), '(mf)\n', (13910, 13914), False, 'import json\n'), ((15013, 15056), 'os.path.join', 'p', (['bundle_directory', 'BUNDLE_INDEXED_DB_NAME'], {}), '(bundle_directory, BUNDLE_INDEXED_DB_NAME)\n', (15014, 15056), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((16076, 16093), 'os.path.exists', 'exists', (['idx_fname'], {}), '(idx_fname)\n', (16082, 16093), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((34607, 34636), 'os.path.exists', 'exists', (["res['rdf.store_conf']"], {}), "(res['rdf.store_conf'])\n", (34613, 34636), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((35575, 35588), 'os.scandir', 'scandir', (['bdir'], {}), '(bdir)\n', (35582, 35588), False, 'from os import makedirs, rename, scandir, listdir\n'), ((37177, 37196), 'os.path.exists', 'exists', (['bundle_path'], {}), '(bundle_path)\n', (37183, 37196), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((37836, 37855), 'os.path.isfile', 'isfile', (['bundle_path'], {}), '(bundle_path)\n', (37842, 37855), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((40113, 40144), 'os.scandir', 'scandir', (['self.bundles_directory'], {}), '(self.bundles_directory)\n', (40120, 40144), False, 'from os import makedirs, rename, scandir, listdir\n'), ((42885, 42902), 'os.path.join', 'p', (['remotes_dir', 'r'], {}), '(remotes_dir, r)\n', (42886, 42902), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((46399, 46426), 'os.makedirs', 'makedirs', (['staging_directory'], {}), '(staging_directory)\n', (46407, 46426), False, 'from os import makedirs, rename, scandir, listdir\n'), ((47139, 47169), 'os.path.join', 'p', (['staging_directory', '"""graphs"""'], {}), "(staging_directory, 'graphs')\n", (47140, 47169), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((47193, 47222), 'os.path.join', 'p', (['staging_directory', '"""files"""'], {}), "(staging_directory, 'files')\n", (47194, 47222), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((48037, 48063), 'os.makedirs', 'makedirs', (['graphs_directory'], {}), '(graphs_directory)\n', (48045, 48063), False, 'from os import makedirs, rename, scandir, listdir\n'), ((48076, 48101), 'os.makedirs', 'makedirs', (['files_directory'], {}), '(files_directory)\n', (48084, 48101), False, 'from os import makedirs, rename, scandir, listdir\n'), ((52062, 52113), 'json.dump', 'json.dump', (['manifest_data', 'mf'], {'separators': "(',', ':')"}), "(manifest_data, mf, separators=(',', ':'))\n", (52071, 52113), False, 'import json\n'), ((55760, 55795), 'os.path.exists', 'exists', (["self.conf['rdf.store_conf']"], {}), "(self.conf['rdf.store_conf'])\n", (55766, 55795), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((57605, 57617), 'urllib.parse.quote', 'urlquote', (['id'], {}), '(id)\n', (57613, 57617), True, 'from urllib.parse import quote as urlquote, unquote as urlunquote\n'), ((59437, 59456), 're.compile', 're.compile', (['pattern'], {}), '(pattern)\n', (59447, 59456), False, 'import re\n'), ((59902, 59921), 're.compile', 're.compile', (['pattern'], {}), '(pattern)\n', (59912, 59921), False, 'import re\n'), ((60265, 60280), 'os.path.join', 'p', (['directory', 'f'], {}), '(directory, f)\n', (60266, 60280), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((5138, 5196), 'textwrap.dedent', 'dedent', (['""" {name}\n Accessors:{accessors}"""'], {}), '(""" {name}\n Accessors:{accessors}""")\n', (5144, 5196), False, 'from textwrap import dedent\n'), ((12221, 12250), 'os.path.expanduser', 'expanduser', (['bundles_directory'], {}), '(bundles_directory)\n', (12231, 12250), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((12617, 12646), 'os.path.expanduser', 'expanduser', (['remotes_directory'], {}), '(remotes_directory)\n', (12627, 12646), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((13827, 13873), 'os.path.join', 'p', (['bundle_directory', 'BUNDLE_MANIFEST_FILE_NAME'], {}), '(bundle_directory, BUNDLE_MANIFEST_FILE_NAME)\n', (13828, 13873), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((14691, 14704), 'json.load', 'json.load', (['mf'], {}), '(mf)\n', (14700, 14704), False, 'import json\n'), ((22064, 22093), 'os.path.expanduser', 'expanduser', (['bundles_directory'], {}), '(bundles_directory)\n', (22074, 22093), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((22242, 22271), 'os.path.expanduser', 'expanduser', (['remotes_directory'], {}), '(remotes_directory)\n', (22252, 22271), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((34518, 34561), 'os.path.join', 'p', (['bundle_directory', 'BUNDLE_INDEXED_DB_NAME'], {}), '(bundle_directory, BUNDLE_INDEXED_DB_NAME)\n', (34519, 34561), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((38457, 38470), 'json.load', 'json.load', (['mf'], {}), '(mf)\n', (38466, 38470), False, 'import json\n'), ((40493, 40523), 'os.scandir', 'scandir', (['bundle_directory.path'], {}), '(bundle_directory.path)\n', (40500, 40523), False, 'from os import makedirs, rename, scandir, listdir\n'), ((46700, 46729), 'os.path.join', 'p', (['staging_directory', '""".lock"""'], {}), "(staging_directory, '.lock')\n", (46701, 46729), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((48324, 48352), 'os.path.join', 'p', (['files_directory', '"""hashes"""'], {}), "(files_directory, 'hashes')\n", (48325, 48352), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((48518, 48549), 'os.path.join', 'p', (['self.source_directory', 'fname'], {}), '(self.source_directory, fname)\n', (48519, 48549), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((49727, 49758), 'rdflib.term.URIRef', 'URIRef', (['self.class_registry_ctx'], {}), '(self.class_registry_ctx)\n', (49733, 49758), False, 'from rdflib.term import URIRef\n'), ((51989, 52036), 'os.path.join', 'p', (['staging_directory', 'BUNDLE_MANIFEST_FILE_NAME'], {}), '(staging_directory, BUNDLE_MANIFEST_FILE_NAME)\n', (51990, 52036), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((52700, 52709), 'rdflib.term.URIRef', 'URIRef', (['c'], {}), '(c)\n', (52706, 52709), False, 'from rdflib.term import URIRef\n'), ((53071, 53102), 'rdflib.term.URIRef', 'URIRef', (['self.class_registry_ctx'], {}), '(self.class_registry_ctx)\n', (53077, 53102), False, 'from rdflib.term import URIRef\n'), ((54196, 54225), 'os.path.join', 'p', (['graphs_directory', '"""hashes"""'], {}), "(graphs_directory, 'hashes')\n", (54197, 54225), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((54268, 54296), 'os.path.join', 'p', (['graphs_directory', '"""index"""'], {}), "(graphs_directory, 'index')\n", (54269, 54296), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((55586, 55630), 'os.path.join', 'p', (['staging_directory', 'BUNDLE_INDEXED_DB_NAME'], {}), '(staging_directory, BUNDLE_INDEXED_DB_NAME)\n', (55587, 55630), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((57015, 57024), 'rdflib.term.URIRef', 'URIRef', (['c'], {}), '(c)\n', (57021, 57024), False, 'from rdflib.term import URIRef\n'), ((60083, 60098), 'os.path.join', 'p', (['directory', 'f'], {}), '(directory, f)\n', (60084, 60098), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((60301, 60326), 'os.path.relpath', 'relpath', (['match', 'directory'], {}), '(match, directory)\n', (60308, 60326), False, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((61555, 61591), 'os.path.join', 'p', (['bundle_directory', '"""graphs"""', 'fname'], {}), "(bundle_directory, 'graphs', fname)\n", (61556, 61591), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((14604, 14650), 'os.path.join', 'p', (['bundle_directory', 'BUNDLE_MANIFEST_FILE_NAME'], {}), '(bundle_directory, BUNDLE_MANIFEST_FILE_NAME)\n', (14605, 14650), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((33483, 33496), 'json.load', 'json.load', (['mf'], {}), '(mf)\n', (33492, 33496), False, 'import json\n'), ((34211, 34230), 'shutil.rmtree', 'shutil.rmtree', (['bdir'], {}), '(bdir)\n', (34224, 34230), False, 'import shutil\n'), ((38384, 38425), 'os.path.join', 'p', (['bundle_path', 'BUNDLE_MANIFEST_FILE_NAME'], {}), '(bundle_path, BUNDLE_MANIFEST_FILE_NAME)\n', (38385, 38425), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((39385, 39398), 'json.load', 'json.load', (['mf'], {}), '(mf)\n', (39394, 39398), False, 'import json\n'), ((41060, 41112), 'os.path.join', 'p', (['version_directory.path', 'BUNDLE_MANIFEST_FILE_NAME'], {}), '(version_directory.path, BUNDLE_MANIFEST_FILE_NAME)\n', (41061, 41112), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((48714, 48739), 'os.path.join', 'p', (['files_directory', 'fname'], {}), '(files_directory, fname)\n', (48715, 48739), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((52622, 52633), 'rdflib.term.URIRef', 'URIRef', (['ctx'], {}), '(ctx)\n', (52628, 52633), False, 'from rdflib.term import URIRef\n'), ((56859, 56868), 'rdflib.term.URIRef', 'URIRef', (['c'], {}), '(c)\n', (56865, 56868), False, 'from rdflib.term import URIRef\n'), ((61498, 61522), 'rdflib.plugin.get', 'plugin.get', (['"""nt"""', 'Parser'], {}), "('nt', Parser)\n", (61508, 61522), False, 'from rdflib import plugin\n'), ((25119, 25132), 'json.load', 'json.load', (['mf'], {}), '(mf)\n', (25128, 25132), False, 'import json\n'), ((33404, 33438), 'os.path.join', 'p', (['bdir', 'BUNDLE_MANIFEST_FILE_NAME'], {}), '(bdir, BUNDLE_MANIFEST_FILE_NAME)\n', (33405, 33438), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((55211, 55237), 'struct.pack', 'pack', (['"""B"""', 'hsh.digest_size'], {}), "('B', hsh.digest_size)\n", (55215, 55237), False, 'from struct import pack\n'), ((61703, 61725), 'rdflib.parser.create_input_source', 'create_input_source', (['f'], {}), '(f)\n', (61722, 61725), False, 'from rdflib.parser import Parser, create_input_source\n'), ((25024, 25070), 'os.path.join', 'p', (['bundle_directory', 'BUNDLE_MANIFEST_FILE_NAME'], {}), '(bundle_directory, BUNDLE_MANIFEST_FILE_NAME)\n', (25025, 25070), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n'), ((41239, 41252), 'json.load', 'json.load', (['mf'], {}), '(mf)\n', (41248, 41252), False, 'import json\n'), ((41289, 41322), 'urllib.parse.unquote', 'urlunquote', (['bundle_directory.name'], {}), '(bundle_directory.name)\n', (41299, 41322), True, 'from urllib.parse import quote as urlquote, unquote as urlunquote\n'), ((25850, 25893), 'os.path.join', 'p', (['bundle_directory', 'BUNDLE_INDEXED_DB_NAME'], {}), '(bundle_directory, BUNDLE_INDEXED_DB_NAME)\n', (25851, 25893), True, 'from os.path import join as p, exists, relpath, isdir, isfile, expanduser, expandvars, realpath\n')] |
"""
Simple class based thing to make loading the datasets nice and easy.
Author: <NAME>
Created: 13/12/2020
"""
import pandas as pd
from sqlalchemy import MetaData, create_engine
from src.config import RAW_DATA
class Data:
def __init__(self, dataset: str) -> None:
"""
Simple class to act as an API for loading the data
into a pandas dataframe.
Args:
dataset (str): One of 'generation_capacity' or 'time_series'.
Raises:
ValueError: If invalid argument passed to dataset.
"""
self.dataset = dataset.lower().strip()
if self.dataset == "generation_capacity":
self._fp = RAW_DATA.joinpath("national_generation_capacity.sqlite")
self._tablename = "national_generation_capacity_stacked"
elif self.dataset == "time_series":
self._fp = RAW_DATA.joinpath("time_series.sqlite")
self._tablename = "time_series_60min_singleindex"
else:
raise ValueError(
f"""Argument 'dataset' should be one of 'generation_capacity' or 'time_series'.
Got {self.dataset}"""
)
self._engine = create_engine(f"sqlite:///{str(self._fp)}")
# So SQLAlchemy knows all the schema and metadata for our tables
# Not essential but tends to come in handy
self._meta = MetaData(bind=self._engine)
self._meta.reflect()
self._table = self._meta.tables[self._tablename]
def __repr__(self) -> str:
return self.__class__.__qualname__ + f"(dataset={self.dataset!r})"
def load_table(self) -> pd.DataFrame:
"""
Loads the entire SQL table into a pandas dataframe.
Uses a hardcoded table name. In the case of the National Generation
Capacity data there is only one table 'national_generation_capacity_stacked'
The time series data has 3 tables with different time deltas as the index
either 60 mins, 30 mins, or 15 mins. I've chosen the 60 min one as the default
table to load here.
Returns:
pd.DataFrame: DataFrame containing the table data.
"""
return pd.read_sql_table(table_name=self._tablename, con=self._engine)
class GenerationCapacity(Data):
def __init__(self) -> None:
super().__init__(dataset="generation_capacity")
def load_cleaned(self, level: str = None) -> pd.DataFrame:
"""
Loads the cleaned Generation Capacity dataset into a pandas dataframe.
User can specify the energy_source_level using the 'level' argument.
If no level is passed, the entire dataset will be loaded.
If level is passed, the energy source level columns will be dropped on load
as they are no longer needed.
Args:
level (str, optional): Level to load, one of ['total', 'type', 'fuel'].
'total' refers to energy_source_level_0
'type' is energy_source_level_1
'fuel' is energy_source_level_2
If None: loads entire dataset
Defaults to None.
Raises:
ValueError: If passed level not in ['total', 'type', 'fuel']
Returns:
pd.DataFrame: Dataframe containing requested data.
"""
levels = {
"total": "energy_source_level_0",
"type": "energy_source_level_1",
"fuel": "energy_source_level_2",
}
df = (
(self.load_table())
.assign(
technology=lambda x: pd.Categorical(x["technology"]),
country=lambda x: pd.Categorical(x["country"]),
)
.drop(
columns=[
"ID",
"weblink",
"type",
"comment",
"capacity_definition",
"source",
"source_type",
]
)
.drop_duplicates(
subset=["technology", "year", "country"],
keep="first",
ignore_index=True,
)
.replace(to_replace="Other or unspecified energy sources", value="Other")
.replace(to_replace="Renewable energy sources", value="Renewables")
.dropna()
)
if level is not None and level not in levels.keys():
raise ValueError(
f"Passed level must be one of ['total', 'type', 'fuel']. Got {level}"
)
elif level is None:
return df
else:
return df[df[levels.get(level)] == 1].drop(
columns=[
"energy_source_level_0",
"energy_source_level_1",
"energy_source_level_2",
"energy_source_level_3",
"technology_level",
]
)
def load_top5(self) -> pd.DataFrame:
"""
Loads the Generation Capacity for the top 5 highest capacity
countries.
Returns:
pd.DataFrame: Top 5 countries generation capacity data.
"""
df = self.load_cleaned()
top5_countries = ["FR", "DE", "IT", "ES", "GB"]
df = df[df["country"].isin(top5_countries)]
return df
def load_uk(self) -> pd.DataFrame:
"""
Loads the UK generation capacity data.
Returns:
pd.DataFrame: UK Data.
"""
df = self.load_cleaned()
df = df[df["country"] == "UK"]
return df
class TimeSeries(Data):
def __init__(self, dataset: str = "time_series") -> None:
super().__init__(dataset=dataset)
| [
"pandas.read_sql_table",
"sqlalchemy.MetaData",
"src.config.RAW_DATA.joinpath",
"pandas.Categorical"
] | [((1383, 1410), 'sqlalchemy.MetaData', 'MetaData', ([], {'bind': 'self._engine'}), '(bind=self._engine)\n', (1391, 1410), False, 'from sqlalchemy import MetaData, create_engine\n'), ((2188, 2251), 'pandas.read_sql_table', 'pd.read_sql_table', ([], {'table_name': 'self._tablename', 'con': 'self._engine'}), '(table_name=self._tablename, con=self._engine)\n', (2205, 2251), True, 'import pandas as pd\n'), ((680, 736), 'src.config.RAW_DATA.joinpath', 'RAW_DATA.joinpath', (['"""national_generation_capacity.sqlite"""'], {}), "('national_generation_capacity.sqlite')\n", (697, 736), False, 'from src.config import RAW_DATA\n'), ((874, 913), 'src.config.RAW_DATA.joinpath', 'RAW_DATA.joinpath', (['"""time_series.sqlite"""'], {}), "('time_series.sqlite')\n", (891, 913), False, 'from src.config import RAW_DATA\n'), ((3583, 3614), 'pandas.Categorical', 'pd.Categorical', (["x['technology']"], {}), "(x['technology'])\n", (3597, 3614), True, 'import pandas as pd\n'), ((3650, 3678), 'pandas.Categorical', 'pd.Categorical', (["x['country']"], {}), "(x['country'])\n", (3664, 3678), True, 'import pandas as pd\n')] |
import torch
from torch_geometric.data import InMemoryDataset,Data
from os.path import join, isfile
from os import listdir
import numpy as np
import os.path as osp
from utils.construct_graph import read_data
class AbideDataset(InMemoryDataset):
def __init__(self, root, name, transform=None, pre_transform=None):
self.root = root
self.name = name
super(AbideDataset, self).__init__(root,transform, pre_transform)
self.data, self.slices = torch.load(self.processed_paths[0])
@property
def raw_file_names(self):
data_dir = osp.join(self.root,'raw')
onlyfiles = [f for f in listdir(data_dir) if osp.isfile(osp.join(data_dir, f))]
onlyfiles.sort()
return onlyfiles
@property
def processed_file_names(self):
return 'data.pt'
def download(self):
# Download to `self.raw_dir`.
return
def process(self):
# Read data into huge `Data` list.
self.data, self.slices = read_data(self.raw_dir, 'ABIDE')
if self.pre_filter is not None:
data_list = [self.get(idx) for idx in range(len(self))]
data_list = [data for data in data_list if self.pre_filter(data)]
self.data, self.slices = self.collate(data_list)
if self.pre_transform is not None:
data_list = [self.get(idx) for idx in range(len(self))]
data_list = [self.pre_transform(data) for data in data_list]
self.data, self.slices = self.collate(data_list)
torch.save((self.data, self.slices), self.processed_paths[0])
def __repr__(self):
return '{}({})'.format(self.name, len(self)) | [
"os.listdir",
"torch.load",
"os.path.join",
"torch.save",
"utils.construct_graph.read_data"
] | [((476, 511), 'torch.load', 'torch.load', (['self.processed_paths[0]'], {}), '(self.processed_paths[0])\n', (486, 511), False, 'import torch\n'), ((576, 602), 'os.path.join', 'osp.join', (['self.root', '"""raw"""'], {}), "(self.root, 'raw')\n", (584, 602), True, 'import os.path as osp\n'), ((994, 1026), 'utils.construct_graph.read_data', 'read_data', (['self.raw_dir', '"""ABIDE"""'], {}), "(self.raw_dir, 'ABIDE')\n", (1003, 1026), False, 'from utils.construct_graph import read_data\n'), ((1530, 1591), 'torch.save', 'torch.save', (['(self.data, self.slices)', 'self.processed_paths[0]'], {}), '((self.data, self.slices), self.processed_paths[0])\n', (1540, 1591), False, 'import torch\n'), ((634, 651), 'os.listdir', 'listdir', (['data_dir'], {}), '(data_dir)\n', (641, 651), False, 'from os import listdir\n'), ((666, 687), 'os.path.join', 'osp.join', (['data_dir', 'f'], {}), '(data_dir, f)\n', (674, 687), True, 'import os.path as osp\n')] |
"""
Author: <NAME> (Vincent)
Description: Connect/create an SQLite3 database and check/create the necessary tables.
"""
import sqlite3
from sqlite3 import Error
import os
def make_dir(filename):
current_dir = os.path.dirname(os.path.abspath(__file__))
dest_dir = os.path.join(current_dir, "db")
try:
os.makedirs(dest_dir)
except OSError:
pass
return os.path.join(dest_dir, filename)
def create_tables(con):
c = con.cursor()
with open(make_dir("schema.sql"), "r") as sql_file:
sql_script = sql_file.read()
c.executescript(sql_script)
con.commit()
con.close();
def create_connection():
"""create database connection to sqlite db"""
con = None
try:
con = sqlite3.connect(make_dir("pos.sqlite"))
print(sqlite3.version)
create_tables(con)
except Error as e:
print(e)
finally:
if con:
con.close()
| [
"os.path.abspath",
"os.path.join",
"os.makedirs"
] | [((273, 304), 'os.path.join', 'os.path.join', (['current_dir', '"""db"""'], {}), "(current_dir, 'db')\n", (285, 304), False, 'import os\n'), ((388, 420), 'os.path.join', 'os.path.join', (['dest_dir', 'filename'], {}), '(dest_dir, filename)\n', (400, 420), False, 'import os\n'), ((231, 256), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (246, 256), False, 'import os\n'), ((322, 343), 'os.makedirs', 'os.makedirs', (['dest_dir'], {}), '(dest_dir)\n', (333, 343), False, 'import os\n')] |
# -*- coding: utf-8 -*-
# !/usr/bin/env python
__author__ = 'maxwu'
from random import randint
from django.views.generic import TemplateView
from chartjs.views.lines import BaseLineChartView
class LineChartJSONView(BaseLineChartView):
def get_labels(self):
"""Return 7 labels."""
#return ["January", "February", "March", "April", "May", "June", "July"]
return ["4/20/2016",
"4/23/2016",
"4/25/2016",
"5/2/2016",
"5/9/2016",
"5/16/2016"]
def get_data(self):
"""Return 3 datasets to plot."""
#return [[75, 44, 92, 11, 44, 95, 35],
# [41, 92, 18, 3, 73, 87, 92],
# [87, 21, 94, 3, 90, 13, 65]]
return [
[377, 377, 377, 467, 478, 431],
[136, 139, 139, 142, 163, 234],
[389, 389, 388, 413, 425, 498],
]
line_chart = TemplateView.as_view(template_name='line_chart.html')
line_chart_json = LineChartJSONView.as_view()
if __name__ == '__main__':
pass | [
"django.views.generic.TemplateView.as_view"
] | [((927, 980), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""line_chart.html"""'}), "(template_name='line_chart.html')\n", (947, 980), False, 'from django.views.generic import TemplateView\n')] |
from src.commands.help import HelpCommand
from src.commands.version import VersionCommand
class CommandManager(object):
"""docstring for CommandManager."""
def __init__(self):
super(CommandManager, self).__init__()
self.command_list = self._build_command_list()
def _build_command_list(self):
"""Build the list of available commands
Returns:
A dictionnary, which key is the string naming the command and the value
the command itself
"""
return {
"help": HelpCommand(self),
"version": VersionCommand()
}
def get_command_list(self):
return self.command_list
def get_command(self, name):
try:
return self.command_list[name]
except KeyError:
return None
| [
"src.commands.version.VersionCommand",
"src.commands.help.HelpCommand"
] | [((554, 571), 'src.commands.help.HelpCommand', 'HelpCommand', (['self'], {}), '(self)\n', (565, 571), False, 'from src.commands.help import HelpCommand\n'), ((596, 612), 'src.commands.version.VersionCommand', 'VersionCommand', ([], {}), '()\n', (610, 612), False, 'from src.commands.version import VersionCommand\n')] |
import torch
import torch.nn as nn
from asteroid.engine.optimizers import make_optimizer
from torch.nn.modules.loss import _Loss
from asteroid.filterbanks import make_enc_dec
from asteroid.filterbanks.transforms import take_mag
from pystoi import stoi
from pb_bss_eval.evaluation.module_pesq import pesq
class Discriminator(nn.Module):
"""Discriminator also mentioned ad D """
def __init__(self, encoder, decoder, negative_slope=0.3):
super().__init__()
self.encoder = encoder
self.decoder = decoder
self.conv = nn.Sequential(
nn.BatchNorm2d(2),
nn.utils.spectral_norm(nn.Conv2d(2, 15, 5, 1)),
nn.LeakyReLU(negative_slope),
nn.utils.spectral_norm(nn.Conv2d(15, 25, 7, 1)),
nn.LeakyReLU(negative_slope),
nn.utils.spectral_norm(nn.Conv2d(25, 40, 9, 1)),
nn.LeakyReLU(negative_slope),
nn.utils.spectral_norm(nn.Conv2d(40, 50, 11, 1)),
nn.LeakyReLU(negative_slope))
self.pool = nn.AdaptiveAvgPool2d(1)
self.linear = nn.Sequential(
nn.utils.spectral_norm(nn.Linear(50, 50)),
nn.LeakyReLU(negative_slope),
nn.utils.spectral_norm(nn.Linear(50, 10)),
nn.LeakyReLU(negative_slope),
nn.utils.spectral_norm(nn.Linear(10, 1)),
)
def forward(self, x, z):
"""
Forward pass of discriminator.
Args:
x: inputs
z: clean
"""
# Encode
x = self.encoder(x)
x = take_mag(x)
x = x.unsqueeze(1)
# Encode
z = self.encoder(z)
z = take_mag(z)
z = z.unsqueeze(1)
x = torch.cat((x, z), dim=1)
x = self.conv(x)
x = self.pool(x).squeeze()
x = self.linear(x)
return x
class DiscriminatorLoss(_Loss):
""" This class implements a generic loss for the discriminator.
However, computation of some metrics can break the code (eg PESQ).
For now, we recommend to use only STOI"""
def __init__(self, metric, rate):
super().__init__()
self.metric = metric
self.rate = rate
def forward(self, noisy, clean, estimates, est_labels, labels):
# Behaves differently if estimates come from the generated data or not
#
if labels:
loss = torch.mean((est_labels - torch.ones_like(est_labels)) ** 2)
else:
loss = torch.mean((est_labels - get_metric(self.metric, noisy,
clean, estimates,
self.rate))**2)
return loss
def get_metric(metric, noisy, clean, estimates, rate):
""" Compute the metric """
noisy_np = noisy.cpu().squeeze(1).data.numpy()
clean_np = clean.cpu().squeeze(1).data.numpy()
estimates_np = estimates.cpu().squeeze(1).data.numpy()
metrics = torch.zeros(noisy.size(0))
if metric == 'stoi':
f = stoi
else:
f = pesq
for i in range(noisy_np.shape[0]):
# print(clean_np[i],estimates_np[i])
m = f(clean_np[i], estimates_np[i], rate)
metrics[i] += m
if metric == 'pesq':
metrics = (metrics + 0.5)/5.0
return metrics.to(noisy.device)
def make_discriminator_and_optimizer(conf):
""" Function to define the model and optimizer for a config dictionary.
Args:
conf: Dictionary containing the output of hierachical argparse.
Returns:
model, optimizer.
The main goal of this function is to make reloading for resuming
and evaluation very simple.
"""
# Define building blocks for local model
encoder, decoder = make_enc_dec(**conf['filterbank'])
model = Discriminator(encoder, decoder)
# Define optimizer of this model
optimizer = make_optimizer(model.parameters(), **conf['optim'])
d_loss = DiscriminatorLoss(conf['metric_to_opt']['metric'],
conf['data']['rate'])
return model, optimizer, d_loss
| [
"torch.nn.BatchNorm2d",
"torch.ones_like",
"torch.nn.LeakyReLU",
"asteroid.filterbanks.transforms.take_mag",
"torch.nn.Conv2d",
"asteroid.filterbanks.make_enc_dec",
"torch.nn.AdaptiveAvgPool2d",
"torch.nn.Linear",
"torch.cat"
] | [((3727, 3761), 'asteroid.filterbanks.make_enc_dec', 'make_enc_dec', ([], {}), "(**conf['filterbank'])\n", (3739, 3761), False, 'from asteroid.filterbanks import make_enc_dec\n'), ((1034, 1057), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (1054, 1057), True, 'import torch.nn as nn\n'), ((1564, 1575), 'asteroid.filterbanks.transforms.take_mag', 'take_mag', (['x'], {}), '(x)\n', (1572, 1575), False, 'from asteroid.filterbanks.transforms import take_mag\n'), ((1661, 1672), 'asteroid.filterbanks.transforms.take_mag', 'take_mag', (['z'], {}), '(z)\n', (1669, 1672), False, 'from asteroid.filterbanks.transforms import take_mag\n'), ((1713, 1737), 'torch.cat', 'torch.cat', (['(x, z)'], {'dim': '(1)'}), '((x, z), dim=1)\n', (1722, 1737), False, 'import torch\n'), ((583, 600), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(2)'], {}), '(2)\n', (597, 600), True, 'import torch.nn as nn\n'), ((674, 702), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['negative_slope'], {}), '(negative_slope)\n', (686, 702), True, 'import torch.nn as nn\n'), ((777, 805), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['negative_slope'], {}), '(negative_slope)\n', (789, 805), True, 'import torch.nn as nn\n'), ((880, 908), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['negative_slope'], {}), '(negative_slope)\n', (892, 908), True, 'import torch.nn as nn\n'), ((984, 1012), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['negative_slope'], {}), '(negative_slope)\n', (996, 1012), True, 'import torch.nn as nn\n'), ((1162, 1190), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['negative_slope'], {}), '(negative_slope)\n', (1174, 1190), True, 'import torch.nn as nn\n'), ((1259, 1287), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['negative_slope'], {}), '(negative_slope)\n', (1271, 1287), True, 'import torch.nn as nn\n'), ((637, 659), 'torch.nn.Conv2d', 'nn.Conv2d', (['(2)', '(15)', '(5)', '(1)'], {}), '(2, 15, 5, 1)\n', (646, 659), True, 'import torch.nn as nn\n'), ((739, 762), 'torch.nn.Conv2d', 'nn.Conv2d', (['(15)', '(25)', '(7)', '(1)'], {}), '(15, 25, 7, 1)\n', (748, 762), True, 'import torch.nn as nn\n'), ((842, 865), 'torch.nn.Conv2d', 'nn.Conv2d', (['(25)', '(40)', '(9)', '(1)'], {}), '(25, 40, 9, 1)\n', (851, 865), True, 'import torch.nn as nn\n'), ((945, 969), 'torch.nn.Conv2d', 'nn.Conv2d', (['(40)', '(50)', '(11)', '(1)'], {}), '(40, 50, 11, 1)\n', (954, 969), True, 'import torch.nn as nn\n'), ((1130, 1147), 'torch.nn.Linear', 'nn.Linear', (['(50)', '(50)'], {}), '(50, 50)\n', (1139, 1147), True, 'import torch.nn as nn\n'), ((1227, 1244), 'torch.nn.Linear', 'nn.Linear', (['(50)', '(10)'], {}), '(50, 10)\n', (1236, 1244), True, 'import torch.nn as nn\n'), ((1324, 1340), 'torch.nn.Linear', 'nn.Linear', (['(10)', '(1)'], {}), '(10, 1)\n', (1333, 1340), True, 'import torch.nn as nn\n'), ((2402, 2429), 'torch.ones_like', 'torch.ones_like', (['est_labels'], {}), '(est_labels)\n', (2417, 2429), False, 'import torch\n')] |
from collections import Counter
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.shortcuts import render, HttpResponseRedirect, redirect
from django.views.generic import ListView
from apps.corecode.models import AcademicSession, AcademicTerm,StudentClass
from apps.students.models import Student
from .models import Result
from .forms import CreateResults, EditResults,CreateResultCLass,GetResutlSubjectForm
ListView
@login_required
def create_result(request):
students = Student.objects.all()
if request.method == 'POST':
#after visiting the second page
if 'finish' in request.POST:
form = CreateResults(request.POST)
if form.is_valid():
subjects = form.cleaned_data['subjects']
session = form.cleaned_data['session']
term = form.cleaned_data['term']
students = request.POST['students']
results = []
for student in students.split(','):
stu = Student.objects.get(pk=student)
if stu.current_class:
for subject in subjects:
check = Result.objects.filter(session=session, term=term,current_class=stu.current_class,subject=subject, student=stu).first()
if not check:
results.append(
Result(
session=session,
term=term,
current_class=stu.current_class,
subject=subject,
student=stu
)
)
Result.objects.bulk_create(results)
return redirect('edit-results')
#after choosing students
id_list = request.POST.getlist('students')
if id_list:
form = CreateResults(initial={"session": request.current_session, "term":request.current_term})
studentlist = ','.join(id_list)
return render(request, 'result/create_result_page2.html', {"students": studentlist, "form": form, "count":len(id_list)})
else:
messages.warning(request, "You didnt select any student.")
return render(request, 'result/create_result.html', {"students": students})
@login_required
def add_score(request):
class_list=StudentClass.objects.all()
if request.method == 'POST':
if 'finish' in request.POST:
form = EditResults(request.POST)
if form.is_valid():
form.save()
messages.success(request, 'Results successfully updated')
return redirect('view-results')
else:
if "current_class" in request.POST:
class_name = request.POST['current_class']
results = Result.objects.filter(
session=request.current_session, term=request.current_term,current_class=class_name)
form = EditResults(queryset=results)
return render(request, 'result/edit_results2.html', {"formset": form})
class_id=request.POST.getlist('current_class')
print(class_id)
if class_id:
form=CreateResultCLass(initial={"session": request.current_session, "term":request.current_term})
class_select=','.join(class_id)
return render(request, 'result/edit_results2.html',
{"current_class": class_select, "form": form, "count": len(class_id)})
else:
messages.warning(request, "You didnt select any class.")
return render(request, 'result/class_list.html', {"class_list": class_list})
def edit_results(request):
if request.method == 'POST':
form = EditResults(request.POST)
if form.is_valid():
form.save()
messages.success(request, 'Results successfully updated')
return redirect('edit-results')
else:
results = Result.objects.filter(
session=request.current_session, term=request.current_term)
form = EditResults(queryset=results)
return render(request, 'result/edit_results.html', {"formset": form})
@login_required
def all_results_view_class(request):
results = Result.objects.filter(
session=request.current_session, term=request.current_term)
bulk = {}
for result in results:
test_total = 0
exam_total = 0
subjects = []
for subject in results:
if subject.student == result.student:
subjects.append(subject)
subject.test_score = float(subject.test_score)
subject.exam_score = float(subject.exam_score)
test_total = (test_total + subject.test_score)
exam_total = (exam_total + subject.exam_score)
test_total = test_total / len(subjects)
exam_total = exam_total / len(subjects)
bulk[result.student.id] = {
"student": result.student,
"subjects": subjects,
"test_total": test_total,
"exam_total": exam_total,
"total_total": round((test_total + exam_total) / 2, 2)
}
context = {
"results": bulk
}
return render(request, 'result/all_results.html', context)
def score_grade(score):
if score <= 10 and score >= 8:
return 'Giỏi'
elif score < 8 and score >= 6.5:
return 'Khá'
elif score < 6.5 and score >= 5:
return 'Trung Bình'
elif score >= 0 and score < 5:
return 'Không Đạt'
else:
return "Invalid Score"
@login_required
def all_results_view(request):
results = Result.objects.filter(
session=request.current_session, term=request.current_term)
bulk = {}
def find_student(arr, target):
for i in range(1, len(arr)):
if arr[i][0] == target:
return i
return -1
grade = []
t = len(results)
classlist = [] # Ten cac lop
grading_class = [["", 0, 0, 0, 0]] # [Ten class, A, B, C, D]
std = [["example", 0, 0, "A", "class"]] # [Ten hoc sinh, Diem Trung Binh, cnt , grading, Class]
for result in results:
test_class = 0
exam_class = 0
total_average = 0
total_total = 0
class_member = []
if result.current_class not in classlist:
classlist.append(result.current_class)
grading_class.append([classlist[-1], 0, 0, 0, 0])
for student in results:
grade.append(result.current_class)
if student.current_class == result.current_class:
class_member.append(student.student)
if find_student(std, student.student) == -1 or len(std) == 1:
std.append([student.student, 0, 0, "", student.current_class])
exam_class += student.exam_score
test_class += student.test_score
total_total = (student.exam_score + student.test_score) / 2
position_student_in_std = find_student(std, student.student)
std[position_student_in_std][1] += total_total
std[position_student_in_std][2] += 1
if std[position_student_in_std][2] == 2:
std[position_student_in_std][2] = 1
std[position_student_in_std][1] /= 2
for i in range(1, len(std)):
std[i][3] = score_grade(std[i][1])
for j in range(1, len(grading_class)):
if std[i][-1] == grading_class[j][0]:
grading_class[j][2] += 1
if std[i][3] == "Giỏi":
grading_class[j][1] += 1
if std[i][3] == "Khá":
grading_class[j][1] += 1
if std[i][3] == "Trung Bình":
grading_class[j][1] += 1
x = len(std)
for i in range(1, len(grading_class)):
if grading_class[i][2] == 0:
percent=0
else:
percent=int((grading_class[i][1]/(grading_class[i][2]))*100)
bulk[grading_class[i][0]] = {
"name_class": grading_class[i][0],
"term": request.current_term,
"percent": percent,
"good": grading_class[i][1],
"SL":grading_class[i][2]
}
context = {
"results": bulk
}
return render(request, 'result/all_results_class.html', context)
def all_result_view_subject(request):
bulk = {}
if request.method == 'POST':
form = GetResutlSubjectForm(request.POST)
if form.is_valid():
subjects = form.cleaned_data['subjects']
term=form.cleaned_data['term']
session=form.cleaned_data['session']
results=Result.objects.filter(term=term,session=session,subject=subjects)
list_class=list(results.values_list('current_class', flat=True).distinct())
name_class=""
for id_class in list_class:
print(id_class)
number_class=0
good_member=0
for result in results:
if result.current_class.id==id_class:
name_class=result.current_class
number_class+=1
score_student=(result.total_score())
if score_student>=5:
good_member+=1
print(subjects)
bulk[id_class] = {
"name_subject":subjects,
"name_class": name_class,
"term": request.current_term,
"percent": int(good_member/number_class*100),
"good": good_member,
"SL":number_class
}
print(bulk)
context = {
"results": bulk,
"form":form
}
return render(request, 'result/result_subject.html', context)
form = GetResutlSubjectForm(initial={"session": request.current_session, "term": request.current_term})
return render(request, 'result/result_subject.html', {"form": form})
| [
"django.shortcuts.render",
"apps.students.models.Student.objects.get",
"apps.students.models.Student.objects.all",
"django.contrib.messages.warning",
"apps.corecode.models.StudentClass.objects.all",
"django.shortcuts.redirect",
"django.contrib.messages.success"
] | [((535, 556), 'apps.students.models.Student.objects.all', 'Student.objects.all', ([], {}), '()\n', (554, 556), False, 'from apps.students.models import Student\n'), ((2096, 2164), 'django.shortcuts.render', 'render', (['request', '"""result/create_result.html"""', "{'students': students}"], {}), "(request, 'result/create_result.html', {'students': students})\n", (2102, 2164), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((2218, 2244), 'apps.corecode.models.StudentClass.objects.all', 'StudentClass.objects.all', ([], {}), '()\n', (2242, 2244), False, 'from apps.corecode.models import AcademicSession, AcademicTerm, StudentClass\n'), ((3333, 3402), 'django.shortcuts.render', 'render', (['request', '"""result/class_list.html"""', "{'class_list': class_list}"], {}), "(request, 'result/class_list.html', {'class_list': class_list})\n", (3339, 3402), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((3809, 3871), 'django.shortcuts.render', 'render', (['request', '"""result/edit_results.html"""', "{'formset': form}"], {}), "(request, 'result/edit_results.html', {'formset': form})\n", (3815, 3871), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((4804, 4855), 'django.shortcuts.render', 'render', (['request', '"""result/all_results.html"""', 'context'], {}), "(request, 'result/all_results.html', context)\n", (4810, 4855), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((7567, 7624), 'django.shortcuts.render', 'render', (['request', '"""result/all_results_class.html"""', 'context'], {}), "(request, 'result/all_results_class.html', context)\n", (7573, 7624), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((9023, 9084), 'django.shortcuts.render', 'render', (['request', '"""result/result_subject.html"""', "{'form': form}"], {}), "(request, 'result/result_subject.html', {'form': form})\n", (9029, 9084), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((2028, 2086), 'django.contrib.messages.warning', 'messages.warning', (['request', '"""You didnt select any student."""'], {}), "(request, 'You didnt select any student.')\n", (2044, 2086), False, 'from django.contrib import messages\n'), ((3267, 3323), 'django.contrib.messages.warning', 'messages.warning', (['request', '"""You didnt select any class."""'], {}), "(request, 'You didnt select any class.')\n", (3283, 3323), False, 'from django.contrib import messages\n'), ((3549, 3606), 'django.contrib.messages.success', 'messages.success', (['request', '"""Results successfully updated"""'], {}), "(request, 'Results successfully updated')\n", (3565, 3606), False, 'from django.contrib import messages\n'), ((3620, 3644), 'django.shortcuts.redirect', 'redirect', (['"""edit-results"""'], {}), "('edit-results')\n", (3628, 3644), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((8853, 8907), 'django.shortcuts.render', 'render', (['request', '"""result/result_subject.html"""', 'context'], {}), "(request, 'result/result_subject.html', context)\n", (8859, 8907), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((1627, 1651), 'django.shortcuts.redirect', 'redirect', (['"""edit-results"""'], {}), "('edit-results')\n", (1635, 1651), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((2403, 2460), 'django.contrib.messages.success', 'messages.success', (['request', '"""Results successfully updated"""'], {}), "(request, 'Results successfully updated')\n", (2419, 2460), False, 'from django.contrib import messages\n'), ((2476, 2500), 'django.shortcuts.redirect', 'redirect', (['"""view-results"""'], {}), "('view-results')\n", (2484, 2500), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((2803, 2866), 'django.shortcuts.render', 'render', (['request', '"""result/edit_results2.html"""', "{'formset': form}"], {}), "(request, 'result/edit_results2.html', {'formset': form})\n", (2809, 2866), False, 'from django.shortcuts import render, HttpResponseRedirect, redirect\n'), ((987, 1018), 'apps.students.models.Student.objects.get', 'Student.objects.get', ([], {'pk': 'student'}), '(pk=student)\n', (1006, 1018), False, 'from apps.students.models import Student\n')] |
# coding: utf-8
"""
NamSor API v2
NamSor API v2 : enpoints to process personal names (gender, cultural origin or ethnicity) in all alphabets or languages. Use GET methods for small tests, but prefer POST methods for higher throughput (batch processing of up to 100 names at a time). Need something you can't find here? We have many more features coming soon. Let us know, we'll do our best to add it! # noqa: E501
OpenAPI spec version: 2.0.10
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class FirstLastNameUSRaceEthnicityOut(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'id': 'str',
'first_name': 'str',
'last_name': 'str',
'race_ethnicity_alt': 'str',
'race_ethnicity': 'str',
'score': 'float',
'race_ethnicities_top': 'list[str]',
'probability_calibrated': 'float',
'probability_alt_calibrated': 'float'
}
attribute_map = {
'id': 'id',
'first_name': 'firstName',
'last_name': 'lastName',
'race_ethnicity_alt': 'raceEthnicityAlt',
'race_ethnicity': 'raceEthnicity',
'score': 'score',
'race_ethnicities_top': 'raceEthnicitiesTop',
'probability_calibrated': 'probabilityCalibrated',
'probability_alt_calibrated': 'probabilityAltCalibrated'
}
def __init__(self, id=None, first_name=None, last_name=None, race_ethnicity_alt=None, race_ethnicity=None, score=None, race_ethnicities_top=None, probability_calibrated=None, probability_alt_calibrated=None): # noqa: E501
"""FirstLastNameUSRaceEthnicityOut - a model defined in OpenAPI""" # noqa: E501
self._id = None
self._first_name = None
self._last_name = None
self._race_ethnicity_alt = None
self._race_ethnicity = None
self._score = None
self._race_ethnicities_top = None
self._probability_calibrated = None
self._probability_alt_calibrated = None
self.discriminator = None
if id is not None:
self.id = id
if first_name is not None:
self.first_name = first_name
if last_name is not None:
self.last_name = last_name
if race_ethnicity_alt is not None:
self.race_ethnicity_alt = race_ethnicity_alt
if race_ethnicity is not None:
self.race_ethnicity = race_ethnicity
if score is not None:
self.score = score
if race_ethnicities_top is not None:
self.race_ethnicities_top = race_ethnicities_top
if probability_calibrated is not None:
self.probability_calibrated = probability_calibrated
if probability_alt_calibrated is not None:
self.probability_alt_calibrated = probability_alt_calibrated
@property
def id(self):
"""Gets the id of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:return: The id of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this FirstLastNameUSRaceEthnicityOut.
:param id: The id of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:type: str
"""
self._id = id
@property
def first_name(self):
"""Gets the first_name of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:return: The first_name of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:rtype: str
"""
return self._first_name
@first_name.setter
def first_name(self, first_name):
"""Sets the first_name of this FirstLastNameUSRaceEthnicityOut.
:param first_name: The first_name of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:type: str
"""
self._first_name = first_name
@property
def last_name(self):
"""Gets the last_name of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:return: The last_name of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:rtype: str
"""
return self._last_name
@last_name.setter
def last_name(self, last_name):
"""Sets the last_name of this FirstLastNameUSRaceEthnicityOut.
:param last_name: The last_name of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:type: str
"""
self._last_name = last_name
@property
def race_ethnicity_alt(self):
"""Gets the race_ethnicity_alt of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
Second most likely US 'race'/ethnicity # noqa: E501
:return: The race_ethnicity_alt of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:rtype: str
"""
return self._race_ethnicity_alt
@race_ethnicity_alt.setter
def race_ethnicity_alt(self, race_ethnicity_alt):
"""Sets the race_ethnicity_alt of this FirstLastNameUSRaceEthnicityOut.
Second most likely US 'race'/ethnicity # noqa: E501
:param race_ethnicity_alt: The race_ethnicity_alt of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:type: str
"""
allowed_values = ["W_NL", "HL", "A", "B_NL"] # noqa: E501
if race_ethnicity_alt not in allowed_values:
raise ValueError(
"Invalid value for `race_ethnicity_alt` ({0}), must be one of {1}" # noqa: E501
.format(race_ethnicity_alt, allowed_values)
)
self._race_ethnicity_alt = race_ethnicity_alt
@property
def race_ethnicity(self):
"""Gets the race_ethnicity of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
Most likely US 'race'/ethnicity # noqa: E501
:return: The race_ethnicity of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:rtype: str
"""
return self._race_ethnicity
@race_ethnicity.setter
def race_ethnicity(self, race_ethnicity):
"""Sets the race_ethnicity of this FirstLastNameUSRaceEthnicityOut.
Most likely US 'race'/ethnicity # noqa: E501
:param race_ethnicity: The race_ethnicity of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:type: str
"""
allowed_values = ["W_NL", "HL", "A", "B_NL"] # noqa: E501
if race_ethnicity not in allowed_values:
raise ValueError(
"Invalid value for `race_ethnicity` ({0}), must be one of {1}" # noqa: E501
.format(race_ethnicity, allowed_values)
)
self._race_ethnicity = race_ethnicity
@property
def score(self):
"""Gets the score of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
Compatibility to NamSor_v1 Origin score value # noqa: E501
:return: The score of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:rtype: float
"""
return self._score
@score.setter
def score(self, score):
"""Sets the score of this FirstLastNameUSRaceEthnicityOut.
Compatibility to NamSor_v1 Origin score value # noqa: E501
:param score: The score of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:type: float
"""
self._score = score
@property
def race_ethnicities_top(self):
"""Gets the race_ethnicities_top of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
List 'race'/ethnicities # noqa: E501
:return: The race_ethnicities_top of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:rtype: list[str]
"""
return self._race_ethnicities_top
@race_ethnicities_top.setter
def race_ethnicities_top(self, race_ethnicities_top):
"""Sets the race_ethnicities_top of this FirstLastNameUSRaceEthnicityOut.
List 'race'/ethnicities # noqa: E501
:param race_ethnicities_top: The race_ethnicities_top of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:type: list[str]
"""
self._race_ethnicities_top = race_ethnicities_top
@property
def probability_calibrated(self):
"""Gets the probability_calibrated of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:return: The probability_calibrated of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:rtype: float
"""
return self._probability_calibrated
@probability_calibrated.setter
def probability_calibrated(self, probability_calibrated):
"""Sets the probability_calibrated of this FirstLastNameUSRaceEthnicityOut.
:param probability_calibrated: The probability_calibrated of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:type: float
"""
self._probability_calibrated = probability_calibrated
@property
def probability_alt_calibrated(self):
"""Gets the probability_alt_calibrated of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:return: The probability_alt_calibrated of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:rtype: float
"""
return self._probability_alt_calibrated
@probability_alt_calibrated.setter
def probability_alt_calibrated(self, probability_alt_calibrated):
"""Sets the probability_alt_calibrated of this FirstLastNameUSRaceEthnicityOut.
:param probability_alt_calibrated: The probability_alt_calibrated of this FirstLastNameUSRaceEthnicityOut. # noqa: E501
:type: float
"""
self._probability_alt_calibrated = probability_alt_calibrated
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FirstLastNameUSRaceEthnicityOut):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"six.iteritems"
] | [((10186, 10219), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (10199, 10219), False, 'import six\n')] |
"""
Res2Net for ImageNet-1K, implemented in Gluon.
Original paper: 'Res2Net: A New Multi-scale Backbone Architecture,' https://arxiv.org/abs/1904.01169.
"""
__all__ = ['Res2Net', 'res2net50_w14_s8', 'res2net50_w26_s8']
import os
from mxnet import cpu
from mxnet.gluon import nn, HybridBlock
from mxnet.gluon.contrib.nn import Identity
from .common import conv1x1, conv3x3, conv1x1_block
from .resnet import ResInitBlock
from .preresnet import PreResActivation
class HierarchicalConcurrent(nn.HybridSequential):
"""
A container for hierarchical concatenation of blocks with parameters.
Parameters:
----------
axis : int, default 1
The axis on which to concatenate the outputs.
multi_input : bool, default False
Whether input is multiple.
"""
def __init__(self,
axis=1,
multi_input=False,
**kwargs):
super(HierarchicalConcurrent, self).__init__(**kwargs)
self.axis = axis
self.multi_input = multi_input
def hybrid_forward(self, F, x):
out = []
y_prev = None
if self.multi_input:
xs = F.split(x, axis=self.axis, num_outputs=len(self._children.values()))
for i, block in enumerate(self._children.values()):
if self.multi_input:
y = block(xs[i])
else:
y = block(x)
if y_prev is not None:
y = y + y_prev
out.append(y)
y_prev = y
out = F.concat(*out, dim=self.axis)
return out
class Res2NetUnit(HybridBlock):
"""
Res2Net unit.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
strides : int or tuple/list of 2 int
Strides of the branch convolution layers.
width : int
Width of filters.
scale : int
Number of scale.
bn_use_global_stats : bool
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
"""
def __init__(self,
in_channels,
out_channels,
strides,
width,
scale,
bn_use_global_stats,
**kwargs):
super(Res2NetUnit, self).__init__(**kwargs)
self.scale = scale
downsample = (strides != 1)
self.resize_identity = (in_channels != out_channels) or downsample
mid_channels = width * scale
brn_channels = width
with self.name_scope():
self.reduce_conv = conv1x1_block(
in_channels=in_channels,
out_channels=mid_channels,
bn_use_global_stats=bn_use_global_stats)
self.branches = HierarchicalConcurrent(axis=1, multi_input=True, prefix="")
if downsample:
self.branches.add(conv1x1(
in_channels=brn_channels,
out_channels=brn_channels,
strides=strides))
else:
self.branches.add(Identity())
for i in range(scale - 1):
self.branches.add(conv3x3(
in_channels=brn_channels,
out_channels=brn_channels,
strides=strides))
self.preactiv = PreResActivation(in_channels=mid_channels)
self.merge_conv = conv1x1_block(
in_channels=mid_channels,
out_channels=out_channels,
bn_use_global_stats=bn_use_global_stats,
activation=None)
if self.resize_identity:
self.identity_conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
bn_use_global_stats=bn_use_global_stats,
activation=None)
self.activ = nn.Activation("relu")
def hybrid_forward(self, F, x):
if self.resize_identity:
identity = self.identity_conv(x)
else:
identity = x
y = self.reduce_conv(x)
y = self.branches(y)
y = self.preactiv(y)
y = self.merge_conv(y)
y = y + identity
y = self.activ(y)
return y
class Res2Net(HybridBlock):
"""
Res2Net model from 'Res2Net: A New Multi-scale Backbone Architecture,' https://arxiv.org/abs/1904.01169.
Parameters:
----------
channels : list of list of int
Number of output channels for each unit.
init_block_channels : int
Number of output channels for the initial unit.
width : int
Width of filters.
scale : int
Number of scale.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
Useful for fine-tuning.
in_channels : int, default 3
Number of input channels.
in_size : tuple of two ints, default (224, 224)
Spatial size of the expected input image.
classes : int, default 1000
Number of classification classes.
"""
def __init__(self,
channels,
init_block_channels,
width,
scale,
bn_use_global_stats=False,
in_channels=3,
in_size=(224, 224),
classes=1000,
**kwargs):
super(Res2Net, self).__init__(**kwargs)
self.in_size = in_size
self.classes = classes
with self.name_scope():
self.features = nn.HybridSequential(prefix="")
self.features.add(ResInitBlock(
in_channels=in_channels,
out_channels=init_block_channels,
bn_use_global_stats=bn_use_global_stats))
in_channels = init_block_channels
for i, channels_per_stage in enumerate(channels):
stage = nn.HybridSequential(prefix="stage{}_".format(i + 1))
with stage.name_scope():
for j, out_channels in enumerate(channels_per_stage):
strides = 2 if (j == 0) and (i != 0) else 1
stage.add(Res2NetUnit(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
width=width,
scale=scale,
bn_use_global_stats=bn_use_global_stats))
in_channels = out_channels
self.features.add(stage)
self.features.add(nn.AvgPool2D(
pool_size=7,
strides=1))
self.output = nn.HybridSequential(prefix="")
self.output.add(nn.Flatten())
self.output.add(nn.Dense(
units=classes,
in_units=in_channels))
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
def get_res2net(blocks,
width,
scale,
model_name=None,
pretrained=False,
ctx=cpu(),
root=os.path.join("~", ".mxnet", "models"),
**kwargs):
"""
Create Res2Net model with specific parameters.
Parameters:
----------
blocks : int
Number of blocks.
width : int
Width of filters.
scale : int
Number of scale.
model_name : str or None, default None
Model name for loading pretrained model.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
bottleneck = True
if blocks == 50:
layers = [3, 4, 6, 3]
elif blocks == 101:
layers = [3, 4, 23, 3]
elif blocks == 152:
layers = [3, 8, 36, 3]
else:
raise ValueError("Unsupported Res2Net with number of blocks: {}".format(blocks))
assert (sum(layers) * 3 + 2 == blocks)
init_block_channels = 64
channels_per_layers = [64, 128, 256, 512]
if bottleneck:
bottleneck_factor = 4
channels_per_layers = [ci * bottleneck_factor for ci in channels_per_layers]
channels = [[ci] * li for (ci, li) in zip(channels_per_layers, layers)]
net = Res2Net(
channels=channels,
init_block_channels=init_block_channels,
width=width,
scale=scale,
**kwargs)
if pretrained:
if (model_name is None) or (not model_name):
raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.")
from .model_store import get_model_file
net.load_parameters(
filename=get_model_file(
model_name=model_name,
local_model_store_dir_path=root),
ctx=ctx)
return net
def res2net50_w14_s8(**kwargs):
"""
Res2Net-50 (14wx8s) model from 'Res2Net: A New Multi-scale Backbone Architecture,' https://arxiv.org/abs/1904.01169.
Parameters:
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_res2net(blocks=50, width=14, scale=8, model_name="res2net50_w14_s8", **kwargs)
def res2net50_w26_s8(**kwargs):
"""
Res2Net-50 (26wx8s) model from 'Res2Net: A New Multi-scale Backbone Architecture,' https://arxiv.org/abs/1904.01169.
Parameters:
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_res2net(blocks=50, width=26, scale=8, model_name="res2net50_w14_s8", **kwargs)
def _test():
import numpy as np
import mxnet as mx
pretrained = False
models = [
res2net50_w14_s8,
res2net50_w26_s8,
]
for model in models:
net = model(pretrained=pretrained)
ctx = mx.cpu()
if not pretrained:
net.initialize(ctx=ctx)
# net.hybridize()
net_params = net.collect_params()
weight_count = 0
for param in net_params.values():
if (param.shape is None) or (not param._differentiable):
continue
weight_count += np.prod(param.shape)
print("m={}, {}".format(model.__name__, weight_count))
assert (model != res2net50_w14_s8 or weight_count == 8231732)
assert (model != res2net50_w26_s8 or weight_count == 11432660)
x = mx.nd.zeros((1, 3, 224, 224), ctx=ctx)
y = net(x)
assert (y.shape == (1, 1000))
if __name__ == "__main__":
_test()
| [
"numpy.prod",
"mxnet.nd.zeros",
"mxnet.gluon.nn.Dense",
"mxnet.cpu",
"mxnet.gluon.contrib.nn.Identity",
"os.path.join",
"mxnet.gluon.nn.Flatten",
"mxnet.gluon.nn.AvgPool2D",
"mxnet.gluon.nn.HybridSequential",
"mxnet.gluon.nn.Activation"
] | [((7313, 7318), 'mxnet.cpu', 'cpu', ([], {}), '()\n', (7316, 7318), False, 'from mxnet import cpu\n'), ((7341, 7378), 'os.path.join', 'os.path.join', (['"""~"""', '""".mxnet"""', '"""models"""'], {}), "('~', '.mxnet', 'models')\n", (7353, 7378), False, 'import os\n'), ((10602, 10610), 'mxnet.cpu', 'mx.cpu', ([], {}), '()\n', (10608, 10610), True, 'import mxnet as mx\n'), ((11170, 11208), 'mxnet.nd.zeros', 'mx.nd.zeros', (['(1, 3, 224, 224)'], {'ctx': 'ctx'}), '((1, 3, 224, 224), ctx=ctx)\n', (11181, 11208), True, 'import mxnet as mx\n'), ((3995, 4016), 'mxnet.gluon.nn.Activation', 'nn.Activation', (['"""relu"""'], {}), "('relu')\n", (4008, 4016), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((5699, 5729), 'mxnet.gluon.nn.HybridSequential', 'nn.HybridSequential', ([], {'prefix': '""""""'}), "(prefix='')\n", (5718, 5729), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((6863, 6893), 'mxnet.gluon.nn.HybridSequential', 'nn.HybridSequential', ([], {'prefix': '""""""'}), "(prefix='')\n", (6882, 6893), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((10932, 10952), 'numpy.prod', 'np.prod', (['param.shape'], {}), '(param.shape)\n', (10939, 10952), True, 'import numpy as np\n'), ((6765, 6801), 'mxnet.gluon.nn.AvgPool2D', 'nn.AvgPool2D', ([], {'pool_size': '(7)', 'strides': '(1)'}), '(pool_size=7, strides=1)\n', (6777, 6801), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((6922, 6934), 'mxnet.gluon.nn.Flatten', 'nn.Flatten', ([], {}), '()\n', (6932, 6934), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((6964, 7009), 'mxnet.gluon.nn.Dense', 'nn.Dense', ([], {'units': 'classes', 'in_units': 'in_channels'}), '(units=classes, in_units=in_channels)\n', (6972, 7009), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((3138, 3148), 'mxnet.gluon.contrib.nn.Identity', 'Identity', ([], {}), '()\n', (3146, 3148), False, 'from mxnet.gluon.contrib.nn import Identity\n')] |
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.11.4
# kernelspec:
# display_name: Python 3
# name: python3
# ---
# +
from pathlib import Path
import geopandas as gpd
import pandas as pd
from shapely.geometry import box
# +
data_dir = Path("data")
FILEPATHS = {
"rail_links": data_dir / "external" / "Dublin_Rail_Links" / "Dublin_Rail_Links.shp",
"small_area_boundaries": data_dir
/ "external"
/ "dublin_small_area_boundaries_in_routing_keys.gpkg",
"rail_schedules": data_dir
/ "external"
/ "Transport Emissions - Combined Rail Schedules.xlsx",
}
# + [markdown]
# # Read input data
#
# - NTA Rail Line Statistics
# - 2016 Small Area Boundaries
# - Dublin Boundary
#
# Notes:
# - Convert spatial data to ITM or epsg=2157 so both in same Coordinate Reference System
# - **Don't forget to Mount Google Drive as otherwise this Notebook won't be able to access the data**
# +
rail_lines = gpd.read_file(FILEPATHS["rail_links"]).to_crs(epsg=2157)
# + id="BMPv0-FLWtVu"
small_area_boundaries = gpd.read_file(FILEPATHS["small_area_boundaries"])[
["small_area", "geometry"]
].to_crs(epsg=2157)
# +
dublin_bounding_box = (
gpd.GeoSeries(box(695000, 712500, 740000, 771000), crs=2157)
.rename("geometry")
.to_frame()
)
# +
total_journeys_linkid = pd.read_excel(FILEPATHS["rail_schedules"])
# +
def sjoin_center_inside(dfA, dfB):
"""Join where center of A intersects B"""
dfA_center = dfA.copy()
dfA_center.geometry = dfA.geometry.representative_point()
dfA_sjoined = gpd.sjoin(dfA_center, dfB, op="intersects")
return dfA_sjoined.assign(geometry=dfA.geometry)
dublin_small_area_boundaries = sjoin_center_inside(
small_area_boundaries, dublin_bounding_box
)
# + [markdown]
# # Get Total Journeys
# +
total_journeys = rail_lines.merge(total_journeys_linkid, on="linkID")
# +
total_journeys
# + [markdown]
# # Distribute Rail lines among Small Areas
# + [markdown]
# ## Measure line lengths in each Small Area
# +
total_journeys_per_small_area = gpd.overlay(
total_journeys, small_area_boundaries, "union"
)
# +
total_journeys_per_small_area["line_length_km"] = (
total_journeys_per_small_area.geometry.length * 10 ** -3
)
# +
# ## Link Small Areas to Number of Journeys for linkID
# +
total_journeys_per_small_area["DART_total"] = (
total_journeys_per_small_area["DART_northbound"]
+ total_journeys_per_small_area["DART_southbound"]
)
total_journeys_per_small_area["LUAS_total"] = (
total_journeys_per_small_area["LUAS_northbound"]
+ total_journeys_per_small_area["LUAS_southbound"]
)
total_journeys_per_small_area["Commuter_total"] = (
total_journeys_per_small_area["Commuter_northbound"]
+ total_journeys_per_small_area["Commuter_southbound"]
)
total_journeys_per_small_area["Intercity_total"] = (
total_journeys_per_small_area["Intercity_northbound"]
+ total_journeys_per_small_area["Intercity_southbound"]
)
# +
diesel_train_kgCO2_per_km = 8.057183256
dart_kgCO2_per_km = 3.793376027
luas_kgCO2_per_km = 1.825367372
diesel_train_kWh_per_km = 30.53119839
dart_kWh_per_km = 11.68991071
luas_kWh_per_km = 5.625169096
# +
total_journeys_per_small_area["DART_MWh"] = (
total_journeys_per_small_area["DART_total"]
* total_journeys_per_small_area["line_length_km"]
* dart_kWh_per_km
* 10 ** -3
)
total_journeys_per_small_area["DART_tCO2"] = (
total_journeys_per_small_area["DART_total"]
* total_journeys_per_small_area["line_length_km"]
* dart_kgCO2_per_km
* 10 ** -3
)
total_journeys_per_small_area["LUAS_MWh"] = (
total_journeys_per_small_area["LUAS_total"]
* total_journeys_per_small_area["line_length_km"]
* luas_kWh_per_km
* 10 ** -3
)
total_journeys_per_small_area["LUAS_tCO2"] = (
total_journeys_per_small_area["LUAS_total"]
* total_journeys_per_small_area["line_length_km"]
* luas_kgCO2_per_km
* 10 ** -3
)
total_journeys_per_small_area["Commuter_MWh"] = (
total_journeys_per_small_area["Commuter_total"]
* total_journeys_per_small_area["line_length_km"]
* diesel_train_kWh_per_km
* 10 ** -3
)
total_journeys_per_small_area["Commuter_tCO2"] = (
total_journeys_per_small_area["Commuter_total"]
* total_journeys_per_small_area["line_length_km"]
* diesel_train_kgCO2_per_km
* 10 ** -3
)
total_journeys_per_small_area["Intercity_MWh"] = (
total_journeys_per_small_area["Intercity_total"]
* total_journeys_per_small_area["line_length_km"]
* diesel_train_kWh_per_km
* 10 ** -3
)
total_journeys_per_small_area["Intercity_tCO2"] = (
total_journeys_per_small_area["Intercity_total"]
* total_journeys_per_small_area["line_length_km"]
* diesel_train_kgCO2_per_km
* 10 ** -3
)
# +
total_energy = total_journeys_per_small_area["DART_MWh"].sum()
total_energy
# +
total_journeys_per_small_area
# +
total_journeys_per_small_area.to_file(
data_dir / "rail_small_area_statistics.geojson", driver="GeoJSON"
)
# + [markdown]
# # Estimate All-of-Dublin Rail Energy
# +
total_journeys["line_length_km"] = total_journeys.geometry.length * 10 ** -3
# +
total_journeys["DART_total"] = (
total_journeys["DART_northbound"] + total_journeys["DART_southbound"]
)
total_journeys["LUAS_total"] = (
total_journeys["LUAS_northbound"] + total_journeys["LUAS_southbound"]
)
total_journeys["Commuter_total"] = (
total_journeys["Commuter_northbound"] + total_journeys["Commuter_southbound"]
)
total_journeys["Intercity_total"] = (
total_journeys["Intercity_northbound"] + total_journeys["Intercity_southbound"]
)
# +
total_journeys["DART_MWh"] = (
total_journeys["DART_total"]
* total_journeys["line_length_km"]
* dart_kWh_per_km
* 10 ** -3
)
total_journeys["DART_tCO2"] = (
total_journeys["DART_total"]
* total_journeys["line_length_km"]
* dart_kgCO2_per_km
* 10 ** -3
)
total_journeys["LUAS_MWh"] = (
total_journeys["LUAS_total"]
* total_journeys["line_length_km"]
* luas_kWh_per_km
* 10 ** -3
)
total_journeys["LUAS_tCO2"] = (
total_journeys["LUAS_total"]
* total_journeys["line_length_km"]
* luas_kgCO2_per_km
* 10 ** -3
)
total_journeys["Commuter_MWh"] = (
total_journeys["Commuter_total"]
* total_journeys["line_length_km"]
* diesel_train_kWh_per_km
* 10 ** -3
)
total_journeys["Commuter_tCO2"] = (
total_journeys["Commuter_total"]
* total_journeys["line_length_km"]
* diesel_train_kgCO2_per_km
* 10 ** -3
)
total_journeys["Intercity_MWh"] = (
total_journeys["Intercity_total"]
* total_journeys["line_length_km"]
* diesel_train_kWh_per_km
* 10 ** -3
)
total_journeys["Intercity_tCO2"] = (
total_journeys["Intercity_total"]
* total_journeys["line_length_km"]
* diesel_train_kgCO2_per_km
* 10 ** -3
)
# +
total_journeys
# +
for rail_mwh in ["DART_MWh", "LUAS_MWh", "Commuter_MWh", "Intercity_MWh"]:
print(total_journeys[rail_mwh].sum())
| [
"geopandas.sjoin",
"geopandas.read_file",
"pathlib.Path",
"shapely.geometry.box",
"geopandas.overlay",
"pandas.read_excel"
] | [((366, 378), 'pathlib.Path', 'Path', (['"""data"""'], {}), "('data')\n", (370, 378), False, 'from pathlib import Path\n'), ((1416, 1458), 'pandas.read_excel', 'pd.read_excel', (["FILEPATHS['rail_schedules']"], {}), "(FILEPATHS['rail_schedules'])\n", (1429, 1458), True, 'import pandas as pd\n'), ((2145, 2204), 'geopandas.overlay', 'gpd.overlay', (['total_journeys', 'small_area_boundaries', '"""union"""'], {}), "(total_journeys, small_area_boundaries, 'union')\n", (2156, 2204), True, 'import geopandas as gpd\n'), ((1653, 1696), 'geopandas.sjoin', 'gpd.sjoin', (['dfA_center', 'dfB'], {'op': '"""intersects"""'}), "(dfA_center, dfB, op='intersects')\n", (1662, 1696), True, 'import geopandas as gpd\n'), ((1045, 1083), 'geopandas.read_file', 'gpd.read_file', (["FILEPATHS['rail_links']"], {}), "(FILEPATHS['rail_links'])\n", (1058, 1083), True, 'import geopandas as gpd\n'), ((1149, 1198), 'geopandas.read_file', 'gpd.read_file', (["FILEPATHS['small_area_boundaries']"], {}), "(FILEPATHS['small_area_boundaries'])\n", (1162, 1198), True, 'import geopandas as gpd\n'), ((1298, 1333), 'shapely.geometry.box', 'box', (['(695000)', '(712500)', '(740000)', '(771000)'], {}), '(695000, 712500, 740000, 771000)\n', (1301, 1333), False, 'from shapely.geometry import box\n')] |
from unittest import TestCase, skipUnless, mock
from pya import *
import numpy as np
import time
class TestAserver(TestCase):
def setUp(self) -> None:
self.backend = DummyBackend()
self.sig = np.sin(2 * np.pi * 440 * np.linspace(0, 1, 44100))
self.asine = Asig(self.sig, sr=44100, label="test_sine")
def test_default_server(self):
Aserver.startup_default_server(backend=self.backend, bs=512, channels=4)
s = Aserver.default
self.assertEqual(s, Aserver.default)
self.asine.play()
time.sleep(0.5)
s.stop()
self.assertGreater(len(s.stream.samples_out), 0)
sample = s.stream.samples_out[0]
self.assertEqual(sample.shape[0], 512)
self.assertEqual(sample.shape[1], 4)
self.assertAlmostEqual(np.max(sample), 1, places=2)
Aserver.shutdown_default_server()
self.assertIsNone(s.stream)
def test_play_float(self):
s = Aserver(backend=self.backend)
s.boot()
self.asine.play(server=s)
time.sleep(0.5)
s.stop()
self.assertGreater(len(s.stream.samples_out), 0)
sample = s.stream.samples_out[0]
self.assertEqual(sample.shape[0], s.bs)
self.assertEqual(sample.shape[1], s.channels)
self.assertAlmostEqual(np.max(sample), 1, places=2)
s.quit()
def test_repr(self):
s = Aserver(backend=self.backend)
s.boot()
print(s)
s.quit()
def test_get_devices(self):
s = Aserver(backend=self.backend)
d_in, d_out = s.get_devices(verbose=True)
self.assertListEqual(d_in, d_out)
self.assertListEqual(d_in, self.backend.dummy_devices)
def test_boot_twice(self):
s = Aserver(backend=self.backend)
s.boot()
self.assertEqual(s.boot(), -1)
s.quit()
def test_quit_not_booted(self):
s = Aserver(backend=self.backend)
self.assertEqual(s.quit(), -1)
def test_incompatible_backend(self):
s = Aserver(backend=self.backend)
sig = np.sin(2 * np.pi * 440 * np.linspace(0, 1, 44100) * np.iinfo(np.int16).max).astype(np.int16)
asine = Asig(sig, sr=44100)
s.boot()
asine.play(server=s)
s.quit()
| [
"numpy.linspace",
"numpy.iinfo",
"time.sleep",
"numpy.max"
] | [((555, 570), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (565, 570), False, 'import time\n'), ((1049, 1064), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1059, 1064), False, 'import time\n'), ((809, 823), 'numpy.max', 'np.max', (['sample'], {}), '(sample)\n', (815, 823), True, 'import numpy as np\n'), ((1313, 1327), 'numpy.max', 'np.max', (['sample'], {}), '(sample)\n', (1319, 1327), True, 'import numpy as np\n'), ((240, 264), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(44100)'], {}), '(0, 1, 44100)\n', (251, 264), True, 'import numpy as np\n'), ((2096, 2120), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(44100)'], {}), '(0, 1, 44100)\n', (2107, 2120), True, 'import numpy as np\n'), ((2123, 2141), 'numpy.iinfo', 'np.iinfo', (['np.int16'], {}), '(np.int16)\n', (2131, 2141), True, 'import numpy as np\n')] |
import sys
from zeep import Client
from PythonUtils.text_input import TextInput
from PythonUtils.live_info.display_item import DisplayItem
from PythonUtils.user_input import UserInput
import json
from pathlib import Path
# WSDL location of the LDBWS rail information. The most up to date version is
# detailed here: http://lite.realtime.nationalrail.co.uk/openldbws/
LDBWS_WSDL = "https://lite.realtime.nationalrail.co.uk/OpenLDBWS/wsdl.aspx?ver=2017-10-01"
# Access token to be supplied in the SOAP header in all web service requests
# Token should either be stored under self.token, or in a file named rail_wsdl_token
class RailInfo(DisplayItem):
def __init__(self, expiry_duration, station_code):
DisplayItem.__init__(self, expiry_duration)
self.client = Client(LDBWS_WSDL)
access_token = json.load(open(Path('live_info') / 'rail_wsdl_token.json','r'))['Token']
self.token = {"AccessToken": {"TokenValue": access_token}}
self.station_code = station_code
self.duration = 120
def get_info(self):
dep_board = self.client.service.GetDepartureBoard(15,
self.station_code,
_soapheaders=self.token,
timeWindow=self.duration)
if dep_board:
services = dep_board.trainServices.service
return_string = ""
for service in services:
loc = service.destination.location
dest_name = loc[0].locationName
return_string += "-----------------------------\n"
return_string += service["std"] + " to " + dest_name + "\n"
if service.etd == "On time":
return_string += "This service is on time\n"
elif service.etd == "Delayed":
return_string += "This service is delayed\n"
details = self.client.service.GetServiceDetails(service.serviceID, _soapheaders=self.token)
else:
return_string += "Estimated arrival " + service.etd + "\n"
if service.isCancelled != None:
return_string += "This service is cancelled\n"
else:
return_string += f"There are no services to/from {self.station_code} in the next {self.duration}"
return_string += "-----------------------------\n"
return return_string
if __name__ == "__main__":
tui = TextInput("Which station would you like to see the departures of? "
"Please enter the 3 character station code.")
rc = tui.request_input()
if rc == UserInput.SUCCESS:
rail_client = RailInfo(tui.answer)
print(rail_client.live_departure_string())
| [
"zeep.Client",
"PythonUtils.text_input.TextInput",
"PythonUtils.live_info.display_item.DisplayItem.__init__",
"pathlib.Path"
] | [((2552, 2672), 'PythonUtils.text_input.TextInput', 'TextInput', (['"""Which station would you like to see the departures of? Please enter the 3 character station code."""'], {}), "(\n 'Which station would you like to see the departures of? Please enter the 3 character station code.'\n )\n", (2561, 2672), False, 'from PythonUtils.text_input import TextInput\n'), ((715, 758), 'PythonUtils.live_info.display_item.DisplayItem.__init__', 'DisplayItem.__init__', (['self', 'expiry_duration'], {}), '(self, expiry_duration)\n', (735, 758), False, 'from PythonUtils.live_info.display_item import DisplayItem\n'), ((781, 799), 'zeep.Client', 'Client', (['LDBWS_WSDL'], {}), '(LDBWS_WSDL)\n', (787, 799), False, 'from zeep import Client\n'), ((838, 855), 'pathlib.Path', 'Path', (['"""live_info"""'], {}), "('live_info')\n", (842, 855), False, 'from pathlib import Path\n')] |
# Importing modules
import pygame
import numpy as np
import random
# Initializing the Pygame module
pygame.init()
def console_screen():
"""This function is meant for the user to enter specifications for the game as the player plays. """
print('Note: Enter nicknames to name the players in the game')
user = ''
user2 = ''
try:
user = input("Enter the name of player 1(Enter 'Computer 1' if you don't want to be named): ")
user2 = input("Enter the name of player 2(Enter 'Computer 2' if there is no other player): ")
print('1 Minecraft Music Remix\n'
'2 Minecraft Calm Music\n'
'3 No Music')
music = input('Pick an option for music: ')
if music == '1':
pygame.mixer_music.load('MinecraftThemeSong.mp3')
pygame.mixer.music.set_volume(.1)
pygame.mixer_music.play(loops=100, start=0.0)
elif music == '2':
pygame.mixer_music.load('MinecraftThemeSong2.mp3')
pygame.mixer_music.play(loops=100, start=0.0)
elif music == '3':
pass
else:
raise ValueError
except ValueError: # Except statement for invalid user input
music = ''
while music != '1' or music != '2' or music != '3':
print('Invalid input. Please enter a valid choice')
print('1 Minecraft Music Remix\n'
'2 Minecraft Calm Music\n'
'3 No Music')
music = input('Pick an option for music: ')
if music == '1':
pygame.mixer_music.load('MinecraftThemeSong.mp3')
pygame.mixer.music.set_volume(.1)
pygame.mixer_music.play(loops=100, start=0.0)
break
elif music == '2':
pygame.mixer_music.load('MinecraftThemeSong2.mp3')
pygame.mixer_music.play(loops=100, start=0.0)
break
elif music == '3':
break
except IOError: # Except statement if a file could not be opened
print('Could not open file. File may not exist')
except AttributeError: # Except statement if a module is not found
print('No module found.')
return user, user2
# Setting up Pygame Display window
display_width = 800
display_height = 600
# Defining colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 220, 0)
LIGHTER_GREEN = (0, 255, 0)
DARK_GREEN = (0, 150, 0)
BLUE = (0, 100, 100)
LIGHTER_BLUE = (0, 128, 128)
ORANGE = (255, 150, 0)
LIGHTER_ORANGE = (255, 165, 0)
YELLOW = (235, 235, 0)
LIGHTER_YELLOW = (255, 255, 0)
# Game Initialization and Settings
name1, name2 = console_screen()
gameDisplay = pygame.display.set_mode((display_width, display_height))
pygame.display.set_caption('OTHELLO GAME')
clock = pygame.time.Clock()
click = pygame.mouse.get_pressed()
mouse = pygame.mouse.get_pos()
# Images used in the game
OthelloImage = pygame.image.load('reversi.png')
DirectionsImage = pygame.image.load('directions2.png')
Othello_background_image = pygame.image.load('background_othello_image.png')
Wood_background = pygame.image.load('wood_background.png')
# Dimensions of the board
rows = 8
columns = 8
# Circle Radius
circle_radius = int((40 / 2) - 2)
# Displaying the Othello Image
def othello_image(x, y):
""" This function adds an image of the Othello board to the pygame display.
It takes coordinates to place the image and the pygame display shows it. """
gameDisplay.blit(OthelloImage, (x, y))
# Displaying the Directions Image
def directions_image(x, y):
"""This function adds an image of the Othello instructions to the pygame display.
It takes coordinates to place the image and the pygame display shows it."""
gameDisplay.blit(DirectionsImage, (x, y))
# Displaying the Background Othello Image
def background_othello_image(x, y):
"""This function adds an image of an Othello background to the pygame display.
It takes coordinates to place the image and the pygame display shows it."""
gameDisplay.blit(Othello_background_image, (x, y))
# Displaying the Wood Background Image
def wood_background_image(x, y):
"""This function adds an image of a Wood Background to the pygame display.
It takes coordinates to place the image and the pygame display shows it."""
gameDisplay.blit(Wood_background, (x, y))
# Creating the board
def game_board():
"""This function creates a matrix of zeros to create the board."""
board = np.zeros((rows, columns))
return board
def piece_placed(x, y, player, board):
"""This function determines the piece played.
It takes the coordinates of the piece, the player number, and the board.
The pieces are zeros or ones and the function returns the piece on the board based on the number."""
if player == 0:
board[x][y] = 1
elif player == 1:
board[x][y] = 2
return board
# Reversing the order of array elements along the specified axis
def print_board(board):
"""This function reverses the order of array elements along the specified axis.
It takes the game board and prints a reversed version after a move."""
print(np.flip(board, 0))
# Assigning the board to the variable board
board = game_board()
# Function to create text objects
def text_objects(text, font, color):
"""This function creates text objects in the pygame display.
It takes a string, a font and a color for the text and it returns a variable with the details about the text. """
textSurface = font.render(text, True, color)
return textSurface, textSurface.get_rect()
# Displaying the first intro text
def message_display(text, color):
"""This function creates the first intro text.
It takes the text_objects function and color, and it displays it in the pygame display."""
largeText = pygame.font.Font('freesansbold.ttf', 35)
TextSurface, TextRectangle = text_objects(text, largeText, color)
TextRectangle.center = ((display_width / 2), (display_height / 1.2))
gameDisplay.blit(TextSurface, TextRectangle)
# pygame.display.update()
# time.sleep(2)
# game_loop()
# Displaying the second intro text
def message_display2(text, color):
"""This function creates the second intro text.
It takes the text_objects function and color, and it displays it in the pygame display."""
largeText = pygame.font.Font('freesansbold.ttf', 45)
TextSurface, TextRectangle = text_objects(text, largeText, color)
TextRectangle.center = ((display_width / 2), (display_height / 4.5))
gameDisplay.blit(TextSurface, TextRectangle)
# Message display for the scoreboard and Othello title
def message_display3(text, color):
"""This function creates the Othello text.
It takes the text_objects function and color, and it displays it in the pygame display."""
largeText = pygame.font.Font('times new roman.ttf', 45)
TextSurface, TextRectangle = text_objects(text, largeText, color)
TextRectangle.center = (280, 540)
gameDisplay.blit(TextSurface, TextRectangle)
# Displaying the Player win text
def winner_or_tie_text(text, color):
"""This function creates a text for the winner.
It takes the text_objects function and color, and it displays it in the pygame display."""
largeText = pygame.font.Font('times new roman.ttf', 70)
TextSurface, TextRectangle = text_objects(text, largeText, color)
TextRectangle.center = ((display_width / 2), (display_height / 9))
gameDisplay.blit(TextSurface, TextRectangle)
# Displaying the return text
def return_text(text, color):
"""This function creates a text to return to the main menu.
It takes the text_objects function and color, and it displays it in the pygame display."""
largeText = pygame.font.Font('freesansbold.ttf', 15)
TextSurface, TextRectangle = text_objects(text, largeText, color)
TextRectangle.center = ((display_width / 1.2), (display_height / 1.05))
gameDisplay.blit(TextSurface, TextRectangle)
# Button function
def button(message, x, y, width, height, inactive_color, active_color, action=None):
"""This function creates the buttons for the main menu. It takes a text for the button, the measurements, the color, and a boolean.
It creates the buttons in the pygame display and assigns them an action when clicked."""
color = BLACK
click = pygame.mouse.get_pressed()
mouse = pygame.mouse.get_pos()
# print(click)
if x + width > mouse[0] > x and y + height > mouse[1] > y:
pygame.draw.rect(gameDisplay, active_color, (x, y, width, height))
if click[0] == 1 and action != None:
action()
else:
pygame.draw.rect(gameDisplay, inactive_color, (x, y, width, height))
# Creating text for the buttons
smallText = pygame.font.Font('freesansbold.ttf', 20)
textSurface, textRectangle = text_objects(message, smallText, color)
textRectangle.center = ((x + (width/2)), (y+(height/2)))
gameDisplay.blit(textSurface, textRectangle)
# Intro Screen
def game_intro():
"""This function creates the intro of the game with the Othello image, name of the game, and
an action to start when the code is run."""
x = 0
y = 0
gameDisplay.fill(WHITE)
othello_image(x, y)
message_display('Press Space to Play', BLACK)
message_display2('REVERSI (OTHELLO)', BLACK)
pygame.display.update()
intro = False
while not intro:
for event in pygame.event.get():
# print(event)
if event.type == pygame.QUIT:
quit_game()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
second_display()
intro = True
# Second Screen
def second_display():
"""This function creates the second display after the intro.
It displays a background image, the buttons of the main menu, and actions when clicked. """
x = 0
y = 0
gameDisplay.fill(WHITE)
background_othello_image(x, y)
game_exit = False
while not game_exit:
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit_game()
button('Player VS Player', 200, 115, 400, 70, BLUE, LIGHTER_BLUE, player_player)
button('Player VS Computer', 200, 200, 400, 70, ORANGE, LIGHTER_ORANGE, player_computer)
button('Computer VS Computer', 200, 285, 400, 70, YELLOW, LIGHTER_YELLOW, computer_computer)
button('How To Play', 200, 370, 400, 70, GREEN, LIGHTER_GREEN, how_to_play)
pygame.display.update()
clock.tick(60)
def display_board():
"""This function creates a board display. It creates eight columns with eight rows of squares for the board.
It indicates the text, size, color, and action."""
x = 0
y = 0
wood_background_image(x, y)
# gameDisplay.fill(RED)
button('', 90, 90, 413, 413, BLACK, BLACK, None)
# 1st column of boxes
button('', 100, 450, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 100, 400, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 100, 350, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 100, 300, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 100, 250, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 100, 200, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 100, 150, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 100, 100, 40, 40, DARK_GREEN, DARK_GREEN, None)
# 2st column of boxes
button('', 150, 450, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 150, 400, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 150, 350, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 150, 300, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 150, 250, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 150, 200, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 150, 150, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 150, 100, 40, 40, DARK_GREEN, DARK_GREEN, None)
# 3st column of boxes
button('', 200, 450, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 200, 400, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 200, 350, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 200, 300, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 200, 250, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 200, 200, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 200, 150, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 200, 100, 40, 40, DARK_GREEN, DARK_GREEN, None)
# 4st column of boxes
button('', 250, 450, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 250, 400, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 250, 350, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 250, 300, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 250, 250, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 250, 200, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 250, 150, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 250, 100, 40, 40, DARK_GREEN, DARK_GREEN, None)
# 5st column of boxes
button('', 300, 450, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 300, 400, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 300, 350, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 300, 300, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 300, 250, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 300, 200, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 300, 150, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 300, 100, 40, 40, DARK_GREEN, DARK_GREEN, None)
# 6st column of boxes
button('', 350, 450, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 350, 400, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 350, 350, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 350, 300, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 350, 250, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 350, 200, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 350, 150, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 350, 100, 40, 40, DARK_GREEN, DARK_GREEN, None)
# 7st column of boxes
button('', 400, 450, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 400, 400, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 400, 350, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 400, 300, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 400, 250, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 400, 200, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 400, 150, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 400, 100, 40, 40, DARK_GREEN, DARK_GREEN, None)
# 8st column of boxes
button('', 450, 450, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 450, 400, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 450, 350, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 450, 300, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 450, 250, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 450, 200, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 450, 150, 40, 40, DARK_GREEN, DARK_GREEN, None)
button('', 450, 100, 40, 40, DARK_GREEN, DARK_GREEN, None)
return_text('Press the letter "m" for Main Menu', WHITE)
# Drawing the score board circles:
pygame.draw.circle(gameDisplay, WHITE, (530, 170), circle_radius)
pygame.draw.circle(gameDisplay, BLACK, (530, 120), circle_radius)
message_display3('OTHELLO', WHITE)
pygame.display.update()
# Player vs Player Screen
def player_player():
"""This function creates the player vs player screen.
It allows the players to place the round pieces on the board when a square is clicked.
Implements the rules of the game."""
turn = 0
display_board()
reset_array(board)
setting_up_board(board)
player_score(board)
pygame.display.update()
game_exit = False
while not game_exit:
mouse = pygame.mouse.get_pos()
draw_piece_in_display(turn)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit_game()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_m:
second_display()
game_exit = True
if event.type == pygame.MOUSEBUTTONUP and (100 < mouse[0] < 490 and 100 < mouse[1] < 490):
if turn == 0:
enforce_rules(board, 1)
else:
enforce_rules(board, 2)
if player_score(board):
game_exit = True
turn += 1
turn %= 2
pygame.display.update()
# Player vs Computer Screen
def player_computer():
"""This function creates the player vs computer screen.
It allows the player and the computer to place the round pieces on the board when a square is clicked.
Implements the rules of the game."""
turn = 0
display_board()
reset_array(board)
setting_up_board(board)
game_exit = False
while not game_exit:
draw_piece_in_display(turn)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit_game()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_m:
second_display()
game_exit = True
if event.type == pygame.MOUSEBUTTONUP:
enforce_rules(board, 1) # Will change array
computer_move(board, 1) # Computer makes a valid move
enforce_rules(board, 2) # Will change the array for the computer
if player_score(board):
game_exit = True
turn += 1
turn %= 2
pygame.display.update()
# Player vs Computer Screen
def computer_computer():
"""This function creates the computer vs computer screen.
It allows the computer to have two different turns to place the round pieces on the board when a square is clicked.
Implements the rules of the game."""
display_board()
reset_array(board)
setting_up_board(board)
game_exit = False
while not game_exit:
pygame.time.wait(500)
# draw_piece_in_display(turn)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit_game()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_m:
second_display()
game_exit = True
computer_move(board, 0) # Computer makes a valid move
enforce_rules(board, 1) # will change the array for the computer
computer_move(board, 1) # Computer makes a valid move
enforce_rules(board, 2) # will change the array for the computer
if player_score(board):
game_exit = True
pygame.display.update()
# How to Play Screen
def how_to_play():
"""This function creates the how to play screen. It displays the instructions image in the pygame display. """
x = 0
y = 0
gameDisplay.fill(WHITE)
directions_image(x, y)
return_text('Press the letter "m" for Main Menu', BLACK)
pygame.display.update()
game_exit = False
while not game_exit:
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit_game()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_m:
second_display()
game_exit = True
# New definition
def setting_up_board(board):
"""This function sets up the board given the board.
It also changes the array so that the game will be able to run properly. """
board[3][3] = 1
pygame.draw.circle(gameDisplay, BLACK, (270, 320), circle_radius)
board[3][4] = 2
pygame.draw.circle(gameDisplay, WHITE, (320, 320), circle_radius)
board[4][3] = 2
pygame.draw.circle(gameDisplay, WHITE, (270, 270), circle_radius)
board[4][4] = 1
pygame.draw.circle(gameDisplay, BLACK, (320, 270), circle_radius)
return board
def computer_move(board, move):
"""This function creates the AI of the game.
It takes the board of the game and a move for the computer.
It creates a move using random and returning booleans. """
while True:
x = random.randint(0, 7)
y = random.randint(0, 7)
if move == 0:
if board[x][y] == 0:
board[x][y] = 1
return False
elif move == 1:
if board[x][y] == 0:
board[x][y] = 2
return False
def reset_array(array):
"""This function resets the array that resembles the board on pygame to the console.
It takes an array with the same number of columns and rows of the board and it reset it after each move."""
for i, e in enumerate(array):
if isinstance(e, list):
reset_array(e)
else:
array[i] = 0
def score(text, color, posx, posy):
"""This function displays the score of each player.
Parameters include the text, color, and the x and y coordinate to display the text. """
largeText = pygame.font.Font('times new roman.ttf', 35)
TextSurface, TextRectangle = text_objects(text, largeText, color)
TextRectangle.center = ((posx), (posy))
gameDisplay.blit(TextSurface, TextRectangle)
# Function to keep track of the scores of each player
def player_score(board):
"""This function keeps track of the score of each player.
It takes the board to check how many pieces of each color are in the game board and
compares the scores to return boolean values if player x wins. """
player1_score = 0
player2_score = 0
zeros = 64
for row in range(rows):
for column in range(columns):
if board[row][column] == 1:
player1_score += 1
button('', 568, 100, 40, 40, WHITE, WHITE, action=None)
score(str(player1_score), BLACK, 590, 120)
zeros -= 1
elif board[row][column] == 2:
player2_score += 1
button('', 568, 150, 40, 40, WHITE, WHITE, action=None)
score(str(player2_score), BLACK, 590, 170)
zeros -= 1
if zeros <= 0:
if player1_score > player2_score:
player_1_win()
return True
elif player1_score < player2_score:
player_2_win()
return True
elif player1_score == player2_score:
player_tie()
return True
def player_1_win():
"""This function creates a screen if player 1 wins.
It displays a text if the boolean expression from player_score indicates a higher score for the first player. """
winner_or_tie_text(str(name1), WHITE)
def player_2_win():
"""This function creates a screen if player 2 wins.
It displays a text if the boolean expression from player_score indicates a higher score for the second player. """
winner_or_tie_text(str(name2), WHITE)
def player_tie():
"""This function creates a screen if there is a tie.
It displays a text if the boolean expression from player_score indicates a higher score for the second player. """
winner_or_tie_text("Tie!", WHITE)
def draw_piece_in_display(move):
"""This function draws the circles over the squares when clicked.
It takes the location of the click and draws a circle with specifications such as location, color, and size. """
mouse = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
# First Column
if click[0] == 1 and (100 + 40 > mouse[0] > 100 and 450 + 40 > mouse[1] > 450) and (board[0][0] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (120, 470), circle_radius) # Surface, color, position x, radius
else:
pygame.draw.circle(gameDisplay, WHITE, (120, 470), circle_radius)
piece_placed(0, 0, move, board)
elif click[0] == 1 and (100 + 40 > mouse[0] > 100 and 400 + 40 > mouse[1] > 400) and (board[1][0] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (120, 420), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (120, 420), circle_radius)
piece_placed(1, 0, move, board)
elif click[0] == 1 and (100 + 40 > mouse[0] > 100 and 350 + 40 > mouse[1] > 350) and (board[2][0] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (120, 370), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (120, 370), circle_radius)
piece_placed(2, 0, move, board)
elif click[0] == 1 and (100 + 40 > mouse[0] > 100 and 300 + 40 > mouse[1] > 300) and (board[3][0] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (120, 320), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (120, 320), circle_radius)
piece_placed(3, 0, move, board)
elif click[0] == 1 and (100 + 40 > mouse[0] > 100 and 250 + 40 > mouse[1] > 250) and (board[4][0] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (120, 270), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (120, 270), circle_radius)
piece_placed(4, 0, move, board)
elif click[0] == 1 and (100 + 40 > mouse[0] > 100 and 200 + 40 > mouse[1] > 200) and (board[5][0] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (120, 220), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (120, 220), circle_radius)
piece_placed(5, 0, move, board)
elif click[0] == 1 and (100 + 40 > mouse[0] > 100 and 150 + 40 > mouse[1] > 150) and (board[6][0] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (120, 170), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (120, 170), circle_radius)
piece_placed(6, 0, move, board)
elif click[0] == 1 and (100 + 40 > mouse[0] > 100 and 100 + 40 > mouse[1] > 100) and (board[7][0] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (120, 120), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (120, 120), circle_radius)
piece_placed(7, 0, move, board)
# Second Column
elif click[0] == 1 and (150 + 40 > mouse[0] > 150 and 450 + 40 > mouse[1] > 450) and (board[0][1] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (170, 470), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (170, 470), circle_radius)
piece_placed(0, 1, move, board)
elif click[0] == 1 and (150 + 40 > mouse[0] > 150 and 400 + 40 > mouse[1] > 400) and (board[1][1] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (170, 420), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (170, 420), circle_radius)
piece_placed(1, 1, move, board)
elif click[0] == 1 and (150 + 40 > mouse[0] > 150 and 350 + 40 > mouse[1] > 350) and (board[2][1] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (170, 370), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (170, 370), circle_radius)
piece_placed(2, 1, move, board)
elif click[0] == 1 and (150 + 40 > mouse[0] > 150 and 300 + 40 > mouse[1] > 300) and (board[3][1] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (170, 320), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (170, 320), circle_radius)
piece_placed(3, 1, move, board)
elif click[0] == 1 and (150 + 40 > mouse[0] > 150 and 250 + 40 > mouse[1] > 250) and (board[4][1] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (170, 270), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (170, 270), circle_radius)
piece_placed(4, 1, move, board)
elif click[0] == 1 and (150 + 40 > mouse[0] > 150 and 200 + 40 > mouse[1] > 200) and (board[5][1] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (170, 220), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (170, 220), circle_radius)
piece_placed(5, 1, move, board)
elif click[0] == 1 and (150 + 40 > mouse[0] > 150 and 150 + 40 > mouse[1] > 150) and (board[6][1] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (170, 170), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (170, 170), circle_radius)
piece_placed(6, 1, move, board)
elif click[0] == 1 and (150 + 40 > mouse[0] > 150 and 100 + 40 > mouse[1] > 100) and (board[7][1] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (170, 120), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (170, 120), circle_radius)
piece_placed(7, 1, move, board)
# Third Column
elif click[0] == 1 and (200 + 40 > mouse[0] > 200 and 450 + 40 > mouse[1] > 450) and (board[0][2] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (220, 470), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (220, 470), circle_radius)
piece_placed(0, 2, move, board)
elif click[0] == 1 and (200 + 40 > mouse[0] > 200 and 400 + 40 > mouse[1] > 400) and (board[1][2] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (220, 420), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (220, 420), circle_radius)
piece_placed(1, 2, move, board)
elif click[0] == 1 and (200 + 40 > mouse[0] > 200 and 350 + 40 > mouse[1] > 350) and (board[2][2] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (220, 370), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (220, 370), circle_radius)
piece_placed(2, 2, move, board)
elif click[0] == 1 and (200 + 40 > mouse[0] > 200 and 300 + 40 > mouse[1] > 300) and (board[3][2] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (220, 320), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (220, 320), circle_radius)
piece_placed(3, 2, move, board)
elif click[0] == 1 and (200 + 40 > mouse[0] > 200 and 250 + 40 > mouse[1] > 250) and (board[4][2] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (220, 270), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (220, 270), circle_radius)
piece_placed(4, 2, move, board)
elif click[0] == 1 and (200 + 40 > mouse[0] > 200 and 200 + 40 > mouse[1] > 200) and (board[5][2] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (220, 220), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (220, 220), circle_radius)
piece_placed(5, 2, move, board)
elif click[0] == 1 and (200 + 40 > mouse[0] > 200 and 150 + 40 > mouse[1] > 150) and (board[6][2] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (220, 170), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (220, 170), circle_radius)
piece_placed(6, 2, move, board)
elif click[0] == 1 and (200 + 40 > mouse[0] > 200 and 100 + 40 > mouse[1] > 100) and (board[7][2] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (220, 120), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (220, 120), circle_radius)
piece_placed(7, 2, move, board)
# Fourth Column
elif click[0] == 1 and (250 + 40 > mouse[0] > 250 and 450 + 40 > mouse[1] > 450) and (board[0][3] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (270, 470), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (270, 470), circle_radius)
piece_placed(0, 3, move, board)
elif click[0] == 1 and (250 + 40 > mouse[0] > 250 and 400 + 40 > mouse[1] > 400) and (board[1][3] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (270, 420), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (270, 420), circle_radius)
piece_placed(1, 3, move, board)
elif click[0] == 1 and (250 + 40 > mouse[0] > 250 and 350 + 40 > mouse[1] > 350) and (board[2][3] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (270, 370), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (270, 370), circle_radius)
piece_placed(2, 3, move, board)
elif click[0] == 1 and (250 + 40 > mouse[0] > 250 and 300 + 40 > mouse[1] > 300) and (board[3][3] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (270, 320), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (270, 320), circle_radius)
piece_placed(3, 3, move, board)
elif click[0] == 1 and (250 + 40 > mouse[0] > 250 and 250 + 40 > mouse[1] > 250) and (board[4][3] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (270, 270), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (270, 270), circle_radius)
piece_placed(4, 3, move, board)
elif click[0] == 1 and (250 + 40 > mouse[0] > 250 and 200 + 40 > mouse[1] > 200) and (board[5][3] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (270, 220), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (270, 220), circle_radius)
piece_placed(5, 3, move, board)
elif click[0] == 1 and (250 + 40 > mouse[0] > 250 and 150 + 40 > mouse[1] > 150) and (board[6][3] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (270, 170), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (270, 170), circle_radius)
piece_placed(6, 3, move, board)
elif click[0] == 1 and (250 + 40 > mouse[0] > 250 and 100 + 40 > mouse[1] > 100) and (board[7][3] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (270, 120), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (270, 120), circle_radius)
piece_placed(7, 3, move, board)
# Fifth Column
elif click[0] == 1 and (300 + 40 > mouse[0] > 300 and 450 + 40 > mouse[1] > 450) and (board[0][4] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (320, 470), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (320, 470), circle_radius)
piece_placed(0, 4, move, board)
elif click[0] == 1 and (300 + 40 > mouse[0] > 300 and 400 + 40 > mouse[1] > 400) and (board[1][4] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (320, 420), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (320, 420), circle_radius)
piece_placed(1, 4, move, board)
elif click[0] == 1 and (300 + 40 > mouse[0] > 300 and 350 + 40 > mouse[1] > 350) and (board[2][4] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (320, 370), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (320, 370), circle_radius)
piece_placed(2, 4, move, board)
elif click[0] == 1 and (300 + 40 > mouse[0] > 300 and 300 + 40 > mouse[1] > 300) and (board[3][4] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (320, 320), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (320, 320), circle_radius)
piece_placed(3, 4, move, board)
elif click[0] == 1 and (300 + 40 > mouse[0] > 300 and 250 + 40 > mouse[1] > 250) and (board[4][4] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (320, 270), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (320, 270), circle_radius)
piece_placed(4, 4, move, board)
elif click[0] == 1 and (300 + 40 > mouse[0] > 300 and 200 + 40 > mouse[1] > 200) and (board[5][4] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (320, 220), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (320, 220), circle_radius)
piece_placed(5, 4, move, board)
elif click[0] == 1 and (300 + 40 > mouse[0] > 300 and 150 + 40 > mouse[1] > 150) and (board[6][4] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (320, 170), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (320, 170), circle_radius)
piece_placed(6, 4, move, board)
elif click[0] == 1 and (300 + 40 > mouse[0] > 300 and 100 + 40 > mouse[1] > 100) and (board[7][4] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (320, 120), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (320, 120), circle_radius)
piece_placed(7, 4, move, board)
# Sixth Column
elif click[0] == 1 and (350 + 40 > mouse[0] > 350 and 450 + 40 > mouse[1] > 450) and (board[0][5] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (370, 470), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (370, 470), circle_radius)
piece_placed(0, 5, move, board)
elif click[0] == 1 and (350 + 40 > mouse[0] > 350 and 400 + 40 > mouse[1] > 400) and (board[1][5] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (370, 420), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (370, 420), circle_radius)
piece_placed(1, 5, move, board)
elif click[0] == 1 and (350 + 40 > mouse[0] > 350 and 350 + 40 > mouse[1] > 350) and (board[2][5] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (370, 370), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (370, 370), circle_radius)
piece_placed(2, 5, move, board)
elif click[0] == 1 and (350 + 40 > mouse[0] > 350 and 300 + 40 > mouse[1] > 300) and (board[3][5] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (370, 320), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (370, 320), circle_radius)
piece_placed(3, 5, move, board)
elif click[0] == 1 and (350 + 40 > mouse[0] > 350 and 250 + 40 > mouse[1] > 250) and (board[4][5] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (370, 270), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (370, 270), circle_radius)
piece_placed(4, 5, move, board)
elif click[0] == 1 and (350 + 40 > mouse[0] > 350 and 200 + 40 > mouse[1] > 200) and (board[5][5] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (370, 220), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (370, 220), circle_radius)
piece_placed(5, 5, move, board)
elif click[0] == 1 and (350 + 40 > mouse[0] > 350 and 150 + 40 > mouse[1] > 150) and (board[6][5] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (370, 170), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (370, 170), circle_radius)
piece_placed(6, 5, move, board)
elif click[0] == 1 and (350 + 40 > mouse[0] > 350 and 100 + 40 > mouse[1] > 100) and (board[7][5] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (370, 120), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (370, 120), circle_radius)
piece_placed(7, 5, move, board)
# Seventh Column
elif click[0] == 1 and (400 + 40 > mouse[0] > 400 and 450 + 40 > mouse[1] > 450) and (board[0][6] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (420, 470), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (420, 470), circle_radius)
piece_placed(0, 6, move, board)
elif click[0] == 1 and (400 + 40 > mouse[0] > 400 and 400 + 40 > mouse[1] > 400) and (board[1][6] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (420, 420), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (420, 420), circle_radius)
piece_placed(1, 6, move, board)
elif click[0] == 1 and (400 + 40 > mouse[0] > 400 and 350 + 40 > mouse[1] > 350) and (board[2][6] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (420, 370), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (420, 370), circle_radius)
piece_placed(2, 6, move, board)
elif click[0] == 1 and (400 + 40 > mouse[0] > 400 and 300 + 40 > mouse[1] > 300) and (board[3][6] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (420, 320), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (420, 320), circle_radius)
piece_placed(3, 6, move, board)
elif click[0] == 1 and (400 + 40 > mouse[0] > 400 and 250 + 40 > mouse[1] > 250) and (board[4][6] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (420, 270), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (420, 270), circle_radius)
piece_placed(4, 6, move, board)
elif click[0] == 1 and (400 + 40 > mouse[0] > 400 and 200 + 40 > mouse[1] > 200) and (board[5][6] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (420, 220), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (420, 220), circle_radius)
piece_placed(5, 6, move, board)
elif click[0] == 1 and (400 + 40 > mouse[0] > 400 and 150 + 40 > mouse[1] > 150) and (board[6][6] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (420, 170), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (420, 170), circle_radius)
piece_placed(6, 6, move, board)
elif click[0] == 1 and (400 + 40 > mouse[0] > 400 and 100 + 40 > mouse[1] > 100) and (board[7][6] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (420, 120), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (420, 120), circle_radius)
piece_placed(7, 6, move, board)
# Eight Column
elif click[0] == 1 and (450 + 40 > mouse[0] > 450 and 450 + 40 > mouse[1] > 450) and (board[0][7] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (470, 470), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (470, 470), circle_radius)
piece_placed(0, 7, move, board)
elif click[0] == 1 and (450 + 40 > mouse[0] > 450 and 400 + 40 > mouse[1] > 400) and (board[1][7] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (470, 420), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (470, 420), circle_radius)
piece_placed(1, 7, move, board)
elif click[0] == 1 and (450 + 40 > mouse[0] > 450 and 350 + 40 > mouse[1] > 350) and (board[2][7] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (470, 370), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (470, 370), circle_radius)
piece_placed(2, 7, move, board)
elif click[0] == 1 and (450 + 40 > mouse[0] > 450 and 300 + 40 > mouse[1] > 300) and (board[3][7] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (470, 320), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (470, 320), circle_radius)
piece_placed(3, 7, move, board)
elif click[0] == 1 and (450 + 40 > mouse[0] > 450 and 250 + 40 > mouse[1] > 250) and (board[4][7] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (470, 270), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (470, 270), circle_radius)
piece_placed(4, 7, move, board)
elif click[0] == 1 and (450 + 40 > mouse[0] > 450 and 200 + 40 > mouse[1] > 200) and (board[5][7] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (470, 220), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (470, 220), circle_radius)
piece_placed(5, 7, move, board)
elif click[0] == 1 and (450 + 40 > mouse[0] > 450 and 150 + 40 > mouse[1] > 150) and (board[6][7] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (470, 170), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (470, 170), circle_radius)
piece_placed(6, 7, move, board)
elif click[0] == 1 and (450 + 40 > mouse[0] > 450 and 100 + 40 > mouse[1] > 100) and (board[7][7] == 0):
if move == 0:
pygame.draw.circle(gameDisplay, BLACK, (470, 120), circle_radius)
else:
pygame.draw.circle(gameDisplay, WHITE, (470, 120), circle_radius)
piece_placed(7, 7, move, board)
pygame.display.update()
def draw_flipped_piece(board, move):
"""This function draws circles on top of other circles to change the color based on the rules of the game.
It takes the game board and the move that converts the color of the pieces.
It displays new circles of the same color if the rules of the game are met."""
if move == 1:
# First Row
if board[0][0] == 1:
pygame.draw.circle(gameDisplay, BLACK, (120, 470), circle_radius) # Surface, color, position x, radius
if board[0][1] == 1:
pygame.draw.circle(gameDisplay, BLACK, (170, 470), circle_radius) # Surface, color, position x, radius
if board[0][2] == 1:
pygame.draw.circle(gameDisplay, BLACK, (220, 470), circle_radius)
if board[0][3] == 1:
pygame.draw.circle(gameDisplay, BLACK, (270, 470), circle_radius)
if board[0][4] == 1:
pygame.draw.circle(gameDisplay, BLACK, (320, 470), circle_radius)
if board[0][5] == 1:
pygame.draw.circle(gameDisplay, BLACK, (370, 470), circle_radius)
if board[0][6] == 1:
pygame.draw.circle(gameDisplay, BLACK, (420, 470), circle_radius)
if board[0][7] == 1:
pygame.draw.circle(gameDisplay, BLACK, (470, 470), circle_radius)
# Second Row
if board[1][0] == 1:
pygame.draw.circle(gameDisplay, BLACK, (120, 420), circle_radius) # Surface, color, position x, radius
if board[1][1] == 1:
pygame.draw.circle(gameDisplay, BLACK, (170, 420), circle_radius) # Surface, color, position x, radius
if board[1][2] == 1:
pygame.draw.circle(gameDisplay, BLACK, (220, 420), circle_radius)
if board[1][3] == 1:
pygame.draw.circle(gameDisplay, BLACK, (270, 420), circle_radius)
if board[1][4] == 1:
pygame.draw.circle(gameDisplay, BLACK, (320, 420), circle_radius)
if board[1][5] == 1:
pygame.draw.circle(gameDisplay, BLACK, (370, 420), circle_radius)
if board[1][6] == 1:
pygame.draw.circle(gameDisplay, BLACK, (420, 420), circle_radius)
if board[1][7] == 1:
pygame.draw.circle(gameDisplay, BLACK, (470, 420), circle_radius)
# Third Row
if board[2][0] == 1:
pygame.draw.circle(gameDisplay, BLACK, (120, 370), circle_radius) # Surface, color, position x, radius
if board[2][1] == 1:
pygame.draw.circle(gameDisplay, BLACK, (170, 370), circle_radius) # Surface, color, position x, radius
if board[2][2] == 1:
pygame.draw.circle(gameDisplay, BLACK, (220, 370), circle_radius)
if board[2][3] == 1:
pygame.draw.circle(gameDisplay, BLACK, (270, 370), circle_radius)
if board[2][4] == 1:
pygame.draw.circle(gameDisplay, BLACK, (320, 370), circle_radius)
if board[2][5] == 1:
pygame.draw.circle(gameDisplay, BLACK, (370, 370), circle_radius)
if board[2][6] == 1:
pygame.draw.circle(gameDisplay, BLACK, (420, 370), circle_radius)
if board[2][7] == 1:
pygame.draw.circle(gameDisplay, BLACK, (470, 370), circle_radius)
# Fourth Row
if board[3][0] == 1:
pygame.draw.circle(gameDisplay, BLACK, (120, 320), circle_radius) # Surface, color, position x, radius
if board[3][1] == 1:
pygame.draw.circle(gameDisplay, BLACK, (170, 320), circle_radius) # Surface, color, position x, radius
if board[3][2] == 1:
pygame.draw.circle(gameDisplay, BLACK, (220, 320), circle_radius)
if board[3][3] == 1:
pygame.draw.circle(gameDisplay, BLACK, (270, 320), circle_radius)
if board[3][4] == 1:
pygame.draw.circle(gameDisplay, BLACK, (320, 320), circle_radius)
if board[3][5] == 1:
pygame.draw.circle(gameDisplay, BLACK, (370, 320), circle_radius)
if board[3][6] == 1:
pygame.draw.circle(gameDisplay, BLACK, (420, 320), circle_radius)
if board[3][7] == 1:
pygame.draw.circle(gameDisplay, BLACK, (470, 320), circle_radius)
# Fifth Row
if board[4][0] == 1:
pygame.draw.circle(gameDisplay, BLACK, (120, 270), circle_radius) # Surface, color, position x, radius
if board[4][1] == 1:
pygame.draw.circle(gameDisplay, BLACK, (170, 270), circle_radius) # Surface, color, position x, radius
if board[4][2] == 1:
pygame.draw.circle(gameDisplay, BLACK, (220, 270), circle_radius)
if board[4][3] == 1:
pygame.draw.circle(gameDisplay, BLACK, (270, 270), circle_radius)
if board[4][4] == 1:
pygame.draw.circle(gameDisplay, BLACK, (320, 270), circle_radius)
if board[4][5] == 1:
pygame.draw.circle(gameDisplay, BLACK, (370, 270), circle_radius)
if board[4][6] == 1:
pygame.draw.circle(gameDisplay, BLACK, (420, 270), circle_radius)
if board[4][7] == 1:
pygame.draw.circle(gameDisplay, BLACK, (470, 270), circle_radius)
# Sixth Row
if board[5][0] == 1:
pygame.draw.circle(gameDisplay, BLACK, (120, 220), circle_radius) # Surface, color, position x, radius
if board[5][1] == 1:
pygame.draw.circle(gameDisplay, BLACK, (170, 220), circle_radius) # Surface, color, position x, radius
if board[5][2] == 1:
pygame.draw.circle(gameDisplay, BLACK, (220, 220), circle_radius)
if board[5][3] == 1:
pygame.draw.circle(gameDisplay, BLACK, (270, 220), circle_radius)
if board[5][4] == 1:
pygame.draw.circle(gameDisplay, BLACK, (320, 220), circle_radius)
if board[5][5] == 1:
pygame.draw.circle(gameDisplay, BLACK, (370, 220), circle_radius)
if board[5][6] == 1:
pygame.draw.circle(gameDisplay, BLACK, (420, 220), circle_radius)
if board[5][7] == 1:
pygame.draw.circle(gameDisplay, BLACK, (470, 220), circle_radius)
# Seventh Row
if board[6][0] == 1:
pygame.draw.circle(gameDisplay, BLACK, (120, 170), circle_radius) # Surface, color, position x, radius
if board[6][1] == 1:
pygame.draw.circle(gameDisplay, BLACK, (170, 170), circle_radius) # Surface, color, position x, radius
if board[6][2] == 1:
pygame.draw.circle(gameDisplay, BLACK, (220, 170), circle_radius)
if board[6][3] == 1:
pygame.draw.circle(gameDisplay, BLACK, (270, 170), circle_radius)
if board[6][4] == 1:
pygame.draw.circle(gameDisplay, BLACK, (320, 170), circle_radius)
if board[6][5] == 1:
pygame.draw.circle(gameDisplay, BLACK, (370, 170), circle_radius)
if board[6][6] == 1:
pygame.draw.circle(gameDisplay, BLACK, (420, 170), circle_radius)
if board[6][7] == 1:
pygame.draw.circle(gameDisplay, BLACK, (470, 170), circle_radius)
# Eight Row
if board[7][0] == 1:
pygame.draw.circle(gameDisplay, BLACK, (120, 120), circle_radius) # Surface, color, position x, radius
if board[7][1] == 1:
pygame.draw.circle(gameDisplay, BLACK, (170, 120), circle_radius) # Surface, color, position x, radius
if board[7][2] == 1:
pygame.draw.circle(gameDisplay, BLACK, (220, 120), circle_radius)
if board[7][3] == 1:
pygame.draw.circle(gameDisplay, BLACK, (270, 120), circle_radius)
if board[7][4] == 1:
pygame.draw.circle(gameDisplay, BLACK, (320, 120), circle_radius)
if board[7][5] == 1:
pygame.draw.circle(gameDisplay, BLACK, (370, 120), circle_radius)
if board[7][6] == 1:
pygame.draw.circle(gameDisplay, BLACK, (420, 120), circle_radius)
if board[7][7] == 1:
pygame.draw.circle(gameDisplay, BLACK, (470, 120), circle_radius)
else:
# First Row
if board[0][0] == 2:
pygame.draw.circle(gameDisplay, WHITE, (120, 470), circle_radius) # Surface, color, position x, radius
if board[0][1] == 2:
pygame.draw.circle(gameDisplay, WHITE, (170, 470), circle_radius) # Surface, color, position x, radius
if board[0][2] == 2:
pygame.draw.circle(gameDisplay, WHITE, (220, 470), circle_radius)
if board[0][3] == 2:
pygame.draw.circle(gameDisplay, WHITE, (270, 470), circle_radius)
if board[0][4] == 2:
pygame.draw.circle(gameDisplay, WHITE, (320, 470), circle_radius)
if board[0][5] == 2:
pygame.draw.circle(gameDisplay, WHITE, (370, 470), circle_radius)
if board[0][6] == 2:
pygame.draw.circle(gameDisplay, WHITE, (420, 470), circle_radius)
if board[0][7] == 2:
pygame.draw.circle(gameDisplay, WHITE, (470, 470), circle_radius)
# Second Row
if board[1][0] == 2:
pygame.draw.circle(gameDisplay, WHITE, (120, 420), circle_radius) # Surface, color, position x, radius
if board[1][1] == 2:
pygame.draw.circle(gameDisplay, WHITE, (170, 420), circle_radius) # Surface, color, position x, radius
if board[1][2] == 2:
pygame.draw.circle(gameDisplay, WHITE, (220, 420), circle_radius)
if board[1][3] == 2:
pygame.draw.circle(gameDisplay, WHITE, (270, 420), circle_radius)
if board[1][4] == 2:
pygame.draw.circle(gameDisplay, WHITE, (320, 420), circle_radius)
if board[1][5] == 2:
pygame.draw.circle(gameDisplay, WHITE, (370, 420), circle_radius)
if board[1][6] == 2:
pygame.draw.circle(gameDisplay, WHITE, (420, 420), circle_radius)
if board[1][7] == 2:
pygame.draw.circle(gameDisplay, WHITE, (470, 420), circle_radius)
# Third Row
if board[2][0] == 2:
pygame.draw.circle(gameDisplay, WHITE, (120, 370), circle_radius) # Surface, color, position x, radius
if board[2][1] == 2:
pygame.draw.circle(gameDisplay, WHITE, (170, 370), circle_radius) # Surface, color, position x, radius
if board[2][2] == 2:
pygame.draw.circle(gameDisplay, WHITE, (220, 370), circle_radius)
if board[2][3] == 2:
pygame.draw.circle(gameDisplay, WHITE, (270, 370), circle_radius)
if board[2][4] == 2:
pygame.draw.circle(gameDisplay, WHITE, (320, 370), circle_radius)
if board[2][5] == 2:
pygame.draw.circle(gameDisplay, WHITE, (370, 370), circle_radius)
if board[2][6] == 2:
pygame.draw.circle(gameDisplay, WHITE, (420, 370), circle_radius)
if board[2][7] == 2:
pygame.draw.circle(gameDisplay, WHITE, (470, 370), circle_radius)
# Fourth Row
if board[3][0] == 2:
pygame.draw.circle(gameDisplay, WHITE, (120, 320), circle_radius) # Surface, color, position x, radius
if board[3][1] == 2:
pygame.draw.circle(gameDisplay, WHITE, (170, 320), circle_radius) # Surface, color, position x, radius
if board[3][2] == 2:
pygame.draw.circle(gameDisplay, WHITE, (220, 320), circle_radius)
if board[3][3] == 2:
pygame.draw.circle(gameDisplay, WHITE, (270, 320), circle_radius)
if board[3][4] == 2:
pygame.draw.circle(gameDisplay, WHITE, (320, 320), circle_radius)
if board[3][5] == 2:
pygame.draw.circle(gameDisplay, WHITE, (370, 320), circle_radius)
if board[3][6] == 2:
pygame.draw.circle(gameDisplay, WHITE, (420, 320), circle_radius)
if board[3][7] == 2:
pygame.draw.circle(gameDisplay, WHITE, (470, 320), circle_radius)
# Fifth Row
if board[4][0] == 2:
pygame.draw.circle(gameDisplay, WHITE, (120, 270), circle_radius) # Surface, color, position x, radius
if board[4][1] == 2:
pygame.draw.circle(gameDisplay, WHITE, (170, 270), circle_radius) # Surface, color, position x, radius
if board[4][2] == 2:
pygame.draw.circle(gameDisplay, WHITE, (220, 270), circle_radius)
if board[4][3] == 2:
pygame.draw.circle(gameDisplay, WHITE, (270, 270), circle_radius)
if board[4][4] == 2:
pygame.draw.circle(gameDisplay, WHITE, (320, 270), circle_radius)
if board[4][5] == 2:
pygame.draw.circle(gameDisplay, WHITE, (370, 270), circle_radius)
if board[4][6] == 2:
pygame.draw.circle(gameDisplay, WHITE, (420, 270), circle_radius)
if board[4][7] == 2:
pygame.draw.circle(gameDisplay, WHITE, (470, 270), circle_radius)
# Sixth Row
if board[5][0] == 2:
pygame.draw.circle(gameDisplay, WHITE, (120, 220), circle_radius) # Surface, color, position x, radius
if board[5][1] == 2:
pygame.draw.circle(gameDisplay, WHITE, (170, 220), circle_radius) # Surface, color, position x, radius
if board[5][2] == 2:
pygame.draw.circle(gameDisplay, WHITE, (220, 220), circle_radius)
if board[5][3] == 2:
pygame.draw.circle(gameDisplay, WHITE, (270, 220), circle_radius)
if board[5][4] == 2:
pygame.draw.circle(gameDisplay, WHITE, (320, 220), circle_radius)
if board[5][5] == 2:
pygame.draw.circle(gameDisplay, WHITE, (370, 220), circle_radius)
if board[5][6] == 2:
pygame.draw.circle(gameDisplay, WHITE, (420, 220), circle_radius)
if board[5][7] == 2:
pygame.draw.circle(gameDisplay, WHITE, (470, 220), circle_radius)
# Seventh Row
if board[6][0] == 2:
pygame.draw.circle(gameDisplay, WHITE, (120, 170), circle_radius) # Surface, color, position x, radius
if board[6][1] == 2:
pygame.draw.circle(gameDisplay, WHITE, (170, 170), circle_radius) # Surface, color, position x, radius
if board[6][2] == 2:
pygame.draw.circle(gameDisplay, WHITE, (220, 170), circle_radius)
if board[6][3] == 2:
pygame.draw.circle(gameDisplay, WHITE, (270, 170), circle_radius)
if board[6][4] == 2:
pygame.draw.circle(gameDisplay, WHITE, (320, 170), circle_radius)
if board[6][5] == 2:
pygame.draw.circle(gameDisplay, WHITE, (370, 170), circle_radius)
if board[6][6] == 2:
pygame.draw.circle(gameDisplay, WHITE, (420, 170), circle_radius)
if board[6][7] == 2:
pygame.draw.circle(gameDisplay, WHITE, (470, 170), circle_radius)
# Eight Row
if board[7][0] == 2:
pygame.draw.circle(gameDisplay, WHITE, (120, 120), circle_radius) # Surface, color, position x, radius
if board[7][1] == 2:
pygame.draw.circle(gameDisplay, WHITE, (170, 120), circle_radius) # Surface, color, position x, radius
if board[7][2] == 2:
pygame.draw.circle(gameDisplay, WHITE, (220, 120), circle_radius)
if board[7][3] == 2:
pygame.draw.circle(gameDisplay, WHITE, (270, 120), circle_radius)
if board[7][4] == 2:
pygame.draw.circle(gameDisplay, WHITE, (320, 120), circle_radius)
if board[7][5] == 2:
pygame.draw.circle(gameDisplay, WHITE, (370, 120), circle_radius)
if board[7][6] == 2:
pygame.draw.circle(gameDisplay, WHITE, (420, 120), circle_radius)
if board[7][7] == 2:
pygame.draw.circle(gameDisplay, WHITE, (470, 120), circle_radius)
pygame.display.update()
# This is what changes the matrix
def enforce_rules(board, move):
"""This function changes the matrix that resembles the game board.
It takes the board and the last move, which is based on numbers 0 and 1, and
updates the matrix on the console with the new moves. """
# Check for horizontal locations for pieces to be flipped
for row in range(rows):
for column in range(columns - 2):
if board[row][column] == move and board[row][column + 1] != 0 and board[row][column + 2] == move:
board[row][column] = move
board[row][column + 1] = move
board[row][column + 2] = move
draw_flipped_piece(board, move)
for row in range(rows):
for column in range(columns - 3):
if board[row][column] == move and board[row][column + 1] != 0 and board[row][column + 2] != 0 and board[row][column + 3] == move:
board[row][column] = move
board[row][column + 1] = move
board[row][column + 2] = move
board[row][column + 3] = move
draw_flipped_piece(board, move)
for row in range(rows):
for column in range(columns - 4):
if board[row][column] == move and board[row][column + 1] != 0 and board[row][column + 2] != 0 and board[row][column + 3] != 0 and board[row][column + 4] == move:
board[row][column] = move
board[row][column + 1] = move
board[row][column + 2] = move
board[row][column + 3] = move
board[row][column + 4] = move
draw_flipped_piece(board, move)
for row in range(rows):
for column in range(columns - 5):
if board[row][column] == move and board[row][column + 1] != 0 and board[row][column + 2] != 0 and board[row][column + 3] != 0 and board[row][column + 4] == move and board[row][column + 5] == move:
board[row][column] = move
board[row][column + 1] = move
board[row][column + 2] = move
board[row][column + 3] = move
board[row][column + 4] = move
board[row][column + 5] = move
draw_flipped_piece(board, move)
for row in range(rows):
for column in range(columns - 6):
if board[row][column] == move and board[row][column + 1] != 0 and board[row][column + 2] != 0 and board[row][column + 3] != 0 and board[row][column + 4] == move and board[row][column + 5] == move and board[row][column + 6] == move:
board[row][column] = move
board[row][column + 1] = move
board[row][column + 2] = move
board[row][column + 3] = move
board[row][column + 4] = move
board[row][column + 5] = move
board[row][column + 6] = move
draw_flipped_piece(board, move)
for row in range(rows):
for column in range(columns - 7):
if board[row][column] == move and board[row][column + 1] != 0 and board[row][column + 2] != 0 and board[row][column + 3] != 0 and board[row][column + 4] == move and board[row][column + 5] == move and board[row][column + 6] == move and board[row][column + 7] == move:
board[row][column] = move
board[row][column + 1] = move
board[row][column + 2] = move
board[row][column + 3] = move
board[row][column + 4] = move
board[row][column + 5] = move
board[row][column + 6] = move
board[row][column + 7] = move
draw_flipped_piece(board, move)
# Check for vertical locations for pieces to be flipped
for row in range(rows - 2):
for column in range(columns):
if board[row][column] == move and board[row + 1][column] != 0 and board[row + 2][column] == move:
board[row][column] = move
board[row + 1][column] = move
board[row + 2][column] = move
draw_flipped_piece(board, move)
for row in range(rows - 3):
for column in range(columns):
if board[row][column] == move and board[row + 1][column] != 0 and board[row + 2][column] != 0 and board[row + 3][column] == move:
board[row][column] = move
board[row + 1][column] = move
board[row + 2][column] = move
board[row + 3][column] = move
draw_flipped_piece(board, move)
for row in range(rows - 4):
for column in range(columns):
if board[row][column] == move and board[row + 1][column] != 0 and board[row + 2][column] != 0 and board[row + 3][column] != 0 and board[row + 4][column] == move:
board[row][column] = move
board[row + 1][column] = move
board[row + 2][column] = move
board[row + 3][column] = move
board[row + 4][column] = move
draw_flipped_piece(board, move)
for row in range(rows - 5):
for column in range(columns):
if board[row][column] == move and board[row + 1][column] != 0 and board[row + 2][column] != 0 and board[row + 3][column] != 0 and board[row + 4][column] == move and board[row + 5][column] == move:
board[row][column] = move
board[row + 1][column] = move
board[row + 2][column] = move
board[row + 3][column] = move
board[row + 4][column] = move
board[row + 5][column] = move
draw_flipped_piece(board, move)
for row in range(rows - 6):
for column in range(columns):
if board[row][column] == move and board[row + 1][column] != 0 and board[row + 2][column] != 0 and board[row + 3][column] != 0 and board[row + 4][column] == move and board[row + 5][column] == move and board[row + 6][column] == move:
board[row][column] = move
board[row + 1][column] = move
board[row + 2][column] = move
board[row + 3][column] = move
board[row + 4][column] = move
board[row + 5][column] = move
board[row + 6][column] = move
draw_flipped_piece(board, move)
for row in range(rows - 7):
for column in range(columns):
if board[row][column] == move and board[row + 1][column] != 0 and board[row + 2][column] != 0 and board[row + 3][column] != 0 and board[row + 4][column] == move and board[row + 5][column] == move and board[row + 6][column] != 0 and board[row + 7][column] == move:
board[row][column] = move
board[row + 1][column] = move
board[row + 2][column] = move
board[row + 3][column] = move
board[row + 4][column] = move
board[row + 5][column] = move
board[row + 6][column] = move
board[row + 7][column] = move
draw_flipped_piece(board, move)
# Check for positive diagonal locations for pieces to be flipped
for row in range(rows - 2):
for column in range(columns - 2):
if board[row][column] == move and board[row + 1][column + 1] != 0 and board[row + 2][column + 2] == move:
board[row][column] = move
board[row + 1][column + 1] = move
board[row + 2][column + 2] = move
draw_flipped_piece(board, move)
for row in range(rows - 3):
for column in range(columns - 3):
if board[row][column] == move and board[row + 1][column + 1] != 0 and board[row + 2][column + 2] != 0 and board[row + 3][column + 3] == move:
board[row][column] = move
board[row + 1][column + 1] = move
board[row + 2][column + 2] = move
board[row + 3][column + 3] = move
draw_flipped_piece(board, move)
for row in range(rows - 4):
for column in range(columns - 4):
if board[row][column] == move and board[row + 1][column + 1] != 0 and board[row + 2][column + 2] != 0 and board[row + 3][column + 3] != 0 and board[row + 4][column + 4] == move:
board[row][column] = move
board[row + 1][column + 1] = move
board[row + 2][column + 2] = move
board[row + 3][column + 3] = move
board[row + 4][column + 4] = move
draw_flipped_piece(board, move)
for row in range(rows - 5):
for column in range(columns - 5):
if board[row][column] == move and board[row + 1][column + 1] != 0 and board[row + 2][column + 2] != 0 and board[row + 3][column + 3] != 0 and board[row + 4][column + 4] != 0 and board[row + 5][column + 5] == move:
board[row][column] = move
board[row + 1][column + 1] = move
board[row + 2][column + 2] = move
board[row + 3][column + 3] = move
board[row + 4][column + 4] = move
board[row + 5][column + 5] = move
draw_flipped_piece(board, move)
for row in range(rows - 6):
for column in range(columns - 6):
if board[row][column] == move and board[row + 1][column + 1] != 0 and board[row + 2][column + 2] != 0 and board[row + 3][column + 3] != 0 and board[row + 4][column + 4] != 0 and board[row + 5][column + 5] != 0 and board[row + 6][column + 6] == move:
board[row][column] = move
board[row + 1][column + 1] = move
board[row + 2][column + 2] = move
board[row + 3][column + 3] = move
board[row + 4][column + 4] = move
board[row + 5][column + 5] = move
board[row + 6][column + 6] = move
draw_flipped_piece(board, move)
for row in range(rows - 7):
for column in range(columns - 7):
if board[row][column] == move and board[row + 1][column + 1] != 0 and board[row + 2][column + 2] != 0 and board[row + 3][column + 3] != 0 and board[row + 4][column + 4] != 0 and board[row + 5][column + 5] != 0and board[row + 6][column + 6] != 0 and board[row + 7][column + 7] == move:
board[row][column] = move
board[row + 1][column + 1] = move
board[row + 2][column + 2] = move
board[row + 3][column + 3] = move
board[row + 4][column + 4] = move
board[row + 5][column + 5] = move
board[row + 6][column + 6] = move
board[row + 7][column + 7] = move
draw_flipped_piece(board, move)
# Check for negatively diagonal locations for pieces to be flipped
for row in range(2, rows):
for column in range(columns - 2):
if board[row][column] == move and board[row - 1][column + 1] != 0 and board[row - 2][column + 2] == move:
board[row][column] = move
board[row - 1][column + 1] = move
board[row - 2][column + 2] = move
draw_flipped_piece(board, move)
for row in range(3, rows):
for column in range(columns - 3):
if board[row][column] == move and board[row - 1][column + 1] != 0 and board[row - 2][column + 2] != 0 and board[row - 3][column + 3] == move:
board[row][column] = move
board[row - 1][column + 1] = move
board[row - 2][column + 2] = move
board[row - 3][column + 3] = move
draw_flipped_piece(board, move)
for row in range(4, rows):
for column in range(columns - 4):
if board[row][column] == move and board[row - 1][column + 1] != 0 and board[row - 2][column + 2] != 0 and board[row - 3][column + 3] != 0 and board[row - 4][column + 4] == move:
board[row][column] = move
board[row - 1][column + 1] = move
board[row - 2][column + 2] = move
board[row - 3][column + 3] = move
board[row - 4][column + 4] = move
draw_flipped_piece(board, move)
for row in range(5, rows):
for column in range(columns - 5):
if board[row][column] == move and board[row - 1][column + 1] != 0 and board[row - 2][column + 2] != 0 and board[row - 3][column + 3] != 0 and board[row - 4][column + 4] != 0 and board[row - 5][column + 5] == move:
board[row][column] = move
board[row - 1][column + 1] = move
board[row - 2][column + 2] = move
board[row - 3][column + 3] = move
board[row - 4][column + 4] = move
board[row - 5][column + 5] = move
draw_flipped_piece(board, move)
for row in range(6, rows):
for column in range(columns - 6):
if board[row][column] == move and board[row - 1][column + 1] != 0 and board[row - 2][column + 2] != 0 and board[row - 3][column + 3] != 0 and board[row - 4][column + 4] != 0 and board[row - 5][column + 5] != 0 and board[row - 6][column + 6] == move:
board[row][column] = move
board[row - 1][column + 1] = move
board[row - 2][column + 2] = move
board[row - 3][column + 3] = move
board[row - 4][column + 4] = move
board[row - 5][column + 5] = move
board[row - 6][column + 6] = move
draw_flipped_piece(board, move)
for row in range(7, rows):
for column in range(columns - 7):
if board[row][column] == move and board[row - 1][column + 1] != 0 and board[row - 2][column + 2] != 0 and board[row - 3][column + 3] != 0 and board[row - 4][column + 4] != 0 and board[row - 5][column + 5] != 0 and board[row - 6][column + 6] != 0 and board[row - 7][column + 7] == move:
board[row][column] = move
board[row - 1][column + 1] = move
board[row - 2][column + 2] = move
board[row - 3][column + 3] = move
board[row - 4][column + 4] = move
board[row - 5][column + 5] = move
board[row - 6][column + 6] = move
board[row - 7][column + 7] = move
draw_flipped_piece(board, move)
# Ending the game function
def quit_game():
"""This function quits pygame."""
pygame.quit()
quit()
# Calling the game intro to begin the game
game_intro()
| [
"pygame.mouse.get_pressed",
"pygame.init",
"pygame.quit",
"pygame.mixer.music.set_volume",
"pygame.mixer_music.load",
"pygame.font.Font",
"numpy.flip",
"pygame.display.set_mode",
"pygame.mixer_music.play",
"pygame.mouse.get_pos",
"pygame.draw.rect",
"pygame.image.load",
"pygame.display.updat... | [((107, 120), 'pygame.init', 'pygame.init', ([], {}), '()\n', (118, 120), False, 'import pygame\n'), ((2801, 2857), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(display_width, display_height)'], {}), '((display_width, display_height))\n', (2824, 2857), False, 'import pygame\n'), ((2859, 2901), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""OTHELLO GAME"""'], {}), "('OTHELLO GAME')\n", (2885, 2901), False, 'import pygame\n'), ((2911, 2930), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (2928, 2930), False, 'import pygame\n'), ((2940, 2966), 'pygame.mouse.get_pressed', 'pygame.mouse.get_pressed', ([], {}), '()\n', (2964, 2966), False, 'import pygame\n'), ((2976, 2998), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (2996, 2998), False, 'import pygame\n'), ((3044, 3076), 'pygame.image.load', 'pygame.image.load', (['"""reversi.png"""'], {}), "('reversi.png')\n", (3061, 3076), False, 'import pygame\n'), ((3096, 3132), 'pygame.image.load', 'pygame.image.load', (['"""directions2.png"""'], {}), "('directions2.png')\n", (3113, 3132), False, 'import pygame\n'), ((3161, 3210), 'pygame.image.load', 'pygame.image.load', (['"""background_othello_image.png"""'], {}), "('background_othello_image.png')\n", (3178, 3210), False, 'import pygame\n'), ((3230, 3270), 'pygame.image.load', 'pygame.image.load', (['"""wood_background.png"""'], {}), "('wood_background.png')\n", (3247, 3270), False, 'import pygame\n'), ((4650, 4675), 'numpy.zeros', 'np.zeros', (['(rows, columns)'], {}), '((rows, columns))\n', (4658, 4675), True, 'import numpy as np\n'), ((6041, 6081), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(35)'], {}), "('freesansbold.ttf', 35)\n", (6057, 6081), False, 'import pygame\n'), ((6590, 6630), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(45)'], {}), "('freesansbold.ttf', 45)\n", (6606, 6630), False, 'import pygame\n'), ((7083, 7126), 'pygame.font.Font', 'pygame.font.Font', (['"""times new roman.ttf"""', '(45)'], {}), "('times new roman.ttf', 45)\n", (7099, 7126), False, 'import pygame\n'), ((7529, 7572), 'pygame.font.Font', 'pygame.font.Font', (['"""times new roman.ttf"""', '(70)'], {}), "('times new roman.ttf', 70)\n", (7545, 7572), False, 'import pygame\n'), ((8009, 8049), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(15)'], {}), "('freesansbold.ttf', 15)\n", (8025, 8049), False, 'import pygame\n'), ((8620, 8646), 'pygame.mouse.get_pressed', 'pygame.mouse.get_pressed', ([], {}), '()\n', (8644, 8646), False, 'import pygame\n'), ((8660, 8682), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (8680, 8682), False, 'import pygame\n'), ((9056, 9096), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(20)'], {}), "('freesansbold.ttf', 20)\n", (9072, 9096), False, 'import pygame\n'), ((9651, 9674), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (9672, 9674), False, 'import pygame\n'), ((15683, 15748), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(530, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (530, 170), circle_radius)\n', (15701, 15748), False, 'import pygame\n'), ((15754, 15819), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(530, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (530, 120), circle_radius)\n', (15772, 15819), False, 'import pygame\n'), ((15865, 15888), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (15886, 15888), False, 'import pygame\n'), ((16253, 16276), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (16274, 16276), False, 'import pygame\n'), ((19709, 19732), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (19730, 19732), False, 'import pygame\n'), ((20280, 20345), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 320), circle_radius)\n', (20298, 20345), False, 'import pygame\n'), ((20372, 20437), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 320), circle_radius)\n', (20390, 20437), False, 'import pygame\n'), ((20464, 20529), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 270), circle_radius)\n', (20482, 20529), False, 'import pygame\n'), ((20556, 20621), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 270), circle_radius)\n', (20574, 20621), False, 'import pygame\n'), ((21760, 21803), 'pygame.font.Font', 'pygame.font.Font', (['"""times new roman.ttf"""', '(35)'], {}), "('times new roman.ttf', 35)\n", (21776, 21803), False, 'import pygame\n'), ((24247, 24269), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (24267, 24269), False, 'import pygame\n'), ((24283, 24309), 'pygame.mouse.get_pressed', 'pygame.mouse.get_pressed', ([], {}), '()\n', (24307, 24309), False, 'import pygame\n'), ((46739, 46762), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (46760, 46762), False, 'import pygame\n'), ((62667, 62690), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (62688, 62690), False, 'import pygame\n'), ((77490, 77503), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (77501, 77503), False, 'import pygame\n'), ((5352, 5369), 'numpy.flip', 'np.flip', (['board', '(0)'], {}), '(board, 0)\n', (5359, 5369), True, 'import numpy as np\n'), ((8776, 8842), 'pygame.draw.rect', 'pygame.draw.rect', (['gameDisplay', 'active_color', '(x, y, width, height)'], {}), '(gameDisplay, active_color, (x, y, width, height))\n', (8792, 8842), False, 'import pygame\n'), ((8931, 8999), 'pygame.draw.rect', 'pygame.draw.rect', (['gameDisplay', 'inactive_color', '(x, y, width, height)'], {}), '(gameDisplay, inactive_color, (x, y, width, height))\n', (8947, 8999), False, 'import pygame\n'), ((9738, 9756), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (9754, 9756), False, 'import pygame\n'), ((10390, 10408), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (10406, 10408), False, 'import pygame\n'), ((10866, 10889), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (10887, 10889), False, 'import pygame\n'), ((16343, 16365), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (16363, 16365), False, 'import pygame\n'), ((16425, 16443), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (16441, 16443), False, 'import pygame\n'), ((17568, 17586), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (17584, 17586), False, 'import pygame\n'), ((18685, 18706), 'pygame.time.wait', 'pygame.time.wait', (['(500)'], {}), '(500)\n', (18701, 18706), False, 'import pygame\n'), ((18768, 18786), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (18784, 18786), False, 'import pygame\n'), ((19377, 19400), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (19398, 19400), False, 'import pygame\n'), ((19804, 19822), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (19820, 19822), False, 'import pygame\n'), ((20886, 20906), 'random.randint', 'random.randint', (['(0)', '(7)'], {}), '(0, 7)\n', (20900, 20906), False, 'import random\n'), ((20920, 20940), 'random.randint', 'random.randint', (['(0)', '(7)'], {}), '(0, 7)\n', (20934, 20940), False, 'import random\n'), ((776, 825), 'pygame.mixer_music.load', 'pygame.mixer_music.load', (['"""MinecraftThemeSong.mp3"""'], {}), "('MinecraftThemeSong.mp3')\n", (799, 825), False, 'import pygame\n'), ((839, 873), 'pygame.mixer.music.set_volume', 'pygame.mixer.music.set_volume', (['(0.1)'], {}), '(0.1)\n', (868, 873), False, 'import pygame\n'), ((886, 931), 'pygame.mixer_music.play', 'pygame.mixer_music.play', ([], {'loops': '(100)', 'start': '(0.0)'}), '(loops=100, start=0.0)\n', (909, 931), False, 'import pygame\n'), ((17080, 17103), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (17101, 17103), False, 'import pygame\n'), ((18244, 18267), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (18265, 18267), False, 'import pygame\n'), ((24476, 24541), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 470), circle_radius)\n', (24494, 24541), False, 'import pygame\n'), ((24608, 24673), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 470), circle_radius)\n', (24626, 24673), False, 'import pygame\n'), ((47169, 47234), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 470), circle_radius)\n', (47187, 47234), False, 'import pygame\n'), ((47316, 47381), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 470), circle_radius)\n', (47334, 47381), False, 'import pygame\n'), ((47463, 47528), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 470), circle_radius)\n', (47481, 47528), False, 'import pygame\n'), ((47572, 47637), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 470), circle_radius)\n', (47590, 47637), False, 'import pygame\n'), ((47681, 47746), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 470), circle_radius)\n', (47699, 47746), False, 'import pygame\n'), ((47790, 47855), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 470), circle_radius)\n', (47808, 47855), False, 'import pygame\n'), ((47899, 47964), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 470), circle_radius)\n', (47917, 47964), False, 'import pygame\n'), ((48008, 48073), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 470), circle_radius)\n', (48026, 48073), False, 'import pygame\n'), ((48141, 48206), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 420), circle_radius)\n', (48159, 48206), False, 'import pygame\n'), ((48288, 48353), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 420), circle_radius)\n', (48306, 48353), False, 'import pygame\n'), ((48435, 48500), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 420), circle_radius)\n', (48453, 48500), False, 'import pygame\n'), ((48544, 48609), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 420), circle_radius)\n', (48562, 48609), False, 'import pygame\n'), ((48653, 48718), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 420), circle_radius)\n', (48671, 48718), False, 'import pygame\n'), ((48762, 48827), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 420), circle_radius)\n', (48780, 48827), False, 'import pygame\n'), ((48871, 48936), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 420), circle_radius)\n', (48889, 48936), False, 'import pygame\n'), ((48980, 49045), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 420), circle_radius)\n', (48998, 49045), False, 'import pygame\n'), ((49112, 49177), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 370), circle_radius)\n', (49130, 49177), False, 'import pygame\n'), ((49259, 49324), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 370), circle_radius)\n', (49277, 49324), False, 'import pygame\n'), ((49406, 49471), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 370), circle_radius)\n', (49424, 49471), False, 'import pygame\n'), ((49515, 49580), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 370), circle_radius)\n', (49533, 49580), False, 'import pygame\n'), ((49624, 49689), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 370), circle_radius)\n', (49642, 49689), False, 'import pygame\n'), ((49733, 49798), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 370), circle_radius)\n', (49751, 49798), False, 'import pygame\n'), ((49842, 49907), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 370), circle_radius)\n', (49860, 49907), False, 'import pygame\n'), ((49951, 50016), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 370), circle_radius)\n', (49969, 50016), False, 'import pygame\n'), ((50084, 50149), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 320), circle_radius)\n', (50102, 50149), False, 'import pygame\n'), ((50231, 50296), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 320), circle_radius)\n', (50249, 50296), False, 'import pygame\n'), ((50378, 50443), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 320), circle_radius)\n', (50396, 50443), False, 'import pygame\n'), ((50487, 50552), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 320), circle_radius)\n', (50505, 50552), False, 'import pygame\n'), ((50596, 50661), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 320), circle_radius)\n', (50614, 50661), False, 'import pygame\n'), ((50705, 50770), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 320), circle_radius)\n', (50723, 50770), False, 'import pygame\n'), ((50814, 50879), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 320), circle_radius)\n', (50832, 50879), False, 'import pygame\n'), ((50923, 50988), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 320), circle_radius)\n', (50941, 50988), False, 'import pygame\n'), ((51055, 51120), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 270), circle_radius)\n', (51073, 51120), False, 'import pygame\n'), ((51202, 51267), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 270), circle_radius)\n', (51220, 51267), False, 'import pygame\n'), ((51349, 51414), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 270), circle_radius)\n', (51367, 51414), False, 'import pygame\n'), ((51458, 51523), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 270), circle_radius)\n', (51476, 51523), False, 'import pygame\n'), ((51567, 51632), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 270), circle_radius)\n', (51585, 51632), False, 'import pygame\n'), ((51676, 51741), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 270), circle_radius)\n', (51694, 51741), False, 'import pygame\n'), ((51785, 51850), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 270), circle_radius)\n', (51803, 51850), False, 'import pygame\n'), ((51894, 51959), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 270), circle_radius)\n', (51912, 51959), False, 'import pygame\n'), ((52026, 52091), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 220), circle_radius)\n', (52044, 52091), False, 'import pygame\n'), ((52173, 52238), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 220), circle_radius)\n', (52191, 52238), False, 'import pygame\n'), ((52320, 52385), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 220), circle_radius)\n', (52338, 52385), False, 'import pygame\n'), ((52429, 52494), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 220), circle_radius)\n', (52447, 52494), False, 'import pygame\n'), ((52538, 52603), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 220), circle_radius)\n', (52556, 52603), False, 'import pygame\n'), ((52647, 52712), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 220), circle_radius)\n', (52665, 52712), False, 'import pygame\n'), ((52756, 52821), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 220), circle_radius)\n', (52774, 52821), False, 'import pygame\n'), ((52865, 52930), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 220), circle_radius)\n', (52883, 52930), False, 'import pygame\n'), ((52999, 53064), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 170), circle_radius)\n', (53017, 53064), False, 'import pygame\n'), ((53146, 53211), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 170), circle_radius)\n', (53164, 53211), False, 'import pygame\n'), ((53293, 53358), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 170), circle_radius)\n', (53311, 53358), False, 'import pygame\n'), ((53402, 53467), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 170), circle_radius)\n', (53420, 53467), False, 'import pygame\n'), ((53511, 53576), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 170), circle_radius)\n', (53529, 53576), False, 'import pygame\n'), ((53620, 53685), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 170), circle_radius)\n', (53638, 53685), False, 'import pygame\n'), ((53729, 53794), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 170), circle_radius)\n', (53747, 53794), False, 'import pygame\n'), ((53838, 53903), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 170), circle_radius)\n', (53856, 53903), False, 'import pygame\n'), ((53970, 54035), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 120), circle_radius)\n', (53988, 54035), False, 'import pygame\n'), ((54117, 54182), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 120), circle_radius)\n', (54135, 54182), False, 'import pygame\n'), ((54264, 54329), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 120), circle_radius)\n', (54282, 54329), False, 'import pygame\n'), ((54373, 54438), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 120), circle_radius)\n', (54391, 54438), False, 'import pygame\n'), ((54482, 54547), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 120), circle_radius)\n', (54500, 54547), False, 'import pygame\n'), ((54591, 54656), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 120), circle_radius)\n', (54609, 54656), False, 'import pygame\n'), ((54700, 54765), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 120), circle_radius)\n', (54718, 54765), False, 'import pygame\n'), ((54809, 54874), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 120), circle_radius)\n', (54827, 54874), False, 'import pygame\n'), ((54954, 55019), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 470), circle_radius)\n', (54972, 55019), False, 'import pygame\n'), ((55101, 55166), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 470), circle_radius)\n', (55119, 55166), False, 'import pygame\n'), ((55248, 55313), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 470), circle_radius)\n', (55266, 55313), False, 'import pygame\n'), ((55357, 55422), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 470), circle_radius)\n', (55375, 55422), False, 'import pygame\n'), ((55466, 55531), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 470), circle_radius)\n', (55484, 55531), False, 'import pygame\n'), ((55575, 55640), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 470), circle_radius)\n', (55593, 55640), False, 'import pygame\n'), ((55684, 55749), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 470), circle_radius)\n', (55702, 55749), False, 'import pygame\n'), ((55793, 55858), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 470), circle_radius)\n', (55811, 55858), False, 'import pygame\n'), ((55926, 55991), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 420), circle_radius)\n', (55944, 55991), False, 'import pygame\n'), ((56073, 56138), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 420), circle_radius)\n', (56091, 56138), False, 'import pygame\n'), ((56220, 56285), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 420), circle_radius)\n', (56238, 56285), False, 'import pygame\n'), ((56329, 56394), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 420), circle_radius)\n', (56347, 56394), False, 'import pygame\n'), ((56438, 56503), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 420), circle_radius)\n', (56456, 56503), False, 'import pygame\n'), ((56547, 56612), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 420), circle_radius)\n', (56565, 56612), False, 'import pygame\n'), ((56656, 56721), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 420), circle_radius)\n', (56674, 56721), False, 'import pygame\n'), ((56765, 56830), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 420), circle_radius)\n', (56783, 56830), False, 'import pygame\n'), ((56897, 56962), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 370), circle_radius)\n', (56915, 56962), False, 'import pygame\n'), ((57044, 57109), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 370), circle_radius)\n', (57062, 57109), False, 'import pygame\n'), ((57191, 57256), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 370), circle_radius)\n', (57209, 57256), False, 'import pygame\n'), ((57300, 57365), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 370), circle_radius)\n', (57318, 57365), False, 'import pygame\n'), ((57409, 57474), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 370), circle_radius)\n', (57427, 57474), False, 'import pygame\n'), ((57518, 57583), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 370), circle_radius)\n', (57536, 57583), False, 'import pygame\n'), ((57627, 57692), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 370), circle_radius)\n', (57645, 57692), False, 'import pygame\n'), ((57736, 57801), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 370), circle_radius)\n', (57754, 57801), False, 'import pygame\n'), ((57869, 57934), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 320), circle_radius)\n', (57887, 57934), False, 'import pygame\n'), ((58016, 58081), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 320), circle_radius)\n', (58034, 58081), False, 'import pygame\n'), ((58163, 58228), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 320), circle_radius)\n', (58181, 58228), False, 'import pygame\n'), ((58272, 58337), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 320), circle_radius)\n', (58290, 58337), False, 'import pygame\n'), ((58381, 58446), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 320), circle_radius)\n', (58399, 58446), False, 'import pygame\n'), ((58490, 58555), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 320), circle_radius)\n', (58508, 58555), False, 'import pygame\n'), ((58599, 58664), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 320), circle_radius)\n', (58617, 58664), False, 'import pygame\n'), ((58708, 58773), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 320), circle_radius)\n', (58726, 58773), False, 'import pygame\n'), ((58840, 58905), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 270), circle_radius)\n', (58858, 58905), False, 'import pygame\n'), ((58987, 59052), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 270), circle_radius)\n', (59005, 59052), False, 'import pygame\n'), ((59134, 59199), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 270), circle_radius)\n', (59152, 59199), False, 'import pygame\n'), ((59243, 59308), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 270), circle_radius)\n', (59261, 59308), False, 'import pygame\n'), ((59352, 59417), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 270), circle_radius)\n', (59370, 59417), False, 'import pygame\n'), ((59461, 59526), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 270), circle_radius)\n', (59479, 59526), False, 'import pygame\n'), ((59570, 59635), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 270), circle_radius)\n', (59588, 59635), False, 'import pygame\n'), ((59679, 59744), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 270), circle_radius)\n', (59697, 59744), False, 'import pygame\n'), ((59811, 59876), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 220), circle_radius)\n', (59829, 59876), False, 'import pygame\n'), ((59958, 60023), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 220), circle_radius)\n', (59976, 60023), False, 'import pygame\n'), ((60105, 60170), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 220), circle_radius)\n', (60123, 60170), False, 'import pygame\n'), ((60214, 60279), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 220), circle_radius)\n', (60232, 60279), False, 'import pygame\n'), ((60323, 60388), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 220), circle_radius)\n', (60341, 60388), False, 'import pygame\n'), ((60432, 60497), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 220), circle_radius)\n', (60450, 60497), False, 'import pygame\n'), ((60541, 60606), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 220), circle_radius)\n', (60559, 60606), False, 'import pygame\n'), ((60650, 60715), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 220), circle_radius)\n', (60668, 60715), False, 'import pygame\n'), ((60784, 60849), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 170), circle_radius)\n', (60802, 60849), False, 'import pygame\n'), ((60931, 60996), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 170), circle_radius)\n', (60949, 60996), False, 'import pygame\n'), ((61078, 61143), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 170), circle_radius)\n', (61096, 61143), False, 'import pygame\n'), ((61187, 61252), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 170), circle_radius)\n', (61205, 61252), False, 'import pygame\n'), ((61296, 61361), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 170), circle_radius)\n', (61314, 61361), False, 'import pygame\n'), ((61405, 61470), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 170), circle_radius)\n', (61423, 61470), False, 'import pygame\n'), ((61514, 61579), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 170), circle_radius)\n', (61532, 61579), False, 'import pygame\n'), ((61623, 61688), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 170), circle_radius)\n', (61641, 61688), False, 'import pygame\n'), ((61755, 61820), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 120), circle_radius)\n', (61773, 61820), False, 'import pygame\n'), ((61902, 61967), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 120), circle_radius)\n', (61920, 61967), False, 'import pygame\n'), ((62049, 62114), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 120), circle_radius)\n', (62067, 62114), False, 'import pygame\n'), ((62158, 62223), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 120), circle_radius)\n', (62176, 62223), False, 'import pygame\n'), ((62267, 62332), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 120), circle_radius)\n', (62285, 62332), False, 'import pygame\n'), ((62376, 62441), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 120), circle_radius)\n', (62394, 62441), False, 'import pygame\n'), ((62485, 62550), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 120), circle_radius)\n', (62503, 62550), False, 'import pygame\n'), ((62594, 62659), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 120), circle_radius)\n', (62612, 62659), False, 'import pygame\n'), ((973, 1023), 'pygame.mixer_music.load', 'pygame.mixer_music.load', (['"""MinecraftThemeSong2.mp3"""'], {}), "('MinecraftThemeSong2.mp3')\n", (996, 1023), False, 'import pygame\n'), ((1037, 1082), 'pygame.mixer_music.play', 'pygame.mixer_music.play', ([], {'loops': '(100)', 'start': '(0.0)'}), '(loops=100, start=0.0)\n', (1060, 1082), False, 'import pygame\n'), ((24861, 24926), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 420), circle_radius)\n', (24879, 24926), False, 'import pygame\n'), ((24955, 25020), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 420), circle_radius)\n', (24973, 25020), False, 'import pygame\n'), ((1619, 1668), 'pygame.mixer_music.load', 'pygame.mixer_music.load', (['"""MinecraftThemeSong.mp3"""'], {}), "('MinecraftThemeSong.mp3')\n", (1642, 1668), False, 'import pygame\n'), ((1686, 1720), 'pygame.mixer.music.set_volume', 'pygame.mixer.music.set_volume', (['(0.1)'], {}), '(0.1)\n', (1715, 1720), False, 'import pygame\n'), ((1737, 1782), 'pygame.mixer_music.play', 'pygame.mixer_music.play', ([], {'loops': '(100)', 'start': '(0.0)'}), '(loops=100, start=0.0)\n', (1760, 1782), False, 'import pygame\n'), ((25208, 25273), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 370), circle_radius)\n', (25226, 25273), False, 'import pygame\n'), ((25302, 25367), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 370), circle_radius)\n', (25320, 25367), False, 'import pygame\n'), ((1855, 1905), 'pygame.mixer_music.load', 'pygame.mixer_music.load', (['"""MinecraftThemeSong2.mp3"""'], {}), "('MinecraftThemeSong2.mp3')\n", (1878, 1905), False, 'import pygame\n'), ((1923, 1968), 'pygame.mixer_music.play', 'pygame.mixer_music.play', ([], {'loops': '(100)', 'start': '(0.0)'}), '(loops=100, start=0.0)\n', (1946, 1968), False, 'import pygame\n'), ((25555, 25620), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 320), circle_radius)\n', (25573, 25620), False, 'import pygame\n'), ((25649, 25714), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 320), circle_radius)\n', (25667, 25714), False, 'import pygame\n'), ((25902, 25967), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 270), circle_radius)\n', (25920, 25967), False, 'import pygame\n'), ((25996, 26061), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 270), circle_radius)\n', (26014, 26061), False, 'import pygame\n'), ((26249, 26314), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 220), circle_radius)\n', (26267, 26314), False, 'import pygame\n'), ((26343, 26408), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 220), circle_radius)\n', (26361, 26408), False, 'import pygame\n'), ((26596, 26661), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 170), circle_radius)\n', (26614, 26661), False, 'import pygame\n'), ((26690, 26755), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 170), circle_radius)\n', (26708, 26755), False, 'import pygame\n'), ((26943, 27008), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(120, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (120, 120), circle_radius)\n', (26961, 27008), False, 'import pygame\n'), ((27037, 27102), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(120, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (120, 120), circle_radius)\n', (27055, 27102), False, 'import pygame\n'), ((27313, 27378), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 470), circle_radius)\n', (27331, 27378), False, 'import pygame\n'), ((27407, 27472), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 470), circle_radius)\n', (27425, 27472), False, 'import pygame\n'), ((27660, 27725), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 420), circle_radius)\n', (27678, 27725), False, 'import pygame\n'), ((27754, 27819), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 420), circle_radius)\n', (27772, 27819), False, 'import pygame\n'), ((28007, 28072), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 370), circle_radius)\n', (28025, 28072), False, 'import pygame\n'), ((28101, 28166), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 370), circle_radius)\n', (28119, 28166), False, 'import pygame\n'), ((28354, 28419), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 320), circle_radius)\n', (28372, 28419), False, 'import pygame\n'), ((28448, 28513), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 320), circle_radius)\n', (28466, 28513), False, 'import pygame\n'), ((28701, 28766), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 270), circle_radius)\n', (28719, 28766), False, 'import pygame\n'), ((28795, 28860), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 270), circle_radius)\n', (28813, 28860), False, 'import pygame\n'), ((29048, 29113), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 220), circle_radius)\n', (29066, 29113), False, 'import pygame\n'), ((29142, 29207), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 220), circle_radius)\n', (29160, 29207), False, 'import pygame\n'), ((29395, 29460), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 170), circle_radius)\n', (29413, 29460), False, 'import pygame\n'), ((29489, 29554), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 170), circle_radius)\n', (29507, 29554), False, 'import pygame\n'), ((29742, 29807), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(170, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (170, 120), circle_radius)\n', (29760, 29807), False, 'import pygame\n'), ((29836, 29901), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(170, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (170, 120), circle_radius)\n', (29854, 29901), False, 'import pygame\n'), ((30111, 30176), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 470), circle_radius)\n', (30129, 30176), False, 'import pygame\n'), ((30205, 30270), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 470), circle_radius)\n', (30223, 30270), False, 'import pygame\n'), ((30458, 30523), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 420), circle_radius)\n', (30476, 30523), False, 'import pygame\n'), ((30552, 30617), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 420), circle_radius)\n', (30570, 30617), False, 'import pygame\n'), ((30805, 30870), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 370), circle_radius)\n', (30823, 30870), False, 'import pygame\n'), ((30899, 30964), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 370), circle_radius)\n', (30917, 30964), False, 'import pygame\n'), ((31152, 31217), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 320), circle_radius)\n', (31170, 31217), False, 'import pygame\n'), ((31246, 31311), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 320), circle_radius)\n', (31264, 31311), False, 'import pygame\n'), ((31499, 31564), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 270), circle_radius)\n', (31517, 31564), False, 'import pygame\n'), ((31593, 31658), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 270), circle_radius)\n', (31611, 31658), False, 'import pygame\n'), ((31846, 31911), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 220), circle_radius)\n', (31864, 31911), False, 'import pygame\n'), ((31940, 32005), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 220), circle_radius)\n', (31958, 32005), False, 'import pygame\n'), ((32193, 32258), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 170), circle_radius)\n', (32211, 32258), False, 'import pygame\n'), ((32287, 32352), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 170), circle_radius)\n', (32305, 32352), False, 'import pygame\n'), ((32540, 32605), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(220, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (220, 120), circle_radius)\n', (32558, 32605), False, 'import pygame\n'), ((32634, 32699), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(220, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (220, 120), circle_radius)\n', (32652, 32699), False, 'import pygame\n'), ((32910, 32975), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 470), circle_radius)\n', (32928, 32975), False, 'import pygame\n'), ((33004, 33069), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 470), circle_radius)\n', (33022, 33069), False, 'import pygame\n'), ((33257, 33322), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 420), circle_radius)\n', (33275, 33322), False, 'import pygame\n'), ((33351, 33416), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 420), circle_radius)\n', (33369, 33416), False, 'import pygame\n'), ((33604, 33669), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 370), circle_radius)\n', (33622, 33669), False, 'import pygame\n'), ((33698, 33763), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 370), circle_radius)\n', (33716, 33763), False, 'import pygame\n'), ((33951, 34016), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 320), circle_radius)\n', (33969, 34016), False, 'import pygame\n'), ((34045, 34110), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 320), circle_radius)\n', (34063, 34110), False, 'import pygame\n'), ((34298, 34363), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 270), circle_radius)\n', (34316, 34363), False, 'import pygame\n'), ((34392, 34457), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 270), circle_radius)\n', (34410, 34457), False, 'import pygame\n'), ((34645, 34710), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 220), circle_radius)\n', (34663, 34710), False, 'import pygame\n'), ((34739, 34804), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 220), circle_radius)\n', (34757, 34804), False, 'import pygame\n'), ((34992, 35057), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 170), circle_radius)\n', (35010, 35057), False, 'import pygame\n'), ((35086, 35151), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 170), circle_radius)\n', (35104, 35151), False, 'import pygame\n'), ((35339, 35404), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(270, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (270, 120), circle_radius)\n', (35357, 35404), False, 'import pygame\n'), ((35433, 35498), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(270, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (270, 120), circle_radius)\n', (35451, 35498), False, 'import pygame\n'), ((35708, 35773), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 470), circle_radius)\n', (35726, 35773), False, 'import pygame\n'), ((35802, 35867), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 470), circle_radius)\n', (35820, 35867), False, 'import pygame\n'), ((36055, 36120), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 420), circle_radius)\n', (36073, 36120), False, 'import pygame\n'), ((36149, 36214), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 420), circle_radius)\n', (36167, 36214), False, 'import pygame\n'), ((36402, 36467), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 370), circle_radius)\n', (36420, 36467), False, 'import pygame\n'), ((36496, 36561), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 370), circle_radius)\n', (36514, 36561), False, 'import pygame\n'), ((36749, 36814), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 320), circle_radius)\n', (36767, 36814), False, 'import pygame\n'), ((36843, 36908), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 320), circle_radius)\n', (36861, 36908), False, 'import pygame\n'), ((37096, 37161), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 270), circle_radius)\n', (37114, 37161), False, 'import pygame\n'), ((37190, 37255), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 270), circle_radius)\n', (37208, 37255), False, 'import pygame\n'), ((37443, 37508), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 220), circle_radius)\n', (37461, 37508), False, 'import pygame\n'), ((37537, 37602), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 220), circle_radius)\n', (37555, 37602), False, 'import pygame\n'), ((37790, 37855), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 170), circle_radius)\n', (37808, 37855), False, 'import pygame\n'), ((37884, 37949), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 170), circle_radius)\n', (37902, 37949), False, 'import pygame\n'), ((38137, 38202), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(320, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (320, 120), circle_radius)\n', (38155, 38202), False, 'import pygame\n'), ((38231, 38296), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(320, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (320, 120), circle_radius)\n', (38249, 38296), False, 'import pygame\n'), ((38506, 38571), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 470), circle_radius)\n', (38524, 38571), False, 'import pygame\n'), ((38600, 38665), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 470), circle_radius)\n', (38618, 38665), False, 'import pygame\n'), ((38853, 38918), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 420), circle_radius)\n', (38871, 38918), False, 'import pygame\n'), ((38947, 39012), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 420), circle_radius)\n', (38965, 39012), False, 'import pygame\n'), ((39200, 39265), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 370), circle_radius)\n', (39218, 39265), False, 'import pygame\n'), ((39294, 39359), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 370), circle_radius)\n', (39312, 39359), False, 'import pygame\n'), ((39547, 39612), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 320), circle_radius)\n', (39565, 39612), False, 'import pygame\n'), ((39641, 39706), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 320), circle_radius)\n', (39659, 39706), False, 'import pygame\n'), ((39894, 39959), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 270), circle_radius)\n', (39912, 39959), False, 'import pygame\n'), ((39988, 40053), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 270), circle_radius)\n', (40006, 40053), False, 'import pygame\n'), ((40241, 40306), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 220), circle_radius)\n', (40259, 40306), False, 'import pygame\n'), ((40335, 40400), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 220), circle_radius)\n', (40353, 40400), False, 'import pygame\n'), ((40588, 40653), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 170), circle_radius)\n', (40606, 40653), False, 'import pygame\n'), ((40682, 40747), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 170), circle_radius)\n', (40700, 40747), False, 'import pygame\n'), ((40935, 41000), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(370, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (370, 120), circle_radius)\n', (40953, 41000), False, 'import pygame\n'), ((41029, 41094), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(370, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (370, 120), circle_radius)\n', (41047, 41094), False, 'import pygame\n'), ((41306, 41371), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 470), circle_radius)\n', (41324, 41371), False, 'import pygame\n'), ((41400, 41465), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 470), circle_radius)\n', (41418, 41465), False, 'import pygame\n'), ((41653, 41718), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 420), circle_radius)\n', (41671, 41718), False, 'import pygame\n'), ((41747, 41812), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 420), circle_radius)\n', (41765, 41812), False, 'import pygame\n'), ((42000, 42065), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 370), circle_radius)\n', (42018, 42065), False, 'import pygame\n'), ((42094, 42159), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 370), circle_radius)\n', (42112, 42159), False, 'import pygame\n'), ((42347, 42412), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 320), circle_radius)\n', (42365, 42412), False, 'import pygame\n'), ((42441, 42506), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 320), circle_radius)\n', (42459, 42506), False, 'import pygame\n'), ((42694, 42759), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 270), circle_radius)\n', (42712, 42759), False, 'import pygame\n'), ((42788, 42853), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 270), circle_radius)\n', (42806, 42853), False, 'import pygame\n'), ((43041, 43106), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 220), circle_radius)\n', (43059, 43106), False, 'import pygame\n'), ((43135, 43200), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 220), circle_radius)\n', (43153, 43200), False, 'import pygame\n'), ((43388, 43453), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 170), circle_radius)\n', (43406, 43453), False, 'import pygame\n'), ((43482, 43547), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 170), circle_radius)\n', (43500, 43547), False, 'import pygame\n'), ((43735, 43800), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(420, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (420, 120), circle_radius)\n', (43753, 43800), False, 'import pygame\n'), ((43829, 43894), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(420, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (420, 120), circle_radius)\n', (43847, 43894), False, 'import pygame\n'), ((44104, 44169), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 470)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 470), circle_radius)\n', (44122, 44169), False, 'import pygame\n'), ((44198, 44263), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 470)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 470), circle_radius)\n', (44216, 44263), False, 'import pygame\n'), ((44451, 44516), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 420)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 420), circle_radius)\n', (44469, 44516), False, 'import pygame\n'), ((44545, 44610), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 420)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 420), circle_radius)\n', (44563, 44610), False, 'import pygame\n'), ((44798, 44863), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 370)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 370), circle_radius)\n', (44816, 44863), False, 'import pygame\n'), ((44892, 44957), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 370)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 370), circle_radius)\n', (44910, 44957), False, 'import pygame\n'), ((45145, 45210), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 320)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 320), circle_radius)\n', (45163, 45210), False, 'import pygame\n'), ((45239, 45304), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 320)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 320), circle_radius)\n', (45257, 45304), False, 'import pygame\n'), ((45492, 45557), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 270)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 270), circle_radius)\n', (45510, 45557), False, 'import pygame\n'), ((45586, 45651), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 270)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 270), circle_radius)\n', (45604, 45651), False, 'import pygame\n'), ((45839, 45904), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 220)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 220), circle_radius)\n', (45857, 45904), False, 'import pygame\n'), ((45933, 45998), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 220)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 220), circle_radius)\n', (45951, 45998), False, 'import pygame\n'), ((46186, 46251), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 170)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 170), circle_radius)\n', (46204, 46251), False, 'import pygame\n'), ((46280, 46345), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 170)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 170), circle_radius)\n', (46298, 46345), False, 'import pygame\n'), ((46533, 46598), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'BLACK', '(470, 120)', 'circle_radius'], {}), '(gameDisplay, BLACK, (470, 120), circle_radius)\n', (46551, 46598), False, 'import pygame\n'), ((46627, 46692), 'pygame.draw.circle', 'pygame.draw.circle', (['gameDisplay', 'WHITE', '(470, 120)', 'circle_radius'], {}), '(gameDisplay, WHITE, (470, 120), circle_radius)\n', (46645, 46692), False, 'import pygame\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from IPython.core.magic import (
register_line_magic,
register_cell_magic,
register_line_cell_magic,
)
from IPython.display import HTML, Image
from pygments import highlight
from pygments.lexers import PythonLexer, get_lexer_by_name
from pygments.formatters import HtmlFormatter, ImageFormatter
@register_line_magic
def lmagic(line):
"my line magic"
return HTML('<b>{}</b>'.format(line))
@register_cell_magic
def cmagic(line, cell):
"my cell magic"
return line, cell
@register_cell_magic
def showcode(line, cell):
"pygments magic cell for Jupyter noebook"
if not line:
lexer = PythonLexer()
else:
lexer = get_lexer_by_name(line)
return HTML(
highlight(
cell,
lexer,
HtmlFormatter(hl_lines="1", noclasses=True),
)
)
del showcode
@register_cell_magic
def imagecode(line, cell):
"pygments magic cell for Jupyter noebook"
lexer = PythonLexer()
return Image(
highlight(
cell,
lexer,
ImageFormatter(image_format='PNG'),
)
)
del imagecode
@register_line_cell_magic
def lcmagic(line, cell=None):
"Magic that works both as %lcmagic and as %%lcmagic"
if cell is None:
print("Called as line magic")
return line
else:
print("Called as cell magic")
return line, cell
# We delete these to avoid name conflicts for automagic to work
del lmagic, cmagic, lcmagic
| [
"pygments.formatters.ImageFormatter",
"pygments.formatters.HtmlFormatter",
"pygments.lexers.PythonLexer",
"pygments.lexers.get_lexer_by_name"
] | [((1019, 1032), 'pygments.lexers.PythonLexer', 'PythonLexer', ([], {}), '()\n', (1030, 1032), False, 'from pygments.lexers import PythonLexer, get_lexer_by_name\n'), ((680, 693), 'pygments.lexers.PythonLexer', 'PythonLexer', ([], {}), '()\n', (691, 693), False, 'from pygments.lexers import PythonLexer, get_lexer_by_name\n'), ((720, 743), 'pygments.lexers.get_lexer_by_name', 'get_lexer_by_name', (['line'], {}), '(line)\n', (737, 743), False, 'from pygments.lexers import PythonLexer, get_lexer_by_name\n'), ((830, 873), 'pygments.formatters.HtmlFormatter', 'HtmlFormatter', ([], {'hl_lines': '"""1"""', 'noclasses': '(True)'}), "(hl_lines='1', noclasses=True)\n", (843, 873), False, 'from pygments.formatters import HtmlFormatter, ImageFormatter\n'), ((1120, 1154), 'pygments.formatters.ImageFormatter', 'ImageFormatter', ([], {'image_format': '"""PNG"""'}), "(image_format='PNG')\n", (1134, 1154), False, 'from pygments.formatters import HtmlFormatter, ImageFormatter\n')] |
# -*- coding: utf-8 -*-
from flask import render_template, session, request
from app import app
from app.log import get_logger
from app.models.Login import login_required
from app.models.SQL_DB import User
logger = get_logger(__name__)
@app.route("/settings/profile", methods=["GET", "POST"])
@login_required
def settings_profile():
from app.models.Mailgun_Internal import mailgun_get_campaigns
username = session["username"]
campaigns = mailgun_get_campaigns()
user_data = User.query.filter_by(username=username).first()
return render_template(
"settings/index.html", user_data=user_data, campaigns=campaigns
)
@app.route("/settings/profile/profile_change", methods=["POST"])
@login_required
def settings_profile_change():
from app.models.Settings import edit_profile
try:
name_1 = request.form["name_1"]
name_2 = request.form["name_2"]
company = request.form["company"]
address = request.form["address"]
edit_profile(name_1, name_2, company, address)
return "Success. Profile Changed."
except Exception as e:
logger.exception(e)
return "Problem occurred. Contact system administrator"
@app.route("/settings/profile/pw_change", methods=["POST"])
@login_required
def settings_profile_pw_change():
from app.models.Settings import edit_password
from flask_bcrypt import generate_password_hash
try:
password = request.form["password"]
pw_hash = generate_password_hash(password).decode("utf8")
edit_password(pw_hash)
return "Success. Password Changed."
except Exception as e:
logger.exception(e)
return "Problem occurred. Contact system administrator"
@app.route("/settings/profile/mg_change", methods=["POST"])
@login_required
def settings_profile_mg_change():
from app.models.Settings import edit_mg_settings
try:
mg_domain = request.form["mg_domain"]
mg_api_private = request.form["mg_api_private"]
mg_sender = request.form["mg_sender"]
edit_mg_settings(mg_domain, mg_api_private, mg_sender)
return "Success. Settings Changed."
except Exception as e:
logger.exception(e)
return "Problem occurred. Contact system administrator"
@app.route("/settings/campaigns/add", methods=["POST"])
@login_required
def settings_campaigns_add():
from app.models.Mailgun_Internal import mailgun_add_campaigns
try:
campaign_name = request.form["campaign_name"]
response = mailgun_add_campaigns(campaign_name)
return response["message"]
except Exception as e:
logger.exception(e)
return "Problem occurred. Contact system administrator"
@app.route("/settings/campaigns/delete", methods=["POST"])
@login_required
def settings_campaigns_delete():
from app.models.Mailgun_Internal import mailgun_delete_campaigns
try:
campaign_name = request.form["campaign_name"]
mailgun_delete_campaigns(campaign_name)
return "Success. Campaign Deleted."
except Exception as e:
logger.exception(e)
return "Problem occurred. Contact system administrator"
| [
"flask.render_template",
"app.models.Mailgun_Internal.mailgun_get_campaigns",
"app.models.SQL_DB.User.query.filter_by",
"flask_bcrypt.generate_password_hash",
"app.models.Settings.edit_password",
"app.models.Settings.edit_profile",
"app.app.route",
"app.models.Mailgun_Internal.mailgun_delete_campaigns... | [((217, 237), 'app.log.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (227, 237), False, 'from app.log import get_logger\n'), ((241, 296), 'app.app.route', 'app.route', (['"""/settings/profile"""'], {'methods': "['GET', 'POST']"}), "('/settings/profile', methods=['GET', 'POST'])\n", (250, 296), False, 'from app import app\n'), ((652, 715), 'app.app.route', 'app.route', (['"""/settings/profile/profile_change"""'], {'methods': "['POST']"}), "('/settings/profile/profile_change', methods=['POST'])\n", (661, 715), False, 'from app import app\n'), ((1207, 1265), 'app.app.route', 'app.route', (['"""/settings/profile/pw_change"""'], {'methods': "['POST']"}), "('/settings/profile/pw_change', methods=['POST'])\n", (1216, 1265), False, 'from app import app\n'), ((1736, 1794), 'app.app.route', 'app.route', (['"""/settings/profile/mg_change"""'], {'methods': "['POST']"}), "('/settings/profile/mg_change', methods=['POST'])\n", (1745, 1794), False, 'from app import app\n'), ((2285, 2339), 'app.app.route', 'app.route', (['"""/settings/campaigns/add"""'], {'methods': "['POST']"}), "('/settings/campaigns/add', methods=['POST'])\n", (2294, 2339), False, 'from app import app\n'), ((2729, 2786), 'app.app.route', 'app.route', (['"""/settings/campaigns/delete"""'], {'methods': "['POST']"}), "('/settings/campaigns/delete', methods=['POST'])\n", (2738, 2786), False, 'from app import app\n'), ((455, 478), 'app.models.Mailgun_Internal.mailgun_get_campaigns', 'mailgun_get_campaigns', ([], {}), '()\n', (476, 478), False, 'from app.models.Mailgun_Internal import mailgun_get_campaigns\n'), ((554, 639), 'flask.render_template', 'render_template', (['"""settings/index.html"""'], {'user_data': 'user_data', 'campaigns': 'campaigns'}), "('settings/index.html', user_data=user_data, campaigns=campaigns\n )\n", (569, 639), False, 'from flask import render_template, session, request\n'), ((994, 1040), 'app.models.Settings.edit_profile', 'edit_profile', (['name_1', 'name_2', 'company', 'address'], {}), '(name_1, name_2, company, address)\n', (1006, 1040), False, 'from app.models.Settings import edit_profile\n'), ((1546, 1568), 'app.models.Settings.edit_password', 'edit_password', (['pw_hash'], {}), '(pw_hash)\n', (1559, 1568), False, 'from app.models.Settings import edit_password\n'), ((2064, 2118), 'app.models.Settings.edit_mg_settings', 'edit_mg_settings', (['mg_domain', 'mg_api_private', 'mg_sender'], {}), '(mg_domain, mg_api_private, mg_sender)\n', (2080, 2118), False, 'from app.models.Settings import edit_mg_settings\n'), ((2535, 2571), 'app.models.Mailgun_Internal.mailgun_add_campaigns', 'mailgun_add_campaigns', (['campaign_name'], {}), '(campaign_name)\n', (2556, 2571), False, 'from app.models.Mailgun_Internal import mailgun_add_campaigns\n'), ((2977, 3016), 'app.models.Mailgun_Internal.mailgun_delete_campaigns', 'mailgun_delete_campaigns', (['campaign_name'], {}), '(campaign_name)\n', (3001, 3016), False, 'from app.models.Mailgun_Internal import mailgun_delete_campaigns\n'), ((495, 534), 'app.models.SQL_DB.User.query.filter_by', 'User.query.filter_by', ([], {'username': 'username'}), '(username=username)\n', (515, 534), False, 'from app.models.SQL_DB import User\n'), ((1490, 1522), 'flask_bcrypt.generate_password_hash', 'generate_password_hash', (['password'], {}), '(password)\n', (1512, 1522), False, 'from flask_bcrypt import generate_password_hash\n')] |
from django.conf.urls import include, url
from django.views.generic.base import RedirectView
urlpatterns = [
url(r'^event/', include('sedastrela_is.event.urls', namespace='event')),
url(r'^$', RedirectView.as_view(url='/admin/')),
]
| [
"django.conf.urls.include",
"django.views.generic.base.RedirectView.as_view"
] | [((131, 185), 'django.conf.urls.include', 'include', (['"""sedastrela_is.event.urls"""'], {'namespace': '"""event"""'}), "('sedastrela_is.event.urls', namespace='event')\n", (138, 185), False, 'from django.conf.urls import include, url\n'), ((203, 238), 'django.views.generic.base.RedirectView.as_view', 'RedirectView.as_view', ([], {'url': '"""/admin/"""'}), "(url='/admin/')\n", (223, 238), False, 'from django.views.generic.base import RedirectView\n')] |
# test synthetic particles with x-displacement
from os.path import join
import matplotlib.pyplot as plt
# imports
import numpy as np
import pandas as pd
import filter
import analyze
from correction import correct
from utils import io, plotting, modify, details
# setup file paths
base_dir = '/Users/mackenzie/Desktop/gdpyt-characterization/publication data/iteration 5/experiment validation'
path_results = join(base_dir, 'results')
path_figs = join(base_dir, 'figs')
# synthetic
path_synthetic_xdisp = join(base_dir, 'test_coords/synthetic/static-xdisp')
save_id_synthetic_xdisp = 'synthetic_xdisp'
# setup I/O
sort_strings = ['test_id', '_coords_']
filetype = '.xlsx'
drop_columns = ['stack_id', 'z_true', 'x', 'y', 'max_sim', 'error']
results_drop_columns = ['frame', 'id', 'stack_id', 'z_true', 'z', 'x', 'y', 'x_true', 'y_true']
# setup - binning
column_to_bin_and_assess = 'z_true'
bins = 20
mean_bins = 1
h_synthetic = 1 # (actual value == 100)
round_z_to_decimal = 5
z_range = [-65.001, 35.001]
min_cm = 0.5
save_figs_synthetic_xdisp = True
show_figs_synthetic_xdisp = True
# ---------------------------------
# 1. read .xlsx files to dictionary
dficts = io.read_dataframes(path_synthetic_xdisp, sort_strings, filetype, drop_columns=None)
# filter out the baseline image that isn't displaced
dficts = filter.dficts_filter(dficts, keys=['z_true'], values=[-15.0], operations=['notequalto'],
copy=True, only_keys=None, return_filtered=False)
dficts = filter.dficts_filter(dficts, keys=['error'], values=[[-10.0, 10.0]], operations=['between'],
copy=True, only_keys=None, return_filtered=False)
# ----------------------------------------------------------------------------------------
# Calculate uncertainty for SPCs
# 3. calculate local z-uncertainty
dfbicts = analyze.calculate_bin_local_rmse_z(dficts, column_to_bin_and_assess, bins, min_cm, z_range,
round_z_to_decimal, dficts_ground_truth=None)
# plot setup
ylim_synthetic_xdisp = [-0.05, 5]
scale_fig_dim_legend_outside = [1.2, 1]
# 4. plot methods comparison local results
if save_figs_synthetic_xdisp or show_figs_synthetic_xdisp:
label_dict = {key: {'label': key} for key in list(dfbicts.keys())}
parameter = ['rmse_z', 'cm']
fig, ax, ax2 = plotting.plot_dfbicts_local(dfbicts, parameters=parameter, h=h_synthetic, label_dict=label_dict,
scale=scale_fig_dim_legend_outside)
ax.set_ylim(ylim_synthetic_xdisp)
ax.set_ylabel(r'$\sigma_{z}(z)\: (\mu m)$')
ax2.set_ylabel(r'$c_{m}$')
ax.legend(loc='upper left', bbox_to_anchor=(1.05, 0.1, 0, 1), title=r'$p_{ID,\: calib}$')
plt.tight_layout()
if save_figs_synthetic_xdisp:
plt.savefig(join(path_figs, save_id_synthetic_xdisp + '_spcs_local_rmse_z_and_cm.png'))
if show_figs_synthetic_xdisp:
plt.show()
plt.close(fig) | [
"utils.plotting.plot_dfbicts_local",
"os.path.join",
"matplotlib.pyplot.close",
"filter.dficts_filter",
"matplotlib.pyplot.tight_layout",
"utils.io.read_dataframes",
"analyze.calculate_bin_local_rmse_z",
"matplotlib.pyplot.show"
] | [((411, 436), 'os.path.join', 'join', (['base_dir', '"""results"""'], {}), "(base_dir, 'results')\n", (415, 436), False, 'from os.path import join\n'), ((449, 471), 'os.path.join', 'join', (['base_dir', '"""figs"""'], {}), "(base_dir, 'figs')\n", (453, 471), False, 'from os.path import join\n'), ((508, 560), 'os.path.join', 'join', (['base_dir', '"""test_coords/synthetic/static-xdisp"""'], {}), "(base_dir, 'test_coords/synthetic/static-xdisp')\n", (512, 560), False, 'from os.path import join\n'), ((1173, 1260), 'utils.io.read_dataframes', 'io.read_dataframes', (['path_synthetic_xdisp', 'sort_strings', 'filetype'], {'drop_columns': 'None'}), '(path_synthetic_xdisp, sort_strings, filetype,\n drop_columns=None)\n', (1191, 1260), False, 'from utils import io, plotting, modify, details\n'), ((1320, 1463), 'filter.dficts_filter', 'filter.dficts_filter', (['dficts'], {'keys': "['z_true']", 'values': '[-15.0]', 'operations': "['notequalto']", 'copy': '(True)', 'only_keys': 'None', 'return_filtered': '(False)'}), "(dficts, keys=['z_true'], values=[-15.0], operations=[\n 'notequalto'], copy=True, only_keys=None, return_filtered=False)\n", (1340, 1463), False, 'import filter\n'), ((1499, 1645), 'filter.dficts_filter', 'filter.dficts_filter', (['dficts'], {'keys': "['error']", 'values': '[[-10.0, 10.0]]', 'operations': "['between']", 'copy': '(True)', 'only_keys': 'None', 'return_filtered': '(False)'}), "(dficts, keys=['error'], values=[[-10.0, 10.0]],\n operations=['between'], copy=True, only_keys=None, return_filtered=False)\n", (1519, 1645), False, 'import filter\n'), ((1843, 1984), 'analyze.calculate_bin_local_rmse_z', 'analyze.calculate_bin_local_rmse_z', (['dficts', 'column_to_bin_and_assess', 'bins', 'min_cm', 'z_range', 'round_z_to_decimal'], {'dficts_ground_truth': 'None'}), '(dficts, column_to_bin_and_assess, bins,\n min_cm, z_range, round_z_to_decimal, dficts_ground_truth=None)\n', (1877, 1984), False, 'import analyze\n'), ((2340, 2476), 'utils.plotting.plot_dfbicts_local', 'plotting.plot_dfbicts_local', (['dfbicts'], {'parameters': 'parameter', 'h': 'h_synthetic', 'label_dict': 'label_dict', 'scale': 'scale_fig_dim_legend_outside'}), '(dfbicts, parameters=parameter, h=h_synthetic,\n label_dict=label_dict, scale=scale_fig_dim_legend_outside)\n', (2367, 2476), False, 'from utils import io, plotting, modify, details\n'), ((2730, 2748), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2746, 2748), True, 'import matplotlib.pyplot as plt\n'), ((2936, 2950), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (2945, 2950), True, 'import matplotlib.pyplot as plt\n'), ((2921, 2931), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2929, 2931), True, 'import matplotlib.pyplot as plt\n'), ((2803, 2877), 'os.path.join', 'join', (['path_figs', "(save_id_synthetic_xdisp + '_spcs_local_rmse_z_and_cm.png')"], {}), "(path_figs, save_id_synthetic_xdisp + '_spcs_local_rmse_z_and_cm.png')\n", (2807, 2877), False, 'from os.path import join\n')] |
#!flask/bin/python
# Copyright 2015 vblazhnov
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'vblazhnov'
from db import DataBase
from functools import wraps
from flask import Flask, jsonify, abort, make_response, request
app = Flask(__name__)
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
@app.errorhandler(400)
def incorrect_data(error):
return make_response(jsonify({'error': 'Your data is incorrect'}), 400)
@app.errorhandler(409)
def conflict_data(error):
return make_response(jsonify({'error': 'Your data is conflict. Try another.'}), 409)
@app.errorhandler(403)
def conflict_data(error):
return unauthorized()
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return unauthorized()
kwargs['authUserName'] = auth.username
return f(*args, **kwargs)
return decorated
def check_auth(login, password):
return DataBase.is_valid_pass(login, password)
def unauthorized():
return make_response(jsonify({'error': 'Unauthorized access'}), 403)
@app.route('/stats/api/register', methods=['POST'])
def sign_up():
"""
Регистрация нового пользователя
"""
if not request.json or not 'login' in request.json or not 'password' in request.json:
abort(400)
user = DataBase.add_user(request.json['login'], request.json['password'])
if user is None:
abort(409)
return jsonify({'login': user[1], 'apiKey': user[3]}), 201
@app.route('/stats/api/me', methods=['GET'])
@requires_auth
def get_user_info(**args):
"""
Получение информации о пользователе
"""
user = DataBase.get_user_info(args['authUserName'])
if user is None:
abort(403)
return jsonify({'login': user[1], 'apiKey': user[3]})
@app.route('/stats/api/events', methods=['POST'])
def add_event():
"""
Добавление евента
"""
if not request.json or not 'apiKey' in request.json or not 'event' in request.json:
abort(400)
result = DataBase.add_event(request.json['apiKey'], request.json['event'], request.remote_addr)
if result is None:
abort(400)
return jsonify({'event': result[2], 'date': result[3], 'ip': result[4]}), 201
@app.route('/stats/api/events', methods=['GET'])
@requires_auth
def get_events(**args):
"""
Получение своих евентов
"""
user = DataBase.get_user_info(args['authUserName'])
if user is None:
abort(403)
result = DataBase.get_users_events(user[0])
return jsonify({'events': result})
@app.route('/stats/api/events/<string:name>', methods=['GET'])
@requires_auth
def get_event(name, **args):
"""
Получение подробной информации об евенте
"""
user = DataBase.get_user_info(args['authUserName'])
if user is None:
abort(403)
result = DataBase.get_users_event(user[0], name)
return jsonify({'event': name, 'events': result})
if __name__ == '__main__':
app.run(debug=True)
| [
"db.DataBase.add_user",
"flask.Flask",
"db.DataBase.get_user_info",
"db.DataBase.add_event",
"db.DataBase.get_users_event",
"functools.wraps",
"db.DataBase.get_users_events",
"flask.abort",
"db.DataBase.is_valid_pass",
"flask.jsonify"
] | [((739, 754), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (744, 754), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((1234, 1242), 'functools.wraps', 'wraps', (['f'], {}), '(f)\n', (1239, 1242), False, 'from functools import wraps\n'), ((1566, 1605), 'db.DataBase.is_valid_pass', 'DataBase.is_valid_pass', (['login', 'password'], {}), '(login, password)\n', (1588, 1605), False, 'from db import DataBase\n'), ((1942, 2008), 'db.DataBase.add_user', 'DataBase.add_user', (["request.json['login']", "request.json['password']"], {}), "(request.json['login'], request.json['password'])\n", (1959, 2008), False, 'from db import DataBase\n'), ((2270, 2314), 'db.DataBase.get_user_info', 'DataBase.get_user_info', (["args['authUserName']"], {}), "(args['authUserName'])\n", (2292, 2314), False, 'from db import DataBase\n'), ((2368, 2414), 'flask.jsonify', 'jsonify', (["{'login': user[1], 'apiKey': user[3]}"], {}), "({'login': user[1], 'apiKey': user[3]})\n", (2375, 2414), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((2643, 2734), 'db.DataBase.add_event', 'DataBase.add_event', (["request.json['apiKey']", "request.json['event']", 'request.remote_addr'], {}), "(request.json['apiKey'], request.json['event'], request.\n remote_addr)\n", (2661, 2734), False, 'from db import DataBase\n'), ((3001, 3045), 'db.DataBase.get_user_info', 'DataBase.get_user_info', (["args['authUserName']"], {}), "(args['authUserName'])\n", (3023, 3045), False, 'from db import DataBase\n'), ((3101, 3135), 'db.DataBase.get_users_events', 'DataBase.get_users_events', (['user[0]'], {}), '(user[0])\n', (3126, 3135), False, 'from db import DataBase\n'), ((3148, 3175), 'flask.jsonify', 'jsonify', (["{'events': result}"], {}), "({'events': result})\n", (3155, 3175), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((3357, 3401), 'db.DataBase.get_user_info', 'DataBase.get_user_info', (["args['authUserName']"], {}), "(args['authUserName'])\n", (3379, 3401), False, 'from db import DataBase\n'), ((3457, 3496), 'db.DataBase.get_users_event', 'DataBase.get_users_event', (['user[0]', 'name'], {}), '(user[0], name)\n', (3481, 3496), False, 'from db import DataBase\n'), ((3508, 3550), 'flask.jsonify', 'jsonify', (["{'event': name, 'events': result}"], {}), "({'event': name, 'events': result})\n", (3515, 3550), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((826, 857), 'flask.jsonify', 'jsonify', (["{'error': 'Not found'}"], {}), "({'error': 'Not found'})\n", (833, 857), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((940, 984), 'flask.jsonify', 'jsonify', (["{'error': 'Your data is incorrect'}"], {}), "({'error': 'Your data is incorrect'})\n", (947, 984), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((1066, 1123), 'flask.jsonify', 'jsonify', (["{'error': 'Your data is conflict. Try another.'}"], {}), "({'error': 'Your data is conflict. Try another.'})\n", (1073, 1123), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((1652, 1693), 'flask.jsonify', 'jsonify', (["{'error': 'Unauthorized access'}"], {}), "({'error': 'Unauthorized access'})\n", (1659, 1693), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((1919, 1929), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (1924, 1929), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((2039, 2049), 'flask.abort', 'abort', (['(409)'], {}), '(409)\n', (2044, 2049), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((2062, 2108), 'flask.jsonify', 'jsonify', (["{'login': user[1], 'apiKey': user[3]}"], {}), "({'login': user[1], 'apiKey': user[3]})\n", (2069, 2108), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((2345, 2355), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (2350, 2355), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((2618, 2628), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (2623, 2628), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((2762, 2772), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (2767, 2772), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((2785, 2850), 'flask.jsonify', 'jsonify', (["{'event': result[2], 'date': result[3], 'ip': result[4]}"], {}), "({'event': result[2], 'date': result[3], 'ip': result[4]})\n", (2792, 2850), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((3076, 3086), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (3081, 3086), False, 'from flask import Flask, jsonify, abort, make_response, request\n'), ((3432, 3442), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (3437, 3442), False, 'from flask import Flask, jsonify, abort, make_response, request\n')] |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from ast import literal_eval
from odoo import api, fields, models, _
from odoo.exceptions import ValidationError
class Project(models.Model):
_inherit = 'project.project'
sale_line_id = fields.Many2one(
'sale.order.line', 'Sales Order Item', copy=False,
domain="[('is_service', '=', True), ('is_expense', '=', False), ('order_id', '=', sale_order_id), ('state', 'in', ['sale', 'done']), '|', ('company_id', '=', False), ('company_id', '=', company_id)]",
help="Sales order item to which the project is linked. Link the timesheet entry to the sales order item defined on the project. "
"Only applies on tasks without sale order item defined, and if the employee is not in the 'Employee/Sales Order Item Mapping' of the project.")
sale_order_id = fields.Many2one('sale.order', 'Sales Order',
domain="[('order_line.product_id.type', '=', 'service'), ('partner_id', '=', partner_id), ('state', 'in', ['sale', 'done'])]",
copy=False, help="Sales order to which the project is linked.")
_sql_constraints = [
('sale_order_required_if_sale_line', "CHECK((sale_line_id IS NOT NULL AND sale_order_id IS NOT NULL) OR (sale_line_id IS NULL))", 'The project should be linked to a sale order to select a sale order item.'),
]
@api.model
def _map_tasks_default_valeus(self, task, project):
defaults = super()._map_tasks_default_valeus(task, project)
defaults['sale_line_id'] = False
return defaults
def action_view_so(self):
self.ensure_one()
action_window = {
"type": "ir.actions.act_window",
"res_model": "sale.order",
"name": "Sales Order",
"views": [[False, "form"]],
"context": {"create": False, "show_sale": True},
"res_id": self.sale_order_id.id
}
return action_window
class ProjectTask(models.Model):
_inherit = "project.task"
sale_order_id = fields.Many2one('sale.order', 'Sales Order', help="Sales order to which the task is linked.")
sale_line_id = fields.Many2one(
'sale.order.line', 'Sales Order Item', domain="[('company_id', '=', company_id), ('is_service', '=', True), ('order_partner_id', 'child_of', commercial_partner_id), ('is_expense', '=', False), ('state', 'in', ['sale', 'done']), ('order_id', '=?', project_sale_order_id)]",
compute='_compute_sale_line', store=True, readonly=False, copy=False,
help="Sales order item to which the project is linked. Link the timesheet entry to the sales order item defined on the project. "
"Only applies on tasks without sale order item defined, and if the employee is not in the 'Employee/Sales Order Item Mapping' of the project.")
project_sale_order_id = fields.Many2one('sale.order', string="Project's sale order", related='project_id.sale_order_id')
invoice_count = fields.Integer("Number of invoices", related='sale_order_id.invoice_count')
task_to_invoice = fields.Boolean("To invoice", compute='_compute_task_to_invoice', search='_search_task_to_invoice', groups='sales_team.group_sale_salesman_all_leads')
@api.depends('project_id.sale_line_id.order_partner_id')
def _compute_partner_id(self):
for task in self:
if not task.partner_id:
task.partner_id = task.project_id.sale_line_id.order_partner_id
super()._compute_partner_id()
@api.depends('commercial_partner_id', 'sale_line_id.order_partner_id.commercial_partner_id', 'parent_id.sale_line_id', 'project_id.sale_line_id')
def _compute_sale_line(self):
for task in self:
if not task.sale_line_id:
task.sale_line_id = task.parent_id.sale_line_id or task.project_id.sale_line_id
# check sale_line_id and customer are coherent
if task.sale_line_id.order_partner_id.commercial_partner_id != task.partner_id.commercial_partner_id:
task.sale_line_id = False
@api.constrains('sale_line_id')
def _check_sale_line_type(self):
for task in self.sudo():
if task.sale_line_id:
if not task.sale_line_id.is_service or task.sale_line_id.is_expense:
raise ValidationError(_(
'You cannot link the order item %(order_id)s - %(product_id)s to this task because it is a re-invoiced expense.',
order_id=task.sale_line_id.order_id.name,
product_id=task.sale_line_id.product_id.display_name,
))
def unlink(self):
if any(task.sale_line_id for task in self):
raise ValidationError(_('You have to unlink the task from the sale order item in order to delete it.'))
return super().unlink()
# ---------------------------------------------------
# Actions
# ---------------------------------------------------
def _get_action_view_so_ids(self):
return self.sale_order_id.ids
def action_view_so(self):
self.ensure_one()
so_ids = self._get_action_view_so_ids()
action_window = {
"type": "ir.actions.act_window",
"res_model": "sale.order",
"name": "Sales Order",
"views": [[False, "tree"], [False, "form"]],
"context": {"create": False, "show_sale": True},
"domain": [["id", "in", so_ids]],
}
if len(so_ids) == 1:
action_window["views"] = [[False, "form"]]
action_window["res_id"] = so_ids[0]
return action_window
def rating_get_partner_id(self):
partner = self.partner_id or self.sale_line_id.order_id.partner_id
if partner:
return partner
return super().rating_get_partner_id()
@api.depends('sale_order_id.invoice_status', 'sale_order_id.order_line')
def _compute_task_to_invoice(self):
for task in self:
if task.sale_order_id:
task.task_to_invoice = bool(task.sale_order_id.invoice_status not in ('no', 'invoiced'))
else:
task.task_to_invoice = False
@api.model
def _search_task_to_invoice(self, operator, value):
query = """
SELECT so.id
FROM sale_order so
WHERE so.invoice_status != 'invoiced'
AND so.invoice_status != 'no'
"""
operator_new = 'inselect'
if(bool(operator == '=') ^ bool(value)):
operator_new = 'not inselect'
return [('sale_order_id', operator_new, (query, ()))]
def action_create_invoice(self):
# ensure the SO exists before invoicing, then confirm it
so_to_confirm = self.filtered(
lambda task: task.sale_order_id and task.sale_order_id.state in ['draft', 'sent']
).mapped('sale_order_id')
so_to_confirm.action_confirm()
# redirect create invoice wizard (of the Sales Order)
action = self.env["ir.actions.actions"]._for_xml_id("sale.action_view_sale_advance_payment_inv")
context = literal_eval(action.get('context', "{}"))
context.update({
'active_id': self.sale_order_id.id if len(self) == 1 else False,
'active_ids': self.mapped('sale_order_id').ids,
'default_company_id': self.company_id.id,
})
action['context'] = context
return action
class ProjectTaskRecurrence(models.Model):
_inherit = 'project.task.recurrence'
def _new_task_values(self, task):
values = super(ProjectTaskRecurrence, self)._new_task_values(task)
task = self.sudo().task_ids[0]
values['sale_line_id'] = self._get_sale_line_id(task)
return values
def _get_sale_line_id(self, task):
return task.sale_line_id.id
| [
"odoo._",
"odoo.api.constrains",
"odoo.fields.Many2one",
"odoo.fields.Integer",
"odoo.api.depends",
"odoo.fields.Boolean"
] | [((298, 856), 'odoo.fields.Many2one', 'fields.Many2one', (['"""sale.order.line"""', '"""Sales Order Item"""'], {'copy': '(False)', 'domain': '"""[(\'is_service\', \'=\', True), (\'is_expense\', \'=\', False), (\'order_id\', \'=\', sale_order_id), (\'state\', \'in\', [\'sale\', \'done\']), \'|\', (\'company_id\', \'=\', False), (\'company_id\', \'=\', company_id)]"""', 'help': '"""Sales order item to which the project is linked. Link the timesheet entry to the sales order item defined on the project. Only applies on tasks without sale order item defined, and if the employee is not in the \'Employee/Sales Order Item Mapping\' of the project."""'}), '(\'sale.order.line\', \'Sales Order Item\', copy=False, domain=\n "[(\'is_service\', \'=\', True), (\'is_expense\', \'=\', False), (\'order_id\', \'=\', sale_order_id), (\'state\', \'in\', [\'sale\', \'done\']), \'|\', (\'company_id\', \'=\', False), (\'company_id\', \'=\', company_id)]"\n , help=\n "Sales order item to which the project is linked. Link the timesheet entry to the sales order item defined on the project. Only applies on tasks without sale order item defined, and if the employee is not in the \'Employee/Sales Order Item Mapping\' of the project."\n )\n', (313, 856), False, 'from odoo import api, fields, models, _\n'), ((893, 1138), 'odoo.fields.Many2one', 'fields.Many2one', (['"""sale.order"""', '"""Sales Order"""'], {'domain': '"""[(\'order_line.product_id.type\', \'=\', \'service\'), (\'partner_id\', \'=\', partner_id), (\'state\', \'in\', [\'sale\', \'done\'])]"""', 'copy': '(False)', 'help': '"""Sales order to which the project is linked."""'}), '(\'sale.order\', \'Sales Order\', domain=\n "[(\'order_line.product_id.type\', \'=\', \'service\'), (\'partner_id\', \'=\', partner_id), (\'state\', \'in\', [\'sale\', \'done\'])]"\n , copy=False, help=\'Sales order to which the project is linked.\')\n', (908, 1138), False, 'from odoo import api, fields, models, _\n'), ((2070, 2168), 'odoo.fields.Many2one', 'fields.Many2one', (['"""sale.order"""', '"""Sales Order"""'], {'help': '"""Sales order to which the task is linked."""'}), "('sale.order', 'Sales Order', help=\n 'Sales order to which the task is linked.')\n", (2085, 2168), False, 'from odoo import api, fields, models, _\n'), ((2183, 2836), 'odoo.fields.Many2one', 'fields.Many2one', (['"""sale.order.line"""', '"""Sales Order Item"""'], {'domain': '"""[(\'company_id\', \'=\', company_id), (\'is_service\', \'=\', True), (\'order_partner_id\', \'child_of\', commercial_partner_id), (\'is_expense\', \'=\', False), (\'state\', \'in\', [\'sale\', \'done\']), (\'order_id\', \'=?\', project_sale_order_id)]"""', 'compute': '"""_compute_sale_line"""', 'store': '(True)', 'readonly': '(False)', 'copy': '(False)', 'help': '"""Sales order item to which the project is linked. Link the timesheet entry to the sales order item defined on the project. Only applies on tasks without sale order item defined, and if the employee is not in the \'Employee/Sales Order Item Mapping\' of the project."""'}), '(\'sale.order.line\', \'Sales Order Item\', domain=\n "[(\'company_id\', \'=\', company_id), (\'is_service\', \'=\', True), (\'order_partner_id\', \'child_of\', commercial_partner_id), (\'is_expense\', \'=\', False), (\'state\', \'in\', [\'sale\', \'done\']), (\'order_id\', \'=?\', project_sale_order_id)]"\n , compute=\'_compute_sale_line\', store=True, readonly=False, copy=False,\n help=\n "Sales order item to which the project is linked. Link the timesheet entry to the sales order item defined on the project. Only applies on tasks without sale order item defined, and if the employee is not in the \'Employee/Sales Order Item Mapping\' of the project."\n )\n', (2198, 2836), False, 'from odoo import api, fields, models, _\n'), ((2877, 2978), 'odoo.fields.Many2one', 'fields.Many2one', (['"""sale.order"""'], {'string': '"""Project\'s sale order"""', 'related': '"""project_id.sale_order_id"""'}), '(\'sale.order\', string="Project\'s sale order", related=\n \'project_id.sale_order_id\')\n', (2892, 2978), False, 'from odoo import api, fields, models, _\n'), ((2994, 3069), 'odoo.fields.Integer', 'fields.Integer', (['"""Number of invoices"""'], {'related': '"""sale_order_id.invoice_count"""'}), "('Number of invoices', related='sale_order_id.invoice_count')\n", (3008, 3069), False, 'from odoo import api, fields, models, _\n'), ((3092, 3251), 'odoo.fields.Boolean', 'fields.Boolean', (['"""To invoice"""'], {'compute': '"""_compute_task_to_invoice"""', 'search': '"""_search_task_to_invoice"""', 'groups': '"""sales_team.group_sale_salesman_all_leads"""'}), "('To invoice', compute='_compute_task_to_invoice', search=\n '_search_task_to_invoice', groups=\n 'sales_team.group_sale_salesman_all_leads')\n", (3106, 3251), False, 'from odoo import api, fields, models, _\n'), ((3248, 3303), 'odoo.api.depends', 'api.depends', (['"""project_id.sale_line_id.order_partner_id"""'], {}), "('project_id.sale_line_id.order_partner_id')\n", (3259, 3303), False, 'from odoo import api, fields, models, _\n'), ((3525, 3677), 'odoo.api.depends', 'api.depends', (['"""commercial_partner_id"""', '"""sale_line_id.order_partner_id.commercial_partner_id"""', '"""parent_id.sale_line_id"""', '"""project_id.sale_line_id"""'], {}), "('commercial_partner_id',\n 'sale_line_id.order_partner_id.commercial_partner_id',\n 'parent_id.sale_line_id', 'project_id.sale_line_id')\n", (3536, 3677), False, 'from odoo import api, fields, models, _\n'), ((4085, 4115), 'odoo.api.constrains', 'api.constrains', (['"""sale_line_id"""'], {}), "('sale_line_id')\n", (4099, 4115), False, 'from odoo import api, fields, models, _\n'), ((5886, 5957), 'odoo.api.depends', 'api.depends', (['"""sale_order_id.invoice_status"""', '"""sale_order_id.order_line"""'], {}), "('sale_order_id.invoice_status', 'sale_order_id.order_line')\n", (5897, 5957), False, 'from odoo import api, fields, models, _\n'), ((4764, 4849), 'odoo._', '_', (['"""You have to unlink the task from the sale order item in order to delete it."""'], {}), "('You have to unlink the task from the sale order item in order to delete it.'\n )\n", (4765, 4849), False, 'from odoo import api, fields, models, _\n'), ((4347, 4568), 'odoo._', '_', (['"""You cannot link the order item %(order_id)s - %(product_id)s to this task because it is a re-invoiced expense."""'], {'order_id': 'task.sale_line_id.order_id.name', 'product_id': 'task.sale_line_id.product_id.display_name'}), "('You cannot link the order item %(order_id)s - %(product_id)s to this task because it is a re-invoiced expense.'\n , order_id=task.sale_line_id.order_id.name, product_id=task.\n sale_line_id.product_id.display_name)\n", (4348, 4568), False, 'from odoo import api, fields, models, _\n')] |
from .AAA3A_utils.cogsutils import CogsUtils # isort:skip
from redbot.core import commands # isort:skip
from redbot.core.i18n import Translator, cog_i18n # isort:skip
from redbot.core.bot import Red # isort:skip
import discord # isort:skip
import typing # isort:skip
from typing import List, Optional, Tuple, Union
from redbot.core import Config
# Credits:
# Thanks to @epic guy on Discord for the basic syntax (command groups, commands) and also commands (await ctx.send, await ctx.author.send, await ctx.message.delete())!
# Thanks to TrustyJAID for the code (a bit modified to work here and to improve as needed) for the log messages sent! (https://github.com/TrustyJAID/Trusty-cogs/tree/master/extendedmodlog)
# Thanks to the developers of the cogs I added features to as it taught me how to make a cog! (Chessgame by WildStriker, Captcha by Kreusada, Speak by Epic guy and Rommer by Dav)
# Thanks to all the people who helped me with some commands in the #coding channel of the redbot support server!
_ = Translator("AntiNuke", __file__)
@cog_i18n(_)
class AntiNuke(commands.Cog):
"""A cog to remove all permissions from a person who deletes a channel!"""
def __init__(self, bot):
self.bot: Red = bot
self.config: Config = Config.get_conf(
self,
identifier=947269490247,
force_registration=True,
)
self.antinuke_guild = {
"logschannel": None, # The channel for logs.
"enabled": False, # Enable the possibility.
"user_dm": True, # Enable the user dm.
"number_detected_member": 1, # Number.
"number_detected_bot": 1, # Number.
}
self.antinuke_member = {
"count": 0, # The count of channel's deletes.
"old_roles": [], # The roles to be handed in if it wasn't a nuke.
}
self.config.register_guild(**self.antinuke_guild)
self.config.register_member(**self.antinuke_member)
self.cogsutils = CogsUtils(cog=self)
self.cogsutils._setup()
@commands.Cog.listener()
async def on_guild_channel_delete(self, old_channel: discord.abc.GuildChannel):
"""Remove all permissions from a user if they delete a channel.
"""
config = await self.config.guild(old_channel.guild).all()
logschannel = config["logschannel"]
actual_state_enabled = config["enabled"]
actual_state_user_dm = config["user_dm"]
actual_number_detected_member = config["number_detected_member"]
actual_number_detected_bot = config["number_detected_bot"]
logschannel = config["logschannel"]
perp, reason = await self.get_audit_log_reason(
old_channel.guild, old_channel, discord.AuditLogAction.channel_delete
)
logschannel = self.bot.get_channel(logschannel)
if perp is None:
return
if perp == old_channel.guild.owner:
return
if perp == old_channel.guild.me:
return
actual_count = await self.config.member(perp).count()
if actual_state_enabled:
if not perp.bot:
actual_number_detected = actual_number_detected_member
if actual_number_detected == 0:
return
else:
actual_number_detected = actual_number_detected_bot
if actual_number_detected == 0:
return
actual_count += 1
if actual_count >= actual_number_detected:
await self.config.member(perp).count.clear()
old_roles = perp.roles.copy()
old_roles.remove(old_channel.guild.default_role)
old_roles = [
r for r in old_roles if r.position < old_channel.guild.me.top_role.position and not r.managed
]
rolelist_name = [r.name for r in old_roles]
rolelist_mention = [r.mention for r in old_roles]
if actual_state_user_dm:
await perp.send(_("All your roles have been taken away because you have deleted channel #{old_channel}.\nYour former roles: {rolelist_name}").format(**locals()))
if old_channel.guild.me.guild_permissions.manage_roles:
# await perp.edit(roles=[], reason=f"All roles in {perp} ({perp.id}) roles have been removed as a result of the antinuke system being triggered on this server.")
for role in old_roles:
try:
await perp.remove_roles(role, reason=_("All roles in {perp} ({perp.id}) roles have been removed as a result of the antinuke system being triggered on this server.").format(**locals()))
except Exception:
pass
await self.config.member(perp).old_roles.set(old_roles)
if logschannel:
embed: discord.Embed = discord.Embed()
embed.title = _("The user {perp.name}#{perp.discriminator} has deleted the channel #{old_channel.name}!").format(**locals())
embed.description = _("To prevent him from doing anything else, I took away as many roles as my current permissions would allow.\nUser mention: {perp.mention} - User ID: {perp.id}").format(**locals())
embed.color = discord.Colour.dark_teal()
embed.set_author(name=perp, url=perp.display_avatar if self.cogsutils.is_dpy2 else perp.avatar_url, icon_url=perp.display_avatar if self.cogsutils.is_dpy2 else perp.avatar_url)
embed.add_field(
inline=False,
name=_("Before I intervened, the user had the following roles:").format(**locals()),
value=rolelist_mention)
logschannel = self.bot.get_channel(logschannel)
await logschannel.send(embed=embed)
else:
await self.config.member(perp).count.set(actual_count)
return
else:
return
async def get_audit_log_reason(
self,
guild: discord.Guild,
target: Union[discord.abc.GuildChannel, discord.Member, discord.Role],
action: discord.AuditLogAction,
) -> Tuple[Optional[discord.abc.User], Optional[str]]:
perp = None
reason = None
if guild.me.guild_permissions.view_audit_log:
async for log in guild.audit_logs(limit=5, action=action):
if log.target.id == target.id:
perp = log.user
if log.reason:
reason = log.reason
break
return perp, reason
@commands.guild_only()
@commands.guildowner()
@commands.group(name="setantinuke", aliases=["antinukeset"])
async def configuration(self, ctx: commands.Context):
"""Configure AntiNuke for your server."""
@configuration.command(aliases=["lchann", "lchannel", "logschan", "logchannel", "logsc"], usage="<text_channel_or_'none'>")
async def logschannel(self, ctx: commands.Context, *, channel: typing.Optional[discord.TextChannel]=None):
"""Set a channel where events are registered.
``channel``: Text channel.
You can also use "None" if you wish to remove the logging channel.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
if channel is None:
await self.config.guild(ctx.guild).logschannel.clear()
await ctx.send(_("Logging channel removed.").format(**locals()))
return
needperm = await self.check_permissions_in_channel(["embed_links", "read_messages", "read_message_history", "send_messages", "attach_files"], channel)
if needperm:
await ctx.send(_("The bot does not have at least one of the following permissions in this channel: `embed_links`, `read_messages`, `read_message_history`, `send_messages`, `attach_files`.").format(**locals()))
return
await self.config.guild(ctx.guild).logschannel.set(channel.id)
await ctx.send(_("Logging channel registered: {channel.mention}.").format(**locals()))
async def check_permissions_in_channel(self, permissions: List[str], channel: discord.TextChannel):
"""Function to checks if the permissions are available in a guild.
This will return a list of the missing permissions.
"""
return [
permission
for permission in permissions
if not getattr(channel.permissions_for(channel.guild.me), permission)
]
@configuration.command(name="enable", aliases=["activate"], usage="<true_or_false>")
async def enable(self, ctx: commands.Context, state: bool):
"""Enable or disable AntiNuke.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_enabled = config["enabled"]
if actual_state_enabled is state:
await ctx.send(_("AntiNuke is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).enabled.set(state)
await ctx.send(_("AntiNuke state registered: {state}.").format(**locals()))
@configuration.command(name="userdm", aliases=["dm"], usage="<true_or_false>")
async def userdm(self, ctx: commands.Context, state: bool):
"""Enable or disable User DM.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_user_dm = config["user_dm"]
if actual_state_user_dm is state:
await ctx.send(_("User DM is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).user_dm.set(state)
await ctx.send(_("User DM state registered: {state}.").format(**locals()))
@configuration.command(name="nbmember", aliases=["membernb"], usage="<int>")
async def nbmember(self, ctx: commands.Context, int: int):
"""Number Detected - Member
Before action, how many deleted channels should be detected?
`0' to disable this protection.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
await self.config.guild(ctx.guild).number_detected_member.set(int)
await ctx.send(_("Number Detected - Member registered: {int}.").format(**locals()))
@configuration.command(name="nbbot", aliases=["botsnb"], usage="<int>")
async def nbbot(self, ctx: commands.Context, int: int):
"""Number Detected - Bot
Before action, how many deleted channels should be detected?
`0' to disable this protection.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
await self.config.guild(ctx.guild).number_detected_bot.set(int)
await ctx.send(_("Number Detected - Bot registered: {int}.").format(**locals()))
@configuration.command(name="resetuser", aliases=["userreset"], usage="<int>")
async def resetuser(self, ctx: commands.Context, user: discord.Member, give_roles: bool = False):
"""Reset number detected for a user
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.member(user).all()
if give_roles:
old_roles = config["old_roles"]
old_roles = [ctx.guild.get_role(r) for r in old_roles]
old_roles = [
r for r in old_roles if r.position < ctx.guild.me.top_role.position and not r.managed
]
if not old_roles == []:
# await user.edit(roles=old_roles, reason=f"All former roles of {user} ({user.id}) have been restored at the request of the server owner.")
await user.add_roles(*old_roles, reason=_("All former roles of {user} ({user.id}) have been restored at the request of the server owner.").format(**locals()))
await ctx.send(_("Restored roles for {user.name} ({user.id}).").format(**locals()))
await self.config.member(user).count.clear()
await self.config.member(user).old_roles.clear()
await ctx.send(_("Count removed for {user.name} ({user.id}).").format(**locals())) | [
"redbot.core.Config.get_conf",
"redbot.core.commands.Cog.listener",
"redbot.core.i18n.Translator",
"redbot.core.i18n.cog_i18n",
"redbot.core.commands.guild_only",
"discord.Colour.dark_teal",
"redbot.core.commands.group",
"discord.Embed",
"redbot.core.commands.guildowner"
] | [((1035, 1067), 'redbot.core.i18n.Translator', 'Translator', (['"""AntiNuke"""', '__file__'], {}), "('AntiNuke', __file__)\n", (1045, 1067), False, 'from redbot.core.i18n import Translator, cog_i18n\n'), ((1072, 1083), 'redbot.core.i18n.cog_i18n', 'cog_i18n', (['_'], {}), '(_)\n', (1080, 1083), False, 'from redbot.core.i18n import Translator, cog_i18n\n'), ((2116, 2139), 'redbot.core.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (2137, 2139), False, 'from redbot.core import commands\n'), ((6910, 6931), 'redbot.core.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (6929, 6931), False, 'from redbot.core import commands\n'), ((6938, 6959), 'redbot.core.commands.guildowner', 'commands.guildowner', ([], {}), '()\n', (6957, 6959), False, 'from redbot.core import commands\n'), ((6966, 7025), 'redbot.core.commands.group', 'commands.group', ([], {'name': '"""setantinuke"""', 'aliases': "['antinukeset']"}), "(name='setantinuke', aliases=['antinukeset'])\n", (6980, 7025), False, 'from redbot.core import commands\n'), ((1289, 1360), 'redbot.core.Config.get_conf', 'Config.get_conf', (['self'], {'identifier': '(947269490247)', 'force_registration': '(True)'}), '(self, identifier=947269490247, force_registration=True)\n', (1304, 1360), False, 'from redbot.core import Config\n'), ((5087, 5102), 'discord.Embed', 'discord.Embed', ([], {}), '()\n', (5100, 5102), False, 'import discord\n'), ((5506, 5532), 'discord.Colour.dark_teal', 'discord.Colour.dark_teal', ([], {}), '()\n', (5530, 5532), False, 'import discord\n')] |
import webbrowser
from subprocess import Popen
from typing import List
from gochan.config import BROWSER_PATH
def open_link(url: str):
if BROWSER_PATH is None:
webbrowser.open(url)
else:
Popen([BROWSER_PATH, url])
def open_links(urls: List[str]):
if BROWSER_PATH is None:
for url in urls:
webbrowser.open(url)
else:
Popen([BROWSER_PATH, *urls])
| [
"subprocess.Popen",
"webbrowser.open"
] | [((175, 195), 'webbrowser.open', 'webbrowser.open', (['url'], {}), '(url)\n', (190, 195), False, 'import webbrowser\n'), ((214, 240), 'subprocess.Popen', 'Popen', (['[BROWSER_PATH, url]'], {}), '([BROWSER_PATH, url])\n', (219, 240), False, 'from subprocess import Popen\n'), ((381, 409), 'subprocess.Popen', 'Popen', (['[BROWSER_PATH, *urls]'], {}), '([BROWSER_PATH, *urls])\n', (386, 409), False, 'from subprocess import Popen\n'), ((342, 362), 'webbrowser.open', 'webbrowser.open', (['url'], {}), '(url)\n', (357, 362), False, 'import webbrowser\n')] |
"""Add domain's ip
Revision ID: 2710366d3d68
Revises: 43b883c483b4
Create Date: 2021-12-22 03:19:46.487228
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2<PASSWORD>'
down_revision = '43b883c483b4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('domains', sa.Column('ipv4', sa.String(), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('domains', 'ipv4')
# ### end Alembic commands ###
| [
"sqlalchemy.String",
"alembic.op.drop_column"
] | [((583, 616), 'alembic.op.drop_column', 'op.drop_column', (['"""domains"""', '"""ipv4"""'], {}), "('domains', 'ipv4')\n", (597, 616), False, 'from alembic import op\n'), ((429, 440), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (438, 440), True, 'import sqlalchemy as sa\n')] |
# Copyright (c) <NAME>, TU Delft.
# All rights reserved.
# See COPYRIGHT for details.
# added -fpermissive to CMAKE_CXX_FLAGS to workaround g++ 4.3 compile
# problem on the included CPT code. We were getting "changes meaning
# of" errors as documented in
# http://stupefydeveloper.blogspot.com/2008/11/c-name-lookup-changes-in-g-43.html
import config
from install_package import InstallPackage
import os
import shutil
import utils
BASENAME = "vtktudoss"
SVN_REPO = "http://vtktudoss.googlecode.com/svn/trunk/"
SVN_REL = 37
dependencies = ['VTK']
class VTKTUDOSS(InstallPackage):
def __init__(self):
self.source_dir = os.path.join(config.archive_dir, BASENAME)
self.build_dir = os.path.join(config.build_dir, '%s-build' %
(BASENAME,))
self.inst_dir = os.path.join(config.inst_dir, BASENAME)
def get(self):
if os.path.exists(self.source_dir):
utils.output("vtktudoss already checked out, skipping step.")
else:
os.chdir(config.archive_dir)
# checkout trunk into directory vtktudoss
ret = os.system("%s co %s %s -r%s" % (config.SVN,
SVN_REPO, BASENAME, SVN_REL))
if ret != 0:
utils.error("Could not SVN checkout. Fix and try again.")
def unpack(self):
# no unpack step
pass
def configure(self):
if os.path.exists(
os.path.join(self.build_dir, 'CMakeFiles/cmake.check_cache')):
utils.output("vtktudoss build already configured.")
return
if not os.path.exists(self.build_dir):
os.mkdir(self.build_dir)
cmake_params = "-DBUILD_SHARED_LIBS=ON " \
"-DBUILD_CONTRIB=ON " \
"-DBUILD_CONTRIB_STLIB=ON " \
"-DBUILD_TESTING=OFF " \
"-DCMAKE_BACKWARDS_COMPATIBILITY=2.6 " \
"-DCMAKE_BUILD_TYPE=RelWithDebInfo " \
"-DCMAKE_INSTALL_PREFIX=%s " \
"-DPYTHON_EXECUTABLE=%s " \
"-DPYTHON_LIBRARY=%s " \
"-DPYTHON_INCLUDE_PATH=%s " \
"-DVTK_DIR=%s" % \
(self.inst_dir,
config.PYTHON_EXECUTABLE,
config.PYTHON_LIBRARY,
config.PYTHON_INCLUDE_PATH,
config.VTK_DIR)
# we only add this under posix as a work-around to compile the
# STLib code under g++
if os.name == 'posix':
cmake_params = cmake_params + " -DCMAKE_CXX_FLAGS=-fpermissive "
ret = utils.cmake_command(self.build_dir, self.source_dir,
cmake_params)
if ret != 0:
utils.error("Could not configure vtktudoss. Fix and try again.")
def build(self):
posix_file = os.path.join(self.build_dir,
'bin/libvtktudossGraphicsPython.so')
nt_file = os.path.join(self.build_dir, 'bin',
config.BUILD_TARGET, 'vtktudossGraphicsPythonD.dll')
if utils.file_exists(posix_file, nt_file):
utils.output("vtktudoss already built. Skipping build step.")
else:
os.chdir(self.build_dir)
ret = utils.make_command('VTKTUDOSS.sln')
if ret != 0:
utils.error("Could not build vtktudoss. Fix and try again.")
def install(self):
config.VTKTUDOSS_PYTHON = os.path.join(
self.inst_dir, 'lib')
config.VTKTUDOSS_LIB = os.path.join(self.inst_dir, 'lib')
test_file = os.path.join(config.VTKTUDOSS_LIB, 'vtktudoss.py')
if os.path.exists(test_file):
utils.output("vtktudoss already installed, skipping step.")
else:
os.chdir(self.build_dir)
ret = utils.make_command('VTKTUDOSS.sln', install=True)
if ret != 0:
utils.error(
"Could not install vtktudoss. Fix and try again.")
def clean_build(self):
# nuke the build dir, the source dir is pristine and there is
# no installation
utils.output("Removing build dir.")
if os.path.exists(self.build_dir):
shutil.rmtree(self.build_dir)
def get_installed_version(self):
import vtktudoss
return vtktudoss.version
| [
"os.path.exists",
"utils.output",
"utils.error",
"utils.cmake_command",
"os.path.join",
"utils.file_exists",
"os.chdir",
"os.mkdir",
"shutil.rmtree",
"os.system",
"utils.make_command"
] | [((642, 684), 'os.path.join', 'os.path.join', (['config.archive_dir', 'BASENAME'], {}), '(config.archive_dir, BASENAME)\n', (654, 684), False, 'import os\n'), ((710, 766), 'os.path.join', 'os.path.join', (['config.build_dir', "('%s-build' % (BASENAME,))"], {}), "(config.build_dir, '%s-build' % (BASENAME,))\n", (722, 766), False, 'import os\n'), ((829, 868), 'os.path.join', 'os.path.join', (['config.inst_dir', 'BASENAME'], {}), '(config.inst_dir, BASENAME)\n', (841, 868), False, 'import os\n'), ((900, 931), 'os.path.exists', 'os.path.exists', (['self.source_dir'], {}), '(self.source_dir)\n', (914, 931), False, 'import os\n'), ((2728, 2794), 'utils.cmake_command', 'utils.cmake_command', (['self.build_dir', 'self.source_dir', 'cmake_params'], {}), '(self.build_dir, self.source_dir, cmake_params)\n', (2747, 2794), False, 'import utils\n'), ((2963, 3028), 'os.path.join', 'os.path.join', (['self.build_dir', '"""bin/libvtktudossGraphicsPython.so"""'], {}), "(self.build_dir, 'bin/libvtktudossGraphicsPython.so')\n", (2975, 3028), False, 'import os\n'), ((3064, 3156), 'os.path.join', 'os.path.join', (['self.build_dir', '"""bin"""', 'config.BUILD_TARGET', '"""vtktudossGraphicsPythonD.dll"""'], {}), "(self.build_dir, 'bin', config.BUILD_TARGET,\n 'vtktudossGraphicsPythonD.dll')\n", (3076, 3156), False, 'import os\n'), ((3181, 3219), 'utils.file_exists', 'utils.file_exists', (['posix_file', 'nt_file'], {}), '(posix_file, nt_file)\n', (3198, 3219), False, 'import utils\n'), ((3577, 3611), 'os.path.join', 'os.path.join', (['self.inst_dir', '"""lib"""'], {}), "(self.inst_dir, 'lib')\n", (3589, 3611), False, 'import os\n'), ((3657, 3691), 'os.path.join', 'os.path.join', (['self.inst_dir', '"""lib"""'], {}), "(self.inst_dir, 'lib')\n", (3669, 3691), False, 'import os\n'), ((3713, 3763), 'os.path.join', 'os.path.join', (['config.VTKTUDOSS_LIB', '"""vtktudoss.py"""'], {}), "(config.VTKTUDOSS_LIB, 'vtktudoss.py')\n", (3725, 3763), False, 'import os\n'), ((3775, 3800), 'os.path.exists', 'os.path.exists', (['test_file'], {}), '(test_file)\n', (3789, 3800), False, 'import os\n'), ((4249, 4284), 'utils.output', 'utils.output', (['"""Removing build dir."""'], {}), "('Removing build dir.')\n", (4261, 4284), False, 'import utils\n'), ((4296, 4326), 'os.path.exists', 'os.path.exists', (['self.build_dir'], {}), '(self.build_dir)\n', (4310, 4326), False, 'import os\n'), ((945, 1006), 'utils.output', 'utils.output', (['"""vtktudoss already checked out, skipping step."""'], {}), "('vtktudoss already checked out, skipping step.')\n", (957, 1006), False, 'import utils\n'), ((1034, 1062), 'os.chdir', 'os.chdir', (['config.archive_dir'], {}), '(config.archive_dir)\n', (1042, 1062), False, 'import os\n'), ((1135, 1208), 'os.system', 'os.system', (["('%s co %s %s -r%s' % (config.SVN, SVN_REPO, BASENAME, SVN_REL))"], {}), "('%s co %s %s -r%s' % (config.SVN, SVN_REPO, BASENAME, SVN_REL))\n", (1144, 1208), False, 'import os\n'), ((1451, 1511), 'os.path.join', 'os.path.join', (['self.build_dir', '"""CMakeFiles/cmake.check_cache"""'], {}), "(self.build_dir, 'CMakeFiles/cmake.check_cache')\n", (1463, 1511), False, 'import os\n'), ((1526, 1577), 'utils.output', 'utils.output', (['"""vtktudoss build already configured."""'], {}), "('vtktudoss build already configured.')\n", (1538, 1577), False, 'import utils\n'), ((1621, 1651), 'os.path.exists', 'os.path.exists', (['self.build_dir'], {}), '(self.build_dir)\n', (1635, 1651), False, 'import os\n'), ((1665, 1689), 'os.mkdir', 'os.mkdir', (['self.build_dir'], {}), '(self.build_dir)\n', (1673, 1689), False, 'import os\n'), ((2845, 2910), 'utils.error', 'utils.error', (['"""Could not configure vtktudoss. Fix and try again."""'], {}), "('Could not configure vtktudoss. Fix and try again.')\n", (2856, 2910), False, 'import utils\n'), ((3237, 3299), 'utils.output', 'utils.output', (['"""vtktudoss already built. Skipping build step."""'], {}), "('vtktudoss already built. Skipping build step.')\n", (3249, 3299), False, 'import utils\n'), ((3327, 3351), 'os.chdir', 'os.chdir', (['self.build_dir'], {}), '(self.build_dir)\n', (3335, 3351), False, 'import os\n'), ((3370, 3405), 'utils.make_command', 'utils.make_command', (['"""VTKTUDOSS.sln"""'], {}), "('VTKTUDOSS.sln')\n", (3388, 3405), False, 'import utils\n'), ((3814, 3873), 'utils.output', 'utils.output', (['"""vtktudoss already installed, skipping step."""'], {}), "('vtktudoss already installed, skipping step.')\n", (3826, 3873), False, 'import utils\n'), ((3900, 3924), 'os.chdir', 'os.chdir', (['self.build_dir'], {}), '(self.build_dir)\n', (3908, 3924), False, 'import os\n'), ((3943, 3992), 'utils.make_command', 'utils.make_command', (['"""VTKTUDOSS.sln"""'], {'install': '(True)'}), "('VTKTUDOSS.sln', install=True)\n", (3961, 3992), False, 'import utils\n'), ((4340, 4369), 'shutil.rmtree', 'shutil.rmtree', (['self.build_dir'], {}), '(self.build_dir)\n', (4353, 4369), False, 'import shutil\n'), ((1266, 1324), 'utils.error', 'utils.error', (['"""Could not SVN checkout. Fix and try again."""'], {}), "('Could not SVN checkout. Fix and try again.')\n", (1277, 1324), False, 'import utils\n'), ((3448, 3509), 'utils.error', 'utils.error', (['"""Could not build vtktudoss. Fix and try again."""'], {}), "('Could not build vtktudoss. Fix and try again.')\n", (3459, 3509), False, 'import utils\n'), ((4035, 4098), 'utils.error', 'utils.error', (['"""Could not install vtktudoss. Fix and try again."""'], {}), "('Could not install vtktudoss. Fix and try again.')\n", (4046, 4098), False, 'import utils\n')] |
import gym
from gym import wrappers
from gym.envs.registration import register
from config import Params
# register ExpansionAi gym env
register(
id='ExpansionAi-v0',
entry_point='expansionai_env:ExpansionAiEnv',
)
def create_expansionai_env(env_id, video=False, params=Params()):
gym.spec(env_id)._kwargs = {
'armies': params.armies,
'board_size': params.board_size,
'offset_x': 0,
'offset_y': 0
}
env = gym.make(env_id)
if video:
env = wrappers.Monitor(env, 'test', force=True, mode='training')
return env
| [
"config.Params",
"gym.spec",
"gym.wrappers.Monitor",
"gym.envs.registration.register",
"gym.make"
] | [((138, 213), 'gym.envs.registration.register', 'register', ([], {'id': '"""ExpansionAi-v0"""', 'entry_point': '"""expansionai_env:ExpansionAiEnv"""'}), "(id='ExpansionAi-v0', entry_point='expansionai_env:ExpansionAiEnv')\n", (146, 213), False, 'from gym.envs.registration import register\n'), ((282, 290), 'config.Params', 'Params', ([], {}), '()\n', (288, 290), False, 'from config import Params\n'), ((461, 477), 'gym.make', 'gym.make', (['env_id'], {}), '(env_id)\n', (469, 477), False, 'import gym\n'), ((297, 313), 'gym.spec', 'gym.spec', (['env_id'], {}), '(env_id)\n', (305, 313), False, 'import gym\n'), ((506, 564), 'gym.wrappers.Monitor', 'wrappers.Monitor', (['env', '"""test"""'], {'force': '(True)', 'mode': '"""training"""'}), "(env, 'test', force=True, mode='training')\n", (522, 564), False, 'from gym import wrappers\n')] |
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\apartments\situations\neighbor_group_hangout.py
# Compiled at: 2018-11-17 02:17:10
# Size of source mod 2**32: 8707 bytes
import random
from distributor.shared_messages import IconInfoData
from event_testing.resolver import DoubleSimResolver
from event_testing.test_events import TestEvent
from event_testing.tests import TunableTestSet
from sims4.tuning.instances import lock_instance_tunables
from sims4.tuning.tunable import TunableRange
from sims4.tuning.tunable_base import GroupNames
from situations.base_situation import _RequestUserData
from situations.bouncer.bouncer_request import SelectableSimRequestFactory
from situations.bouncer.bouncer_types import BouncerExclusivityCategory, RequestSpawningOption, BouncerRequestPriority
from situations.situation_complex import SituationComplexCommon, SituationStateData, CommonSituationState, TunableSituationJobAndRoleState
from situations.situation_guest_list import SituationGuestList, SituationGuestInfo
from situations.situation_types import SituationCreationUIOption
from ui.ui_dialog_notification import TunableUiDialogNotificationSnippet
import services
class _NeighborHangoutState(CommonSituationState):
pass
class NeighborGroupHangoutSituation(SituationComplexCommon):
INSTANCE_TUNABLES = {'player_sim_job_and_default_role_state':TunableSituationJobAndRoleState(description='\n The Situation Job and role state to put player Sims in. \n '),
'neighbor_job_and_default_role_state':TunableSituationJobAndRoleState(description='\n The Situation Job and Role State for the neighbor.\n '),
'number_of_neighbors':TunableRange(description="\n The number of other neighbors to bring to the situation. If\n there aren't enough neighbors then none will be generated to\n bring.\n ",
tunable_type=int,
default=1,
minimum=1),
'_hangout_state':_NeighborHangoutState.TunableFactory(description='\n The state for the neighbor to come in and hang out with the player.\n ',
tuning_group=GroupNames.STATE),
'_arrival_notification':TunableUiDialogNotificationSnippet(description='\n Localized string to display as a notification when the first Sim\n arrives on the player lot.\n '),
'scheduling_tests':TunableTestSet(description="\n Tunable tests that run before scheduling this situation. If they\n pass for at least one Sim we find that matches the job filter, the\n situation is weighed and considered for scheduling. Otherwise it\n does not take up a slot in the situation manager because the zone\n director won't consider it. Participants: Actor = active sim,\n TargetSim = Sim from Job filter. Tests fail if TargetSim is None.\n ")}
def __init__(self, *args, **kwargs):
(super().__init__)(*args, **kwargs)
self._tns_popped = False
@classmethod
def _states(cls):
return (SituationStateData(1, _NeighborHangoutState, factory=(cls._hangout_state)),)
@classmethod
def situation_meets_starting_requirements(cls, **kwargs):
neighbor_results = cls.get_filter_results_for_job()
for neighbor_result in neighbor_results:
resolver = DoubleSimResolver(services.active_sim_info(), neighbor_result.sim_info)
if cls.scheduling_tests.run_tests(resolver):
return True
return False
@classmethod
def default_job(cls):
pass
@classmethod
def _get_tuned_job_and_default_role_state_tuples(cls):
return [(cls.neighbor_job_and_default_role_state.job, cls.neighbor_job_and_default_role_state.role_state),
(
cls.player_sim_job_and_default_role_state.job, cls.player_sim_job_and_default_role_state.role_state)]
@classmethod
def get_filter_results_for_job(cls):
active_sim_info = services.active_sim_info()
neighbor_results = services.sim_filter_service().submit_filter((cls.neighbor_job_and_default_role_state.job.filter), callback=None,
requesting_sim_info=active_sim_info,
allow_yielding=False,
blacklist_sim_ids={sim_info.sim_id for sim_info in services.active_household()},
gsi_source_fn=(cls.get_sim_filter_gsi_name))
return neighbor_results
@classmethod
def get_predefined_guest_list(cls):
neighbor_results = cls.get_filter_results_for_job()
if not neighbor_results:
return
elif len(neighbor_results) > cls.number_of_neighbors:
neighbors = random.sample(neighbor_results, cls.number_of_neighbors)
else:
neighbors = neighbor_results
active_sim_info = services.active_sim_info()
guest_list = SituationGuestList(invite_only=True, host_sim_id=(neighbor_results[0].sim_info.sim_id),
filter_requesting_sim_id=(active_sim_info.sim_id))
for neighbor in neighbors:
guest_list.add_guest_info(SituationGuestInfo((neighbor.sim_info.sim_id), (cls.neighbor_job_and_default_role_state.job),
(RequestSpawningOption.DONT_CARE),
(BouncerRequestPriority.EVENT_VIP),
expectation_preference=True))
return guest_list
def start_situation(self):
super().start_situation()
services.get_event_manager().register_single_event(self, TestEvent.SimActiveLotStatusChanged)
self._change_state(self._hangout_state())
def _issue_requests(self):
super()._issue_requests()
request = SelectableSimRequestFactory(self, callback_data=(_RequestUserData()),
job_type=(self.player_sim_job_and_default_role_state.job),
exclusivity=(self.exclusivity))
self.manager.bouncer.submit_request(request)
def handle_event(self, sim_info, event, resolver):
super().handle_event(sim_info, event, resolver)
if event == TestEvent.SimActiveLotStatusChanged:
if not self._tns_popped:
sim = sim_info.get_sim_instance()
if sim is not None:
if sim.is_on_active_lot():
if self.is_sim_in_situation(sim):
if self.sim_has_job(sim, self.neighbor_job_and_default_role_state.job):
active_sim = services.get_active_sim()
if active_sim is not None:
dialog = self._arrival_notification(active_sim)
dialog.show_dialog(icon_override=IconInfoData(obj_instance=sim), secondary_icon_override=IconInfoData(obj_instance=active_sim))
self._tns_popped = True
services.get_event_manager().unregister_single_event(self, TestEvent.SimActiveLotStatusChanged)
lock_instance_tunables(NeighborGroupHangoutSituation, exclusivity=(BouncerExclusivityCategory.NORMAL),
creation_ui_option=(SituationCreationUIOption.NOT_AVAILABLE)) | [
"random.sample",
"sims4.tuning.instances.lock_instance_tunables",
"situations.situation_complex.TunableSituationJobAndRoleState",
"situations.situation_guest_list.SituationGuestList",
"ui.ui_dialog_notification.TunableUiDialogNotificationSnippet",
"services.active_household",
"services.get_event_manager... | [((7150, 7320), 'sims4.tuning.instances.lock_instance_tunables', 'lock_instance_tunables', (['NeighborGroupHangoutSituation'], {'exclusivity': 'BouncerExclusivityCategory.NORMAL', 'creation_ui_option': 'SituationCreationUIOption.NOT_AVAILABLE'}), '(NeighborGroupHangoutSituation, exclusivity=\n BouncerExclusivityCategory.NORMAL, creation_ui_option=\n SituationCreationUIOption.NOT_AVAILABLE)\n', (7172, 7320), False, 'from sims4.tuning.instances import lock_instance_tunables\n'), ((1524, 1667), 'situations.situation_complex.TunableSituationJobAndRoleState', 'TunableSituationJobAndRoleState', ([], {'description': '"""\n The Situation Job and role state to put player Sims in. \n """'}), '(description=\n """\n The Situation Job and role state to put player Sims in. \n """\n )\n', (1555, 1667), False, 'from situations.situation_complex import SituationComplexCommon, SituationStateData, CommonSituationState, TunableSituationJobAndRoleState\n'), ((1701, 1838), 'situations.situation_complex.TunableSituationJobAndRoleState', 'TunableSituationJobAndRoleState', ([], {'description': '"""\n The Situation Job and Role State for the neighbor.\n """'}), '(description=\n """\n The Situation Job and Role State for the neighbor.\n """\n )\n', (1732, 1838), False, 'from situations.situation_complex import SituationComplexCommon, SituationStateData, CommonSituationState, TunableSituationJobAndRoleState\n'), ((1856, 2116), 'sims4.tuning.tunable.TunableRange', 'TunableRange', ([], {'description': '"""\n The number of other neighbors to bring to the situation. If\n there aren\'t enough neighbors then none will be generated to\n bring.\n """', 'tunable_type': 'int', 'default': '(1)', 'minimum': '(1)'}), '(description=\n """\n The number of other neighbors to bring to the situation. If\n there aren\'t enough neighbors then none will be generated to\n bring.\n """\n , tunable_type=int, default=1, minimum=1)\n', (1868, 2116), False, 'from sims4.tuning.tunable import TunableRange\n'), ((2369, 2562), 'ui.ui_dialog_notification.TunableUiDialogNotificationSnippet', 'TunableUiDialogNotificationSnippet', ([], {'description': '"""\n Localized string to display as a notification when the first Sim\n arrives on the player lot.\n """'}), '(description=\n """\n Localized string to display as a notification when the first Sim\n arrives on the player lot.\n """\n )\n', (2403, 2562), False, 'from ui.ui_dialog_notification import TunableUiDialogNotificationSnippet\n'), ((2578, 3098), 'event_testing.tests.TunableTestSet', 'TunableTestSet', ([], {'description': '"""\n Tunable tests that run before scheduling this situation. If they\n pass for at least one Sim we find that matches the job filter, the\n situation is weighed and considered for scheduling. Otherwise it\n does not take up a slot in the situation manager because the zone\n director won\'t consider it. Participants: Actor = active sim,\n TargetSim = Sim from Job filter. Tests fail if TargetSim is None.\n """'}), '(description=\n """\n Tunable tests that run before scheduling this situation. If they\n pass for at least one Sim we find that matches the job filter, the\n situation is weighed and considered for scheduling. Otherwise it\n does not take up a slot in the situation manager because the zone\n director won\'t consider it. Participants: Actor = active sim,\n TargetSim = Sim from Job filter. Tests fail if TargetSim is None.\n """\n )\n', (2592, 3098), False, 'from event_testing.tests import TunableTestSet\n'), ((4193, 4219), 'services.active_sim_info', 'services.active_sim_info', ([], {}), '()\n', (4217, 4219), False, 'import services\n'), ((5011, 5037), 'services.active_sim_info', 'services.active_sim_info', ([], {}), '()\n', (5035, 5037), False, 'import services\n'), ((5059, 5198), 'situations.situation_guest_list.SituationGuestList', 'SituationGuestList', ([], {'invite_only': '(True)', 'host_sim_id': 'neighbor_results[0].sim_info.sim_id', 'filter_requesting_sim_id': 'active_sim_info.sim_id'}), '(invite_only=True, host_sim_id=neighbor_results[0].\n sim_info.sim_id, filter_requesting_sim_id=active_sim_info.sim_id)\n', (5077, 5198), False, 'from situations.situation_guest_list import SituationGuestList, SituationGuestInfo\n'), ((3268, 3340), 'situations.situation_complex.SituationStateData', 'SituationStateData', (['(1)', '_NeighborHangoutState'], {'factory': 'cls._hangout_state'}), '(1, _NeighborHangoutState, factory=cls._hangout_state)\n', (3286, 3340), False, 'from situations.situation_complex import SituationComplexCommon, SituationStateData, CommonSituationState, TunableSituationJobAndRoleState\n'), ((3575, 3601), 'services.active_sim_info', 'services.active_sim_info', ([], {}), '()\n', (3599, 3601), False, 'import services\n'), ((4247, 4276), 'services.sim_filter_service', 'services.sim_filter_service', ([], {}), '()\n', (4274, 4276), False, 'import services\n'), ((4873, 4929), 'random.sample', 'random.sample', (['neighbor_results', 'cls.number_of_neighbors'], {}), '(neighbor_results, cls.number_of_neighbors)\n', (4886, 4929), False, 'import random\n'), ((5281, 5476), 'situations.situation_guest_list.SituationGuestInfo', 'SituationGuestInfo', (['neighbor.sim_info.sim_id', 'cls.neighbor_job_and_default_role_state.job', 'RequestSpawningOption.DONT_CARE', 'BouncerRequestPriority.EVENT_VIP'], {'expectation_preference': '(True)'}), '(neighbor.sim_info.sim_id, cls.\n neighbor_job_and_default_role_state.job, RequestSpawningOption.\n DONT_CARE, BouncerRequestPriority.EVENT_VIP, expectation_preference=True)\n', (5299, 5476), False, 'from situations.situation_guest_list import SituationGuestList, SituationGuestInfo\n'), ((5619, 5647), 'services.get_event_manager', 'services.get_event_manager', ([], {}), '()\n', (5645, 5647), False, 'import services\n'), ((5896, 5914), 'situations.base_situation._RequestUserData', '_RequestUserData', ([], {}), '()\n', (5912, 5914), False, 'from situations.base_situation import _RequestUserData\n'), ((4500, 4527), 'services.active_household', 'services.active_household', ([], {}), '()\n', (4525, 4527), False, 'import services\n'), ((6623, 6648), 'services.get_active_sim', 'services.get_active_sim', ([], {}), '()\n', (6646, 6648), False, 'import services\n'), ((6861, 6891), 'distributor.shared_messages.IconInfoData', 'IconInfoData', ([], {'obj_instance': 'sim'}), '(obj_instance=sim)\n', (6873, 6891), False, 'from distributor.shared_messages import IconInfoData\n'), ((6917, 6954), 'distributor.shared_messages.IconInfoData', 'IconInfoData', ([], {'obj_instance': 'active_sim'}), '(obj_instance=active_sim)\n', (6929, 6954), False, 'from distributor.shared_messages import IconInfoData\n'), ((7052, 7080), 'services.get_event_manager', 'services.get_event_manager', ([], {}), '()\n', (7078, 7080), False, 'import services\n')] |
import inspect
import os
import sys
import unittest
from unittest.mock import call, patch
from Whittler import File, Process, WhittlerProgram
from WhittlerTests import Random, UnitTester
testNames = [
'test_get_module_ReturnsWhittlerProgramModule',
'test_make_whittler_filepath_for_command_ReturnsHomeSlashDotWhittlerSlashProgramNameAndArgsDotTxt',
'test_open_whittler_file_with_editor_environment_variable_OpensFileWithEDITOR',
'test_prefix_command_with_powershell_dot_exe_if_command_is_get_help_ReturnsPowerShellPrefixedCommandIfGetHelp',
'test_open_or_create_whittler_file_WhittlerFileAlreadyExists_OpensItWithEDITOR',
'test_open_or_create_whittler_file_WhittlerFileDoesNotAlreadyExist_CreatesItAndOpensItWithEDITOR',
'test_run_CallsOpenOrCreateWhittlerFile_Returns0',
'test_split_lines_and_remove_leading_and_trailing_whitespace_and_carriage_returns',
'test_try_open_whittler_file_OpensWhittlerFileWithEDITORIfItExists',
'test_write_program_output_to_whittler_file_EmptyStdOutAndStdErr_PrintsErrorAndExits1',
'test_write_program_output_to_whittler_file_ProcessRaisesFileNotFoundError_PrintsErrorAndExits1',
'test_write_program_output_to_whittler_file_NonEmptyStdOutOrStdErr_WritesNonEmptyOneToWhittlerFileAndReturnsItsPath']
class WhittlerProgramTests(unittest.TestCase):
def setUp(self):
self.program = Random.string()
self.command = Random.string()
self.whittlerFilePath = Random.string()
def test_get_module_ReturnsWhittlerProgramModule(self):
#
whittlerProgram = WhittlerProgram.get_module()
#
self.assertTrue(inspect.ismodule(whittlerProgram))
self.assertEqual('Whittler.WhittlerProgram', whittlerProgram.__name__)
def test_make_whittler_filepath_for_command_ReturnsHomeSlashDotWhittlerSlashProgramNameAndArgsDotTxt(self):
@patch('os.path.expanduser', spec_set=True)
def testcase(commandArg, expectedExpandUserArgument, _1):
with self.subTest(f'{commandArg, expectedExpandUserArgument}'):
expandUserReturnValue = Random.string()
os.path.expanduser.return_value = expandUserReturnValue
#
whittlerFilePath = WhittlerProgram.make_whittler_filepath_for_command(commandArg)
#
os.path.expanduser.assert_called_once_with(expectedExpandUserArgument)
self.assertEqual(expandUserReturnValue, whittlerFilePath)
testcase('ls', '~/.config/Whittler/ls.txt')
testcase('systemd --help', '~/.config/Whittler/systemd --help.txt')
testcase('cmake --help-command set', '~/.config/Whittler/cmake --help-command set.txt')
testcase('substr /?', '~/.config/Whittler/substr SlashQuestionMark.txt')
@patch('os.environ.get', spec_set=True)
@patch('Whittler.Process.fail_fast_run', spec_set=True)
@patch('builtins.print', spec_set=True)
def test_open_whittler_file_with_editor_environment_variable_OpensFileWithEDITOR(self, printMock, _2, _3):
editorEnvironmentVariableValue = Random.string()
os.environ.get.return_value = editorEnvironmentVariableValue
whittlerFilePath = Random.string()
#
WhittlerProgram.open_whittler_file_with_editor_environment_variable(whittlerFilePath)
#
self.assertEqual(2, len(printMock.call_args_list))
printMock.assert_has_calls([
call('[Whittler] ', end=''),
call('[Whittler] ExitCode: 0')])
expectedEditorCommand = f'{editorEnvironmentVariableValue} "{whittlerFilePath}"'
Process.fail_fast_run.assert_called_once_with(expectedEditorCommand, True)
def test_prefix_command_with_powershell_dot_exe_if_command_is_get_help_ReturnsPowerShellPrefixedCommandIfGetHelp(self):
def testcase(command, expectedAdjustedCommand):
with self.subTest(f'{command, expectedAdjustedCommand}'):
#
adjustedCommand = WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help(command)
#
self.assertEqual(expectedAdjustedCommand, adjustedCommand)
testcase('get-help', "powershell.exe -Command 'get-help'")
testcase('Get-Help', "powershell.exe -Command 'Get-Help'")
testcase('Get-Help Get-Command -detailed', "powershell.exe -Command 'Get-Help Get-Command -detailed'")
testcase('whoami', "whoami")
testcase(' get-help', " get-help")
@patch('Whittler.WhittlerProgram.open_or_create_whittler_file', spec_set=True)
def test_run_CallsOpenOrCreateWhittlerFile_Returns0(self, _1):
command = Random.string()
#
exitCode = WhittlerProgram.run(command)
#
WhittlerProgram.open_or_create_whittler_file.assert_called_once_with(command)
self.assertEqual(0, exitCode)
def test_split_lines_and_remove_leading_and_trailing_whitespace_and_carriage_returns(self):
def testcase(text, expectedReturnValue):
with self.subTest(f'{text, expectedReturnValue}'):
returnValue = WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace(text)
self.assertEqual(expectedReturnValue, returnValue)
testcase('', [''])
testcase('abc', ['abc'])
testcase('\r', [''])
testcase('\n', ['', ''])
testcase('\r\n', ['', ''])
testcase(' \tA\t ', ['A'])
testcase('\t \t1\t\n2\t\r\n 3 45 ', ['1', '2', '3 45'])
testcase('\t Line1\r\nLine2\nLine3\r\n\r\nLine4 \t', ['Line1', 'Line2', 'Line3', '', 'Line4'])
@patch('Whittler.WhittlerProgram.try_open_whittler_file', spec_set=True)
@patch('Whittler.WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help', spec_set=True)
@patch('Whittler.WhittlerProgram.write_program_output_to_whittler_file', spec_set=True)
@patch('Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable', spec_set=True)
def test_open_or_create_whittler_file_WhittlerFileAlreadyExists_OpensItWithEDITOR(self, _1, _2, _3, _4):
WhittlerProgram.try_open_whittler_file.return_value = True
powershellAdjustedCommand = Random.string()
WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help.return_value = powershellAdjustedCommand
WhittlerProgram.write_program_output_to_whittler_file.return_value = self.whittlerFilePath
#
WhittlerProgram.open_or_create_whittler_file(self.command)
#
WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help.assert_called_once_with(self.command)
WhittlerProgram.try_open_whittler_file.assert_called_once_with(powershellAdjustedCommand)
WhittlerProgram.write_program_output_to_whittler_file.assert_not_called()
WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_not_called()
@patch('Whittler.WhittlerProgram.try_open_whittler_file', spec_set=True)
@patch('Whittler.WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help', spec_set=True)
@patch('Whittler.WhittlerProgram.write_program_output_to_whittler_file', spec_set=True)
@patch('Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable', spec_set=True)
def test_open_or_create_whittler_file_WhittlerFileDoesNotAlreadyExist_CreatesItAndOpensItWithEDITOR(self, _1, _2, _3, _4):
WhittlerProgram.try_open_whittler_file.return_value = False
powershellAdjustedCommand = Random.string()
WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help.return_value = powershellAdjustedCommand
WhittlerProgram.write_program_output_to_whittler_file.return_value = self.whittlerFilePath
#
WhittlerProgram.open_or_create_whittler_file(self.command)
#
WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help.assert_called_once_with(self.command)
WhittlerProgram.try_open_whittler_file.assert_called_once_with(powershellAdjustedCommand)
WhittlerProgram.write_program_output_to_whittler_file.assert_called_once_with(powershellAdjustedCommand)
WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_called_once_with(self.whittlerFilePath)
def test_try_open_whittler_file_OpensWhittlerFileWithEDITORIfItExists(self):
@patch('Whittler.WhittlerProgram.make_whittler_filepath_for_command', spec_set=True)
@patch('os.path.exists', spec_set=True)
@patch('Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable', spec_set=True)
def testcase(whittlerFilePathExists, expectedReturnValue, _1, _2, _3):
with self.subTest(f'{whittlerFilePathExists, expectedReturnValue}'):
makeWhittlerFilePathReturnValue = 'makeWhittlerFilePathReturnValue'
WhittlerProgram.make_whittler_filepath_for_command.return_value = makeWhittlerFilePathReturnValue
os.path.exists.return_value = whittlerFilePathExists
powershellAdjustedCommand = Random.string()
#
didOpenWhittlerFile = WhittlerProgram.try_open_whittler_file(powershellAdjustedCommand)
#
WhittlerProgram.make_whittler_filepath_for_command.assert_called_once_with(powershellAdjustedCommand)
os.path.exists.assert_called_once_with(makeWhittlerFilePathReturnValue)
if whittlerFilePathExists:
WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_called_once_with(makeWhittlerFilePathReturnValue)
else:
WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_not_called()
self.assertEqual(expectedReturnValue, didOpenWhittlerFile)
testcase(False, False)
testcase(True, True)
@patch('Whittler.Process.run', spec_set=True)
@patch('builtins.print', spec_set=True)
@patch('sys.exit', spec_set=True)
def test_write_program_output_to_whittler_file_EmptyStdOutAndStdErr_PrintsErrorAndExits1(self, _1, printMock, _3):
Process.run.return_value = ('', '')
#
WhittlerProgram.write_program_output_to_whittler_file(self.command)
#
Process.run.assert_called_once_with(self.command)
expectedErrorMessage = f"[Whittler] Command '{self.command}' returned empty standard output and empty standard error.\n" +\
'[Whittler] ExitCode: 1'
printMock.assert_called_once_with(expectedErrorMessage)
sys.exit.assert_called_once_with(1)
@patch('Whittler.Process.run', spec_set=True)
@patch('builtins.print', spec_set=True)
@patch('sys.exit', spec_set=True)
def test_write_program_output_to_whittler_file_ProcessRaisesFileNotFoundError_PrintsErrorAndExits1(self, _1, printMock, _3):
fileNotFoundError = FileNotFoundError(Random.string())
Process.run.side_effect = fileNotFoundError
#
WhittlerProgram.write_program_output_to_whittler_file(self.command)
#
expectedErrorMessage = f"[Whittler] Error: FileNotFoundError raised when running command '{self.command}': " + str(fileNotFoundError) + '\n' +\
'[Whittler] ExitCode: 1'
printMock.assert_called_once_with(expectedErrorMessage)
sys.exit.assert_called_once_with(1)
def test_write_program_output_to_whittler_file_NonEmptyStdOutOrStdErr_WritesNonEmptyOneToWhittlerFileAndReturnsItsPath(self):
@patch('Whittler.Process.run', spec_set=True)
@patch('Whittler.WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace', spec_set=True)
@patch('Whittler.WhittlerProgram.make_whittler_filepath_for_command', spec_set=True)
@patch('Whittler.File.write_lines', spec_set=True)
def testcase(stdout, stderr, trueExpectStdoutWrittenFalseExpectStderrWritten, _1, _2, _3, _4):
with self.subTest(f'{stdout}, {stderr}, {trueExpectStdoutWrittenFalseExpectStderrWritten}'):
Process.run.return_value = (stdout, stderr)
whittlerFilePath = Random.string()
WhittlerProgram.make_whittler_filepath_for_command.return_value = whittlerFilePath
stdoutOrStderrLinesWithLeadingWhitespaceRemoved = Random.string()
WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace.return_value = stdoutOrStderrLinesWithLeadingWhitespaceRemoved
#
returnedWhittlerFilePath = WhittlerProgram.write_program_output_to_whittler_file(self.command)
#
Process.run.assert_called_once_with(self.command)
WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace.assert_called_once_with(\
stdout if trueExpectStdoutWrittenFalseExpectStderrWritten else stderr)
WhittlerProgram.make_whittler_filepath_for_command.assert_called_once_with(self.command)
File.write_lines.assert_called_once_with(whittlerFilePath, stdoutOrStderrLinesWithLeadingWhitespaceRemoved)
self.assertEqual(whittlerFilePath, returnedWhittlerFilePath)
testcase(Random.string(), '', True)
testcase('', Random.string(), False)
if __name__ == '__main__': # pragma nocover
UnitTester.run_tests(WhittlerProgramTests, testNames)
| [
"Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_called_once_with",
"os.path.expanduser.assert_called_once_with",
"Whittler.WhittlerProgram.try_open_whittler_file.assert_called_once_with",
"Whittler.WhittlerProgram.write_program_output_to_whittler_file.assert_called_once_wi... | [((2732, 2770), 'unittest.mock.patch', 'patch', (['"""os.environ.get"""'], {'spec_set': '(True)'}), "('os.environ.get', spec_set=True)\n", (2737, 2770), False, 'from unittest.mock import call, patch\n'), ((2776, 2830), 'unittest.mock.patch', 'patch', (['"""Whittler.Process.fail_fast_run"""'], {'spec_set': '(True)'}), "('Whittler.Process.fail_fast_run', spec_set=True)\n", (2781, 2830), False, 'from unittest.mock import call, patch\n'), ((2836, 2874), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'spec_set': '(True)'}), "('builtins.print', spec_set=True)\n", (2841, 2874), False, 'from unittest.mock import call, patch\n'), ((4409, 4486), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.open_or_create_whittler_file"""'], {'spec_set': '(True)'}), "('Whittler.WhittlerProgram.open_or_create_whittler_file', spec_set=True)\n", (4414, 4486), False, 'from unittest.mock import call, patch\n'), ((5520, 5591), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.try_open_whittler_file"""'], {'spec_set': '(True)'}), "('Whittler.WhittlerProgram.try_open_whittler_file', spec_set=True)\n", (5525, 5591), False, 'from unittest.mock import call, patch\n'), ((5597, 5717), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help"""'], {'spec_set': '(True)'}), "(\n 'Whittler.WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help'\n , spec_set=True)\n", (5602, 5717), False, 'from unittest.mock import call, patch\n'), ((5713, 5803), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.write_program_output_to_whittler_file"""'], {'spec_set': '(True)'}), "('Whittler.WhittlerProgram.write_program_output_to_whittler_file',\n spec_set=True)\n", (5718, 5803), False, 'from unittest.mock import call, patch\n'), ((5805, 5915), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable"""'], {'spec_set': '(True)'}), "(\n 'Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable'\n , spec_set=True)\n", (5810, 5915), False, 'from unittest.mock import call, patch\n'), ((6845, 6916), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.try_open_whittler_file"""'], {'spec_set': '(True)'}), "('Whittler.WhittlerProgram.try_open_whittler_file', spec_set=True)\n", (6850, 6916), False, 'from unittest.mock import call, patch\n'), ((6922, 7042), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help"""'], {'spec_set': '(True)'}), "(\n 'Whittler.WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help'\n , spec_set=True)\n", (6927, 7042), False, 'from unittest.mock import call, patch\n'), ((7038, 7128), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.write_program_output_to_whittler_file"""'], {'spec_set': '(True)'}), "('Whittler.WhittlerProgram.write_program_output_to_whittler_file',\n spec_set=True)\n", (7043, 7128), False, 'from unittest.mock import call, patch\n'), ((7130, 7240), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable"""'], {'spec_set': '(True)'}), "(\n 'Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable'\n , spec_set=True)\n", (7135, 7240), False, 'from unittest.mock import call, patch\n'), ((9817, 9861), 'unittest.mock.patch', 'patch', (['"""Whittler.Process.run"""'], {'spec_set': '(True)'}), "('Whittler.Process.run', spec_set=True)\n", (9822, 9861), False, 'from unittest.mock import call, patch\n'), ((9867, 9905), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'spec_set': '(True)'}), "('builtins.print', spec_set=True)\n", (9872, 9905), False, 'from unittest.mock import call, patch\n'), ((9911, 9943), 'unittest.mock.patch', 'patch', (['"""sys.exit"""'], {'spec_set': '(True)'}), "('sys.exit', spec_set=True)\n", (9916, 9943), False, 'from unittest.mock import call, patch\n'), ((10537, 10581), 'unittest.mock.patch', 'patch', (['"""Whittler.Process.run"""'], {'spec_set': '(True)'}), "('Whittler.Process.run', spec_set=True)\n", (10542, 10581), False, 'from unittest.mock import call, patch\n'), ((10587, 10625), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'spec_set': '(True)'}), "('builtins.print', spec_set=True)\n", (10592, 10625), False, 'from unittest.mock import call, patch\n'), ((10631, 10663), 'unittest.mock.patch', 'patch', (['"""sys.exit"""'], {'spec_set': '(True)'}), "('sys.exit', spec_set=True)\n", (10636, 10663), False, 'from unittest.mock import call, patch\n'), ((13235, 13288), 'WhittlerTests.UnitTester.run_tests', 'UnitTester.run_tests', (['WhittlerProgramTests', 'testNames'], {}), '(WhittlerProgramTests, testNames)\n', (13255, 13288), False, 'from WhittlerTests import Random, UnitTester\n'), ((1345, 1360), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (1358, 1360), False, 'from WhittlerTests import Random, UnitTester\n'), ((1383, 1398), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (1396, 1398), False, 'from WhittlerTests import Random, UnitTester\n'), ((1430, 1445), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (1443, 1445), False, 'from WhittlerTests import Random, UnitTester\n'), ((1542, 1570), 'Whittler.WhittlerProgram.get_module', 'WhittlerProgram.get_module', ([], {}), '()\n', (1568, 1570), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((1838, 1880), 'unittest.mock.patch', 'patch', (['"""os.path.expanduser"""'], {'spec_set': '(True)'}), "('os.path.expanduser', spec_set=True)\n", (1843, 1880), False, 'from unittest.mock import call, patch\n'), ((3026, 3041), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (3039, 3041), False, 'from WhittlerTests import Random, UnitTester\n'), ((3136, 3151), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (3149, 3151), False, 'from WhittlerTests import Random, UnitTester\n'), ((3168, 3258), 'Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable', 'WhittlerProgram.open_whittler_file_with_editor_environment_variable', (['whittlerFilePath'], {}), '(\n whittlerFilePath)\n', (3235, 3258), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((3534, 3608), 'Whittler.Process.fail_fast_run.assert_called_once_with', 'Process.fail_fast_run.assert_called_once_with', (['expectedEditorCommand', '(True)'], {}), '(expectedEditorCommand, True)\n', (3579, 3608), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((4571, 4586), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (4584, 4586), False, 'from WhittlerTests import Random, UnitTester\n'), ((4614, 4642), 'Whittler.WhittlerProgram.run', 'WhittlerProgram.run', (['command'], {}), '(command)\n', (4633, 4642), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((4659, 4736), 'Whittler.WhittlerProgram.open_or_create_whittler_file.assert_called_once_with', 'WhittlerProgram.open_or_create_whittler_file.assert_called_once_with', (['command'], {}), '(command)\n', (4727, 4736), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((6116, 6131), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (6129, 6131), False, 'from WhittlerTests import Random, UnitTester\n'), ((6372, 6430), 'Whittler.WhittlerProgram.open_or_create_whittler_file', 'WhittlerProgram.open_or_create_whittler_file', (['self.command'], {}), '(self.command)\n', (6416, 6430), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((6447, 6567), 'Whittler.WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help.assert_called_once_with', 'WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help.assert_called_once_with', (['self.command'], {}), '(\n self.command)\n', (6548, 6567), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((6570, 6664), 'Whittler.WhittlerProgram.try_open_whittler_file.assert_called_once_with', 'WhittlerProgram.try_open_whittler_file.assert_called_once_with', (['powershellAdjustedCommand'], {}), '(\n powershellAdjustedCommand)\n', (6632, 6664), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((6667, 6740), 'Whittler.WhittlerProgram.write_program_output_to_whittler_file.assert_not_called', 'WhittlerProgram.write_program_output_to_whittler_file.assert_not_called', ([], {}), '()\n', (6738, 6740), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((6748, 6840), 'Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_not_called', 'WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_not_called', ([], {}), '(\n )\n', (6833, 6840), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((7460, 7475), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (7473, 7475), False, 'from WhittlerTests import Random, UnitTester\n'), ((7716, 7774), 'Whittler.WhittlerProgram.open_or_create_whittler_file', 'WhittlerProgram.open_or_create_whittler_file', (['self.command'], {}), '(self.command)\n', (7760, 7774), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((7791, 7911), 'Whittler.WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help.assert_called_once_with', 'WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help.assert_called_once_with', (['self.command'], {}), '(\n self.command)\n', (7892, 7911), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((7914, 8008), 'Whittler.WhittlerProgram.try_open_whittler_file.assert_called_once_with', 'WhittlerProgram.try_open_whittler_file.assert_called_once_with', (['powershellAdjustedCommand'], {}), '(\n powershellAdjustedCommand)\n', (7976, 8008), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((8011, 8120), 'Whittler.WhittlerProgram.write_program_output_to_whittler_file.assert_called_once_with', 'WhittlerProgram.write_program_output_to_whittler_file.assert_called_once_with', (['powershellAdjustedCommand'], {}), '(\n powershellAdjustedCommand)\n', (8088, 8120), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((8123, 8242), 'Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_called_once_with', 'WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_called_once_with', (['self.whittlerFilePath'], {}), '(\n self.whittlerFilePath)\n', (8214, 8242), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((8331, 8418), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.make_whittler_filepath_for_command"""'], {'spec_set': '(True)'}), "('Whittler.WhittlerProgram.make_whittler_filepath_for_command',\n spec_set=True)\n", (8336, 8418), False, 'from unittest.mock import call, patch\n'), ((8423, 8461), 'unittest.mock.patch', 'patch', (['"""os.path.exists"""'], {'spec_set': '(True)'}), "('os.path.exists', spec_set=True)\n", (8428, 8461), False, 'from unittest.mock import call, patch\n'), ((8470, 8580), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable"""'], {'spec_set': '(True)'}), "(\n 'Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable'\n , spec_set=True)\n", (8475, 8580), False, 'from unittest.mock import call, patch\n'), ((10122, 10189), 'Whittler.WhittlerProgram.write_program_output_to_whittler_file', 'WhittlerProgram.write_program_output_to_whittler_file', (['self.command'], {}), '(self.command)\n', (10175, 10189), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((10206, 10255), 'Whittler.Process.run.assert_called_once_with', 'Process.run.assert_called_once_with', (['self.command'], {}), '(self.command)\n', (10241, 10255), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((10492, 10527), 'sys.exit.assert_called_once_with', 'sys.exit.assert_called_once_with', (['(1)'], {}), '(1)\n', (10524, 10527), False, 'import sys\n'), ((10922, 10989), 'Whittler.WhittlerProgram.write_program_output_to_whittler_file', 'WhittlerProgram.write_program_output_to_whittler_file', (['self.command'], {}), '(self.command)\n', (10975, 10989), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((11255, 11290), 'sys.exit.assert_called_once_with', 'sys.exit.assert_called_once_with', (['(1)'], {}), '(1)\n', (11287, 11290), False, 'import sys\n'), ((11433, 11477), 'unittest.mock.patch', 'patch', (['"""Whittler.Process.run"""'], {'spec_set': '(True)'}), "('Whittler.Process.run', spec_set=True)\n", (11438, 11477), False, 'from unittest.mock import call, patch\n'), ((11486, 11607), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace"""'], {'spec_set': '(True)'}), "(\n 'Whittler.WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace'\n , spec_set=True)\n", (11491, 11607), False, 'from unittest.mock import call, patch\n'), ((11606, 11693), 'unittest.mock.patch', 'patch', (['"""Whittler.WhittlerProgram.make_whittler_filepath_for_command"""'], {'spec_set': '(True)'}), "('Whittler.WhittlerProgram.make_whittler_filepath_for_command',\n spec_set=True)\n", (11611, 11693), False, 'from unittest.mock import call, patch\n'), ((11698, 11747), 'unittest.mock.patch', 'patch', (['"""Whittler.File.write_lines"""'], {'spec_set': '(True)'}), "('Whittler.File.write_lines', spec_set=True)\n", (11703, 11747), False, 'from unittest.mock import call, patch\n'), ((1603, 1636), 'inspect.ismodule', 'inspect.ismodule', (['whittlerProgram'], {}), '(whittlerProgram)\n', (1619, 1636), False, 'import inspect\n'), ((10838, 10853), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (10851, 10853), False, 'from WhittlerTests import Random, UnitTester\n'), ((13111, 13126), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (13124, 13126), False, 'from WhittlerTests import Random, UnitTester\n'), ((13158, 13173), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (13171, 13173), False, 'from WhittlerTests import Random, UnitTester\n'), ((2057, 2072), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (2070, 2072), False, 'from WhittlerTests import Random, UnitTester\n'), ((2189, 2251), 'Whittler.WhittlerProgram.make_whittler_filepath_for_command', 'WhittlerProgram.make_whittler_filepath_for_command', (['commandArg'], {}), '(commandArg)\n', (2239, 2251), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((2280, 2350), 'os.path.expanduser.assert_called_once_with', 'os.path.expanduser.assert_called_once_with', (['expectedExpandUserArgument'], {}), '(expectedExpandUserArgument)\n', (2322, 2350), False, 'import os\n'), ((3367, 3394), 'unittest.mock.call', 'call', (['"""[Whittler] """'], {'end': '""""""'}), "('[Whittler] ', end='')\n", (3371, 3394), False, 'from unittest.mock import call, patch\n'), ((3406, 3436), 'unittest.mock.call', 'call', (['"""[Whittler] ExitCode: 0"""'], {}), "('[Whittler] ExitCode: 0')\n", (3410, 3436), False, 'from unittest.mock import call, patch\n'), ((3906, 3997), 'Whittler.WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help', 'WhittlerProgram.prefix_command_with_powershell_dot_exe_if_command_is_get_help', (['command'], {}), '(\n command)\n', (3983, 3997), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((5010, 5099), 'Whittler.WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace', 'WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace', (['text'], {}), '(\n text)\n', (5088, 5099), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((9027, 9042), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (9040, 9042), False, 'from WhittlerTests import Random, UnitTester\n'), ((9093, 9158), 'Whittler.WhittlerProgram.try_open_whittler_file', 'WhittlerProgram.try_open_whittler_file', (['powershellAdjustedCommand'], {}), '(powershellAdjustedCommand)\n', (9131, 9158), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((9187, 9293), 'Whittler.WhittlerProgram.make_whittler_filepath_for_command.assert_called_once_with', 'WhittlerProgram.make_whittler_filepath_for_command.assert_called_once_with', (['powershellAdjustedCommand'], {}), '(\n powershellAdjustedCommand)\n', (9261, 9293), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((9302, 9373), 'os.path.exists.assert_called_once_with', 'os.path.exists.assert_called_once_with', (['makeWhittlerFilePathReturnValue'], {}), '(makeWhittlerFilePathReturnValue)\n', (9340, 9373), False, 'import os\n'), ((12044, 12059), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (12057, 12059), False, 'from WhittlerTests import Random, UnitTester\n'), ((12221, 12236), 'WhittlerTests.Random.string', 'Random.string', ([], {}), '()\n', (12234, 12236), False, 'from WhittlerTests import Random, UnitTester\n'), ((12447, 12514), 'Whittler.WhittlerProgram.write_program_output_to_whittler_file', 'WhittlerProgram.write_program_output_to_whittler_file', (['self.command'], {}), '(self.command)\n', (12500, 12514), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((12543, 12592), 'Whittler.Process.run.assert_called_once_with', 'Process.run.assert_called_once_with', (['self.command'], {}), '(self.command)\n', (12578, 12592), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((12606, 12784), 'Whittler.WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace.assert_called_once_with', 'WhittlerProgram.split_lines_and_remove_carriage_returns_and_leading_whitespace.assert_called_once_with', (['(stdout if trueExpectStdoutWrittenFalseExpectStderrWritten else stderr)'], {}), '(\n stdout if trueExpectStdoutWrittenFalseExpectStderrWritten else stderr)\n', (12708, 12784), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((12811, 12904), 'Whittler.WhittlerProgram.make_whittler_filepath_for_command.assert_called_once_with', 'WhittlerProgram.make_whittler_filepath_for_command.assert_called_once_with', (['self.command'], {}), '(self\n .command)\n', (12885, 12904), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((12913, 13024), 'Whittler.File.write_lines.assert_called_once_with', 'File.write_lines.assert_called_once_with', (['whittlerFilePath', 'stdoutOrStderrLinesWithLeadingWhitespaceRemoved'], {}), '(whittlerFilePath,\n stdoutOrStderrLinesWithLeadingWhitespaceRemoved)\n', (12953, 13024), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((9430, 9559), 'Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_called_once_with', 'WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_called_once_with', (['makeWhittlerFilePathReturnValue'], {}), '(\n makeWhittlerFilePathReturnValue)\n', (9521, 9559), False, 'from Whittler import File, Process, WhittlerProgram\n'), ((9590, 9682), 'Whittler.WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_not_called', 'WhittlerProgram.open_whittler_file_with_editor_environment_variable.assert_not_called', ([], {}), '(\n )\n', (9675, 9682), False, 'from Whittler import File, Process, WhittlerProgram\n')] |
from django.db import models
from django.utils.translation import ugettext as _
from ckeditor_uploader.fields import RichTextUploadingField
class RangAward(models.Model):
title = models.CharField(verbose_name=_('Заголовок'), max_length=60)
volume = models.DecimalField(verbose_name=_('Объём'), max_digits=10, decimal_places=2)
max_lines = models.PositiveSmallIntegerField(verbose_name=_('Количество линий'))
lines_volume = models.DecimalField(verbose_name=_('Объём в линиях'), max_digits=10, decimal_places=2)
include_rang = models.ForeignKey('self', on_delete=models.CASCADE, verbose_name=_('Ранг партнёров'), null=True, blank=True)
include_rang_count = models.PositiveSmallIntegerField(verbose_name=_('Количество партнёров с указаным рангом'))
bonus = models.DecimalField(verbose_name=_('Бонус'), max_digits=10, decimal_places=2)
is_final = models.BooleanField(default=False)
is_start = models.BooleanField(default=False)
quick_days = models.PositiveSmallIntegerField(blank=True, null=True, verbose_name=_('К-во дней с момента регистрации'))
quick_bonus = models.DecimalField(blank=True, null=True, verbose_name=_('Бонус быстрого старта'), decimal_places=2, max_digits=10)
weight = models.PositiveSmallIntegerField(default=10, verbose_name=_('Вес'))
class Meta:
verbose_name = _('Ранговый бонус')
verbose_name_plural = _('Ранговые бонусы')
ordering = ('volume',)
def __str__(self):
return self.title
class RangAwardHistory(models.Model):
user = models.ForeignKey('user_profile.User', on_delete=models.CASCADE, verbose_name=_('Пользователь'))
text = RichTextUploadingField()
created = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name=_('Дата создания'))
class Meta:
verbose_name = _('История начисления рангового бонуса')
verbose_name_plural = _('Истории начисления рангового бонуса')
def __str__(self):
return self.user.unique_number
class MultiLevelBonus(models.Model):
rang = models.ForeignKey(RangAward, on_delete=models.CASCADE, verbose_name=_('Ранг'))
bonus_for_line_1 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 1'), default=0)
bonus_for_line_2 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 2'), default=0)
bonus_for_line_3 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 3'), default=0)
bonus_for_line_4 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 4'), default=0)
bonus_for_line_5 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 5'), default=0)
bonus_for_line_6 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 6'), default=0)
bonus_for_line_7 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 7'), default=0)
bonus_for_line_8 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 8'), default=0)
bonus_for_line_9 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 9'), default=0)
bonus_for_line_10 = models.DecimalField(
max_digits=3, decimal_places=1, verbose_name=_('Бонус за линию 10'), default=0)
class Meta:
verbose_name = _('Многоуровневый бонус')
verbose_name_plural = _('Многоуровневые бонусы')
def __str__(self):
return self.rang.title
class MultiLevelBonusHistory(models.Model):
package_history = models.ForeignKey('packages.PackageHistory', on_delete=models.CASCADE, verbose_name=_('Запись в истории'))
text = RichTextUploadingField()
created = models.DateTimeField(auto_now_add=True, auto_now=False, verbose_name=_('Дата создания'))
class Meta:
verbose_name = _('Отчёт по многоуровневому бонусу')
verbose_name_plural = _('Отчёты по многоуровневому бонусу')
def __str__(self):
return '{} > {}'.format(self.package_history.user.unique_number, self.package_history.package.title)
class PointAward(models.Model):
rang = models.ForeignKey(RangAward, on_delete=models.CASCADE, verbose_name=_('Ранг'))
bonus = models.DecimalField(verbose_name=_('Бонус'), max_digits=10, decimal_places=2)
max_money = 10000
class Meta:
verbose_name = _('Схема конвертации баллов в FBC')
verbose_name_plural = _('Схемы конвертации баллов в FBC')
def __str__(self):
return self.rang.title
| [
"django.utils.translation.ugettext",
"ckeditor_uploader.fields.RichTextUploadingField",
"django.db.models.BooleanField"
] | [((877, 911), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (896, 911), False, 'from django.db import models\n'), ((927, 961), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (946, 961), False, 'from django.db import models\n'), ((1653, 1677), 'ckeditor_uploader.fields.RichTextUploadingField', 'RichTextUploadingField', ([], {}), '()\n', (1675, 1677), False, 'from ckeditor_uploader.fields import RichTextUploadingField\n'), ((3801, 3825), 'ckeditor_uploader.fields.RichTextUploadingField', 'RichTextUploadingField', ([], {}), '()\n', (3823, 3825), False, 'from ckeditor_uploader.fields import RichTextUploadingField\n'), ((1342, 1361), 'django.utils.translation.ugettext', '_', (['"""Ранговый бонус"""'], {}), "('Ранговый бонус')\n", (1343, 1361), True, 'from django.utils.translation import ugettext as _\n'), ((1392, 1412), 'django.utils.translation.ugettext', '_', (['"""Ранговые бонусы"""'], {}), "('Ранговые бонусы')\n", (1393, 1412), True, 'from django.utils.translation import ugettext as _\n'), ((1821, 1861), 'django.utils.translation.ugettext', '_', (['"""История начисления рангового бонуса"""'], {}), "('История начисления рангового бонуса')\n", (1822, 1861), True, 'from django.utils.translation import ugettext as _\n'), ((1892, 1932), 'django.utils.translation.ugettext', '_', (['"""Истории начисления рангового бонуса"""'], {}), "('Истории начисления рангового бонуса')\n", (1893, 1932), True, 'from django.utils.translation import ugettext as _\n'), ((3477, 3502), 'django.utils.translation.ugettext', '_', (['"""Многоуровневый бонус"""'], {}), "('Многоуровневый бонус')\n", (3478, 3502), True, 'from django.utils.translation import ugettext as _\n'), ((3533, 3559), 'django.utils.translation.ugettext', '_', (['"""Многоуровневые бонусы"""'], {}), "('Многоуровневые бонусы')\n", (3534, 3559), True, 'from django.utils.translation import ugettext as _\n'), ((3969, 4005), 'django.utils.translation.ugettext', '_', (['"""Отчёт по многоуровневому бонусу"""'], {}), "('Отчёт по многоуровневому бонусу')\n", (3970, 4005), True, 'from django.utils.translation import ugettext as _\n'), ((4036, 4073), 'django.utils.translation.ugettext', '_', (['"""Отчёты по многоуровневому бонусу"""'], {}), "('Отчёты по многоуровневому бонусу')\n", (4037, 4073), True, 'from django.utils.translation import ugettext as _\n'), ((4484, 4519), 'django.utils.translation.ugettext', '_', (['"""Схема конвертации баллов в FBC"""'], {}), "('Схема конвертации баллов в FBC')\n", (4485, 4519), True, 'from django.utils.translation import ugettext as _\n'), ((4550, 4585), 'django.utils.translation.ugettext', '_', (['"""Схемы конвертации баллов в FBC"""'], {}), "('Схемы конвертации баллов в FBC')\n", (4551, 4585), True, 'from django.utils.translation import ugettext as _\n'), ((215, 229), 'django.utils.translation.ugettext', '_', (['"""Заголовок"""'], {}), "('Заголовок')\n", (216, 229), True, 'from django.utils.translation import ugettext as _\n'), ((292, 302), 'django.utils.translation.ugettext', '_', (['"""Объём"""'], {}), "('Объём')\n", (293, 302), True, 'from django.utils.translation import ugettext as _\n'), ((399, 420), 'django.utils.translation.ugettext', '_', (['"""Количество линий"""'], {}), "('Количество линий')\n", (400, 420), True, 'from django.utils.translation import ugettext as _\n'), ((474, 493), 'django.utils.translation.ugettext', '_', (['"""Объём в линиях"""'], {}), "('Объём в линиях')\n", (475, 493), True, 'from django.utils.translation import ugettext as _\n'), ((612, 631), 'django.utils.translation.ugettext', '_', (['"""Ранг партнёров"""'], {}), "('Ранг партнёров')\n", (613, 631), True, 'from django.utils.translation import ugettext as _\n'), ((727, 770), 'django.utils.translation.ugettext', '_', (['"""Количество партнёров с указаным рангом"""'], {}), "('Количество партнёров с указаным рангом')\n", (728, 770), True, 'from django.utils.translation import ugettext as _\n'), ((817, 827), 'django.utils.translation.ugettext', '_', (['"""Бонус"""'], {}), "('Бонус')\n", (818, 827), True, 'from django.utils.translation import ugettext as _\n'), ((1048, 1084), 'django.utils.translation.ugettext', '_', (['"""К-во дней с момента регистрации"""'], {}), "('К-во дней с момента регистрации')\n", (1049, 1084), True, 'from django.utils.translation import ugettext as _\n'), ((1160, 1186), 'django.utils.translation.ugettext', '_', (['"""Бонус быстрого старта"""'], {}), "('Бонус быстрого старта')\n", (1161, 1186), True, 'from django.utils.translation import ugettext as _\n'), ((1292, 1300), 'django.utils.translation.ugettext', '_', (['"""Вес"""'], {}), "('Вес')\n", (1293, 1300), True, 'from django.utils.translation import ugettext as _\n'), ((1623, 1640), 'django.utils.translation.ugettext', '_', (['"""Пользователь"""'], {}), "('Пользователь')\n", (1624, 1640), True, 'from django.utils.translation import ugettext as _\n'), ((1761, 1779), 'django.utils.translation.ugettext', '_', (['"""Дата создания"""'], {}), "('Дата создания')\n", (1762, 1779), True, 'from django.utils.translation import ugettext as _\n'), ((2114, 2123), 'django.utils.translation.ugettext', '_', (['"""Ранг"""'], {}), "('Ранг')\n", (2115, 2123), True, 'from django.utils.translation import ugettext as _\n'), ((2222, 2243), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 1"""'], {}), "('Бонус за линию 1')\n", (2223, 2243), True, 'from django.utils.translation import ugettext as _\n'), ((2353, 2374), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 2"""'], {}), "('Бонус за линию 2')\n", (2354, 2374), True, 'from django.utils.translation import ugettext as _\n'), ((2484, 2505), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 3"""'], {}), "('Бонус за линию 3')\n", (2485, 2505), True, 'from django.utils.translation import ugettext as _\n'), ((2615, 2636), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 4"""'], {}), "('Бонус за линию 4')\n", (2616, 2636), True, 'from django.utils.translation import ugettext as _\n'), ((2746, 2767), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 5"""'], {}), "('Бонус за линию 5')\n", (2747, 2767), True, 'from django.utils.translation import ugettext as _\n'), ((2877, 2898), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 6"""'], {}), "('Бонус за линию 6')\n", (2878, 2898), True, 'from django.utils.translation import ugettext as _\n'), ((3008, 3029), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 7"""'], {}), "('Бонус за линию 7')\n", (3009, 3029), True, 'from django.utils.translation import ugettext as _\n'), ((3139, 3160), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 8"""'], {}), "('Бонус за линию 8')\n", (3140, 3160), True, 'from django.utils.translation import ugettext as _\n'), ((3270, 3291), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 9"""'], {}), "('Бонус за линию 9')\n", (3271, 3291), True, 'from django.utils.translation import ugettext as _\n'), ((3402, 3424), 'django.utils.translation.ugettext', '_', (['"""Бонус за линию 10"""'], {}), "('Бонус за линию 10')\n", (3403, 3424), True, 'from django.utils.translation import ugettext as _\n'), ((3767, 3788), 'django.utils.translation.ugettext', '_', (['"""Запись в истории"""'], {}), "('Запись в истории')\n", (3768, 3788), True, 'from django.utils.translation import ugettext as _\n'), ((3909, 3927), 'django.utils.translation.ugettext', '_', (['"""Дата создания"""'], {}), "('Дата создания')\n", (3910, 3927), True, 'from django.utils.translation import ugettext as _\n'), ((4320, 4329), 'django.utils.translation.ugettext', '_', (['"""Ранг"""'], {}), "('Ранг')\n", (4321, 4329), True, 'from django.utils.translation import ugettext as _\n'), ((4376, 4386), 'django.utils.translation.ugettext', '_', (['"""Бонус"""'], {}), "('Бонус')\n", (4377, 4386), True, 'from django.utils.translation import ugettext as _\n')] |
# --------------------------------
# Name: CEBatchFBXExport.py
# Purpose: Batch export of CE layers to game engine importable FBXs.
# Current Owner: <NAME>
# Last Modified: 7/12/2017
# Copyright: (c) Co-Adaptive
# CityEngine Vs: 2017
# Python Version: 2.7
# License
# Copyright 2015 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# --------------------------------
# Import Modules
from scripting import *
import os
# get a CityEngine instance
ce = CE()
outputFolder = "/BatchExportFBX"
generateBoolean = False
deleteBoolean = False
fileType="BINARY" #Or "TEXT"
CollectTextures=True
CreateShapeGroups=True
IncludeMaterials=True
ExportGeometry= "MODEL_GEOMETRY_FALLBACK"#["MODEL_GEOMETRY_FALLBACK", "MODEL_GEOMETRY", "SHAPE_GEOMETRY"]
def assure_dir(Base_Dir_Path, Base_Name=""):
"""Worker function will ensure that a directory exists. If it does not it, will create one. If an
optional basename is passed it will create a folder with the base name joined if it does not exist."""
if os.path.exists(Base_Dir_Path):
if Base_Name:
new_folder = os.path.join(Base_Dir_Path, Base_Name)
if os.path.exists(new_folder):
return new_folder
else:
os.makedirs(new_folder)
return new_folder
else:
return Base_Dir_Path
else:
os.makedirs(Base_Dir_Path)
return Base_Dir_Path
# Turn off User Interface updates, or script might take forever.
@noUIupdate
def main():
# This function will export in batch web scenes to the input outputFolder
layers = ce.getObjectsFrom(ce.scene, ce.isLayer)
print("There are " + str(len(layers)) + " layers in the current scene.")
counter = 0
for layer in layers:
try:
ce.setSelection(layer)
OID = ce.getOID(layer)
layerName = ce.getName(layer)
if layerName == "Panorama" or layerName == "Scene Light":
continue # skip
if generateBoolean:
# generate models on selected shapes (assumes size of file is too big)
ce.generateModels(ce.selection())
ce.waitForUIIdle()
print ("Setting export settings for layer named: " + str(layerName))
exportSettings = FBXExportModelSettings()
assure_dir(ce.toFSPath("models"),str(outputFolder).strip(r"/"))
exportSettings.setOutputPath(ce.toFSPath("models") + str(outputFolder))
exportSettings.setBaseName(layerName)
exportSettings.setFileType(fileType)
exportSettings.setCollectTextures(CollectTextures)
exportSettings.setCreateShapeGroups(CreateShapeGroups)
exportSettings.setIncludeMaterials(IncludeMaterials)
exportSettings.setExportGeometry(ExportGeometry)
ce.export(ce.selection()[0], exportSettings)
print ("Exported layer named: " + str(layerName) + "to models/BatchExportFBX")
counter += 1
if deleteBoolean:
ce.delete(ce.selection())
pass
print("Exported FBX for layer named:" + str(layerName))
# Change this to an absolute path that points to your KML files.
except Exception as e:
print("Could not execute on counter " + str(counter))
print("Error:", e.args[0])
counter += 1
pass
# Call
if __name__ == '__main__':
print("Batch Layer script started.")
main()
print("Script completed.")
| [
"os.path.exists",
"os.path.join",
"os.makedirs"
] | [((1509, 1538), 'os.path.exists', 'os.path.exists', (['Base_Dir_Path'], {}), '(Base_Dir_Path)\n', (1523, 1538), False, 'import os\n'), ((1860, 1886), 'os.makedirs', 'os.makedirs', (['Base_Dir_Path'], {}), '(Base_Dir_Path)\n', (1871, 1886), False, 'import os\n'), ((1587, 1625), 'os.path.join', 'os.path.join', (['Base_Dir_Path', 'Base_Name'], {}), '(Base_Dir_Path, Base_Name)\n', (1599, 1625), False, 'import os\n'), ((1641, 1667), 'os.path.exists', 'os.path.exists', (['new_folder'], {}), '(new_folder)\n', (1655, 1667), False, 'import os\n'), ((1737, 1760), 'os.makedirs', 'os.makedirs', (['new_folder'], {}), '(new_folder)\n', (1748, 1760), False, 'import os\n')] |
import tkinter as tk
import random
class Controller(object):
"""
A class to control the movement of the snake in the game
"""
def __init__(self, screen):
"""
Binds the arrow keys to the game canvas.
Parameters:
screen (Canvas): The canvas for the Snake game.
"""
self._screen = screen
screen.bind("<Left>", self.left_move)
screen.bind("<Right>", self.right_move)
screen.bind("<Up>", self.up_move)
screen.bind("<Down>", self.down_move)
screen.focus_set()
def left_move(self, e):
"""Changes the direction of the snake head to 'left' when the left key is pressed."""
if self._screen.get_direction() == 'right':
pass
else:
self._screen.set_direction('left')
self._screen.delete(self._screen.get_snake_id())
self._screen.change_x_axis(-10)
self._screen.create_snake_head()
def right_move(self, e):
"""Changes the direction of the snake head to 'right' when the right key is pressed."""
if self._screen.get_direction() == 'left':
pass
else:
self._screen.set_direction('right')
self._screen.delete(self._screen.get_snake_id())
self._screen.change_x_axis(10)
self._screen.create_snake_head()
def up_move(self, e):
"""Changes the direction of the snake head to 'up' when the up key is pressed."""
if self._screen.get_direction() == 'down':
pass
else:
self._screen.set_direction('up')
self._screen.delete(self._screen.get_snake_id())
self._screen.change_y_axis(-10)
self._screen.create_snake_head()
def down_move(self, e):
"""Changes the direction of the snake head to 'down' when the down key is pressed."""
if self._screen.get_direction() == 'up':
pass
else:
self._screen.set_direction('down')
self._screen.delete(self._screen.get_snake_id())
self._screen.change_y_axis(10)
self._screen.create_snake_head()
class Screen(tk.Canvas):
"""
A canvas class that displays the game
"""
def __init__(self, master):
"""
Construct the canvas of the game on the root window.
Parameters:
master (tk.Tk): The root window for the Snake game.
"""
super().__init__(master)
self._master = master
self._width = 500
self._height = 300
self.config(bg='white', width=self._width, height=self._height)
self._x = self._width
self._y = self._height
self._game_status = True
self._direction = 'right'
self._snake = self.create_oval
self.create_snake_head()
self._snack = self.create_oval
self.create_snack()
self._tail_number = 0
self._tail_list = []
self._tail = self.create_line([(0, 0), (0, 0)])
def get_snake_id(self):
"""Returns the id of the snake head.
Returns:
snake (int): The id of the snake head.
"""
return self._snake
def get_direction(self):
"""Returns the current direction of the snake head.
Returns:
direction (str): The direction of the snake head.
"""
return self._direction
def get_tail_number(self):
"""Returns the length of the tail of the snake.
Returns:
tail_number (int): The current length of the tail.
"""
return self._tail_number
def get_game_status(self):
"""Returns the current status of the game. True if the game is running,
False otherwise.
Returns:
game_status (bool): The current status of the game
"""
return self._game_status
def set_direction(self, direction):
"""
Changes the movement direction of the snake
Parameter:
direction (str): The new direction of the snake.
"""
self._direction = direction
def change_x_axis(self, change):
"""
Changes the value of the x-axis.
Parameter:
change (int): Changes the x-axis by this value.
"""
self._x += change
def change_y_axis(self, change):
"""
Changes the value of the y-axis.
Parameter:
change (int): Changes the y-axis by this value.
"""
self._y += change
def check_collision(self):
"""
Checks for any collision between the snake head and its tail
or with the snack in the game.
"""
snake_coords = self.coords(self._snake)
snack_coords = self.coords(self._snack)
x1, y1, x2, y2 = snack_coords
xx1, yy1, xx2, yy2 = snake_coords
# Checks for collision between the snake head and the snake
if xx1 <= x1 <= xx2:
if yy1 <= y1 <= yy2:
self.delete(self._snack)
self._tail_number += 10
self.create_snack()
elif xx1 <= x2 <= xx2:
if yy1 <= y2 <= yy2:
self.delete(self._snack)
self._tail_number += 10
self.create_snack()
# Checks for collision between the snake head and the tail
for tail in self._tail_list:
tail_coords = self.coords(tail)
x1, y1, x2, y2 = tail_coords
if xx1 <= x1 <= xx2:
if yy1 <= y1 <= yy2:
self._game_status = False
elif xx1 <= x2 <= xx2:
if yy1 <= y2 <= yy2:
self._game_status = False
def create_snack(self):
"""
Creates the snack in the game based on random coordinates.
"""
random_x = random.randint(0, self._width-5)
random_y = random.randint(0, self._height-5)
self._snack = self.create_oval(random_x, random_y, random_x + 5, random_y + 5, fill='red', outline='red')
def create_snake_head(self):
"""
Creates the snake head in the game.
"""
circle_size = (self._x / 2, self._y / 2)
x, y = circle_size
# Resets the x and y coordinates of the snake head if it makes contact
# with the boundaries of the game.
if (self._width*2) < self._x+10:
self._x = 0
elif self._x < 0:
self._x = (self._width*2)
if (self._height*2) < self._y+10:
self._y = 0
elif self._y < 0:
self._y = (self._height*2)
self._snake = self.create_oval(x, y, x + 10, y+10, fill='black')
def create_tail(self):
"""
Creates and keeps track of the tail of the snake based on the current score
as well as the movement direction.
"""
snake_coords = self.coords(self._snake)
x1, y1, x2, y2 = snake_coords
x = (x1+x2)/2
y = (y1+y2)/2
tail_size = 10
self._tail_list += [self._tail, ]
if self._direction == 'right':
self._tail = self.create_line([(x-tail_size, y), (x, y)])
elif self._direction == 'left':
self._tail = self.create_line([(x+tail_size, y), (x, y)])
elif self._direction == 'up':
self._tail = self.create_line([(x, y+tail_size), (x, y)])
else:
self._tail = self.create_line([(x, y-tail_size), (x, y)])
# Removes any tail-lines created after the length of the tail exceeds the score
if len(self._tail_list) > self._tail_number:
self.delete(self._tail_list.pop(0))
class SnakeGame(object):
"""
A game of Snake Xenzia
"""
def __init__(self, master):
"""
Construct the main game window
Parameters:
master (tk.Tk): The root window for the Snake game.
"""
self._master = master
self._master.title("Snake Game")
self._canvas = Screen(master)
self._controls = Controller(self._canvas)
self._canvas.pack(side=tk.BOTTOM)
self._score = tk.Label(master, bg='black', fg='white')
self._score.pack(fill='x')
self._speed = 50
self._master.after(self._speed, self.animation)
def animation(self):
"""
Animates and constructs the snake head and tail. Checks the
the score and game status at every cycle and updates accordingly.
"""
if self._canvas.get_direction() == 'right':
self._controls.right_move('')
elif self._canvas.get_direction() == 'left':
self._controls.left_move('')
elif self._canvas.get_direction() == 'up':
self._controls.up_move('')
else:
self._controls.down_move('')
self._canvas.check_collision()
if not self._canvas.get_game_status():
self.game_end()
self._canvas.create_tail()
self.update_score()
speed = self._speed - (self._canvas.get_tail_number()//10)
self._master.after(speed, self.animation)
def update_score(self):
"""
Updates the game score on the label widget of the main window.
"""
self._score.config(text=f'Score: {self._canvas.get_tail_number()}')
def game_end(self):
"""
Hides the game canvas and increases the size of the score label.
"""
self._canvas.pack_forget()
self._score.config(font='Courier, 30')
self._score.pack(ipadx=200, ipady=200)
if __name__ == '__main__':
root = tk.Tk()
game = SnakeGame(root)
root.resizable(False, False)
root.mainloop()
| [
"tkinter.Tk",
"random.randint",
"tkinter.Label"
] | [((9641, 9648), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (9646, 9648), True, 'import tkinter as tk\n'), ((5884, 5918), 'random.randint', 'random.randint', (['(0)', '(self._width - 5)'], {}), '(0, self._width - 5)\n', (5898, 5918), False, 'import random\n'), ((5936, 5971), 'random.randint', 'random.randint', (['(0)', '(self._height - 5)'], {}), '(0, self._height - 5)\n', (5950, 5971), False, 'import random\n'), ((8170, 8210), 'tkinter.Label', 'tk.Label', (['master'], {'bg': '"""black"""', 'fg': '"""white"""'}), "(master, bg='black', fg='white')\n", (8178, 8210), True, 'import tkinter as tk\n')] |
# coding: utf-8
from __future__ import with_statement, print_function, absolute_import
import torch
from torch import nn
import torch.nn.functional as F
import numpy as np
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=dilation, groups=groups, bias=False, dilation=dilation)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class Bottleneck(nn.Module):
def __init__(self, inplanes, planes, outplanes, stride=1, downsample=None, groups=1,
base_width=64, dilation=1, norm_layer=None):
super(Bottleneck, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
width = int(planes * (base_width / 64.)) * groups
# Both self.conv2 and self.downsample layers downsample the input when stride != 1
self.conv1 = conv1x1(inplanes, width)
self.bn1 = norm_layer(width)
self.conv2 = conv3x3(width, width, stride, groups, dilation)
self.bn2 = norm_layer(width)
self.conv3 = conv1x1(width, outplanes)
self.bn3 = norm_layer(outplanes)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, planes, block=Bottleneck, layers=[2, 3, 3, 3], zero_init_residual=False,
groups=1, width_per_group=64, replace_stride_with_dilation=None, norm_layer=None):
super(ResNet, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.out_dim = planes[-1]
self.inplanes = 64
self.dilation = 1
if replace_stride_with_dilation is None:
# each element in the tuple indicates if we should replace
# the 2x2 stride with a dilated convolution instead
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError("replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".format(replace_stride_with_dilation))
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(
1, self.inplanes, kernel_size=7, stride=1, padding=3, bias=False)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool1 = nn.MaxPool2d(kernel_size=2, stride=2, padding=0)
self.layer1 = self._make_layer(block, planes[0], planes[1], layers[0])
self.layer2 = self._make_layer(block, planes[1], planes[2], layers[1], stride=2,
dilate=replace_stride_with_dilation[0])
self.layer3 = self._make_layer(block, planes[2], planes[3], layers[2], stride=2,
dilate=replace_stride_with_dilation[1])
self.layer4 = self._make_layer(block, planes[3], planes[4], layers[3], stride=2,
dilate=replace_stride_with_dilation[2])
self.maxpool2 = nn.MaxPool2d(kernel_size=(1, 3), stride=2, padding=0)
self.fc = nn.Conv2d(
512, 512, kernel_size=(1, 2), stride=1, padding=0, bias=True)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(
m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, outplanes, blocks, stride=1, dilate=False):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != outplanes:
downsample = nn.Sequential(
conv1x1(self.inplanes, outplanes, stride),
norm_layer(outplanes),
)
layers = []
layers.append(block(self.inplanes, planes, outplanes, stride, downsample,
self.groups, self.base_width, previous_dilation, norm_layer))
self.inplanes = outplanes
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes, outplanes, groups=self.groups,
base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer))
return nn.Sequential(*layers)
def forward(self, x):
# x.shape = (batch, channel, time, frequency)
# in_x = (batch, 1, 250(2.5sec), 257(fft point))
# out_x = (batch, last_layer.outplanes, time/32, 1)
if len(x.shape) <= 3:
x = x.unsqueeze(1)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool1(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.maxpool2(x)
x = self.fc(x)
x = self.relu(x)
return x
class NetVLAD(nn.Module):
"""NetVLAD layer implementation"""
def __init__(self, num_clusters=8, dim=512, alpha=1.0,
normalize_input=True):
"""
Args:
num_clusters : int
The number of clusters
dim : int
Dimension of descriptors
alpha : float
Parameter of initialization. Larger value is harder assignment.
normalize_input : bool
If true, descriptor-wise L2 normalization is applied to input.
"""
super(NetVLAD, self).__init__()
self.num_clusters = num_clusters
self.dim = dim
self.alpha = alpha
self.normalize_input = normalize_input
self.conv = nn.Conv2d(dim, num_clusters, kernel_size=(1, 1), bias=True)
self.centroids = nn.Parameter(torch.rand(num_clusters, dim))
self._init_params()
def _init_params(self):
self.conv.weight = nn.Parameter(
(2.0 * self.alpha * self.centroids).unsqueeze(-1).unsqueeze(-1)
)
self.conv.bias = nn.Parameter(
- self.alpha * self.centroids.norm(dim=1)
)
def forward(self, x):
N, C = x.shape[:2]
if self.normalize_input:
x = F.normalize(x, p=2, dim=1) # across descriptor dim
# soft-assignment
soft_assign = self.conv(x).view(N, self.num_clusters, -1)
soft_assign = F.softmax(soft_assign, dim=1)
x_flatten = x.view(N, C, -1)
# calculate residuals to each clusters
residual = x_flatten.expand(self.num_clusters, -1, -1, -1).permute(1, 0, 2, 3) - \
self.centroids.expand(x_flatten.size(-1), -
1, -1).permute(1, 2, 0).unsqueeze(0)
residual *= soft_assign.unsqueeze(2)
vlad = residual.sum(dim=-1)
vlad = F.normalize(vlad, p=2, dim=2) # intra-normalization
vlad = vlad.view(x.size(0), -1) # flatten
vlad = F.normalize(vlad, p=2, dim=1) # L2 normalize
return vlad
class ThinResNet(nn.Module):
def __init__(self, speaker_num, time_dim, loss_fn, spkr_dim, resnet_config, netvlad_config):
super(ThinResNet, self).__init__()
self.resnet = ResNet(**resnet_config)
self.netvlad = NetVLAD(**netvlad_config)
self.time_dim = time_dim
#vlad_dim = (time_dim + 31) // 32 * self.resnet.out_dim
vlad_dim = time_dim // 32 * self.resnet.out_dim
self.fc = nn.Linear(vlad_dim, spkr_dim)
self.prediction_layer = nn.Linear(spkr_dim, speaker_num, bias=False)
self.loss_fn = loss_fn
def forward(self, x, hidden_len):
x_cut = x[:, :self.time_dim, :]
# Cut input feature to fixed size(self.time_dim)
for i, cut_end in enumerate(hidden_len):
rand_end = cut_end - self.time_dim
rand_end = rand_end if rand_end > 0 else 1
cut_start = np.random.random_integers(0, rand_end)
x_cut[i] = x[i, cut_start:cut_start+self.time_dim]
extracted_feature = self.resnet(x_cut)
vlad = self.netvlad(extracted_feature)
speaker_vector = self.fc(vlad)
if self.loss_fn == 'softmax':
y_pred = self.prediction_layer(speaker_vector)
y_pred = F.softmax(y_pred, dim=1)
elif self.loss_fn == 'amsoftmax':
speaker_vector = F.normalize(speaker_vector, p=2, dim=1)
y_pred = self.prediction_layer(speaker_vector)
else:
raise NotImplementedError
return speaker_vector, y_pred
| [
"torch.nn.ReLU",
"torch.nn.init.constant_",
"torch.nn.Sequential",
"numpy.random.random_integers",
"torch.nn.init.kaiming_normal_",
"torch.nn.Conv2d",
"torch.nn.functional.normalize",
"torch.nn.MaxPool2d",
"torch.nn.Linear",
"torch.nn.functional.softmax",
"torch.rand"
] | [((293, 424), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', 'out_planes'], {'kernel_size': '(3)', 'stride': 'stride', 'padding': 'dilation', 'groups': 'groups', 'bias': '(False)', 'dilation': 'dilation'}), '(in_planes, out_planes, kernel_size=3, stride=stride, padding=\n dilation, groups=groups, bias=False, dilation=dilation)\n', (302, 424), False, 'from torch import nn\n'), ((526, 600), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', 'out_planes'], {'kernel_size': '(1)', 'stride': 'stride', 'bias': '(False)'}), '(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)\n', (535, 600), False, 'from torch import nn\n'), ((1343, 1364), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1350, 1364), False, 'from torch import nn\n'), ((2894, 2969), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1)', 'self.inplanes'], {'kernel_size': '(7)', 'stride': '(1)', 'padding': '(3)', 'bias': '(False)'}), '(1, self.inplanes, kernel_size=7, stride=1, padding=3, bias=False)\n', (2903, 2969), False, 'from torch import nn\n'), ((3048, 3069), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3055, 3069), False, 'from torch import nn\n'), ((3094, 3142), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)', 'padding': '(0)'}), '(kernel_size=2, stride=2, padding=0)\n', (3106, 3142), False, 'from torch import nn\n'), ((3750, 3803), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(1, 3)', 'stride': '(2)', 'padding': '(0)'}), '(kernel_size=(1, 3), stride=2, padding=0)\n', (3762, 3803), False, 'from torch import nn\n'), ((3822, 3893), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(512)'], {'kernel_size': '(1, 2)', 'stride': '(1)', 'padding': '(0)', 'bias': '(True)'}), '(512, 512, kernel_size=(1, 2), stride=1, padding=0, bias=True)\n', (3831, 3893), False, 'from torch import nn\n'), ((5729, 5751), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (5742, 5751), False, 'from torch import nn\n'), ((7077, 7136), 'torch.nn.Conv2d', 'nn.Conv2d', (['dim', 'num_clusters'], {'kernel_size': '(1, 1)', 'bias': '(True)'}), '(dim, num_clusters, kernel_size=(1, 1), bias=True)\n', (7086, 7136), False, 'from torch import nn\n'), ((7764, 7793), 'torch.nn.functional.softmax', 'F.softmax', (['soft_assign'], {'dim': '(1)'}), '(soft_assign, dim=1)\n', (7773, 7793), True, 'import torch.nn.functional as F\n'), ((8195, 8224), 'torch.nn.functional.normalize', 'F.normalize', (['vlad'], {'p': '(2)', 'dim': '(2)'}), '(vlad, p=2, dim=2)\n', (8206, 8224), True, 'import torch.nn.functional as F\n'), ((8314, 8343), 'torch.nn.functional.normalize', 'F.normalize', (['vlad'], {'p': '(2)', 'dim': '(1)'}), '(vlad, p=2, dim=1)\n', (8325, 8343), True, 'import torch.nn.functional as F\n'), ((8818, 8847), 'torch.nn.Linear', 'nn.Linear', (['vlad_dim', 'spkr_dim'], {}), '(vlad_dim, spkr_dim)\n', (8827, 8847), False, 'from torch import nn\n'), ((8880, 8924), 'torch.nn.Linear', 'nn.Linear', (['spkr_dim', 'speaker_num'], {'bias': '(False)'}), '(spkr_dim, speaker_num, bias=False)\n', (8889, 8924), False, 'from torch import nn\n'), ((7175, 7204), 'torch.rand', 'torch.rand', (['num_clusters', 'dim'], {}), '(num_clusters, dim)\n', (7185, 7204), False, 'import torch\n'), ((7597, 7623), 'torch.nn.functional.normalize', 'F.normalize', (['x'], {'p': '(2)', 'dim': '(1)'}), '(x, p=2, dim=1)\n', (7608, 7623), True, 'import torch.nn.functional as F\n'), ((9267, 9305), 'numpy.random.random_integers', 'np.random.random_integers', (['(0)', 'rand_end'], {}), '(0, rand_end)\n', (9292, 9305), True, 'import numpy as np\n'), ((9620, 9644), 'torch.nn.functional.softmax', 'F.softmax', (['y_pred'], {'dim': '(1)'}), '(y_pred, dim=1)\n', (9629, 9644), True, 'import torch.nn.functional as F\n'), ((3998, 4068), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {'mode': '"""fan_out"""', 'nonlinearity': '"""relu"""'}), "(m.weight, mode='fan_out', nonlinearity='relu')\n", (4021, 4068), False, 'from torch import nn\n'), ((9716, 9755), 'torch.nn.functional.normalize', 'F.normalize', (['speaker_vector'], {'p': '(2)', 'dim': '(1)'}), '(speaker_vector, p=2, dim=1)\n', (9727, 9755), True, 'import torch.nn.functional as F\n'), ((4170, 4200), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.weight', '(1)'], {}), '(m.weight, 1)\n', (4187, 4200), False, 'from torch import nn\n'), ((4217, 4245), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (4234, 4245), False, 'from torch import nn\n'), ((4643, 4677), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bn3.weight', '(0)'], {}), '(m.bn3.weight, 0)\n', (4660, 4677), False, 'from torch import nn\n'), ((4746, 4780), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bn2.weight', '(0)'], {}), '(m.bn2.weight, 0)\n', (4763, 4780), False, 'from torch import nn\n')] |
# https://discordapp.com/developers/docs/resources/channel#embed-limits
import discord
from ..lists import (
weapons_english_internal,
abilities_short_to_emoji,
top_500_emoji,
weapons_to_emoji,
)
# Cheers to Lean
class LohiEmbed:
def __init__(
self,
title="\uFEFF",
description="\uFEFF",
color=0x60DD8E,
url=None,
footer="\uFEFF",
):
self.title = title
self.description = description
self.color = color
self.url = url
self.fields = []
self.footer = footer
def add_field(self, name="\uFEFF", value="\uFEFF", inline=True):
self.fields.append((name, value, inline))
def add_weapon_build_fields(self, builds):
for build in builds:
title = build["title"]
discord_tag = f"{build['discord_user']['username']}#{build['discord_user']['discriminator']}"
if title:
title = f"{title} by {discord_tag}"
else:
title = f"{discord_tag}"
ability_arrs = [build["headgear"], build["clothing"], build["shoes"]]
ability_str = ""
for arr in ability_arrs:
for count, ability in enumerate(arr):
ability_str += abilities_short_to_emoji[ability]
if count == 0:
ability_str += "|"
ability_str += "\n"
self.add_field(title, ability_str, False)
if len(self.fields) == 0:
self.add_field(value="No builds found!")
def get_embeds(self):
title = self.title[: 256 - 8]
desc = self.description[:2048]
used_up = len(title) + len(desc)
embed = discord.Embed(
title=title, description=desc, color=self.color, url=self.url
)
embed.set_footer(text=self.footer)
returned_embeds = []
for i in range(len(self.fields)):
name, value, inline = self.fields[i]
name = name[:256]
value = value[:1024]
additional = len(name) + len(value)
if i % 25 == 24 or used_up + additional > 6000:
returned_embeds.append(embed)
title = self.title + " (cont.)"
embed = discord.Embed(
title=title, description=desc, color=self.color, url=self.url
)
embed.set_footer(text=self.footer)
used_up = len(title) + len(desc)
used_up += additional
embed.add_field(name=name, value=value, inline=inline)
returned_embeds.append(embed)
return returned_embeds
| [
"discord.Embed"
] | [((1739, 1815), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': 'desc', 'color': 'self.color', 'url': 'self.url'}), '(title=title, description=desc, color=self.color, url=self.url)\n', (1752, 1815), False, 'import discord\n'), ((2291, 2367), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': 'desc', 'color': 'self.color', 'url': 'self.url'}), '(title=title, description=desc, color=self.color, url=self.url)\n', (2304, 2367), False, 'import discord\n')] |
import argparse
import asyncio
import json
import sys
import time
from google.protobuf import json_format
from google.protobuf.message import DecodeError
from highlevel.adapter.socket.isotp import ISOTPSocketAdapter, ISOTPAddress
from proto.gen.python.outech_pb2 import BusMessage
t_last = time.time()
async def main():
global t_last
parser = argparse.ArgumentParser(description='Read packets on the CAN bus.')
parser.add_argument('device', metavar='device', type=str,
help='CAN device to use. Can be virtual or physical.')
parser.add_argument('id_rx', metavar='id_rx', type=int,
help='CAN address to accept when receiving. Should be in [0, 1023]')
parser.add_argument('id_tx', metavar='id_tx', type=int,
help='CAN address to transmit to. Should be in [0, 1023]')
args = parser.parse_args()
t_last = time.time()
assert args.id_rx != args.id_tx
assert len(args.device) > 0
print("Addresses:", args.id_rx, args.id_tx)
isotp = ISOTPSocketAdapter(
address=ISOTPAddress(args.device, args.id_rx, args.id_tx),
adapter_name="receiver"
)
async def callback(bytes, name):
global t_last
t = time.time()
bus_message = BusMessage()
try:
bus_message.ParseFromString(bytes)
printable_data = json_format.MessageToDict(bus_message,
including_default_value_fields=True)
json_data = json.dumps(printable_data)
sys.stdout.write(f'{(t - t_last) * 1000:10.3f} ms:"{name}" ' + json_data + '\n')
sys.stdout.flush()
except DecodeError:
print("Protobuf couldn't decode this:", bytes)
t_last = t
await isotp.init()
isotp.register_callback(callback)
try:
await isotp.run(),
except KeyboardInterrupt:
print("KeyboardInterrupt")
if __name__ == '__main__':
asyncio.run(main())
| [
"sys.stdout.flush",
"proto.gen.python.outech_pb2.BusMessage",
"argparse.ArgumentParser",
"json.dumps",
"google.protobuf.json_format.MessageToDict",
"highlevel.adapter.socket.isotp.ISOTPAddress",
"time.time",
"sys.stdout.write"
] | [((293, 304), 'time.time', 'time.time', ([], {}), '()\n', (302, 304), False, 'import time\n'), ((356, 423), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Read packets on the CAN bus."""'}), "(description='Read packets on the CAN bus.')\n", (379, 423), False, 'import argparse\n'), ((908, 919), 'time.time', 'time.time', ([], {}), '()\n', (917, 919), False, 'import time\n'), ((1246, 1257), 'time.time', 'time.time', ([], {}), '()\n', (1255, 1257), False, 'import time\n'), ((1280, 1292), 'proto.gen.python.outech_pb2.BusMessage', 'BusMessage', ([], {}), '()\n', (1290, 1292), False, 'from proto.gen.python.outech_pb2 import BusMessage\n'), ((1085, 1134), 'highlevel.adapter.socket.isotp.ISOTPAddress', 'ISOTPAddress', (['args.device', 'args.id_rx', 'args.id_tx'], {}), '(args.device, args.id_rx, args.id_tx)\n', (1097, 1134), False, 'from highlevel.adapter.socket.isotp import ISOTPSocketAdapter, ISOTPAddress\n'), ((1382, 1457), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['bus_message'], {'including_default_value_fields': '(True)'}), '(bus_message, including_default_value_fields=True)\n', (1407, 1457), False, 'from google.protobuf import json_format\n'), ((1537, 1563), 'json.dumps', 'json.dumps', (['printable_data'], {}), '(printable_data)\n', (1547, 1563), False, 'import json\n'), ((1576, 1661), 'sys.stdout.write', 'sys.stdout.write', (['(f\'{(t - t_last) * 1000:10.3f} ms:"{name}" \' + json_data + \'\\n\')'], {}), '(f\'{(t - t_last) * 1000:10.3f} ms:"{name}" \' + json_data + \'\\n\'\n )\n', (1592, 1661), False, 'import sys\n'), ((1669, 1687), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1685, 1687), False, 'import sys\n')] |
import torch
from torch import nn
from torch.nn import functional as F
from lib.utils import bounding_box_batch, get_member
from models.pose_discriminator import MIDisc, MIDiscConv1
from lib.utils import toggle_grad
from torch.optim import Adam
from collections import namedtuple
VGGOutput = namedtuple(
"VGGOutput",
["input", "relu1_2", "relu2_2", "relu3_2", "relu4_2", "relu5_2"],
)
def weight_decay(weights: list):
# reshaped = [weight.reshape(-1) for weight in weights]
tests = [
torch.dot(weight.reshape(-1), weight.reshape(-1)) for weight in weights
]
weight_norms = torch.stack(tests, dim=-1)
return torch.sum(weight_norms)
def latent_kl(prior_mean, posterior_mean):
"""
:param prior_mean:
:param posterior_mean:
:return:
"""
kl = 0.5 * torch.pow(prior_mean - posterior_mean, 2)
kl = torch.sum(kl, dim=[1, 2, 3])
kl = torch.mean(kl)
return kl
def aggregate_kl_loss(prior_means, posterior_means):
kl_loss = torch.sum(
torch.cat(
[
latent_kl(p, q).unsqueeze(dim=-1)
for p, q in zip(
list(prior_means.values()), list(posterior_means.values())
)
],
dim=-1,
)
)
return kl_loss
def compute_kl_loss(prior_means, posterior_means):
kl_loss = torch.sum(
torch.cat(
[
latent_kl(p, q).unsqueeze(dim=-1)
for p, q in zip(prior_means, posterior_means)
],
dim=-1,
)
)
return kl_loss
def compute_kl_with_prior(means, logstds):
kl_out = torch.mean(
torch.cat(
[
kl_loss(m.reshape(m.size(0),-1), l.reshape(l.size(0),-1)).unsqueeze(dim=-1)
for m, l in zip(means, logstds)
],
dim=-1,
)
)
return kl_out
def vgg_loss(custom_vgg, target, pred, weights=None):
"""
:param custom_vgg:
:param target:
:param pred:
:return:
"""
target_feats = custom_vgg(target)
pred_feats = custom_vgg(pred)
target_feats = VGGOutput(**target_feats)
pred_feats = VGGOutput(**pred_feats)
names = list(pred_feats._asdict().keys())
if weights is None:
losses = {}
for i, (tf, pf) in enumerate(zip(target_feats, pred_feats)):
loss = get_member(custom_vgg, "loss_weights")[i] * torch.mean(
torch.abs(tf - pf)
).unsqueeze(dim=-1)
losses.update({names[i]: loss})
else:
losses = {
names[0]: get_member(custom_vgg, "loss_weights")[0]
* torch.mean(weights * torch.abs(target_feats[0] - pred_feats[0]))
.unsqueeze(dim=-1)
.to(torch.float)
}
for i, (tf, pf) in enumerate(zip(target_feats[1:], pred_feats[1:])):
loss = get_member(custom_vgg, "loss_weights")[i + 1] * torch.mean(
torch.abs(tf - pf)
).unsqueeze(dim=-1)
losses.update({names[i + 1]: loss})
return losses
def zoom_loss(target, pred, kps, img_sizes, custom_vgg, spatial_size):
resized_pred = bounding_box_batch(kps, pred, img_sizes, spatial_size)
return vgg_loss(custom_vgg, target, resized_pred)
class GANLoss(nn.Module):
"""
The GAN loss; 'loss_type'-parameter defines the loss function which is actually computed
"""
def __init__(self, loss_type: str = "mse"):
super().__init__()
if loss_type == "vanilla":
self.loss = nn.BCEWithLogitsLoss()
elif loss_type == "mse":
self.loss = nn.MSELoss()
else:
raise ValueError(
f'The loss type for GANLoss must be either "vanilla" or "mse", but is actually {loss_type}.'
)
self.loss_type = loss_type
def forward(self, pred, target):
return self.loss(pred, target)
class TripletLoss(nn.Module):
def __init__(self, margin=0.2):
super(TripletLoss, self).__init__()
self.margin = margin
def forward(self, anchor, positive, negative, size_average=True):
distance_positive = (anchor - positive).pow(2).sum(1) # .pow(.5)
distance_negative = (anchor - negative).pow(2).sum(1) # .pow(.5)
losses = F.relu(distance_positive - distance_negative + self.margin)
return losses.mean() if size_average else losses.sum()
class SequentialDiscLoss(nn.Module):
def __init__(self, loss_type: str = "bce"):
super().__init__()
self.loss_type = loss_type
if loss_type == "bce":
self.loss = nn.BCEWithLogitsLoss()
elif loss_type == "mse":
loss_layers = [nn.Sigmoid(), nn.MSELoss()]
self.loss = nn.Sequential(*loss_layers)
else:
self.loss = None
assert self.loss_type in ["bce", "mse", "hinge"]
def forward(self, pred, target, mode="real"):
if self.loss_type in ["bce", "mse"]:
return self.loss(pred, target)
elif self.loss_type == "hinge":
assert mode in ["real", "fake", "gen"]
if mode == "real":
# discriminator training for real
return torch.mean(torch.nn.ReLU()(1.0 - pred))
elif mode == "fake":
# discriminator training for fake
return torch.mean(torch.nn.ReLU()(1.0 + pred))
else:
# generator training
return -torch.mean(pred)
else:
raise ValueError("Invalid loss type.")
class MILoss:
def __init__(self, input_dim, device,**kwargs):
n_layer = (
kwargs["n_layer_c"]
if not "n_layer_midisc" in kwargs
else kwargs["n_layer_midisc"]
)
nf_hidden = (
kwargs["dim_hidden_c"]
if not "nf_hidden_midisc" in kwargs
else kwargs["nf_hidden_midisc"]
)
if hasattr(kwargs, "conv_midisc") and kwargs.conv_midisc:
self.disc = MIDiscConv1(n_layer, input_dim, nf_hidden)
print("Using convolutional mi disc.")
else:
self.disc = MIDisc(n_layer, input_dim, nf_hidden)
print("Using linear mi disc.")
self.disc.to(device)
self.loss = nn.BCEWithLogitsLoss(reduction="mean")
self.disc_opt = Adam(
params=[{"params": self.disc.parameters(), "name": "mi_disc"}],
lr=kwargs.lr_init,
weight_decay=kwargs["weight_decay"],
)
self.scheduler = torch.optim.lr_scheduler.MultiStepLR(
self.disc_opt, milestones=kwargs["tau"], gamma=kwargs["gamma"]
)
self.sigm = nn.Sigmoid()
def train_disc(self, zb_joint, zb_marg, seq_len=0):
# enable gradient
toggle_grad(self.disc, True)
self.disc.train()
self.disc_opt.zero_grad()
disc_joint = self.disc(zb_joint).squeeze()
joint_p = torch.mean(self.sigm(disc_joint))
out_dict = {"mi_true_p": joint_p.item()}
disc_marg = self.disc(zb_marg).squeeze()
marg_p = torch.mean(self.sigm(disc_marg))
out_dict.update({"mi_fake_p": marg_p.item()})
loss_joint = (
self.loss(disc_joint, torch.ones_like(disc_joint)) / seq_len
)
loss_marg = self.loss(disc_marg, torch.zeros_like(disc_marg))
out_dict.update({"mi_disc_loss_joint": loss_joint.item()})
out_dict.update({"mi_disc_loss_marg": loss_marg.item()})
loss = loss_joint + loss_marg
out_dict.update({"mi_disc_loss": loss.item()})
loss.backward(retain_graph=True)
self.disc_opt.step()
return out_dict
def train_gen(self, zb_joint, zb_marg):
# disable gradient
toggle_grad(self.disc, False)
self.disc.eval()
zb_joint.requires_grad_(True)
disc_joint = self.disc(zb_joint).squeeze()
zb_marg.requires_grad_(True)
disc_marg = self.disc(zb_marg).squeeze()
loss_joint = self.loss(disc_joint, torch.ones_like(disc_joint))
loss_marg = self.loss(disc_marg, torch.zeros_like(disc_marg))
return -(loss_joint + loss_marg)
def load(self, ckpt):
if ckpt is not None:
self.disc.load_state_dict(ckpt["mi_disc"])
self.disc_opt.load_state_dict(ckpt["mi_optimizer"])
def get_save_dict(self):
return {"mi_disc": self.disc, "mi_optimizer": self.disc_opt}
def kl_loss(mu, logstd):
# mu and logstd are b x k x d x d
# make them into b*d*d x k
dim = mu.shape[1]
std = torch.exp(logstd)
kl = torch.sum(-logstd + 0.5 * (std ** 2 + mu ** 2), dim=-1) - (0.5 * dim)
return kl.mean()
class FlowLoss(nn.Module):
def __init__(self,):
super().__init__()
# self.config = config
def forward(self, sample, logdet):
nll_loss = torch.mean(nll(sample))
assert len(logdet.shape) == 1
nlogdet_loss = -torch.mean(logdet)
loss = nll_loss + nlogdet_loss
reference_nll_loss = torch.mean(nll(torch.randn_like(sample)))
log = {
"flow_loss": loss.item(),
"reference_nll_loss": reference_nll_loss.item(),
"nlogdet_loss": nlogdet_loss.item(),
"nll_loss": nll_loss.item(),
}
return loss, log
class FlowLossUncond(nn.Module):
def __init__(self):
super().__init__()
def forward(self, sample, logdet):
nll_loss = torch.mean(nll(sample))
assert len(logdet.shape) == 1
nlogdet_loss = -torch.mean(logdet)
loss = nll_loss + nlogdet_loss
reference_nll_loss = torch.mean(nll(torch.randn_like(sample)))
log = {
"flow_loss": loss, "reference_nll_loss": reference_nll_loss,
"nlogdet_loss": nlogdet_loss, "nll_loss": nll_loss,
}
return loss, log
def nll(sample):
return 0.5 * torch.sum(torch.pow(sample, 2), dim=[1, 2, 3])
| [
"torch.nn.ReLU",
"torch.optim.lr_scheduler.MultiStepLR",
"torch.nn.Sequential",
"lib.utils.bounding_box_batch",
"torch.exp",
"torch.pow",
"models.pose_discriminator.MIDiscConv1",
"torch.nn.MSELoss",
"torch.sum",
"torch.nn.Sigmoid",
"torch.mean",
"lib.utils.get_member",
"torch.zeros_like",
... | [((294, 387), 'collections.namedtuple', 'namedtuple', (['"""VGGOutput"""', "['input', 'relu1_2', 'relu2_2', 'relu3_2', 'relu4_2', 'relu5_2']"], {}), "('VGGOutput', ['input', 'relu1_2', 'relu2_2', 'relu3_2',\n 'relu4_2', 'relu5_2'])\n", (304, 387), False, 'from collections import namedtuple\n'), ((609, 635), 'torch.stack', 'torch.stack', (['tests'], {'dim': '(-1)'}), '(tests, dim=-1)\n', (620, 635), False, 'import torch\n'), ((647, 670), 'torch.sum', 'torch.sum', (['weight_norms'], {}), '(weight_norms)\n', (656, 670), False, 'import torch\n'), ((862, 890), 'torch.sum', 'torch.sum', (['kl'], {'dim': '[1, 2, 3]'}), '(kl, dim=[1, 2, 3])\n', (871, 890), False, 'import torch\n'), ((900, 914), 'torch.mean', 'torch.mean', (['kl'], {}), '(kl)\n', (910, 914), False, 'import torch\n'), ((3176, 3230), 'lib.utils.bounding_box_batch', 'bounding_box_batch', (['kps', 'pred', 'img_sizes', 'spatial_size'], {}), '(kps, pred, img_sizes, spatial_size)\n', (3194, 3230), False, 'from lib.utils import bounding_box_batch, get_member\n'), ((8614, 8631), 'torch.exp', 'torch.exp', (['logstd'], {}), '(logstd)\n', (8623, 8631), False, 'import torch\n'), ((811, 852), 'torch.pow', 'torch.pow', (['(prior_mean - posterior_mean)', '(2)'], {}), '(prior_mean - posterior_mean, 2)\n', (820, 852), False, 'import torch\n'), ((4309, 4368), 'torch.nn.functional.relu', 'F.relu', (['(distance_positive - distance_negative + self.margin)'], {}), '(distance_positive - distance_negative + self.margin)\n', (4315, 4368), True, 'from torch.nn import functional as F\n'), ((6311, 6349), 'torch.nn.BCEWithLogitsLoss', 'nn.BCEWithLogitsLoss', ([], {'reduction': '"""mean"""'}), "(reduction='mean')\n", (6331, 6349), False, 'from torch import nn\n'), ((6571, 6676), 'torch.optim.lr_scheduler.MultiStepLR', 'torch.optim.lr_scheduler.MultiStepLR', (['self.disc_opt'], {'milestones': "kwargs['tau']", 'gamma': "kwargs['gamma']"}), "(self.disc_opt, milestones=kwargs['tau'\n ], gamma=kwargs['gamma'])\n", (6607, 6676), False, 'import torch\n'), ((6714, 6726), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (6724, 6726), False, 'from torch import nn\n'), ((6819, 6847), 'lib.utils.toggle_grad', 'toggle_grad', (['self.disc', '(True)'], {}), '(self.disc, True)\n', (6830, 6847), False, 'from lib.utils import toggle_grad\n'), ((7796, 7825), 'lib.utils.toggle_grad', 'toggle_grad', (['self.disc', '(False)'], {}), '(self.disc, False)\n', (7807, 7825), False, 'from lib.utils import toggle_grad\n'), ((8641, 8696), 'torch.sum', 'torch.sum', (['(-logstd + 0.5 * (std ** 2 + mu ** 2))'], {'dim': '(-1)'}), '(-logstd + 0.5 * (std ** 2 + mu ** 2), dim=-1)\n', (8650, 8696), False, 'import torch\n'), ((3558, 3580), 'torch.nn.BCEWithLogitsLoss', 'nn.BCEWithLogitsLoss', ([], {}), '()\n', (3578, 3580), False, 'from torch import nn\n'), ((4636, 4658), 'torch.nn.BCEWithLogitsLoss', 'nn.BCEWithLogitsLoss', ([], {}), '()\n', (4656, 4658), False, 'from torch import nn\n'), ((6049, 6091), 'models.pose_discriminator.MIDiscConv1', 'MIDiscConv1', (['n_layer', 'input_dim', 'nf_hidden'], {}), '(n_layer, input_dim, nf_hidden)\n', (6060, 6091), False, 'from models.pose_discriminator import MIDisc, MIDiscConv1\n'), ((6180, 6217), 'models.pose_discriminator.MIDisc', 'MIDisc', (['n_layer', 'input_dim', 'nf_hidden'], {}), '(n_layer, input_dim, nf_hidden)\n', (6186, 6217), False, 'from models.pose_discriminator import MIDisc, MIDiscConv1\n'), ((7364, 7391), 'torch.zeros_like', 'torch.zeros_like', (['disc_marg'], {}), '(disc_marg)\n', (7380, 7391), False, 'import torch\n'), ((8070, 8097), 'torch.ones_like', 'torch.ones_like', (['disc_joint'], {}), '(disc_joint)\n', (8085, 8097), False, 'import torch\n'), ((8140, 8167), 'torch.zeros_like', 'torch.zeros_like', (['disc_marg'], {}), '(disc_marg)\n', (8156, 8167), False, 'import torch\n'), ((8990, 9008), 'torch.mean', 'torch.mean', (['logdet'], {}), '(logdet)\n', (9000, 9008), False, 'import torch\n'), ((9589, 9607), 'torch.mean', 'torch.mean', (['logdet'], {}), '(logdet)\n', (9599, 9607), False, 'import torch\n'), ((9975, 9995), 'torch.pow', 'torch.pow', (['sample', '(2)'], {}), '(sample, 2)\n', (9984, 9995), False, 'import torch\n'), ((3638, 3650), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (3648, 3650), False, 'from torch import nn\n'), ((4771, 4798), 'torch.nn.Sequential', 'nn.Sequential', (['*loss_layers'], {}), '(*loss_layers)\n', (4784, 4798), False, 'from torch import nn\n'), ((7274, 7301), 'torch.ones_like', 'torch.ones_like', (['disc_joint'], {}), '(disc_joint)\n', (7289, 7301), False, 'import torch\n'), ((9092, 9116), 'torch.randn_like', 'torch.randn_like', (['sample'], {}), '(sample)\n', (9108, 9116), False, 'import torch\n'), ((9691, 9715), 'torch.randn_like', 'torch.randn_like', (['sample'], {}), '(sample)\n', (9707, 9715), False, 'import torch\n'), ((2381, 2419), 'lib.utils.get_member', 'get_member', (['custom_vgg', '"""loss_weights"""'], {}), "(custom_vgg, 'loss_weights')\n", (2391, 2419), False, 'from lib.utils import bounding_box_batch, get_member\n'), ((2600, 2638), 'lib.utils.get_member', 'get_member', (['custom_vgg', '"""loss_weights"""'], {}), "(custom_vgg, 'loss_weights')\n", (2610, 2638), False, 'from lib.utils import bounding_box_batch, get_member\n'), ((2888, 2926), 'lib.utils.get_member', 'get_member', (['custom_vgg', '"""loss_weights"""'], {}), "(custom_vgg, 'loss_weights')\n", (2898, 2926), False, 'from lib.utils import bounding_box_batch, get_member\n'), ((4719, 4731), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (4729, 4731), False, 'from torch import nn\n'), ((4733, 4745), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (4743, 4745), False, 'from torch import nn\n'), ((2453, 2471), 'torch.abs', 'torch.abs', (['(tf - pf)'], {}), '(tf - pf)\n', (2462, 2471), False, 'import torch\n'), ((2964, 2982), 'torch.abs', 'torch.abs', (['(tf - pf)'], {}), '(tf - pf)\n', (2973, 2982), False, 'import torch\n'), ((5245, 5260), 'torch.nn.ReLU', 'torch.nn.ReLU', ([], {}), '()\n', (5258, 5260), False, 'import torch\n'), ((5499, 5515), 'torch.mean', 'torch.mean', (['pred'], {}), '(pred)\n', (5509, 5515), False, 'import torch\n'), ((5391, 5406), 'torch.nn.ReLU', 'torch.nn.ReLU', ([], {}), '()\n', (5404, 5406), False, 'import torch\n'), ((2677, 2719), 'torch.abs', 'torch.abs', (['(target_feats[0] - pred_feats[0])'], {}), '(target_feats[0] - pred_feats[0])\n', (2686, 2719), False, 'import torch\n')] |
"""Implementation of circuit for ML
"""
from numpy import pi, random, zeros_like, zeros, log2
class circuitML():
"""Abstract Quantum ML circuit interface.
Provides a unified interface to run multiple parametric circuits with
different input and model parameters, agnostic of the backend, implemented
in the subclasses.
Parameters
----------
make_circuit : callable of signature self.make_circuit
Function to generate the circuit corresponding to input `x` and
`params`.
nbqbits : int
Number of qubits.
nbparams : int
Number of parameters.
cbuilder : circuitBuilder
Circuit builder class to be used. It must correspond to the subclass
implementation.
Attributes
----------
nbqbits : int
Number of qubits.
nbparams : int
Number of parameters.
"""
def __init__(self, make_circuit, nbqbits, nbparams, cbuilder):
self.nbqbits = nbqbits
self.nbparams = nbparams
self.__set_builder__(cbuilder)
self.make_circuit = make_circuit
def __set_builder__(self, cbuilder):
self.__verify_builder__(cbuilder)
self._circuitBuilder = cbuilder
def __verify_builder__(self, cbuilder):
raise NotImplementedError
def run(self, X, params, nbshots=None, job_size=None):
"""Run the circuit with input `X` and parameters `params`.
Parameters
----------
X : array-like
Input matrix of shape *(nb_samples, nb_features)*.
params : vector-like
Parameter vector.
nbshots : int, optional
Number of shots for the circuit run, by default ``None``. If
``None``, uses the backend default.
job_size : int, optional
Maximum job size, to split the circuit runs, by default ``None``.
If ``None``, put all *nb_samples* in the same job.
Returns
-------
array
Bitstring counts as an array of shape *(nb_samples, 2**nbqbits)*
"""
raise NotImplementedError
def random_params(self, seed=None):
"""Generate a valid vector of random parameters.
Parameters
----------
seed : int, optional
random seed, by default ``None``
Returns
-------
vector
Vector of random parameters.
"""
if seed: random.seed(seed)
return random.randn(self.nbparams)
def make_circuit(self, bdr, x, params):
"""Generate the circuit corresponding to input `x` and `params`.
NOTE: This function is to be provided by the user, with the present
signature.
Parameters
----------
bdr : circuitBuilder
A circuit builder.
x : vector-like
Input sample
params : vector-like
Parameter vector.
Returns
-------
circuitBuilder
Instructed builder
"""
raise NotImplementedError
def __eq__(self, other):
return self.make_circuit is other.make_circuit
def __repr__(self):
return "<circuitML>"
def __str__(self):
return self.__repr__()
def grad(self, X, params, v=None, eps=None, nbshots=None, job_size=None):
"""Compute the gradient of the circuit w.r.t. parameters *params* on
input *X*.
Uses finite differences of the circuit runs.
Parameters
----------
X : array-like
Input matrix of shape *(nb_samples, nb_features)*.
params : vector-like
Parameter vector of length *nb_params*.
v : array-like
Vector or matrix to right multiply the Jacobian with.
eps : float, optional
Epsilon for finite differences. By default uses ``1e-8`` if
`nbshots` is not provided, else uses :math:`\\pi /
\\sqrt{\\text{nbshots}}`
nbshots : int, optional
Number of shots for the circuit run, by default ``None``. If
``None``, uses the backend default.
job_size : int, optional
Maximum job size, to split the circuit runs, by default ``None``.
If ``None``, put all *nb_samples* in the same job.
Returns
-------
array
Jacobian matix as an array of shape *(nb_params, 2**nbqbits)* if
`v` is None, else Jacobian-vector product: ``J(circuit) @ v``
"""
dim_out = 2**self.nbqbits
if v is not None:
if len(v.shape) > 1:
dim_out = v.shape[0]
else:
dim_out = 1
if eps is None:
if nbshots is None:
eps = 1e-8
else:
max(log2(self.nbqbits)*2*pi/3 * min(.5, 1/nbshots**.25), 1e-8)
num = eps if nbshots is None else eps * nbshots
out = zeros((self.nbparams, dim_out))
run_out = self.run(X, params, nbshots, job_size) / num
for i in range(len(params)):
d = zeros_like(params)
d[i] = eps
pd = self.run(X, params + d, nbshots, job_size) / num - run_out
out[i] = pd if v is None else pd @ v
return out
| [
"numpy.zeros_like",
"numpy.zeros",
"numpy.random.seed",
"numpy.log2",
"numpy.random.randn"
] | [((2458, 2485), 'numpy.random.randn', 'random.randn', (['self.nbparams'], {}), '(self.nbparams)\n', (2470, 2485), False, 'from numpy import pi, random, zeros_like, zeros, log2\n'), ((4925, 4956), 'numpy.zeros', 'zeros', (['(self.nbparams, dim_out)'], {}), '((self.nbparams, dim_out))\n', (4930, 4956), False, 'from numpy import pi, random, zeros_like, zeros, log2\n'), ((2425, 2442), 'numpy.random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (2436, 2442), False, 'from numpy import pi, random, zeros_like, zeros, log2\n'), ((5073, 5091), 'numpy.zeros_like', 'zeros_like', (['params'], {}), '(params)\n', (5083, 5091), False, 'from numpy import pi, random, zeros_like, zeros, log2\n'), ((4794, 4812), 'numpy.log2', 'log2', (['self.nbqbits'], {}), '(self.nbqbits)\n', (4798, 4812), False, 'from numpy import pi, random, zeros_like, zeros, log2\n')] |
'''
mb_24x256_512.py
Very Simple MicroPython module/driver for Microchip 24x256 and 24x512 I2C EEPROM
Author: <EMAIL>
Version: 0.1, 2021-06-04
**NOTE(1): There is no guarantee that this software will work in the way you expect (or at all).
**Use at your own risk.
**NOTE(2): This driver is intended to be as simple as possible to use. As a result it
**does byte writes instead of page writes. That means that each time you write a byte,
**the entire page is re-written in the EEPROM. This can/will wear the EEPROM significantly
**faster than doing a page write. Other options are sequential writes or saving data in RAM
**and writing them 128 bytes at a time but they are not part of this driver.
**NOTE(3): Thanks to KJRC on the Adafruit forums for testing and providing feedback and ideas
Prerequisites:
- RP2040 silicon (tested with Raspberry Pi Pico), should work with other MCUs with HW or SW I2C
- MicroPython v1.15
- 24x256/512 connected to hardware I2C pins, should also work with SW I2C
Usage:
- Set up I2C (software or hardware)
- Create constructor:
thisMemoryChipDeviceName = mb_24x256_512.mb_24x256_512(i2c, i2c_address, EEPROM_DEVICE)
where i2c_address is a base-10 value that corresponds to the 7-bit i2c address of the EEPROM, and
where EEPROM_DEVICE is either "24x256" or "24x512"
- To write a single byte to an address:
thisMemoryChipDeviceName.write_byte(address, value)
- To read a single byte from an address:
thisMemoryChipDeviceName.read_byte(address), value is returned as an int of range 0-255.
For more information, consult the Raspberry Pi Pico Micropython SDK documentation at:
https://datasheets.raspberrypi.org/pico/raspberry-pi-pico-python-sdk.pdf
and the MicroPython documentation at:
https://micropython.org
and the Microchip 24x256/512 datasheets at:
https://www.microchip.com
'''
from machine import Pin, I2C
import utime
class mb_24x256_512:
"""Driver for Microchip 24x256/512 EEPROM devices"""
def __init__(self, i2c, i2c_address, EEPROM_device):
# Init with the I2C setting
self.i2c = i2c
self.i2c_address = i2c_address[0]
if(EEPROM_device == "24x256"):
self._MAX_ADDRESS = 32767
elif(EEPROM_device == "24x512"):
self._MAX_ADDRESS = 65535
else:
raise ValueError("Please choose a device from the list")
return()
# Done init, ready to go
def write_byte(self, address, data):
if((address > self._MAX_ADDRESS) or (address < 0)):
raise ValueError("Address is outside of device address range")
return()
if((data > 255) or (data < 0)):
raise ValueError("You can only pass an 8-bit data value 0-255 to this function")
return()
self.i2c.writeto_mem(self.i2c_address, address, bytes([data]), addrsize=16)
utime.sleep_ms(10) # EEPROM needs time to write and will not respond if not ready
def read_byte(self, address):
if((address > self._MAX_ADDRESS) or (address < 0)):
raise ValueError("Address is outside of device address range")
return()
self.value_read = bytearray(1)
self.value_read = self.i2c.readfrom_mem(self.i2c_address, address, 1, addrsize=16)
self.value_read = int.from_bytes(self.value_read, "big")
return(self.value_read)
| [
"utime.sleep_ms"
] | [((2949, 2967), 'utime.sleep_ms', 'utime.sleep_ms', (['(10)'], {}), '(10)\n', (2963, 2967), False, 'import utime\n')] |
import os
from permedcoe.core.constants import SEPARATOR
DO_NOT_PARSE = (".pyc", ".def", ".sif")
PARSING_KEY = "NEW_NAME"
PARSING_PATH = "/PATH/TO/"
def adapt_name(name, path):
""" Replace recursively into the given path they keyword with name.
Like a set recursively.
Args:
name (str): Name to personalize the files.
path (str): Path to find the files.
"""
template_path = path + "/"
for directory_name, dirs, files in os.walk(path):
for file_name in files:
if not file_name.endswith(DO_NOT_PARSE):
file_path = os.path.join(directory_name, file_name)
with open(file_path) as f:
s = f.read()
s = s.replace(PARSING_KEY, name)
s = s.replace(PARSING_PATH, template_path)
with open(file_path, "w") as f:
f.write(s)
def rename_folder(name, path):
""" Adapt the building block folder name.
Args:
name (str): Name to personalize the folder name.
path (str): Path to find the files.
"""
source = os.path.join(path, "src", "bb")
destination = os.path.join(path, "src", name)
os.rename(source, destination)
def show_todo(path):
""" Show on the screen all to do messages.
Args:
path (str): Artifact path.
"""
print(SEPARATOR)
print("To be completed:")
print()
for directory_name, dirs, files in os.walk(path):
for file_name in files:
if not file_name.endswith(DO_NOT_PARSE):
file_path = os.path.join(directory_name, file_name)
__show_work__(file_path)
print(SEPARATOR)
def __show_work__(file_path):
""" Show the TODO messages of a given set of lines.
Args:
file_path (str): File to be analyzed.
"""
with open(file_path) as f:
lines = f.readlines()
position = 0
for line in lines:
if "TODO" in line:
_, message = line.split("#")
print("- %s:(%s):\t%s" % (str(os.path.basename(file_path)),
str(position),
str(message).strip()))
position += 1
| [
"os.rename",
"os.path.join",
"os.path.basename",
"os.walk"
] | [((464, 477), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (471, 477), False, 'import os\n'), ((1107, 1138), 'os.path.join', 'os.path.join', (['path', '"""src"""', '"""bb"""'], {}), "(path, 'src', 'bb')\n", (1119, 1138), False, 'import os\n'), ((1157, 1188), 'os.path.join', 'os.path.join', (['path', '"""src"""', 'name'], {}), "(path, 'src', name)\n", (1169, 1188), False, 'import os\n'), ((1193, 1223), 'os.rename', 'os.rename', (['source', 'destination'], {}), '(source, destination)\n', (1202, 1223), False, 'import os\n'), ((1450, 1463), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (1457, 1463), False, 'import os\n'), ((592, 631), 'os.path.join', 'os.path.join', (['directory_name', 'file_name'], {}), '(directory_name, file_name)\n', (604, 631), False, 'import os\n'), ((1578, 1617), 'os.path.join', 'os.path.join', (['directory_name', 'file_name'], {}), '(directory_name, file_name)\n', (1590, 1617), False, 'import os\n'), ((2044, 2071), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (2060, 2071), False, 'import os\n')] |
import collections
import datetime
import json
import logging
import time
# Python 2 compatibility
try:
from logging.handlers import QueueHandler
except ImportError:
from logutils.queue import QueueHandler
# Python 2/3 hack for stringify, below
try:
unicode
except NameError:
unicode = str
nocolor = 0
red = 31
green = 32
yellow = 33
blue = 34
gray = 37
starttime = time.time()
def secs_since(starttime):
"""Return the (padded) number of whole seconds since `starttime`.
:param starttime: time to calculate seconds since
:type starttime: int number of seconds since the epoch
:returns: number of seconds since starttime padded to 4 with 0s
:rtype: str
"""
return '{0:0>4}'.format(int(time.time() - starttime))
def strtime():
"""Return the current time in a string conforming to RFC3339.
:returns: current time in RFC3339 format
:rtype: str
"""
curr_time = time.time()
fmtd_time = time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(curr_time))
utc_offset = ((datetime.datetime.fromtimestamp(curr_time) -
datetime.datetime.utcfromtimestamp(curr_time)).
total_seconds())
utc_hours = int(utc_offset // 3600)
utc_mins = abs(int(utc_offset % 3600 // 60))
return '{0}{1:0=+3}:{2:0>2}'.format(fmtd_time, utc_hours, utc_mins)
def levelcolor(level):
"""Return the terminal color number appropriate for the logging level.
:param int level: logging level in integer form
:returns: the SGR parameter number for foreground text color
:rtype: int
"""
if level == logging.DEBUG:
return green
elif level == logging.WARNING:
return yellow
elif level in (logging.ERROR, logging.CRITICAL):
return red
else:
return blue
class DictLogFilter(object):
"""A logging 'filter' that adds arbitrary data to messages.
Depending on the output format type in self.output the filter converts a
dict-type log msg to the appropriate format. The formats are intended to
mimic the Sirupsen/logrus formats and are: 'json', 'text', and 'tty'. If
self.output is None or a different string, 'json' formatting is used.
The 'json' output format is a valid json object with current time and log
level added to the log msg dict.
The 'text' output is a 'key=val key=val' string with current time and log
level added to the data from the log msg dict.
The 'tty' output is a colorized output with the log level (truncated to
four characters) followed by the number of seconds since program start
followed by the 'msg' value from the log msg dict, followed by any other
data from the dict in 'key=val key=val' format.
"""
def __init__(self, output=None):
"""Create a DictLogFilter object, setting the output format if given.
:param output: the output format
:type output: None or str from ['json', 'text', or 'tty']
:returns: a new DictLogFilter object
:rtype: Dic)LogFilter
"""
self.output = output
def filter(self, record):
"""Format the log record if record.msg is a dict.
Dispatch the record to appropriate '*filter' method depending on the
the value of self.output. json formatting is the default.
:param record: a log record instance
:type record: logging.LogRecord
:returns: always True to indicate the record should be handled
:rtype: bool
"""
if not isinstance(record.msg, dict):
return True
if self.output == 'text':
return self.text_filter(record)
elif self.output == 'tty':
return self.tty_filter(record)
else:
return self.json_filter(record)
def json_filter(self, record):
"""Format the log record in json style.
:param record: a log record instance
:type record: logging.LogRecord
:returns: always True to indicate the record should be handled
:rtype: bool
"""
# Add time and level entries.
record.msg['time'] = strtime()
record.msg['level'] = record.levelname.lower()
# Ensure all keys and values are stringified to assist json.dumps.
record.msg = stringify(record.msg)
# Make sure msg is valid JSON.
record.msg = json.dumps(record.msg)
return True
def tty_filter(self, record):
"""Format the log record in tty style.
:param record: a log record instance
:type record: logging.LogRecord
:returns: always True to indicate the record should be handled
:rtype: bool
"""
# Ensure all keys and values are stringified.
record.msg = stringify(record.msg)
# Construct the start of the message.
out = '\x1b[{0}m{1}\x1b[0m[{2}] {3}'.format(levelcolor(record.levelno),
record.levelname[:4],
secs_since(starttime),
record.msg.get('msg', ''))
# Pad to or truncate at 80 characters.
out = '{0:<80}'.format(out)
# Format into colorized k=v pairs
for k, v in record.msg.items():
if k != 'msg':
out = out + ' \x1b[{0}m{1}\x1b[0m={2}'\
.format(levelcolor(record.levelno), k, v)
record.msg = out
return True
def text_filter(self, record):
"""Format the log record in text style.
:param record: a log record instance
:type record: logging.LogRecord
:returns: always True to indicate the record should be handled
:rtype: bool
"""
# Add time and level entries.
record.msg['time'] = strtime()
record.msg['level'] = record.levelname.lower()
# Ensure all keys and values are stringified.
record.msg = stringify(record.msg)
# Attempt to meet the logfmt-compatible format.
# Format into k=v pairs, quoting the v's.
record.msg = ['{0}="{1}"'.format(k, v) for k, v in record.msg.items()]
# Join with a space
record.msg = " ".join(record.msg)
return True
class DictQueueHandler(QueueHandler):
"""A logging QueueHandler that does *not* convert dict msgs to strings.
In order to make the log record picklable, the logging QueueHandler calls
self.prepare, which calls self.format, before enqueuing the log record.
This is problematic for the DictLogFilter because it converts the dict into
a string. See https://hg.python.org/cpython/file/3.5/Lib/logging/handlers.py#l1289
for details.
This handler attempts to make the log record picklable without converting
dict msgs to strings. If the msg is a dict, it reconstructs the dict with
the result of calling str on all its items. If args exist, it does the same
there. If exc_info exists, it uses self.formatter.formatException to
convert it to string and then stores it in the exc_text attribute and wipes
exc_info.
""" # noqa
formatter = logging.Formatter()
def prepare(self, record):
"""Prepare the log record for pickling.
If record.msg is a mapping, call str on all its items. If record.args
is a sequence or mapping, call str on all its items. Convert
record.exc_info to a string at record.exc_text, using
self.formatter.formatException, and wipe out record.exc_info.
:param record: the log record to prepare
:type record: logging.LogRecord
:returns: the prepared log record
:rtype: logging.LogRecord
"""
record.msg = stringify(record.msg)
record.args = stringify(record.args)
if record.exc_info:
record.exc_text = self.formatter.formatException(record.exc_info)
record.exc_info = None
return record
def stringify(obj):
"""Recursively str() an object, leaving mappings and sequences."""
if isinstance(obj, str):
new_obj = obj
elif isinstance(obj, unicode):
new_obj = str(obj)
elif isinstance(obj, collections.Mapping):
new_obj = {str(k): stringify(v) for k, v in obj.items()}
elif isinstance(obj, collections.Sequence):
new_obj = [stringify(i) for i in obj]
else:
new_obj = str(obj)
return new_obj
| [
"datetime.datetime.utcfromtimestamp",
"datetime.datetime.fromtimestamp",
"logging.Formatter",
"json.dumps",
"time.localtime",
"time.time"
] | [((387, 398), 'time.time', 'time.time', ([], {}), '()\n', (396, 398), False, 'import time\n'), ((953, 964), 'time.time', 'time.time', ([], {}), '()\n', (962, 964), False, 'import time\n'), ((7285, 7304), 'logging.Formatter', 'logging.Formatter', ([], {}), '()\n', (7302, 7304), False, 'import logging\n'), ((1016, 1041), 'time.localtime', 'time.localtime', (['curr_time'], {}), '(curr_time)\n', (1030, 1041), False, 'import time\n'), ((4453, 4475), 'json.dumps', 'json.dumps', (['record.msg'], {}), '(record.msg)\n', (4463, 4475), False, 'import json\n'), ((755, 766), 'time.time', 'time.time', ([], {}), '()\n', (764, 766), False, 'import time\n'), ((1063, 1105), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['curr_time'], {}), '(curr_time)\n', (1094, 1105), False, 'import datetime\n'), ((1127, 1172), 'datetime.datetime.utcfromtimestamp', 'datetime.datetime.utcfromtimestamp', (['curr_time'], {}), '(curr_time)\n', (1161, 1172), False, 'import datetime\n')] |