index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
73,609 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/accountInfos/apps.py | from django.apps import AppConfig
class AccountinfosConfig(AppConfig):
name = 'accountInfos'
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,610 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/accountInfos/migrations/0004_auto_20200410_1533.py | # Generated by Django 3.0.3 on 2020-04-10 15:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accountInfos', '0003_auto_20200410_1147'),
]
operations = [
migrations.AlterField(
model_name='accountinfo',
name='createTime',
field=models.DateTimeField(auto_now=True),
),
]
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,611 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/islands/migrations/0005_island_rule.py | # Generated by Django 3.0.3 on 2020-04-12 11:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('islands', '0004_auto_20200410_1548'),
]
operations = [
migrations.AddField(
model_name='island',
name='rule',
field=models.TextField(blank=True, null=True),
),
]
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,612 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/islands/schema.py | import graphene
import datetime
from graphene_django import DjangoObjectType
from .models import Island
from accountInfos.models import AccountInfo
from accountInfos.schema import AccountInfoType
from django.db.models import Q
class IslandType(DjangoObjectType):
class Meta:
model = Island
class CreateIsland(graphene.Mutation):
id = graphene.Int()
accountInfo = graphene.Field(AccountInfoType)
islandPassCode = graphene.String()
location = graphene.String()
hashTagDescription = graphene.String()
rule = graphene.String()
reportCount = graphene.Int()
createTime = graphene.DateTime()
close = graphene.Boolean()
class Arguments:
islandPassCode = graphene.String()
location = graphene.String()
hashTagDescription = graphene.String()
rule = graphene.String()
def mutate(self, info, islandPassCode, location, hashTagDescription, rule):
user = info.context.user or None
if user is None:
raise Exception('You must be logged first!')
if user.is_anonymous:
raise Exception('You must be logged first!')
accountInfo = AccountInfo.objects.filter(user__id__contains=user.id).first()
if accountInfo is None:
raise Exception('CreateIsland Fail -> cannot find accountInfo')
island = Island(
accountInfo = accountInfo,
islandPassCode = islandPassCode,
location = location,
hashTagDescription = hashTagDescription,
rule = rule,
createTime = datetime.datetime.now(),
)
island.save()
return CreateIsland(
id = island.id,
accountInfo = island.accountInfo,
islandPassCode = island.islandPassCode,
location = island.location,
hashTagDescription = island.hashTagDescription,
rule = rule,
reportCount = island.reportCount,
createTime = island.createTime,
close = island.close,
)
class ChangeIsland(graphene.Mutation):
id = graphene.Int()
accountInfo = graphene.Field(AccountInfoType)
islandPassCode = graphene.String()
location = graphene.String()
hashTagDescription = graphene.String()
reportCount = graphene.Int()
createTime = graphene.DateTime()
close = graphene.Boolean()
class Arguments:
id = graphene.Int()
islandPassCode = graphene.String()
location = graphene.String()
hashTagDescription = graphene.String()
close = graphene.Boolean()
def mutate(self, info, id, islandPassCode, location, hashTagDescription, close):
user = info.context.user or None
if user is None:
raise Exception('You must be logged first!')
if user.is_anonymous:
raise Exception('You must be logged first!')
island = Island.objects.get(id = id)
accountInfo = island.accountInfo
if accountInfo is None:
raise Exception('CreateIsland Fail -> cannot find accountInfo')
if user.id != accountInfo.user.id:
raise Exception('You are not the correct user!')
island.islandPassCode = islandPassCode
island.location = location
island.hashTagDescription = hashTagDescription
island.close = close
island.save()
return ChangeIsland(
id = island.id,
accountInfo = island.accountInfo,
islandPassCode = island.islandPassCode,
location = island.location,
hashTagDescription = island.hashTagDescription,
reportCount = island.reportCount,
createTime = island.createTime,
close = island.close,
)
class DeleteIsland(graphene.Mutation):
id = graphene.Int()
accountInfo = graphene.Field(AccountInfoType)
islandPassCode = graphene.String()
location = graphene.String()
hashTagDescription = graphene.String()
reportCount = graphene.Int()
createTime = graphene.DateTime()
close = graphene.Boolean()
class Arguments:
id = graphene.Int()
def mutate(self, info, id):
user = info.context.user or None
if user is None:
raise Exception('You must be logged first!')
if user.is_anonymous:
raise Exception('You must be logged first!')
island = Island.objects.get(id = id)
island.delete()
return DeleteIsland(
id = island.id,
accountInfo = island.accountInfo,
islandPassCode = island.islandPassCode,
location = island.location,
hashTagDescription = island.hashTagDescription,
reportCount = island.reportCount,
createTime = island.createTime,
close = island.close,
)
class Mutation(graphene.ObjectType):
create_island = CreateIsland.Field()
change_island = ChangeIsland.Field()
delete_island = DeleteIsland.Field()
class Query(graphene.ObjectType):
islands = graphene.List(IslandType, search=graphene.String(), close=graphene.Boolean())
def resolve_islands(self, info, search=None, close=None, **kwargs):
# The value sent with the search parameter will be in the args variable
if close is None:
close = False
if not(search is None):
filter = (
(Q(location__icontains=search) |
Q(hashTagDescription__icontains=search)) &
Q(close=close)
)
return Island.objects.filter(filter)
if search is None:
filter = (
Q(close=close)
)
return Island.objects.filter(filter)
return Island.objects.all()
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,613 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/islandReservations/models.py | import datetime
from django.db import models
from django.conf import settings
class IslandReservation(models.Model):
island = models.ForeignKey('islands.Island', related_name='reservation', on_delete=models.CASCADE)
accountInfo = models.ForeignKey('accountInfos.AccountInfo', related_name='reservedIslands', on_delete=models.CASCADE)
createTime = models.DateTimeField(auto_now=True, null=True)
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,614 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/islands/migrations/0002_auto_20200410_1147.py | # Generated by Django 3.0.3 on 2020-04-10 11:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('islands', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='island',
name='createTime',
field=models.TimeField(auto_now=True),
),
migrations.AlterField(
model_name='island',
name='islandPassCode',
field=models.TextField(blank=True),
),
]
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,615 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/buys/models.py |
from django.db import models
from django.conf import settings
class Buy(models.Model):
accountInfo = models.ForeignKey('accountInfos.AccountInfo', related_name='postedBuys', on_delete=models.CASCADE)
islandPassCode = models.TextField(blank=True, null=True)
itemName = models.TextField(blank=True, null=True)
numberOfItem = models.IntegerField(default=1, null=True)
unitPrice = models.IntegerField(default=0, null=True)
reportCount = models.IntegerField(default=0, null=True)
createTime = models.DateTimeField(auto_now=True, null=True)
close = models.BooleanField(default=False, null=True)
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,616 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/islandReservations/apps.py | from django.apps import AppConfig
class IslandreservationsConfig(AppConfig):
name = 'islandReservations'
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,617 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/sells/schema.py | import graphene
from graphene_django import DjangoObjectType
from .models import Sell
class SellType(DjangoObjectType):
class Meta:
model = Sell
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,618 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/accountInfos/migrations/0005_auto_20200410_1545.py | # Generated by Django 3.0.3 on 2020-04-10 15:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accountInfos', '0004_auto_20200410_1533'),
]
operations = [
migrations.AlterField(
model_name='accountinfo',
name='createTime',
field=models.DateTimeField(auto_now=True, null=True),
),
]
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,619 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/accountInfos/models.py |
from django.db import models
from django.conf import settings
class AccountInfo(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='acountInfos', on_delete=models.CASCADE)
switchID = models.TextField(blank=True, null=True)
createTime = models.DateTimeField(auto_now=True, null=True)
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,620 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/islands/migrations/0004_auto_20200410_1548.py | # Generated by Django 3.0.3 on 2020-04-10 15:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('islands', '0003_auto_20200410_1533'),
]
operations = [
migrations.AlterField(
model_name='island',
name='close',
field=models.BooleanField(default=False, null=True),
),
migrations.AlterField(
model_name='island',
name='createTime',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AlterField(
model_name='island',
name='hashTagDescription',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='island',
name='islandPassCode',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='island',
name='location',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='island',
name='reportCount',
field=models.IntegerField(default=0, null=True),
),
]
| {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,621 | paco-ambilab/animalcrossing_server | refs/heads/master | /animalcrossing_server/animalcrossing_server/schema.py | import graphene
import accountInfos.schema
import islands.schema
import buys.schema
import islandReservations.schema
class Query(accountInfos.schema.Query, islands.schema.Query, buys.schema.Query, graphene.ObjectType):
pass
class Mutation(accountInfos.schema.Mutation, islands.schema.Mutation, buys.schema.Mutation, islandReservations.schema.Mutation, graphene.ObjectType):
pass
schema = graphene.Schema(query=Query, mutation=Mutation) | {"/animalcrossing_server/buys/schema.py": ["/animalcrossing_server/buys/models.py"], "/animalcrossing_server/islandReservations/schema.py": ["/animalcrossing_server/islandReservations/models.py"], "/animalcrossing_server/accountInfos/schema.py": ["/animalcrossing_server/accountInfos/models.py"], "/animalcrossing_server/islands/schema.py": ["/animalcrossing_server/islands/models.py"]} |
73,622 | Koubae/CSV-Reader | refs/heads/main | /01-simple/ssn_readers/vehicle_reader.py | import csv
from itertools import islice
from collections import namedtuple
from datetime import datetime
vehicles = r'ssn_readers\csv_dir\vehicles.csv'
def csv_extractor(file_name): # General CSV extractor
with open(file_name, 'r') as f:
rows = csv.reader(f, delimiter=',', quotechar='"')
yield from rows
def vehicles_info_header():
"""
Generates PersonalInfo Header
:return: a namedtuple with CSV Fields
"""
personal_csv_rows = csv_extractor(vehicles)
personal_csv_head = [row for row in islice(personal_csv_rows, 1)]
PersonalInfo = namedtuple('PersonalInfo', *personal_csv_head)
yield PersonalInfo
def vehicles_info_extractor():
"""
Extracts all rows information, dropping the csv Header
:return: yields csv's row array
"""
personal_csv_rows = csv_extractor(vehicles)
rows = islice(personal_csv_rows, 1, None)
yield from rows
def data_parser(row):
"""
Converts Row Items into Python DataTypes, it convert datetime string in format 1991 -> %Y
:param row: List
:return: List with converted Data
"""
for idx, data in enumerate(row):
try:
value = datetime.strptime(data, '%Y')
row[idx] = value
except ValueError:
try:
element = data.replace('-', '_')
row[idx] = int(element)
except ValueError as err:
pass
yield row
def vehi_gen_row():
"""
Creates Named Tuple from each Row in the CSV file.
Calls Personal_info_header for Header, Personal_inftoextractor for each row, converts row with data_parser
:return: yields PersonalInfo => Namedtuple for each row
"""
header = next(vehicles_info_header())
row = vehicles_info_extractor()
converted_row = data_parser(next(row))
while True:
try:
new_row_instance = header(*next(converted_row))
yield new_row_instance
converted_row = data_parser(next(row))
except StopIteration as err:
print(StopIteration)
break
def vehi_output_values():
"""
Function that Yields All the CSV Fields and converts data into Python data
:return: Yields a list of each CSV's row with converted Data.
"""
row = vehicles_info_extractor()
converted_row = data_parser(next(row))
while True:
try:
yield next(converted_row)
converted_row = data_parser(next(row))
except StopIteration as err:
print(err)
break | {"/01-simple/ssn_readers/__init__.py": ["/01-simple/ssn_readers/personal_info_reader.py", "/01-simple/ssn_readers/vehicle_reader.py", "/01-simple/ssn_readers/update_status_reader.py", "/01-simple/ssn_readers/employment_reader.py"]} |
73,623 | Koubae/CSV-Reader | refs/heads/main | /01-simple/ssn_readers/update_status_reader.py | import csv
from itertools import islice
from collections import namedtuple
from datetime import datetime
update_status = r'ssn_readers\csv_dir/update_status.csv'
def csv_extractor(file_name): # General CSV extractor
with open(file_name, 'r') as f:
rows = csv.reader(f, delimiter=',', quotechar='"')
yield from rows
def update_status_header():
"""
Generates PersonalInfo Header
:return: a namedtuple with CSV Fields
"""
personal_csv_rows = csv_extractor(update_status)
personal_csv_head = [row for row in islice(personal_csv_rows, 1)]
PersonalInfo = namedtuple('PersonalInfo', *personal_csv_head)
yield PersonalInfo
def update_status_extractor():
"""
Extracts all rows information, dropping the csv Header
:return: yields csv's row array
"""
personal_csv_rows = csv_extractor(update_status)
rows = islice(personal_csv_rows, 1, None)
yield from rows
def clean_date(date):
"""
:param date: strig format %Y-%m-%d %H:%M:%S, needs to remove the 'T' from the middle, expect data string like;
2017-06-10T11:20:41Z
:return: string like 2017-06-10 11:20:41Z
"""
return ' '.join(date.split('T'))
def data_parser(row):
"""
Converts Row Items into Python DataTypes, needs clean_date to split and remove the T from the date string
:param row: List
:return: List with converted Data
"""
for idx, data in enumerate(row):
try:
value = datetime.strptime(clean_date(data), '%Y-%m-%d %H:%M:%S%z').date()
row[idx] = value
except ValueError:
try:
element = data.replace('-', '_')
row[idx] = int(element)
except ValueError as err:
pass
yield row
def up_gen_row():
"""
Creates Named Tuple from each Row in the CSV file.
Calls Personal_info_header for Header, Personal_inftoextractor for each row, converts row with data_parser
:return: yields PersonalInfo => Namedtuple for each row
"""
header = next(update_status_header())
row = update_status_extractor()
converted_row = data_parser(next(row))
while True:
try:
new_row_instance = header(*next(converted_row))
yield new_row_instance
converted_row = data_parser(next(row))
except StopIteration as err:
print(StopIteration)
break
def up_output_values():
"""
Function that Yields All the CSV Fields and converts data into Python data
:return: Yields a list of each CSV's row with converted Data.
"""
row = update_status_extractor()
converted_row = data_parser(next(row))
while True:
try:
yield next(converted_row)
converted_row = data_parser(next(row))
except StopIteration as err:
print(err)
break | {"/01-simple/ssn_readers/__init__.py": ["/01-simple/ssn_readers/personal_info_reader.py", "/01-simple/ssn_readers/vehicle_reader.py", "/01-simple/ssn_readers/update_status_reader.py", "/01-simple/ssn_readers/employment_reader.py"]} |
73,624 | Koubae/CSV-Reader | refs/heads/main | /03-simple-generators/main.py |
import csv
from itertools import islice
from collections import namedtuple
f_names = 'cars.csv', 'personal_info.csv'
def get_dialect(f_name):
with open(f_name) as f:
return csv.Sniffer().sniff(f.read(1000))
class FileParser:
def __init__(self, f_name):
self.f_name = f_name
def __enter__(self):
self._f = open(self.f_name, 'r')
self._reader = csv.reader(self._f, get_dialect(self.f_name))
headers = map(lambda x: x.lower(), next(self._reader))
self._nt = namedtuple('Data', headers)
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self._f.close()
return False
def __iter__(self):
return self
def __next__(self):
if self._f.closed:
# file has been closed - so we're can't iterate anymore!
raise StopIteration
else:
return self._nt(*next(self._reader))
with FileParser('cars.csv') as data:
for row in islice(data, 3):
print(row)
with FileParser('personal_info.csv') as data:
for row in islice(data, 3 ):
print(row)
| {"/01-simple/ssn_readers/__init__.py": ["/01-simple/ssn_readers/personal_info_reader.py", "/01-simple/ssn_readers/vehicle_reader.py", "/01-simple/ssn_readers/update_status_reader.py", "/01-simple/ssn_readers/employment_reader.py"]} |
73,625 | Koubae/CSV-Reader | refs/heads/main | /01-simple/tests.py | from csv_reader import *
def test_csv_read():
# 4 Lazy iterators, they extracts data from CSV file
assert list(person)
assert list(vehicle)
assert list(update)
assert list(employment)
# Lazy Iterators, getting Header from 4 CSV, got exhausted
# assert next(h_person)._fields
# assert next(h_vehicle)._fields
# assert next(h_update)._fields
# assert next(h_employment)._fields
# headers = combined_headers()
# print(headers)
# ['ssn', 'first_name', 'last_name', 'gender', 'language', 'employer',
# 'department', 'employee_id', 'vehicle_make', 'vehicle_model',
# 'model_year', 'last_updated', 'created']
# employees_all = employee_extractor()
# assert list(employees_all)
employee = generate_employee()
print(list(employee))
if __name__ == '__main__':
test_csv_read() | {"/01-simple/ssn_readers/__init__.py": ["/01-simple/ssn_readers/personal_info_reader.py", "/01-simple/ssn_readers/vehicle_reader.py", "/01-simple/ssn_readers/update_status_reader.py", "/01-simple/ssn_readers/employment_reader.py"]} |
73,626 | Koubae/CSV-Reader | refs/heads/main | /01-simple/csv_reader.py | import ssn_readers
from collections import namedtuple
from itertools import islice, chain, starmap, zip_longest, takewhile
from datetime import datetime
#
# __all__ = [personal_info, employment, update_status, vehicles,
# per_gen_row, vehi_gen_row, up_gen_row,employment_gen_row]
# personal_info_header, vehicles_info_header, update_status_header, employment_header
person = ssn_readers.per_gen_row()
vehicle = ssn_readers.vehi_gen_row()
update = ssn_readers.up_gen_row()
employment = ssn_readers.employment_gen_row()
# These Valueables are Iterator, and can be called once only, then get exausted
h_person = ssn_readers.personal_info_header()
h_vehicle = ssn_readers.vehicles_info_header()
h_update = ssn_readers.update_status_header()
h_employment = ssn_readers.employment_header()
current_record_array = list()
state_record_array = list()
def combined_headers():
"""
Calculates all the Header from CSV files
:return: List of CSV's headers
"""
get_heads = [field._fields for field in chain(h_person, h_employment, h_vehicle, h_update)]
compress_heads = chain.from_iterable(get_heads)
return list(dict.fromkeys(compress_heads, None).keys())
def combined_headers_forloop():
"""
Equivalen to combined_headers but using a for loop
:return: List of CSV's headers
"""
compress_heads = list()
get_heads = [field._fields for field in chain(h_person, h_employment, h_vehicle, h_update)]
for values in get_heads:
for value in values:
if value not in compress_heads:
compress_heads.append(value)
return compress_heads
def employee_header(headers):
"""
Generates EmployeeHeader Header
:return: a namedtuple with CSV Fields
"""
Employee = namedtuple('Employee', headers)
yield Employee
def employee_extractor():
"""
Extracts Employee info from All CSV's files, dropping the csv Header
:return: yields csv's row array
"""
personal_stats = ssn_readers.info_output_values()
employment_stats = ssn_readers.employment_output_values()
vehicle_stats = ssn_readers.vehi_output_values()
update_stats = ssn_readers.up_output_values()
while True:
try:
compressed_data = chain.from_iterable([next(personal_stats), next(employment_stats), next(vehicle_stats), next(update_stats)])
yield list(dict.fromkeys(compressed_data, None))
except StopIteration as err:
print(err)
break
def generate_employee():
"""
Creates Named Tuple from each Row in the CSV file.
:return: yields Employee => Namedtuple for each row
"""
header = next(employee_header(combined_headers()))
row = employee_extractor()
while True:
try:
new_employee = header(*next(row))
yield new_employee
except StopIteration as err:
print(err)
break
def state_record(employee_record):
"""
Function that extract Not Updated date before the 3/1/2017, last_updated Employee namedtuple's attribute
param employee_record: Employee Namedtuple instance
:return: Lazy iterator Yields data Instances of Employee namedtuple in a state record < 3/1/2017
"""
with open('state_record.txt', 'w+') as f:
for line in employee_record:
f.write(str(line) + '\n')
def current_record(employee_record):
"""
Function that extract Updated data equal or later of 3/1/2017, last_updated Employee namedtuple's attribute
:param employee_record: Employee Namedtuple instance
:return: Lazy iterator Yields data Instances of Employee namedtuple in a state record >= 3/1/2017
"""
print('here')
with open('current_record.txt', 'w+') as f:
for line in employee_record:
f.write(str(line) + '\n')
# TODO create lazy iterator that extracts data from current_record_array & state_record_array
def split_data_dates(range_=None):
"""
Lazy Iterator that splits updated row and insert them in current_data or state_date, also injects
the data into the current_record_array & state_record_array = list()
:param range_: int
:return: Yield and writes updated data.
"""
sentinel_date = datetime.strptime('3/1/2017', '%d/%m/%Y').date()
employee = generate_employee()
global current_record_array
global state_record_array
if not range_:
for data in employee:
if data.last_updated < sentinel_date:
state_record_array.append(data)
else:
current_record_array.append(data)
else:
for index in range(range_):
try:
row = next(employee)
if row.last_updated < sentinel_date:
state_record_array.append(row)
else:
current_record_array.append(row)
except StopIteration as err:
print(err)
pass
yield current_record(current_record_array)
yield state_record(state_record_array)
def max_gender_per_carmake():
male_make = dict()
female_make = dict()
all_employee = generate_employee()
for i in all_employee:
vehicle_make = i.vehicle_make
if i.gender.lower() == 'male':
if male_make.get(vehicle_make):
male_make[vehicle_make] += 1
else:
male_make.__setitem__(vehicle_make, 1)
elif i.gender.lower() == 'female':
if female_make.get(vehicle_make):
female_make[vehicle_make] += 1
else:
female_make.__setitem__(vehicle_make, 1)
max_make = max(male_make.values())
max_female = max(female_make.values())
result = dict()
for k, i in male_make.items():
if i == max_make:
result.__setitem__('Male', {k:i})
for k, i in female_make.items():
if i == max_female:
if result.get('Female'):
prev = result.get('Female')
result['Female'] = ({k: i}, prev)
else:
result.__setitem__('Female', {k:i})
return result
| {"/01-simple/ssn_readers/__init__.py": ["/01-simple/ssn_readers/personal_info_reader.py", "/01-simple/ssn_readers/vehicle_reader.py", "/01-simple/ssn_readers/update_status_reader.py", "/01-simple/ssn_readers/employment_reader.py"]} |
73,627 | Koubae/CSV-Reader | refs/heads/main | /01-simple/ssn_readers/personal_info_reader.py | import csv
from itertools import islice
from collections import namedtuple
from datetime import datetime
personal_info = r'ssn_readers\csv_dir\personal_info.csv'
def csv_extractor(file_name): # General CSV extractor
with open(file_name, 'r') as f:
rows = csv.reader(f, delimiter=',', quotechar='"')
yield from rows
def personal_info_header():
"""
Generates PersonalInfo Header
:return: a namedtuple with CSV Fields
"""
personal_csv_rows = csv_extractor(personal_info)
personal_csv_head = [row for row in islice(personal_csv_rows, 1)]
PersonalInfo = namedtuple('PersonalInfo', *personal_csv_head)
yield PersonalInfo
def personal_info_extractor():
"""
Extracts all rows information, dropping the csv Header
:return: yields csv's row array
"""
personal_csv_rows = csv_extractor(personal_info)
rows = islice(personal_csv_rows, 1, None)
yield from rows
def data_parser(row):
"""
Converts Row Items into Python DataTypes
:param row: List
:return: List with converted Data
"""
for idx, data in enumerate(row):
try:
value = datetime.strptime(data, '%d%m%Y').date()
row[idx] = value
except ValueError:
try:
element = data.replace('-', '_')
row[idx] = int(element)
except ValueError as err:
pass
yield row
def per_gen_row():
"""
Creates Named Tuple from each Row in the CSV file.
Calls Personal_info_header for Header, Personal_inftoextractor for each row, converts row with data_parser
:return: yields PersonalInfo => Namedtuple for each row
"""
header = next(personal_info_header())
row = personal_info_extractor()
converted_row = data_parser(next(row))
while True:
try:
new_row_instance = header(*next(converted_row))
yield new_row_instance
converted_row = data_parser(next(row))
except StopIteration as err:
print(err)
break
def info_output_values():
"""
Function that Yields All the CSV Fields and converts data into Python data
:return: Yields a list of each CSV's row with converted Data.
"""
row = personal_info_extractor()
converted_row = data_parser(next(row))
while True:
try:
yield next(converted_row)
converted_row = data_parser(next(row))
except StopIteration as err:
print(err)
break
| {"/01-simple/ssn_readers/__init__.py": ["/01-simple/ssn_readers/personal_info_reader.py", "/01-simple/ssn_readers/vehicle_reader.py", "/01-simple/ssn_readers/update_status_reader.py", "/01-simple/ssn_readers/employment_reader.py"]} |
73,628 | Koubae/CSV-Reader | refs/heads/main | /01-simple/ssn_readers/__init__.py | from .personal_info_reader import per_gen_row, personal_info_header, info_output_values
from .vehicle_reader import vehi_gen_row, vehicles_info_header, vehi_output_values
from .update_status_reader import up_gen_row, update_status_header, up_output_values
from .employment_reader import employment_gen_row, employment_header, employment_output_values
csv_files = {
'personal_info': 'csv_dir/personal_info.csv',
'employment': 'csv_dir/employment.csv',
'update_status': 'csv_dir/update_status.csv',
'vehicles': 'csv_dir/vehicles.csv'
}
personal_info = csv_files['personal_info']
employment = csv_files['employment']
update_status = csv_files['update_status']
vehicles = csv_files['vehicles']
__all__ = [personal_info, employment, update_status, vehicles,
per_gen_row, vehi_gen_row, up_gen_row,employment_gen_row,
personal_info_header, vehicles_info_header, update_status_header, employment_header,
info_output_values, vehi_output_values, up_output_values, employment_output_values]
| {"/01-simple/ssn_readers/__init__.py": ["/01-simple/ssn_readers/personal_info_reader.py", "/01-simple/ssn_readers/vehicle_reader.py", "/01-simple/ssn_readers/update_status_reader.py", "/01-simple/ssn_readers/employment_reader.py"]} |
73,629 | Koubae/CSV-Reader | refs/heads/main | /01-simple/ssn_readers/employment_reader.py | import csv
from itertools import islice
from collections import namedtuple
from datetime import datetime
employment = r'ssn_readers\csv_dir\employment.csv'
def csv_extractor(file_name): # General CSV extractor
with open(file_name, 'r') as f:
rows = csv.reader(f, delimiter=',', quotechar='"')
yield from rows
def sort_columns(iterable):
iterable = list(iterable)
iterable.reverse()
return iterable
def employment_header():
"""
Generates PersonalInfo Header
:return: a namedtuple with CSV Fields
"""
# Employer has the ssn at the end, sort to front
personal_csv_rows = csv_extractor(employment)
personal_csv_head1 = [row for row in islice(personal_csv_rows, 1)]
personal_csv_head = sorted(personal_csv_head1[0], key=lambda x: len(x))
PersonalInfo = namedtuple('PersonalInfo', personal_csv_head)
yield PersonalInfo
def employment_extractor():
"""
Extracts all rows information, dropping the csv Header
:return: yields csv's row array
"""
personal_csv_rows = csv_extractor(employment)
rows = islice(personal_csv_rows, 1, None)
yield from rows
def data_parser(row):
"""
Converts Row Items into Python DataTypes
:param row: List
:return: List with converted Data
"""
for idx, data in enumerate(row):
try:
value = datetime.strptime(data, '%d%m%Y').date()
row[idx] = value
except ValueError:
try:
element = data.replace('-', '_')
row[idx] = int(element)
except ValueError as err:
pass
yield row
def employment_gen_row():
"""
Creates Named Tuple from each Row in the CSV file.
Calls Personal_info_header for Header, Personal_inftoextractor for each row, converts row with data_parser
:return: yields PersonalInfo => Namedtuple for each row
"""
header = next(employment_header())
row = employment_extractor()
converted_row = data_parser(next(row))
while True:
try:
new_row_instance = header(*next(converted_row))
yield new_row_instance
converted_row = data_parser(next(row))
except StopIteration as err:
print(StopIteration)
break
def employment_output_values():
"""
Function that Yields All the CSV Fields and converts data into Python data
:return: Yields a list of each CSV's row with converted Data.
"""
row = employment_extractor()
converted_row = data_parser(next(row))
while True:
try:
yield sort_columns(next(converted_row))
converted_row = data_parser(next(row))
except StopIteration as err:
print(err)
break
| {"/01-simple/ssn_readers/__init__.py": ["/01-simple/ssn_readers/personal_info_reader.py", "/01-simple/ssn_readers/vehicle_reader.py", "/01-simple/ssn_readers/update_status_reader.py", "/01-simple/ssn_readers/employment_reader.py"]} |
73,630 | Koubae/CSV-Reader | refs/heads/main | /03-simple-generators/context_manager.py | import csv
from collections import namedtuple
from contextlib import contextmanager
from itertools import islice
def get_dialect(f_name):
with open(f_name) as f:
return csv.Sniffer().sniff(f.read(1000))
@contextmanager
def parsed_data(f_name):
f = open(f_name, 'r')
try:
reader = csv.reader(f, get_dialect(f_name))
headers = map(lambda x: x.lower(), next(reader))
nt = namedtuple('Data', headers)
yield parsed_data_iter(reader, nt)
finally:
f.close()
with parsed_data('personal_info.csv') as data:
for row in islice(data, 5):
#print(row)
x = row
@contextmanager
def parsed_data(f_name):
def get_dialect(f_name):
with open(f_name) as f:
return csv.Sniffer().sniff(f.read(1000))
def parsed_data_iter(data_iter, nt):
for row in data_iter:
yield nt(*row)
f = open(f_name, 'r')
try:
reader = csv.reader(f, get_dialect(f_name))
headers = map(lambda x: x.lower(), next(reader))
nt = namedtuple('Data', headers)
yield parsed_data_iter(reader, nt)
finally:
f.close()
f_names = 'cars.csv', 'personal_info.csv'
for f_name in f_names:
with parsed_data(f_name) as data:
for row in islice(data, 5):
#print(row)
print('-------------------')
@contextmanager
def parsed_data(f_name):
f = open(f_name, 'r')
try:
dialect = csv.Sniffer().sniff(f.read(1000))
f.seek(0)
reader = csv.reader(f, dialect)
headers = map(lambda x: x.lower(), next(reader))
nt = namedtuple('Data', headers)
yield (nt(*row) for row in reader)
finally:
f.close()
f_names = 'cars.csv', 'personal_info.csv'
for f_name in f_names:
with parsed_data(f_name) as data:
for row in islice(data, 5):
print(row)
print('-------------------') | {"/01-simple/ssn_readers/__init__.py": ["/01-simple/ssn_readers/personal_info_reader.py", "/01-simple/ssn_readers/vehicle_reader.py", "/01-simple/ssn_readers/update_status_reader.py", "/01-simple/ssn_readers/employment_reader.py"]} |
73,636 | cea56/chromium | refs/heads/master | /third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import web_idl
from . import name_style
from .code_node import CodeNode
from .code_node import SymbolDefinitionNode
from .code_node import SymbolNode
from .code_node import SymbolScopeNode
from .code_node import TextNode
from .code_node import UnlikelyExitNode
from .codegen_format import format_template as _format
def blink_class_name(idl_definition):
"""
Returns the class name of Blink implementation.
"""
try:
class_name = idl_definition.extended_attributes.get(
"ImplementedAs").value
except:
class_name = idl_definition.identifier
if isinstance(idl_definition,
(web_idl.CallbackFunction, web_idl.CallbackInterface)):
return name_style.class_("v8", class_name)
else:
return name_style.class_(class_name)
def blink_type_info(idl_type):
"""
Returns the types of Blink implementation corresponding to the given IDL
type. The returned object has the following attributes.
member_t: The type of a member variable. E.g. T => Member<T>
ref_t: The type of a local variable that references to an already-existing
value. E.g. String => String&
value_t: The type of a variable that behaves as a value. E.g. String =>
String
is_nullable: True if the Blink implementation type can represent IDL null
value by itself.
"""
assert isinstance(idl_type, web_idl.IdlType)
class TypeInfo(object):
def __init__(self,
typename,
member_fmt="{}",
ref_fmt="{}",
value_fmt="{}",
is_nullable=False):
self.member_t = member_fmt.format(typename)
self.ref_t = ref_fmt.format(typename)
self.value_t = value_fmt.format(typename)
# Whether Blink impl type can represent IDL null or not.
self.is_nullable = is_nullable
real_type = idl_type.unwrap(typedef=True)
if real_type.is_boolean or real_type.is_numeric:
cxx_type = {
"boolean": "bool",
"byte": "int8_t",
"octet": "uint8_t",
"short": "int16_t",
"unsigned short": "uint16_t",
"long": "int32_t",
"unsigned long": "uint32_t",
"long long": "int64_t",
"unsigned long long": "uint64_t",
"float": "float",
"unrestricted float": "float",
"double": "double",
"unrestricted double": "double",
}
return TypeInfo(cxx_type[real_type.keyword_typename])
if real_type.is_string:
return TypeInfo("String", ref_fmt="{}&", is_nullable=True)
if real_type.is_symbol:
assert False, "Blink does not support/accept IDL symbol type."
if real_type.is_any or real_type.is_object:
return TypeInfo("ScriptValue", ref_fmt="{}&", is_nullable=True)
if real_type.is_void:
assert False, "Blink does not support/accept IDL void type."
if real_type.type_definition_object is not None:
type_def_obj = real_type.type_definition_object
blink_impl_type = (
type_def_obj.code_generator_info.receiver_implemented_as
or name_style.class_(type_def_obj.identifier))
return TypeInfo(
blink_impl_type,
member_fmt="Member<{}>",
ref_fmt="{}*",
value_fmt="{}*",
is_nullable=True)
if (real_type.is_sequence or real_type.is_frozen_array
or real_type.is_variadic):
element_type = blink_type_info(real_type.element_type)
return TypeInfo(
"VectorOf<{}>".format(element_type.value_t), ref_fmt="{}&")
if real_type.is_record:
key_type = blink_type_info(real_type.key_type)
value_type = blink_type_info(real_type.value_type)
return TypeInfo(
"VectorOfPairs<{}, {}>".format(key_type.value_t,
value_type.value_t),
ref_fmt="{}&")
if real_type.is_promise:
return TypeInfo("ScriptPromise", ref_fmt="{}&")
if real_type.is_union:
return TypeInfo("ToBeImplementedUnion")
if real_type.is_nullable:
inner_type = blink_type_info(real_type.inner_type)
if inner_type.is_nullable:
return inner_type
return TypeInfo(
"base::Optional<{}>".format(inner_type.value_t), ref_fmt="{}&")
def native_value_tag(idl_type):
"""Returns the tag type of NativeValueTraits."""
assert isinstance(idl_type, web_idl.IdlType)
real_type = idl_type.unwrap(typedef=True)
if (real_type.is_boolean or real_type.is_numeric or real_type.is_string
or real_type.is_any or real_type.is_object):
return "IDL{}".format(real_type.type_name)
if real_type.is_symbol:
assert False, "Blink does not support/accept IDL symbol type."
if real_type.is_void:
assert False, "Blink does not support/accept IDL void type."
if real_type.type_definition_object is not None:
return blink_type_info(real_type).value_t
if real_type.is_sequence:
return "IDLSequence<{}>".format(
native_value_tag(real_type.element_type))
if real_type.is_record:
return "IDLRecord<{}, {}>".format(
native_value_tag(real_type.key_type),
native_value_tag(real_type.value_type))
if real_type.is_promise:
return "IDLPromise"
if real_type.is_union:
return blink_type_info(real_type).value_t
if real_type.is_nullable:
return "IDLNullable<{}>".format(native_value_tag(real_type.inner_type))
def make_v8_to_blink_value(blink_var_name,
v8_value_expr,
idl_type,
default_value=None):
"""
Returns a SymbolNode whose definition converts a v8::Value to a Blink value.
"""
assert isinstance(blink_var_name, str)
assert isinstance(v8_value_expr, str)
assert isinstance(idl_type, web_idl.IdlType)
assert (default_value is None
or isinstance(default_value, web_idl.LiteralConstant))
pattern = (
"const auto& ${{{_1}}} = NativeValueTraits<{_2}>::NativeValue({_3});")
_1 = blink_var_name
_2 = native_value_tag(idl_type)
_3 = ["${isolate}", v8_value_expr, "${exception_state}"]
text = _format(pattern, _1=_1, _2=_2, _3=", ".join(_3))
def create_definition(symbol_node):
return SymbolDefinitionNode(symbol_node, [
TextNode(text),
UnlikelyExitNode(
cond=TextNode("${exception_state}.HadException()"),
body=SymbolScopeNode([TextNode("return;")])),
])
return SymbolNode(blink_var_name, definition_constructor=create_definition)
def make_v8_to_blink_value_variadic(blink_var_name, v8_array,
v8_array_start_index, idl_type):
"""
Returns a SymbolNode whose definition converts an array of v8::Value
(variadic arguments) to a Blink value.
"""
assert isinstance(blink_var_name, str)
assert isinstance(v8_array, str)
assert isinstance(v8_array_start_index, (int, long))
assert isinstance(idl_type, web_idl.IdlType)
pattern = "const auto& ${{{_1}}} = ToImplArguments<{_2}>({_3});"
_1 = blink_var_name
_2 = native_value_tag(idl_type.element_type)
_3 = [v8_array, str(v8_array_start_index), "${exception_state}"]
text = _format(pattern, _1=_1, _2=_2, _3=", ".join(_3))
def create_definition(symbol_node):
return SymbolDefinitionNode(symbol_node, [
TextNode(text),
UnlikelyExitNode(
cond=TextNode("${exception_state}.HadException()"),
body=SymbolScopeNode([TextNode("return;")])),
])
return SymbolNode(blink_var_name, definition_constructor=create_definition)
| {"/third_party/blink/renderer/bindings/scripts/bind_gen/interface.py": ["/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py", "/third_party/blink/renderer/bindings/scripts/bind_gen/code_node_cxx.py"], "/third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py": ["/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py"]} |
73,637 | cea56/chromium | refs/heads/master | /third_party/blink/renderer/bindings/scripts/bind_gen/interface.py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import itertools
import os.path
import web_idl
from . import name_style
from .blink_v8_bridge import blink_class_name
from .blink_v8_bridge import blink_type_info
from .blink_v8_bridge import make_v8_to_blink_value
from .blink_v8_bridge import make_v8_to_blink_value_variadic
from .code_node import SequenceNode
from .code_node import SymbolDefinitionNode
from .code_node import SymbolNode
from .code_node import SymbolScopeNode
from .code_node import TextNode
from .code_node_cxx import CxxBreakableBlockNode
from .code_node_cxx import CxxFuncDefNode
from .code_node_cxx import CxxLikelyIfNode
from .code_node_cxx import CxxUnlikelyIfNode
from .codegen_accumulator import CodeGenAccumulator
from .codegen_context import CodeGenContext
from .codegen_expr import expr_from_exposure
from .codegen_expr import expr_or
from .codegen_format import format_template as _format
from .codegen_utils import collect_include_headers
from .codegen_utils import enclose_with_namespace
from .codegen_utils import make_copyright_header
from .codegen_utils import make_header_include_directives
from .codegen_utils import write_code_node_to_file
from .mako_renderer import MakoRenderer
def bind_blink_api_arguments(code_node, cg_context):
assert isinstance(code_node, SymbolScopeNode)
assert isinstance(cg_context, CodeGenContext)
if cg_context.attribute_get:
return
if cg_context.attribute_set:
name = "arg1_value"
v8_value = "${info}[0]"
code_node.register_code_symbol(
make_v8_to_blink_value(name, v8_value,
cg_context.attribute.idl_type))
return
for index, argument in enumerate(cg_context.function_like.arguments, 1):
name = name_style.arg_f("arg{}_{}", index, argument.identifier)
if argument.is_variadic:
code_node.register_code_symbol(
make_v8_to_blink_value_variadic(name, "${info}", index - 1,
argument.idl_type))
else:
v8_value = "${{info}}[{}]".format(argument.index)
code_node.register_code_symbol(
make_v8_to_blink_value(name, v8_value, argument.idl_type,
argument.default_value))
def bind_callback_local_vars(code_node, cg_context):
assert isinstance(code_node, SymbolScopeNode)
assert isinstance(cg_context, CodeGenContext)
S = SymbolNode
T = TextNode
local_vars = []
template_vars = {}
local_vars.extend([
S("class_like_name", ("const char* const ${class_like_name} = "
"\"${class_like.identifier}\";")),
S("current_context", ("v8::Local<v8::Context> ${current_context} = "
"${isolate}->GetCurrentContext();")),
S("current_execution_context",
("ExecutionContext* ${current_execution_context} = "
"ExecutionContext::From(${current_script_state});")),
S("current_script_state", ("ScriptState* ${current_script_state} = "
"ScriptState::From(${current_context});")),
S("execution_context", ("ExecutionContext* ${execution_context} = "
"ExecutionContext::From(${script_state});")),
S("isolate", "v8::Isolate* ${isolate} = ${info}.GetIsolate();"),
S("per_context_data", ("V8PerContextData* ${per_context_data} = "
"${script_state}->PerContextData();")),
S("per_isolate_data", ("V8PerIsolateData* ${per_isolate_data} = "
"V8PerIsolateData::From(${isolate});")),
S("property_name",
"const char* const ${property_name} = \"${property.identifier}\";"),
S("v8_receiver",
"v8::Local<v8::Object> ${v8_receiver} = ${info}.This();"),
S("receiver_context", ("v8::Local<v8::Context> ${receiver_context} = "
"${v8_receiver}->CreationContext();")),
S("receiver_script_state",
("ScriptState* ${receiver_script_state} = "
"ScriptState::From(${receiver_context});")),
])
is_receiver_context = (cg_context.member_like
and not cg_context.member_like.is_static)
# creation_context
pattern = "const v8::Local<v8::Context>& ${creation_context} = {_1};"
_1 = "${receiver_context}" if is_receiver_context else "${current_context}"
local_vars.append(S("creation_context", _format(pattern, _1=_1)))
# creation_context_object
text = ("${v8_receiver}"
if is_receiver_context else "${current_context}->Global()")
template_vars["creation_context_object"] = T(text)
# script_state
pattern = "ScriptState* ${script_state} = {_1};"
_1 = ("${receiver_script_state}"
if is_receiver_context else "${current_script_state}")
local_vars.append(S("script_state", _format(pattern, _1=_1)))
# exception_state_context_type
pattern = (
"const ExceptionState::ContextType ${exception_state_context_type} = "
"{_1};")
if cg_context.attribute_get:
_1 = "ExceptionState::kGetterContext"
elif cg_context.attribute_set:
_1 = "ExceptionState::kSetterContext"
elif cg_context.constructor:
_1 = "ExceptionState::kConstructionContext"
else:
_1 = "ExceptionState::kExecutionContext"
local_vars.append(
S("exception_state_context_type", _format(pattern, _1=_1)))
# exception_state
pattern = "ExceptionState ${exception_state}({_1});{_2}"
_1 = [
"${isolate}", "${exception_state_context_type}", "${class_like_name}",
"${property_name}"
]
_2 = ""
if cg_context.return_type and cg_context.return_type.unwrap().is_promise:
_2 = ("\n"
"ExceptionToRejectPromiseScope reject_promise_scope"
"(${info}, ${exception_state});")
local_vars.append(
S("exception_state", _format(pattern, _1=", ".join(_1), _2=_2)))
# blink_receiver
if cg_context.class_like.identifier == "Window":
# TODO(yukishiino): Window interface should be
# [ImplementedAs=LocalDOMWindow] instead of [ImplementedAs=DOMWindow],
# and [CrossOrigin] properties should be implemented specifically with
# DOMWindow class. Then, we'll have less hacks.
if "CrossOrigin" in cg_context.member_like.extended_attributes:
text = ("DOMWindow* ${blink_receiver} = "
"${v8_class}::ToBlinkUnsafe(${v8_receiver});")
else:
text = ("LocalDOMWindow* ${blink_receiver} = To<LocalDOMWindow>("
"${v8_class}::ToBlinkUnsafe(${v8_receiver}));")
else:
pattern = ("{_1}* ${blink_receiver} = "
"${v8_class}::ToBlinkUnsafe(${v8_receiver});")
_1 = blink_class_name(cg_context.class_like)
text = _format(pattern, _1=_1)
local_vars.append(S("blink_receiver", text))
code_node.register_code_symbols(local_vars)
code_node.add_template_vars(template_vars)
def _make_blink_api_call(cg_context, num_of_args=None):
assert isinstance(cg_context, CodeGenContext)
assert num_of_args is None or isinstance(num_of_args, (int, long))
arguments = []
ext_attrs = cg_context.member_like.extended_attributes
values = ext_attrs.values_of("CallWith") + (
ext_attrs.values_of("SetterCallWith") if cg_context.attribute_set else
())
if "Isolate" in values:
arguments.append("${isolate}")
if "ScriptState" in values:
arguments.append("${script_state}")
if "ExecutionContext" in values:
arguments.append("${execution_context}")
if cg_context.attribute_get:
pass
elif cg_context.attribute_set:
arguments.append("${arg1_value}")
else:
for index, argument in enumerate(cg_context.function_like.arguments):
if num_of_args is not None and index == num_of_args:
break
name = name_style.arg_f("arg{}_{}", index + 1, argument.identifier)
arguments.append(_format("${{{}}}", name))
if cg_context.is_return_by_argument:
arguments.append("${return_value}")
if cg_context.may_throw_exception:
arguments.append("${exception_state}")
code_generator_info = cg_context.member_like.code_generator_info
func_name = (code_generator_info.property_implemented_as
or name_style.api_func(cg_context.member_like.identifier))
if cg_context.attribute_set:
func_name = name_style.api_func("set", func_name)
is_partial_or_mixin = (code_generator_info.defined_in_partial
or code_generator_info.defined_in_mixin)
if cg_context.member_like.is_static or is_partial_or_mixin:
class_like = cg_context.member_like.owner_mixin or cg_context.class_like
class_name = (code_generator_info.receiver_implemented_as
or name_style.class_(class_like.identifier))
func_designator = "{}::{}".format(class_name, func_name)
if not cg_context.member_like.is_static:
arguments.insert(0, "*${blink_receiver}")
else:
func_designator = _format("${blink_receiver}->{}", func_name)
return _format("{_1}({_2})", _1=func_designator, _2=", ".join(arguments))
def bind_return_value(code_node, cg_context):
assert isinstance(code_node, SymbolScopeNode)
assert isinstance(cg_context, CodeGenContext)
T = TextNode
def create_definition(symbol_node):
api_calls = [] # Pairs of (num_of_args, api_call_text)
arguments = (cg_context.function_like.arguments
if cg_context.function_like else [])
for index, arg in enumerate(arguments):
if arg.is_optional and not arg.default_value:
api_calls.append((index, _make_blink_api_call(
cg_context, index)))
api_calls.append((None, _make_blink_api_call(cg_context)))
nodes = []
is_return_type_void = cg_context.return_type.unwrap().is_void
if not is_return_type_void:
return_type = blink_type_info(cg_context.return_type).value_t
if len(api_calls) == 1:
_, api_call = api_calls[0]
if is_return_type_void:
nodes.append(T(_format("{};", api_call)))
elif cg_context.is_return_by_argument:
nodes.append(T(_format("{} ${return_value};", return_type)))
nodes.append(T(_format("{};", api_call)))
else:
nodes.append(
T(_format("const auto& ${return_value} = {};", api_call)))
else:
branches = SequenceNode()
for index, api_call in api_calls:
if is_return_type_void or cg_context.is_return_by_argument:
assignment = "{};".format(api_call)
else:
assignment = _format("${return_value} = {};", api_call)
if index is not None:
branches.append(
CxxLikelyIfNode(
cond=_format("${info}[{}]->IsUndefined()", index),
body=[
T(assignment),
T("break;"),
]))
else:
branches.append(T(assignment))
if not is_return_type_void:
nodes.append(T(_format("{} ${return_value};", return_type)))
nodes.append(CxxBreakableBlockNode(branches))
if cg_context.may_throw_exception:
nodes.append(
CxxUnlikelyIfNode(
cond="${exception_state}.HadException()",
body=T("return;")))
return SymbolDefinitionNode(symbol_node, nodes)
code_node.register_code_symbol(
SymbolNode("return_value", definition_constructor=create_definition))
def bind_v8_set_return_value(code_node, cg_context):
assert isinstance(code_node, SymbolScopeNode)
assert isinstance(cg_context, CodeGenContext)
pattern = "{_1}({_2});"
_1 = "V8SetReturnValue"
_2 = ["${info}", "${return_value}"]
return_type = cg_context.return_type.unwrap(nullable=True, typedef=True)
if return_type.is_void:
# Render a SymbolNode |return_value| discarding the content text, and
# let a symbol definition be added.
pattern = "<% str(return_value) %>"
elif (cg_context.for_world == cg_context.MAIN_WORLD
and return_type.is_interface):
_1 = "V8SetReturnValueForMainWorld"
elif return_type.is_interface:
_2.append("${creation_context_object}")
text = _format(pattern, _1=_1, _2=", ".join(_2))
code_node.add_template_var("v8_set_return_value", TextNode(text))
_callback_common_binders = (
bind_blink_api_arguments,
bind_callback_local_vars,
bind_return_value,
bind_v8_set_return_value,
)
def make_check_receiver(cg_context):
assert isinstance(cg_context, CodeGenContext)
T = TextNode
if (cg_context.attribute
and "LenientThis" in cg_context.attribute.extended_attributes):
return SequenceNode([
T("// [LenientThis]"),
CxxUnlikelyIfNode(
cond="!${v8_class}::HasInstance(${v8_receiver}, ${isolate})",
body=T("return;")),
])
if cg_context.return_type.unwrap().is_promise:
return SequenceNode([
T("// Promise returning function: "
"Convert a TypeError to a reject promise."),
CxxUnlikelyIfNode(
cond="!${v8_class}::HasInstance(${v8_receiver}, ${isolate})",
body=[
T("${exception_state}.ThrowTypeError("
"\"Illegal invocation\");"),
T("return;"),
])
])
return None
def make_check_security_of_return_value(cg_context):
assert isinstance(cg_context, CodeGenContext)
T = TextNode
check_security = cg_context.member_like.extended_attributes.value_of(
"CheckSecurity")
if check_security != "ReturnValue":
return None
web_feature = _format(
"WebFeature::{}",
name_style.constant("CrossOrigin", cg_context.class_like.identifier,
cg_context.member_like.identifier))
use_counter = _format(
"UseCounter::Count(${current_execution_context}, {});", web_feature)
cond = T("!BindingSecurity::ShouldAllowAccessTo("
"ToLocalDOMWindow(${current_context}), ${return_value}, "
"BindingSecurity::ErrorReportOption::kDoNotReport)")
body = SymbolScopeNode([
T(use_counter),
T("V8SetReturnValueNull(${info});\n"
"return;"),
])
return SequenceNode([
T("// [CheckSecurity=ReturnValue]"),
UnlikelyExitNode(cond=cond, body=body),
])
def make_log_activity(cg_context):
assert isinstance(cg_context, CodeGenContext)
ext_attrs = cg_context.member_like.extended_attributes
if "LogActivity" not in ext_attrs:
return None
target = ext_attrs.value_of("LogActivity")
if target:
assert target in ("GetterOnly", "SetterOnly")
if ((target == "GetterOnly" and not cg_context.attribute_get)
or (target == "SetterOnly" and not cg_context.attribute_set)):
return None
if (cg_context.for_world == cg_context.MAIN_WORLD
and "LogAllWorlds" not in ext_attrs):
return None
pattern = "{_1}${per_context_data} && ${per_context_data}->ActivityLogger()"
_1 = ""
if (cg_context.attribute and "PerWorldBindings" not in ext_attrs
and "LogAllWorlds" not in ext_attrs):
_1 = "${script_state}->World().IsIsolatedWorld() && "
cond = _format(pattern, _1=_1)
pattern = "${per_context_data}->ActivityLogger()->{_1}(\"{_2}.{_3}\"{_4});"
_2 = cg_context.class_like.identifier
_3 = cg_context.property_.identifier
if cg_context.attribute_get:
_1 = "LogGetter"
_4 = ""
if cg_context.attribute_set:
_1 = "LogSetter"
_4 = ", ${info}[0]"
if cg_context.operation_group:
_1 = "LogMethod"
_4 = ", ${info}"
body = _format(pattern, _1=_1, _2=_2, _3=_3, _4=_4)
pattern = ("// [LogActivity], [LogAllWorlds]\n" "if ({_1}) {{ {_2} }}")
node = TextNode(_format(pattern, _1=cond, _2=body))
node.accumulate(
CodeGenAccumulator.require_include_headers([
"third_party/blink/renderer/"
"platform/bindings/v8_dom_activity_logger.h",
]))
return node
def _make_overloaded_function_name(function_like):
if isinstance(function_like, web_idl.Constructor):
return name_style.func("constructor", "overload",
function_like.overload_index + 1)
else:
return name_style.func(function_like.identifier, "op", "overload",
function_like.overload_index + 1)
def _make_overload_dispatcher_per_arg_size(items):
"""
https://heycam.github.io/webidl/#dfn-overload-resolution-algorithm
Args:
items: Partial list of an "effective overload set" with the same
type list size.
Returns:
A pair of a resulting CodeNode and a boolean flag that is True if there
exists a case that overload resolution will fail, i.e. a bailout that
throws a TypeError is necessary.
"""
# Variables shared with nested functions
if len(items) > 1:
arg_index = web_idl.OverloadGroup.distinguishing_argument_index(items)
else:
arg_index = None
func_like = None
dispatcher_nodes = SequenceNode()
# True if there exists a case that overload resolution will fail.
can_fail = True
def find_test(item, test):
# |test| is a callable that takes (t, u) where:
# t = the idl_type (in the original form)
# u = the unwrapped version of t
idl_type = item.type_list[arg_index]
t = idl_type
u = idl_type.unwrap()
return test(t, u) or (u.is_union and any(
[test(m, m.unwrap()) for m in u.flattened_member_types]))
def find(test):
for item in items:
if find_test(item, test):
return item.function_like
return None
def find_all_interfaces():
result = [] # [(func_like, idl_type), ...]
for item in items:
idl_type = item.type_list[arg_index].unwrap()
if idl_type.is_interface:
result.append((item.function_like, idl_type))
if idl_type.is_union:
for member_type in idl_type.flattened_member_types:
if member_type.unwrap().is_interface:
result.append((item.function_like,
member_type.unwrap()))
return result
def make_node(pattern):
value = _format("${info}[{}]", arg_index)
func_name = _make_overloaded_function_name(func_like)
return TextNode(_format(pattern, value=value, func_name=func_name))
def dispatch_if(expr):
if expr is True:
pattern = "return {func_name}(${info});"
else:
pattern = ("if (" + expr + ") {{\n"
" return {func_name}(${info});\n"
"}}")
node = make_node(pattern)
conditional = expr_from_exposure(func_like.exposure)
if not conditional.is_always_true:
node = CxxUnlikelyIfNode(cond=conditional, body=node)
dispatcher_nodes.append(node)
return expr is True and conditional.is_always_true
if len(items) == 1:
func_like = items[0].function_like
can_fail = False
return make_node("return {func_name}(${info});"), can_fail
# 12.2. If V is undefined, ...
func_like = find(lambda t, u: t.is_optional)
if func_like:
dispatch_if("{value}->IsUndefined()")
# 12.3. if V is null or undefined, ...
func_like = find(
lambda t, u: t.does_include_nullable_type or u.is_dictionary)
if func_like:
dispatch_if("{value}->IsNullOrUndefined()")
# 12.4. if V is a platform object, ...
def inheritance_length(func_and_type):
return len(func_and_type[1].type_definition_object.
inclusive_inherited_interfaces)
# Attempt to match from most derived to least derived.
for func_like, idl_type in sorted(
find_all_interfaces(), key=inheritance_length, reverse=True):
cgc = CodeGenContext(
interface=idl_type.unwrap().type_definition_object)
dispatch_if(
_format("{}::HasInstance(${isolate}, {value})", cgc.v8_class))
is_typedef_name = lambda t, name: t.is_typedef and t.identifier == name
func_like_a = find(
lambda t, u: is_typedef_name(t.unwrap(typedef=False),
"ArrayBufferView"))
func_like_b = find(
lambda t, u: is_typedef_name(t.unwrap(typedef=False), "BufferSource"))
if func_like_a or func_like_b:
# V8 specific optimization: ArrayBufferView
if func_like_a:
func_like = func_like_a
dispatch_if("{value}->IsArrayBufferView()")
if func_like_b:
func_like = func_like_b
dispatch_if("{value}->IsArrayBufferView() || "
"{value}->IsArrayBuffer() || "
"{value}->IsSharedArrayBuffer()")
else:
# 12.5. if Type(V) is Object, V has an [[ArrayBufferData]] internal
# slot, ...
func_like = find(lambda t, u: u.is_array_buffer)
if func_like:
dispatch_if("{value}->IsArrayBuffer() || "
"{value}->IsSharedArrayBuffer()")
# 12.6. if Type(V) is Object, V has a [[DataView]] internal slot, ...
func_like = find(lambda t, u: u.is_data_view)
if func_like:
dispatch_if("{value}->IsDataView()")
# 12.7. if Type(V) is Object, V has a [[TypedArrayName]] internal slot,
# ...
func_like = find(lambda t, u: u.is_typed_array_type)
if func_like:
dispatch_if("{value}->IsTypedArray()")
# 12.8. if IsCallable(V) is true, ...
func_like = find(lambda t, u: u.is_callback_function)
if func_like:
dispatch_if("{value}->IsFunction()")
# 12.9. if Type(V) is Object and ... @@iterator ...
func_like = find(lambda t, u: u.is_sequence or u.is_frozen_array)
if func_like:
dispatch_if("{value}->IsArray() || " # Excessive optimization
"bindings::IsEsIterableObject"
"(${isolate}, {value}, ${exception_state})")
dispatcher_nodes.append(
TextNode("if (${exception_state}.HadException()) {\n"
" return;\n"
"}"))
# 12.10. if Type(V) is Object and ...
def is_es_object_type(t, u):
return (u.is_callback_interface or u.is_dictionary or u.is_record
or u.is_object)
func_like = find(is_es_object_type)
if func_like:
dispatch_if("{value}->IsObject()")
# 12.11. if Type(V) is Boolean and ...
func_like = find(lambda t, u: u.is_boolean)
if func_like:
dispatch_if("{value}->IsBoolean()")
# 12.12. if Type(V) is Number and ...
func_like = find(lambda t, u: u.is_numeric)
if func_like:
dispatch_if("{value}->IsNumber()")
# 12.13. if there is an entry in S that has ... a string type ...
# 12.14. if there is an entry in S that has ... a numeric type ...
# 12.15. if there is an entry in S that has ... boolean ...
# 12.16. if there is an entry in S that has any ...
func_likes = [
find(lambda t, u: u.is_string),
find(lambda t, u: u.is_numeric),
find(lambda t, u: u.is_boolean),
find(lambda t, u: u.is_any),
]
for func_like in func_likes:
if func_like:
if dispatch_if(True):
can_fail = False
break
return dispatcher_nodes, can_fail
def make_overload_dispatcher(cg_context):
# https://heycam.github.io/webidl/#dfn-overload-resolution-algorithm
assert isinstance(cg_context, CodeGenContext)
T = TextNode
overload_group = cg_context.property_
items = overload_group.effective_overload_set()
args_size = lambda item: len(item.type_list)
items_grouped_by_arg_size = itertools.groupby(
sorted(items, key=args_size, reverse=True), key=args_size)
branches = SequenceNode()
did_use_break = False
for arg_size, items in items_grouped_by_arg_size:
items = list(items)
node, can_fail = _make_overload_dispatcher_per_arg_size(items)
if arg_size > 0:
node = CxxLikelyIfNode(
cond=_format("${info}.Length() >= {}", arg_size),
body=[node, T("break;") if can_fail else None])
did_use_break = did_use_break or can_fail
conditional = expr_or(
map(lambda item: expr_from_exposure(item.function_like.exposure),
items))
if not conditional.is_always_true:
node = CxxUnlikelyIfNode(cond=conditional, body=node)
branches.append(node)
if did_use_break:
branches = CxxBreakableBlockNode(branches)
if not did_use_break and arg_size == 0 and conditional.is_always_true:
return branches
return SequenceNode([
branches,
T(""),
T("${exception_state}.ThrowTypeError"
"(\"Overload resolution failed.\");\n"
"return;"),
])
def make_report_deprecate_as(cg_context):
assert isinstance(cg_context, CodeGenContext)
name = cg_context.member_like.extended_attributes.value_of("DeprecateAs")
if not name:
return None
pattern = ("// [DeprecateAs]\n"
"Deprecation::CountDeprecation("
"${execution_context}, WebFeature::k{_1});")
_1 = name
node = TextNode(_format(pattern, _1=_1))
node.accumulate(
CodeGenAccumulator.require_include_headers([
"third_party/blink/renderer/core/frame/deprecation.h",
]))
return node
def make_report_measure_as(cg_context):
assert isinstance(cg_context, CodeGenContext)
ext_attrs = cg_context.member_like.extended_attributes
if not ("Measure" in ext_attrs or "MeasureAs" in ext_attrs):
assert "HighEntropy" not in ext_attrs, "{}: {}".format(
cg_context.idl_location_and_name,
"[HighEntropy] must be specified with either [Measure] or "
"[MeasureAs].")
return None
suffix = ""
if cg_context.attribute_get:
suffix = "_AttributeGetter"
elif cg_context.attribute_set:
suffix = "_AttributeSetter"
elif cg_context.constructor:
suffix = "_Constructor"
elif cg_context.operation:
suffix = "_Method"
name = cg_context.member_like.extended_attributes.value_of("MeasureAs")
if name:
name = "k{}".format(name)
elif cg_context.constructor:
name = "kV8{}{}".format(cg_context.class_like.identifier, suffix)
else:
name = "kV8{}_{}{}".format(
cg_context.class_like.identifier,
name_style.raw.upper_camel_case(cg_context.member_like.identifier),
suffix)
node = SequenceNode()
pattern = ("// [Measure], [MeasureAs]\n"
"UseCounter::Count(${execution_context}, WebFeature::{_1});")
_1 = name
node.append(TextNode(_format(pattern, _1=_1)))
node.accumulate(
CodeGenAccumulator.require_include_headers([
"third_party/blink/renderer/core/frame/web_feature.h",
"third_party/blink/renderer/platform/instrumentation/use_counter.h",
]))
if "HighEntropy" not in ext_attrs or cg_context.attribute_set:
return node
pattern = (
"// [HighEntropy]\n"
"Dactyloscoper::Record(${execution_context}, WebFeature::{_1});")
_1 = name
node.append(TextNode(_format(pattern, _1=_1)))
node.accumulate(
CodeGenAccumulator.require_include_headers([
"third_party/blink/renderer/core/frame/dactyloscoper.h",
]))
return node
def make_return_value_cache_return_early(cg_context):
assert isinstance(cg_context, CodeGenContext)
pred = cg_context.member_like.extended_attributes.value_of(
"CachedAttribute")
if pred:
return TextNode("""\
// [CachedAttribute]
static const V8PrivateProperty::SymbolKey kPrivatePropertyCachedAttribute;
auto v8_private_cached_attribute =
V8PrivateProperty::GetSymbol(${isolate}, kPrivatePropertyCachedAttribute);
if (!impl->""" + pred + """()) {
v8::Local<v8::Value> v8_value;
if (v8_private_cached_attribute.GetOrUndefined(${v8_receiver})
.ToLocal(&v8_value) && !v8_value->IsUndefined()) {
V8SetReturnValue(${info}, v8_value);
return;
}
}""")
if "SaveSameObject" in cg_context.member_like.extended_attributes:
return TextNode("""\
// [SaveSameObject]
static const V8PrivateProperty::SymbolKey kPrivatePropertySaveSameObject;
auto v8_private_save_same_object =
V8PrivateProperty::GetSymbol(${isolate}, kPrivatePropertySaveSameObject);
{
v8::Local<v8::Value> v8_value;
if (v8_private_save_same_object.GetOrUndefined(${v8_receiver})
.ToLocal(&v8_value) && !v8_value->IsUndefined()) {
V8SetReturnValue(${info}, v8_value);
return;
}
}""")
def make_return_value_cache_update_value(cg_context):
assert isinstance(cg_context, CodeGenContext)
if "CachedAttribute" in cg_context.member_like.extended_attributes:
return TextNode("// [CachedAttribute]\n"
"v8_private_cached_attribute.Set"
"(${v8_receiver}, ${info}.GetReturnValue().Get());")
if "SaveSameObject" in cg_context.member_like.extended_attributes:
return TextNode("// [SaveSameObject]\n"
"v8_private_save_same_object.Set"
"(${v8_receiver}, ${info}.GetReturnValue().Get());")
def make_runtime_call_timer_scope(cg_context):
assert isinstance(cg_context, CodeGenContext)
pattern = "RUNTIME_CALL_TIMER_SCOPE{_1}(${isolate}, {_2});"
_1 = "_DISABLED_BY_DEFAULT"
suffix = ""
if cg_context.attribute_get:
suffix = "_Getter"
elif cg_context.attribute_set:
suffix = "_Setter"
counter = cg_context.member_like.extended_attributes.value_of(
"RuntimeCallStatsCounter")
if counter:
_2 = "k{}{}".format(counter, suffix)
else:
_2 = "\"Blink_{}_{}{}\"".format(
blink_class_name(cg_context.class_like),
cg_context.member_like.identifier, suffix)
node = TextNode(_format(pattern, _1=_1, _2=_2))
node.accumulate(
CodeGenAccumulator.require_include_headers([
"third_party/blink/renderer/platform/bindings/runtime_call_stats.h",
]))
return node
def make_attribute_get_callback_def(cg_context, function_name):
assert isinstance(cg_context, CodeGenContext)
assert isinstance(function_name, str)
T = TextNode
cg_context = cg_context.make_copy(attribute_get=True)
func_def = CxxFuncDefNode(
name=function_name,
arg_decls=["const v8::FunctionCallbackInfo<v8::Value>& info"],
return_type="void")
body = func_def.body
body.add_template_var("info", "info")
body.add_template_vars(cg_context.template_bindings())
for bind in _callback_common_binders:
bind(body, cg_context)
body.extend([
make_runtime_call_timer_scope(cg_context),
make_report_deprecate_as(cg_context),
make_report_measure_as(cg_context),
make_log_activity(cg_context),
T(""),
make_check_receiver(cg_context),
make_return_value_cache_return_early(cg_context),
T(""),
make_check_security_of_return_value(cg_context),
T("${v8_set_return_value}"),
make_return_value_cache_update_value(cg_context),
])
return func_def
def make_attribute_set_callback_def(cg_context, function_name):
assert isinstance(cg_context, CodeGenContext)
assert isinstance(function_name, str)
return None
def make_operation_function_def(cg_context, function_name):
assert isinstance(cg_context, CodeGenContext)
assert isinstance(function_name, str)
T = TextNode
func_def = CxxFuncDefNode(
name=function_name,
arg_decls=["const v8::FunctionCallbackInfo<v8::Value>& info"],
return_type="void")
body = func_def.body
body.add_template_var("info", "info")
body.add_template_vars(cg_context.template_bindings())
for bind in _callback_common_binders:
bind(body, cg_context)
body.extend([
make_runtime_call_timer_scope(cg_context),
make_report_deprecate_as(cg_context),
make_report_measure_as(cg_context),
make_log_activity(cg_context),
T(""),
make_check_receiver(cg_context),
T(""),
T("${v8_set_return_value}"),
])
return func_def
def make_overload_dispatcher_function_def(cg_context, function_name):
assert isinstance(cg_context, CodeGenContext)
assert isinstance(function_name, str)
func_def = CxxFuncDefNode(
name=function_name,
arg_decls=["const v8::FunctionCallbackInfo<v8::Value>& info"],
return_type="void")
body = func_def.body
body.add_template_var("info", "info")
body.add_template_vars(cg_context.template_bindings())
bind_callback_local_vars(body, cg_context)
body.append(make_overload_dispatcher(cg_context))
return func_def
def make_operation_callback_def(cg_context, function_name):
assert isinstance(cg_context, CodeGenContext)
assert isinstance(function_name, str)
operation_group = cg_context.constructor_group or cg_context.operation_group
if len(operation_group) == 1:
return make_operation_function_def(
cg_context.make_copy(operation=operation_group[0]), function_name)
node = SequenceNode()
for operation in operation_group:
node.append(
make_operation_function_def(
cg_context.make_copy(operation=operation),
_make_overloaded_function_name(operation)))
node.append(
make_overload_dispatcher_function_def(cg_context, function_name))
return node
def bind_template_installer_local_vars(code_node, cg_context):
assert isinstance(code_node, SymbolScopeNode)
assert isinstance(cg_context, CodeGenContext)
S = SymbolNode
local_vars = []
local_vars.extend([
S("instance_template",
("v8::Local<v8::ObjectTemplate> ${instance_template} = "
"${interface_template}->InstanceTemplate();")),
S("prototype_template",
("v8::Local<v8::ObjectTemplate> ${prototype_template} = "
"${interface_template}->PrototypeTemplate();")),
S("signature",
("v8::Local<v8::Signature> ${signature} = "
"v8::Signature::New(${isolate}, ${interface_template});")),
S("wrapper_type_info",
("const WrapperTypeInfo* const ${wrapper_type_info} = "
"${v8_class}::GetWrapperTypeInfo();")),
])
pattern = (
"v8::Local<v8::FunctionTemplate> ${parent_interface_template}{_1};")
_1 = (" = ${wrapper_type_info}->parent_class->dom_template_function"
"(${isolate}, ${world})")
if not cg_context.class_like.inherited:
_1 = ""
local_vars.append(S("parent_interface_template", _format(pattern, _1=_1)))
code_node.register_code_symbols(local_vars)
def make_install_interface_template_def(cg_context):
assert isinstance(cg_context, CodeGenContext)
T = TextNode
func_def = CxxFuncDefNode(
name="InstallInterfaceTemplate",
arg_decls=[
"v8::Isolate* isolate",
"const DOMWrapperWorld& world",
"v8::Local<v8::FunctionTemplate> interface_template",
],
return_type="void")
body = func_def.body
body.add_template_var("isolate", "isolate")
body.add_template_var("world", "world")
body.add_template_var("interface_template", "interface_template")
body.add_template_vars(cg_context.template_bindings())
binders = [
bind_template_installer_local_vars,
]
for bind in binders:
bind(body, cg_context)
body.extend([
T("V8DOMConfiguration::InitializeDOMInterfaceTemplate("
"${isolate}, ${interface_template}, "
"${wrapper_type_info}->interface_name, ${parent_interface_template}, "
"kV8DefaultWrapperInternalFieldCount);"),
])
if cg_context.class_like.constructor_groups:
body.extend([
T("${interface_template}->SetCallHandler(ConstructorCallback);"),
T("${interface_template}->SetLength("
"${class_like.constructor_groups[0]"
".min_num_of_required_arguments});"),
])
return func_def
def generate_interfaces(web_idl_database, output_dirs):
filename = "v8_example_interface.cc"
filepath = os.path.join(output_dirs['core'], filename)
interface = web_idl_database.find("TestNamespace")
cg_context = CodeGenContext(interface=interface)
root_node = SymbolScopeNode(separator_last="\n")
root_node.set_accumulator(CodeGenAccumulator())
root_node.set_renderer(MakoRenderer())
root_node.accumulator.add_include_headers(
collect_include_headers(interface))
code_node = SequenceNode()
for attribute in interface.attributes:
func_name = name_style.func(attribute.identifier,
"AttributeGetCallback")
code_node.append(
make_attribute_get_callback_def(
cg_context.make_copy(attribute=attribute), func_name))
func_name = name_style.func(attribute.identifier,
"AttributeSetCallback")
code_node.append(
make_attribute_set_callback_def(
cg_context.make_copy(attribute=attribute), func_name))
for constructor_group in interface.constructor_groups:
func_name = name_style.func("ConstructorCallback")
code_node.append(
make_operation_callback_def(
cg_context.make_copy(constructor_group=constructor_group),
func_name))
for operation_group in interface.operation_groups:
func_name = name_style.func(operation_group.identifier,
"OperationCallback")
code_node.append(
make_operation_callback_def(
cg_context.make_copy(operation_group=operation_group),
func_name))
code_node.append(make_install_interface_template_def(cg_context))
root_node.extend([
make_copyright_header(),
TextNode(""),
make_header_include_directives(root_node.accumulator),
TextNode(""),
enclose_with_namespace(code_node, name_style.namespace("blink")),
])
write_code_node_to_file(root_node, filepath)
| {"/third_party/blink/renderer/bindings/scripts/bind_gen/interface.py": ["/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py", "/third_party/blink/renderer/bindings/scripts/bind_gen/code_node_cxx.py"], "/third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py": ["/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py"]} |
73,638 | cea56/chromium | refs/heads/master | /third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import posixpath
import web_idl
from . import name_style
from .blink_v8_bridge import blink_class_name
class PathManager(object):
"""
Provides a variety of paths such as Blink headers and output files.
About output files, there are two cases.
- cross-components case:
APIs are generated in 'core' and implementations are generated in
'modules'.
- single component case:
Everything is generated in a single component.
"""
_REQUIRE_INIT_MESSAGE = ("PathManager.init must be called in advance.")
_is_initialized = False
@classmethod
def init(cls, output_dirs):
"""
Args:
output_dirs: Pairs of component and output directory.
"""
assert not cls._is_initialized
assert isinstance(output_dirs, dict)
cls._output_dirs = output_dirs
cls._blink_path_prefix = posixpath.sep + posixpath.join(
"third_party", "blink", "renderer", "")
cls._is_initialized = True
@classmethod
def relpath_to_project_root(cls, path):
index = path.find(cls._blink_path_prefix)
if index < 0:
assert path.startswith(cls._blink_path_prefix[1:])
return path
return path[index + 1:]
def __init__(self, idl_definition):
assert self._is_initialized, self._REQUIRE_INIT_MESSAGE
idl_path = idl_definition.debug_info.location.filepath
self._idl_basepath, _ = posixpath.splitext(idl_path)
self._idl_dir, self._idl_basename = posixpath.split(self._idl_basepath)
components = sorted(idl_definition.components)
if len(components) == 1:
component = components[0]
self._is_cross_components = False
self._api_component = component
self._impl_component = component
elif len(components) == 2:
assert components[0] == "core"
assert components[1] == "modules"
self._is_cross_components = True
self._api_component = "core"
self._impl_component = "modules"
else:
assert False
self._api_dir = self._output_dirs[self._api_component]
self._impl_dir = self._output_dirs[self._impl_component]
self._out_basename = name_style.file("v8", idl_definition.identifier)
if isinstance(idl_definition,
(web_idl.CallbackFunction, web_idl.CallbackInterface)):
self._blink_dir = self._api_dir
else:
self._blink_dir = self._idl_dir
self._blink_basename = name_style.file(
blink_class_name(idl_definition))
@property
def idl_dir(self):
return self._idl_dir
def blink_path(self, filename=None, ext=None):
"""
Returns a path to a Blink implementation file relative to the project
root directory, e.g. "third_party/blink/renderer/..."
"""
return self.relpath_to_project_root(
self._join(
dirpath=self._blink_dir,
filename=(filename or self._blink_basename),
ext=ext))
@property
def is_cross_components(self):
return self._is_cross_components
@property
def api_component(self):
return self._api_component
@property
def api_dir(self):
return self._api_dir
def api_path(self, filename=None, ext=None):
return self._join(
dirpath=self.api_dir,
filename=(filename or self._out_basename),
ext=ext)
@property
def impl_component(self):
return self._impl_component
@property
def impl_dir(self):
return self._impl_dir
def impl_path(self, filename=None, ext=None):
return self._join(
dirpath=self.impl_dir,
filename=(filename or self._out_basename),
ext=ext)
@staticmethod
def _join(dirpath, filename, ext=None):
if ext is not None:
filename = posixpath.extsep.join([filename, ext])
return posixpath.join(dirpath, filename)
| {"/third_party/blink/renderer/bindings/scripts/bind_gen/interface.py": ["/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py", "/third_party/blink/renderer/bindings/scripts/bind_gen/code_node_cxx.py"], "/third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py": ["/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py"]} |
73,639 | cea56/chromium | refs/heads/master | /third_party/blink/renderer/bindings/scripts/bind_gen/code_node_cxx.py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module provides C++ language specific implementations of
code_node.CodeNode.
"""
from .code_node import CodeNode
from .code_node import CompositeNode
from .code_node import Likeliness
from .code_node import ListNode
from .code_node import SymbolScopeNode
from .code_node import TextNode
from .codegen_expr import CodeGenExpr
class CxxBlockNode(CompositeNode):
def __init__(self, body):
template_format = ("{{\n" " {body}\n" "}}")
CompositeNode.__init__(
self,
template_format,
body=_to_symbol_scope_node(body, Likeliness.ALWAYS))
class CxxIfNode(CompositeNode):
def __init__(self, cond, body, likeliness):
template_format = ("if ({cond}) {{\n" " {body}\n" "}}")
CompositeNode.__init__(
self,
template_format,
cond=_to_conditional_node(cond),
body=_to_symbol_scope_node(body, likeliness))
class CxxIfElseNode(CompositeNode):
def __init__(self, cond, then, then_likeliness, else_, else_likeliness):
template_format = ("if ({cond}) {{\n"
" {then}\n"
"}} else {{\n"
" {else_}\n"
"}}")
CompositeNode.__init__(
self,
template_format,
cond,
then=_to_symbol_scope_node(then, then_likeliness),
else_=_to_symbol_scope_node(else_, else_likeliness))
class CxxLikelyIfNode(CxxIfNode):
def __init__(self, cond, body):
CxxIfNode.__init__(self, cond, body, Likeliness.LIKELY)
class CxxUnlikelyIfNode(CxxIfNode):
def __init__(self, cond, body):
CxxIfNode.__init__(self, cond, body, Likeliness.UNLIKELY)
class CxxBreakableBlockNode(CompositeNode):
def __init__(self, body, likeliness=Likeliness.LIKELY):
template_format = ("do {{ // Dummy loop for use of 'break'.\n"
" {body}\n"
"}} while (false);")
CompositeNode.__init__(
self,
template_format,
body=_to_symbol_scope_node(body, likeliness))
class CxxFuncDeclNode(CompositeNode):
def __init__(self,
name,
arg_decls,
return_type,
const=False,
override=False,
default=False,
delete=False):
"""
Args:
name: Function name node, which may include nested-name-specifier
(i.e. 'namespace_name::' and/or 'class_name::').
arg_decls: List of argument declarations.
return_type: Return type.
const: True makes this a const function.
override: True makes this an overriding function.
default: True makes this have the default implementation.
delete: True makes this function be deleted.
"""
assert isinstance(const, bool)
assert isinstance(override, bool)
assert isinstance(default, bool)
assert isinstance(delete, bool)
assert not (default and delete)
template_format = ("{return_type} {name}({arg_decls})"
"{const}"
"{override}"
"{default_or_delete}"
";")
const = " const" if const else ""
override = " override" if override else ""
if default:
default_or_delete = " = default"
elif delete:
default_or_delete = " = delete"
else:
default_or_delete = ""
CompositeNode.__init__(
self,
template_format,
name=name,
arg_decls=ListNode(
_to_node_list(arg_decls, TextNode), separator=", "),
return_type=return_type,
const=const,
override=override,
default_or_delete=default_or_delete)
class CxxFuncDefNode(CompositeNode):
def __init__(self,
name,
arg_decls,
return_type,
const=False,
override=False,
member_initializer_list=None):
"""
Args:
name: Function name node, which may include nested-name-specifier
(i.e. 'namespace_name::' and/or 'class_name::').
arg_decls: List of argument declarations.
return_type: Return type.
const: True makes this a const function.
override: True makes this an overriding function.
member_initializer_list: List of member initializers.
"""
assert isinstance(const, bool)
assert isinstance(override, bool)
template_format = ("{return_type} {name}({arg_decls})"
"{const}"
"{override}"
"{member_initializer_list} {{\n"
" {body}\n"
"}}")
const = " const" if const else ""
override = " override" if override else ""
if member_initializer_list is None:
member_initializer_list = ""
else:
initializers = ListNode(
_to_node_list(member_initializer_list, TextNode),
separator=", ")
member_initializer_list = ListNode([TextNode(" : "), initializers],
separator="")
self._body_node = SymbolScopeNode()
CompositeNode.__init__(
self,
template_format,
name=name,
arg_decls=ListNode(
_to_node_list(arg_decls, TextNode), separator=", "),
return_type=return_type,
const=const,
override=override,
member_initializer_list=member_initializer_list,
body=self._body_node)
@property
def body(self):
return self._body_node
def _to_conditional_node(cond):
if isinstance(cond, CodeNode):
return cond
elif isinstance(cond, CodeGenExpr):
return TextNode(cond.to_text())
elif isinstance(cond, str):
return TextNode(cond)
else:
assert False
def _to_node_list(iterable, constructor):
return map(lambda x: x if isinstance(x, CodeNode) else constructor(x),
iterable)
def _to_symbol_scope_node(node, likeliness):
if isinstance(node, SymbolScopeNode):
pass
elif isinstance(node, CodeNode):
node = SymbolScopeNode([node])
elif isinstance(node, (list, tuple)):
node = SymbolScopeNode(node)
else:
assert False
node.set_likeliness(likeliness)
return node
| {"/third_party/blink/renderer/bindings/scripts/bind_gen/interface.py": ["/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py", "/third_party/blink/renderer/bindings/scripts/bind_gen/code_node_cxx.py"], "/third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py": ["/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py"]} |
73,640 | isabelmgao/LSA-Election-2016-Tweets | refs/heads/master | /stream_twitter.py | import numpy as np
import pandas as pd
import tweepy
import matplotlib.pyplot as plt
import pymongo
import ipywidgets as wgt
from IPython.display import display
from sklearn.feature_extraction.text import CountVectorizer
import re
from datetime import datetime
# %matplotlib inline
api_key = "xxx"
api_secret = "xxx"
access_token = "xxx"
access_token_secret = "xxx"
auth = tweepy.OAuthHandler(api_key, api_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
col = pymongo.MongoClient()["tweets"]["StreamingAPITweets"]
col.count()
class MyStreamListener(tweepy.StreamListener):
counter = 0
def __init__(self, max_tweets=1000, *args, **kwargs):
self.max_tweets = max_tweets
self.counter = 0
def on_connect(self):
self.counter = 0
self.start_time = datetime.now()
def on_status(self, status):
# Increment counter
self.counter += 1
print status
# Store tweet to MongoDB
col.insert_one(status._json)
if self.counter >= self.max_tweets:
myStream.disconnect()
print("Finished")
myStreamListener = MyStreamListener(max_tweets=10)
myStream = tweepy.Stream(auth = api.auth, listener=myStreamListener)
keywords = ["Trump",
"Clinton",
"imwithher",
"election2016",
"MyVote2016",
"ElectionNight",
"CrookedHillary",
"nastywomen",
"DonaldTrump",
"MAGA",
"MakeAmericaGreatAgain",
]
try:
myStream.filter(track=keywords)
# print("Tweets collected: %s" % myStream.listener.counter)
# print("Total tweets in collection: %s" % col.count())
# break
except:
# print("ERROR# %s" % (error_counter + 1))
| {"/main_lsa.py": ["/LSA.py"], "/process_partisan.py": ["/LSA.py"], "/main_final.py": ["/LSA.py"]} |
73,641 | isabelmgao/LSA-Election-2016-Tweets | refs/heads/master | /LSA.py | import nltk
from nltk.corpus import stopwords
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.decomposition import TruncatedSVD
#thank you Mike Bernico for the tutorial
def lsa(tweet_list):
stopset = set(stopwords.words('english'))
vectorizer = TfidfVectorizer(stop_words = stopset, use_idf=True, ngram_range = (1,3))
X = vectorizer.fit_transform(tweet_list)
lsa = TruncatedSVD(n_components = 10, n_iter = 100)
lsa.fit(X)
terms = vectorizer.get_feature_names()
# sys.exit()
for i, comp in enumerate(lsa.components_):
termsIncomp = zip(terms, comp)
sortedTerms = sorted(termsIncomp, key=lambda x:x[1], reverse=True) [:10]
print "Concept %d:" %i
for term in sortedTerms:
print term[0]
print " " | {"/main_lsa.py": ["/LSA.py"], "/process_partisan.py": ["/LSA.py"], "/main_final.py": ["/LSA.py"]} |
73,642 | isabelmgao/LSA-Election-2016-Tweets | refs/heads/master | /main_lsa.py | import pymongo
import json
from pymongo import MongoClient
import sys
from pprint import pprint
import json
from bson import ObjectId
import string
import unicodedata
import LSA
import cPickle as pick
# with open("data_pickled_1.pickle",'r')as f:
# tweets = pick.load(f)
# with open("data_pickled_2.pickle",'r')as f:
# tweets = pick.load(f)
# with open("data_pickled_3.pickle",'r')as f:
# tweets = pick.load(f)
# with open("data_pickled_4.pickle",'r')as f:
# tweets = pick.load(f)
with open("data_pickled_5.pickle",'r')as f:
tweets = pick.load(f)
LSA.lsa(tweets) | {"/main_lsa.py": ["/LSA.py"], "/process_partisan.py": ["/LSA.py"], "/main_final.py": ["/LSA.py"]} |
73,643 | isabelmgao/LSA-Election-2016-Tweets | refs/heads/master | /sentiment_analysis/Clinton week 1.py | #Sentiment Analysis of keyword "Clinton" from 2016-11-6 to 2016-10-12
import tweepy
from textblob import TextBlob
import getoldtweets
import numpy as np
import operator
consumer_key= 'Consumer key from twitter'
consumer_secret= 'Consumer secret from twitter'
access_token='Access token from twitter'
access_token_secret='Access token secret from twitter'
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
topic_name = "Clinton"
#To find data for week 1
since_date = "2016-11-6"
until_date = "2016-11-12"
def get_label(analysis, threshold = 0):
if analysis.sentiment[0]>threshold:
return 'Positive'
else:
return 'Negative'
all_polarities = dict()
for topic_name in tweets:
this_topic_polarities = []
this_topic_tweets = api.search(q=[topic_name, topic], count=10000, since = since_date, until=until_date)
with open('%s_tweets.csv' % topic, 'wb') as this_topic_file:
this_topic_file.write('tweet,sentiment_label\n')
for tweet in this_topic_tweets:
analysis = TextBlob(tweet.text, pos_tagger=PatternTagger(), analyzer=PatternAnalyzer())
this_topic_polarities.append(analysis.sentiment[0])
this_topic_file.write('%s,%s\n' % (tweet.text.encode('utf8'), get_label(analysis)))
all_polarities[topic] = np.mean(this_topic_polarities)
| {"/main_lsa.py": ["/LSA.py"], "/process_partisan.py": ["/LSA.py"], "/main_final.py": ["/LSA.py"]} |
73,644 | isabelmgao/LSA-Election-2016-Tweets | refs/heads/master | /process_partisan.py | import pymongo
import json
from pymongo import MongoClient
import sys
from pprint import pprint
import json
from bson import ObjectId
import string
import unicodedata
import LSA
import cPickle as pick
# with open("data_pickled_1.pickle",'r')as f:
# tweets = pick.load(f)
# with open("data_pickled_2.pickle",'r')as f:
# tweets = pick.load(f)
# with open("data_pickled_3.pickle",'r')as f:
# tweets = pick.load(f)
# with open("data_pickled_4.pickle",'r')as f:
# tweets = pick.load(f)
with open("data_pickled_5.pickle",'r')as f:
tweets = pick.load(f)
# LSA.lsa(tweets)
trump_list = []
t1 = "CrookedHillary"
t2 = "MAGA"
t3 = "MakeAmericaGreatAgain"
for each in tweets:
if t1.lower() in each or t2.lower() in each or t3.lower() in each:
trump_list.append(each)
LSA.lsa(trump_list)
# clinton_list = []
# c1 = "imwithher"
# c2 = "nastywomen"
# for each in tweets:
# if c1.lower() in each or c2.lower() in each:
# clinton_list.append(each)
# LSA.lsa(clinton_list) | {"/main_lsa.py": ["/LSA.py"], "/process_partisan.py": ["/LSA.py"], "/main_final.py": ["/LSA.py"]} |
73,645 | isabelmgao/LSA-Election-2016-Tweets | refs/heads/master | /main_final.py | import pymongo
import json
from pymongo import MongoClient
import sys
from pprint import pprint
import json
from bson import ObjectId
import string
import unicodedata
import LSA
import cPickle as pick
collection = pymongo.MongoClient()["tweets"]["StreamingAPITweets"]
objects = collection.find()
tweet_list = []
data_twitter = []
# with open('new.txt', 'w') as outfile:
for each in objects:
try:
strip_newline = each['text'].replace('\n', ' ')
convert_string = unicodedata.normalize('NFKD', strip_newline).encode('ascii','ignore')
remove_punc = convert_string.translate(None, string.punctuation)
# tweet_list.append({each['timestamp_ms']: ['geo'+str(each['geo']), remove_punc]})
# tweet_list.append({each['timestamp_ms']: [each['user']['location'], remove_punc]})
data_twitter.append((int(each['timestamp_ms']), remove_punc.lower().strip()))
except:
pass
data_sorted = sorted(data_twitter, key=lambda x: x[0])
json.dump(tweet_list, outfile)
with open("data_pickled.pickle",'w')as f:
pick.dump(data_sorted,f)
int1 = []
int2 = []
int3 = []
int4 = []
int5 = []
for i in range(len(data_persist)):
if data_persist[i][0] >= 1478662847222 and data_persist[i][0] <= 1478670255733:
int1.append(data_persist[i][1])
elif data_persist[i][0] > 1478670255733 and data_persist[i][0] <= 1478697167163:
int2.append(data_persist[i][1])
data_persist.pop(i)
elif data_persist[i][0] > 1478697167163 and data_persist[i][0] <= 1478709600000:
int3.append(data_persist[i][1])
data_persist.pop(i)
elif data_persist[i][0] > 1478709600000 and data_persist[i][0] <= 1478711940000:
int4.append(data_persist[i][1])
data_persist.pop(i)
elif data_persist[i][0] > 1478711940000:
int5.append(data_persist[i][1])
data_persist.pop(i)
with open("data_pickled_1.pickle",'w')as f:
pick.dump(int1,f)
with open("data_pickled_2.pickle",'w')as f:
pick.dump(int2,f)
with open("data_pickled_3.pickle",'w')as f:
pick.dump(int3,f)
with open("data_pickled_4.pickle",'w')as f:
pick.dump(int4,f)
with open("data_pickled_5.pickle",'w')as f:
pick.dump(int5,f)
| {"/main_lsa.py": ["/LSA.py"], "/process_partisan.py": ["/LSA.py"], "/main_final.py": ["/LSA.py"]} |
73,655 | makdenis/labs | refs/heads/master | /lab5_bd/lab6/ex1.py | import MySQLdb
# Открываем соединение
db = MySQLdb.connect(
host="localhost",
user="root",
passwd="1",
db="zakaz"
)
db.set_character_set('utf8')
# Получаем курсор для работы с БД
c = db.cursor()
c.execute('SET NAMES utf8;')
c.execute('SET CHARACTER SET utf8;')
c.execute('SET character_set_connection=utf8;')
# Выполняем вставку
c.execute("INSERT INTO zakaz (name, usluga) VALUES (%s, %s);", ('Книга', 'Описание книги'))
# Фиксируем изменения
db.commit()
# Выполняем выборку
c.execute("SELECT * FROM zakaz;")
# Забираем все полученные записи #
entries = c.fetchall()
# И печатаем их
for e in entries:
print(e)
#c.сlose() # Закрываем курсор
db.close() # Закрываем соединение | {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,656 | makdenis/labs | refs/heads/master | /lab6_djangoforms/lab5/urls.py | """lab5 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from labApp.views import *
urlpatterns = [
url(r'^$', home),
url(r'^prodact/$', ProdactsView.as_view(), name='prodacts_url'),
url(r'^customers/$', CustomerView.as_view(), name='customers_url'),
url(r'^registration_form/$', registration_form, name='registration_form'),
url(r'^registration/$', registration, name='registration'),
url(r'^authorization_form/$', authorization_form, name='authorization_form'),
url(r'^authorization/$', authorization, name='authorization'),
url(r'^logout$', logout_view, name='logout'),
url(r'^success_authorization_form$', success_authorization_form, name='success_authorization_form'),
url(r'^success_authorization$', success_authorization, name='success_authorization'),
url(r'^admin/', include(admin.site.urls))
]
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,657 | makdenis/labs | refs/heads/master | /lab5_bd/django_project/labApp/models.py | from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
class Customer(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
customer_name = models.CharField(max_length=30)
email = models.EmailField(max_length=75)
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
birthday = models.DateField()
sex = models.CharField(max_length=1)
class zakaz(models.Model):
name = models.CharField(max_length=30)
Usluga = models.CharField(max_length=255, null=True)
#price = models.FloatField(max_length=10)
objects = models.Manager()
class Usluga(models.Model):
user = models.ForeignKey(Customer, on_delete=models.CASCADE)
user_zakaz = models.ForeignKey(zakaz, on_delete=models.CASCADE)
date = models.DateField()
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,658 | makdenis/labs | refs/heads/master | /lab5_bd/django_project/labApp/apps.py | from django.apps import AppConfig
class Lab5AppConfig(AppConfig):
name = 'labApp'
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,659 | makdenis/labs | refs/heads/master | /django/Test/views.py | from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic import View
# Create your views here.
def var(request):
# return HttpResponse('response from function view')
return render(request, 'var.html', {'myvariable': 'kek'})
# class ExampleClassBased():
# def get(self, request):
# return HttpResponse('response from class based')
class OrdersView(View):
def get(self, request):
data = {
'orders': [
{'title': 'Первый заказ', 'id': 1},
{'title': 'Второй заказ', 'id': 2},
{'title': 'Третий заказ', 'id': 3}
]
}
return render(request, 'ex.html', data)
class OrderView(View):
def get(self, request, id):
data = {
'order': {
'id': id
}
}
return render(request, 'order.html', data)
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,660 | makdenis/labs | refs/heads/master | /lab1/arr_algs.py | l = [2, 3, 5, 1, 0, 5]
print(min(l))
print(sum(l) / len(l)) | {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,661 | makdenis/labs | refs/heads/master | /lab6_djangoforms/labApp/models.py | from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
class Customer(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
customer_name = models.CharField(max_length=30)
email = models.EmailField(max_length=75)
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
def __unicode__(self):
return self.customer_name
class zakaz(models.Model):
name = models.CharField(max_length=30)
Usluga = models.CharField(max_length=255, null=True)
price = models.FloatField(max_length=10)
def __unicode__(self):
return self.name
class Usluga(models.Model):
user = models.ForeignKey(Customer, on_delete=models.CASCADE)
user_zakaz = models.ForeignKey(zakaz, on_delete=models.CASCADE)
date = models.DateField()
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,662 | makdenis/labs | refs/heads/master | /lab3/librip/iterators.py | import itertools
# Итератор для удаления дубликатов
class Unique(object):
def __init__(self, items, **kwargs):
self.items = iter(items) if isinstance(items, list) else items
self.ignore_case = False
self.ignore_case = kwargs.get('ignore_case', False)
self.dupl = []
def __next__(self):
while True:
try:
cur = next(self.items)
if isinstance(cur, str) and self.ignore_case is True:
check = cur.upper()
else:
check = cur
if not check in self.dupl:
self.dupl.append(check)
return cur
except Exception:
raise StopIteration
def __iter__(self):
return self
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,663 | makdenis/labs | refs/heads/master | /lab6_djangoforms/labApp/admin.py | from django.contrib import admin
from .models import *
# Register your models here.
@admin.register(Customer)
class CustomerAdmin(admin.ModelAdmin):
#fields = ('first_name', 'last_name')
list_display = ('username','full_name','count_of_orders',)
list_filter = ('first_name',)
search_fields = ['last_name', 'first_name']
def full_name(self, obj):
return "{} {}".format(obj.last_name, obj.first_name)
def username(self, obj):
return "{}".format(obj.user.username)
def count_of_orders(self, obj):
ord = Usluga.objects.filter(user=obj)
return len(ord)
@admin.register(zakaz)
class ProdactAdmin(admin.ModelAdmin):
empty_value_display = '-empty-'
# list_display = ('prodact','price','description',)
# list_filter = ('price',)
# search_fields = ['name']
#
# def prodact(self, obj):
# return "{}".format(obj.prodact_name)
#
# def price(self, obj):
# return "{}".format(obj.price)
#
# def description(self, obj):
# return "{}".format(obj.description)
@admin.register(Usluga)
class OrderAdmin(admin.ModelAdmin):
empty_value_display = '-empty-'
# fields = ('username', 'prodact', 'date')
# list_display = ('username', 'prodact', 'date')
# list_filter = ('user_zakaz',)
# search_fields = ['user_zakaz']
#
# def username(self, obj):
# return "{}".format(obj.user.user)
#
# def prodact(self, obj):
# return obj.prodact.prodact_name
#
# def date(self, obj):
# return "{}".format(obj.order_date)
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,664 | makdenis/labs | refs/heads/master | /lab2/main.py | import friends
import get_id
import datetime
import matplotlib.pyplot as plt
name=input()
today = datetime.datetime.today()
id =get_id.get_id()
id=id.response_handler(id._get_data(name))
t=friends.friends()
t=t.response_handler(t._get_data(id))
a=[]
for i in t:
if ('bdate' not in i):
continue
if (len(i['bdate']) > 5):
# print(i)
d = datetime.datetime.strptime(i['bdate'], "%d.%m.%Y")
# t=datetime.timedelta(d.day)
y = int((str((today - d) / 365)[0:2]))
a.append(y)
plt.hist(
a, # в зависимости от количества 1,2,3 строится гистограмма
40 # а это как бы длина оси х
)
plt.show() | {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,665 | makdenis/labs | refs/heads/master | /lab5_bd/django_project/labApp/views.py | from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic import View
from django.views.generic import ListView
from .models import *
def home(request):
par = {
'header': 'Home'
}
return render(request, 'home.html', context=par)
class CustomerView(ListView):
model = Customer
template_name = 'customer_list.html'
class ZakazView(ListView):
model = zakaz
template_name = 'zakazs.html'
context_object_name = 'zakaz_list'
class OrderView(ListView):
model = Usluga
template_name = 'order_list.html'
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,666 | makdenis/labs | refs/heads/master | /lab3/ex_5.py | from time import sleep
from librip.ctxmngrs import timer
with timer():
sleep(2.5)
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,667 | makdenis/labs | refs/heads/master | /lab1/dict_algs.py | Ivan ={
"name": "ivan",
"age": 34,
"children":
[{ "name": "vasja", "age": 12, },
{ "name": "petja", "age": 10, }],
}
darja = {
"name": "darja",
"age": 41,
"children": [{ "name": "kirill", "age": 21, },
{ "name": "pavel", "age": 15,}]
}
emps = [Ivan, darja]
for i in emps:
for j in range(len(i["children"])):
if i["children"][j]["age"] > 18:
print(i["name"]) | {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,668 | makdenis/labs | refs/heads/master | /lab1/str_algs.py | str = "Hello, world"
str2 = str[::-1]
print(str2) | {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,669 | makdenis/labs | refs/heads/master | /lab3/librip/decorators.py |
def print2(func, res):
print(func.__name__)
a = type(res).__name__
if a == 'list':
[print(i) for i in res]
elif a == 'dict':
[print('{0}={1}'.format(k,v)) for k,v in res.items()]
else:
print(res)
def print_result(func):
def decfunc(*args,**kwargs):
res = func(*args,**kwargs)
print2(func, res)
return res
return decfunc
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,670 | makdenis/labs | refs/heads/master | /lab6_djangoforms/labApp/views.py | from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.views.generic import View
from django.views.generic import ListView
from .models import *
from django import forms
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
def home(request):
par = {
'header': '"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."'
}
return render(request, 'home.html', context=par)
class CustomerView(ListView):
model = Customer
template_name = 'customer_list.html'
class ProdactsView(ListView):
model = zakaz
template_name = 'zakazs.html'
context_object_name = 'zakaz_list'
class OrderView(ListView):
model = Usluga
template_name = 'order_list.html'
# представления для веб-форм
def registration_form(request):
errors = {}
request.encoding = 'utf-8'
if request.method == 'POST':
username = request.POST.get('username')
if not username:
errors['uname']='Введите логин'
elif len(username) < 5:
errors['uname']='Длина логина должна быть более 5 символов'
if User.objects.filter(username=username).exists():
errors['uname']='Такой логин уже занят'
password = request.POST.get('password')
if not username:
errors['psw']='Введите пароль'
elif len(password) < 6:
errors['psw']='Длина пароля должна быть более 6 символов'
password2 = request.POST.get('password2')
if password != password2:
errors['psw2']='Пароли не совпадают'
email = request.POST.get('email')
if not email:
errors['email'] = 'Введите email'
last_name = request.POST.get('last_name')
if not last_name:
errors['lname'] = 'Введите фамилию'
first_name = request.POST.get('first_name')
if not first_name:
errors['fname'] = 'Введите имя'
print(username, password,"1")
if not errors:
user = User.objects.create_user(username, email, password)
print(user)
cust = Customer()
cust.user = user
#cust.customer_name = username
cust.email = email
cust.last_name = last_name
cust.first_name = first_name
cust.save()
return HttpResponseRedirect('/authorization')
else:
context = {'errors': errors, 'username': username, 'email': email, 'last_name': last_name,
'first_name': first_name}
return render(request, 'registration_form.html', context)
return render(request, 'registration_form.html', {'errors': errors })
# форма регистрации
class RegistrationForm(forms.Form):
username = forms.CharField(min_length=5,label='Логин')
password = forms.CharField(min_length=8,widget=forms.PasswordInput, label='Пароль')
password2 = forms.CharField(min_length=8, widget=forms.PasswordInput, label='Повторите ввод')
email = forms.EmailField(label='Email')
last_name = forms.CharField(label='Фамилия')
first_name = forms.CharField(label='Имя')
class AuthorizationForm(forms.Form):
username = forms.CharField(label='Логин')
password = forms.CharField(widget=forms.PasswordInput, label='Пароль')
# регистрация
def registration(request):
if request.method == 'POST':
form = RegistrationForm(request.POST)
is_val = form.is_valid()
data = form.cleaned_data
if data['password']!=data['password2']:
is_val = False
form.add_error('password2', ['Пароли должны совпадать'])
if User.objects.filter(username=data['username']).exists():
form.add_error('username', ['Такой логин уже занят'])
is_val = False
if is_val:
data = form.cleaned_data
user = User.objects.create_user(data['username'], data['email'], data['password'])
print(user)
cust = Customer()
cust.user = user
cust.first_name = data['first_name']
cust.last_name = data['last_name']
cust.save()
return HttpResponseRedirect('/authorization')
else:
form = RegistrationForm()
return render(request, 'registration.html', {'form': form})
# авторизация вручную
def authorization_form(request):
errors = {}
if request.method == 'POST':
username = request.POST.get('username')
if not username:
errors['uname']='Введите логин'
elif len(username) < 5:
errors['uname']='Длина логина должна быть не меньше 5 символов'
password = request.POST.get('password')
if not password:
errors['psw']='Введите пароль'
elif len(password) < 8:
errors['psw']='Длина пароля должна быть не меньше 8 символов'
user = authenticate(request, username=username, password=password)
#user = authenticate(request, username='petrov',password='12345678')
print(user)
if user is None and 'uname' not in errors.keys() and 'psw' not in errors.keys():
errors['login'] = 'Логин или пароль введены неверно'
if not errors:
login(request, user)
return HttpResponseRedirect('/success_authorization_form')
#return HttpResponseRedirect('/success_authorization')
else:
context = {'errors': errors}
return render(request, 'authorization_form.html', context)
return render(request, 'authorization_form.html', {'errors':errors})
# авторизация django
def authorization(request):
if request.method == 'POST':
form = AuthorizationForm(request.POST)
print(form)
data = form.cleaned_data
if form.is_valid():
user = authenticate(request, username=data['username'], password=data['password'])
# user = authenticate(request, username='petrov',password='12345678')
print(len(data['username']),len(data['password']))
if user is not None:
login(request, user)
return HttpResponseRedirect('/success_authorization')
else:
form.add_error('username', ['Неверный логин или пароль'])
#raise forms.ValidationError('Имя пользователя и пароль не подходят')
else:
form = AuthorizationForm()
return render(request, 'authorization.html', {'form': form})
# успешная авторизация вручную
def success_authorization_form(request):
if request.user.is_authenticated:
return HttpResponseRedirect('/')
else:
return HttpResponseRedirect('/authorization')
# успешная авторизация django
@login_required(login_url='/authorization')
def success_authorization(request):
return HttpResponseRedirect('/')
def logout_view(request):
logout(request)
return HttpResponseRedirect('/')
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,671 | makdenis/labs | refs/heads/master | /lab2/get_id.py | import baseclass
import requests
class get_id(baseclass.BaseClient):
BASE_URL = 'https://api.vk.com/method/'
method = "users.get"
def _get_data(self, name):
#method="users.get"
response = requests.get(get_id.BASE_URL+get_id.method+'?user_ids=' + name).json()
if('error' in response):
print('error in user id or smth else, try again')
raise SystemExit
return response
def response_handler(self,response):
id=response["response"][0]["uid"]
return id | {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,672 | makdenis/labs | refs/heads/master | /lab5_bd/django_project/labApp/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-01 13:09
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=30)),
('email', models.EmailField(max_length=75)),
('first_name', models.CharField(max_length=30)),
('last_name', models.CharField(max_length=30)),
('birthday', models.DateField()),
('sex', models.CharField(max_length=1)),
('user_id', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order_date', models.DateField()),
],
),
migrations.CreateModel(
name='Prodact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('prodact_name', models.CharField(max_length=30)),
('description', models.CharField(max_length=255, null=True)),
('price', models.FloatField(max_length=10)),
('owner', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='order',
name='prodact',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='labApp.Prodact'),
),
migrations.AddField(
model_name='order',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='labApp.Customer'),
),
]
| {"/lab6_djangoforms/labApp/admin.py": ["/lab6_djangoforms/labApp/models.py"], "/lab5_bd/django_project/labApp/views.py": ["/lab5_bd/django_project/labApp/models.py"], "/lab6_djangoforms/labApp/views.py": ["/lab6_djangoforms/labApp/models.py"]} |
73,676 | FriesW/TeamJellyBean | refs/heads/master | /Code/Viewer/GlobalServer.py | from .ViewServer import ViewServer
__server = ViewServer()
def new_view(name, *args, **kwargs):
return __server.new_view(name, *args, **kwargs)
def new_int(name, *args, **kwargs):
return __server.new_int(name, *args, **kwargs)
def new_float(name, *args, **kwargs):
return __server.new_float(name, *args, **kwargs)
def new_string(name, *args, **kwargs):
return __server.new_string(name, *args, **kwargs)
def new_event(name, *args, **kwargs):
return __server.new_event(name, *args, **kwargs)
def new_bool(name, *args, **kwargs):
return __server.new_bool(name, *args, **kwargs) | {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,677 | FriesW/TeamJellyBean | refs/heads/master | /Code/Util.py | import cv2
import numpy as np
import Viewer.GlobalServer as GS
import os
import re
import time
def p_dist(x1, y1, x2, y2):
return ( (x1-x2)**2 + (y1-y2)**2 ) ** 0.5
def crop(img, x, y, w, h):
return img[y:y+h, x:x+w]
def save(path, img):
if not os.path.exists(path):
os.makedirs(path)
files = []
for (dirpath, dirnames, filenames) in os.walk(path):
files.extend(filenames)
break
max_num = 0
for f in files:
s = re.findall('\d+\.png$', f)
if s:
max_num = max(max_num, int(s[0][:-4]))
path = os.path.join(path, str(max_num+1).zfill(5)+'.png' )
print('Saving image:', path)
cv2.imwrite(path, img)
class Timer:
def __init__(self, name='Timer', hidden=False):
self.__l = GS.new_int('ms '+name)
self.__l.set_hidden(hidden)
self.__l.set_editable(False)
self.__lt = time.time()
def cycle(self):
tt = time.time()
self.__l.set(int( (tt - self.__lt) * 1000 ))
self.__lt = tt
class Exposure:
def __init__(self, name='Exposure', hidden=False):
self.__t = GS.new_int(name+' Lum')
self.__t.set_hidden(hidden)
self.__t.set_editable(False)
self.__r = GS.new_int(name+' R')
self.__r.set_hidden(hidden)
self.__r.set_editable(False)
self.__g = GS.new_int(name+' G')
self.__g.set_hidden(hidden)
self.__g.set_editable(False)
self.__b = GS.new_int(name+' B')
self.__b.set_hidden(hidden)
self.__b.set_editable(False)
def measure(self, img):
self.__t.set( np.average(img) )
#row_avg = np.average(img, 1)
#rgb_avg = np.average(row_avg, 0)
#avg = list(rgb_avg)
avg = np.mean(img, axis=(0,1))
self.__b.set(avg[0])
self.__g.set(avg[1])
self.__r.set(avg[2])
class Crop:
def __init__(self, name='Crop', x=0, y=0, w=100, h=100, hidden=False, editable=True):
self.__name = name
self.__x = GS.new_int(name+': crop X', min=0, initial=x)
self.__x.set_hidden(hidden)
self.__x.set_editable(editable)
self.__w = GS.new_int(name+': crop width', min=1, initial=w)
self.__w.set_hidden(hidden)
self.__w.set_editable(editable)
self.__y = GS.new_int(name+': crop Y', min=0, initial=y)
self.__y.set_hidden(hidden)
self.__y.set_editable(editable)
self.__h = GS.new_int(name+': crop height', min=1, initial=h)
self.__h.set_hidden(hidden)
self.__h.set_editable(editable)
self.__view = GS.new_view(name+': crop')
self.__view.set_hidden(hidden)
def crop(self, img):
x = self.__x.get()
y = self.__y.get()
w = self.__w.get()
h = self.__h.get()
out = crop(img, x, y, w, h)
self.__view.update(out)
return out
class FindTray:
def __init__(self, name='FindTray', hidden=False, editable=True):
#Changing these numbers may break BeanSlicer
self.__hw_proportion = 5.33/6.62
self.__out_w = int(1500)
self.__out_h = int( self.__out_w * self.__hw_proportion )
self.__v_process = GS.new_view(name+': pre-processed')
self.__v_process.set_hidden(hidden)
self.__v_thres = GS.new_view(name+': threshold')
self.__v_thres.set_hidden(hidden)
self.__v_morph = GS.new_view(name+': threshold morphology')
self.__v_morph.set_hidden(hidden)
self.__v_contours = GS.new_view(name+': contours')
self.__v_contours.set_hidden(hidden)
self.__v_warp = GS.new_view(name+': cutout tray')
self.__blur = GS.new_int(name+': blur', min=-1, max=100, initial=-1, step=2)
self.__blur.set_hidden(hidden)
self.__blur.set_editable(editable)
self.__t_lvl = GS.new_int(name+': threshold', min=0, max=255, initial=70)
self.__t_lvl.set_hidden(hidden)
self.__t_lvl.set_editable(editable)
self.__c_approx = GS.new_float(name+': contour approximation level', min=0, initial=10)
self.__c_approx.set_hidden(hidden)
self.__c_approx.set_editable(editable)
def find(self, img):
orig = img.copy()
#Preprocess
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
if self.__blur.get() != -1:
img = cv2.GaussianBlur(img, (self.__blur.get(),self.__blur.get()), 0)
self.__v_process.update(img)
#Threshold image
rv, img = cv2.threshold(img, self.__t_lvl.get(), 255, cv2.THRESH_BINARY)
self.__v_thres.update(img)
#Fill gaps
img = cv2.morphologyEx(img, cv2.MORPH_CLOSE,
cv2.getStructuringElement(cv2.MORPH_RECT, (11,11)))
self.__v_morph.update(img)
#Find contours / edges
img, contours, hierarchy = \
cv2.findContours(img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
contour_img = orig.copy()
cv2.drawContours(contour_img, contours, -1, (0, 255, 0), 2)
#Failure if no contours
if len(contours) == 0:
return (False, None)
#Find largest area contour
candidate_contour = contours[0]
for c in contours[1:]:
if cv2.contourArea(candidate_contour) < cv2.contourArea(c):
candidate_contour = c
#Approximate contour
approx = cv2.approxPolyDP(candidate_contour, self.__c_approx.get(), True)
#Draw it
cv2.drawContours(contour_img, [approx], -1, (255, 0, 0), 3)
for p in approx:
cv2.circle(contour_img, (p[0][0], p[0][1]), 8, (0, 0, 255), -1)
#Show all contours
self.__v_contours.update(contour_img)
#Build new edge array, sorta hackish
corners = []
for p in approx:
corners.append([float(p[0][0]), float(p[0][1])])
#Failure if not 4 points
if len(corners) != 4:
return (False, None)
#Find minimal rotation
min_i = 0
min_d = 100000
for i in range(len(corners)):
p = corners[i]
d = p_dist(p[0], p[1], 0, 0)
if d < min_d:
min_i = i
min_d = d
#Rotate
offset = 3
slice = (min_i + offset) % len(corners)
corners = corners[slice:] + corners[:slice]
#Convert edge array
corners = np.asarray(corners, np.float32)
#Make output
dest_size = np.array([ [0,0],[self.__out_w,0],[self.__out_w,self.__out_h],[0,self.__out_h] ],np.float32)
transform = cv2.getPerspectiveTransform(corners, dest_size)
img = cv2.warpPerspective(orig, transform, (self.__out_w,self.__out_h))
img = cv2.flip(img, 1)
self.__v_warp.update(img)
return (True, img)
class BeanSlicer:
def __init__(self, name='BeanSlicer', hidden=False, editable=True):
self.__name = name
self.__canny = GS.new_view(name+': canny')
self.__canny.set_hidden(hidden)
self.__morph = GS.new_view(name+': morphology')
self.__morph.set_hidden(hidden)
self.__pass_1 = GS.new_view(name+': pass one, contour')
self.__pass_1.set_hidden(hidden)
self.__pass_2 = GS.new_view(name+': pass two, contour')
self.__pass_2.set_hidden(hidden)
self.__fin = GS.new_view(name+': discovered')
self.__fin.set_hidden(False)
self.__res = GS.new_view(name+': result')
self.__res.set_hidden(False)
self.__blur = GS.new_int(name+': blur', initial=45, min=-1, max=100, step=2)
self.__blur.set_hidden(hidden)
self.__blur.set_editable(editable)
self.__canny_l = GS.new_int(name+': canny low', initial=0, min=0, max=255)
self.__canny_l.set_hidden(hidden)
self.__canny_l.set_editable(editable)
self.__canny_h = GS.new_int(name+': canny high', initial=35, min=0, max=255)
self.__canny_h.set_hidden(hidden)
self.__canny_h.set_editable(editable)
self.__morph_size = GS.new_int(name+': morph amount', initial=15, min=1, step=2)
self.__morph_size.set_hidden(hidden)
self.__morph_size.set_editable(editable)
self.__pass_1_width = GS.new_int(name+': pass one width', initial=40, min=1)
self.__pass_1_width.set_hidden(hidden)
self.__pass_1_width.set_editable(editable)
self.__cutoff_distance = GS.new_float(name+': center cutoff distance', initial=20, min=0)
self.__cutoff_distance.set_hidden(hidden)
self.__cutoff_distance.set_editable(editable)
self.__cutoff_area = GS.new_float(name+': cutoff area', initial=450, min=0)
self.__cutoff_area.set_hidden(hidden)
self.__cutoff_area.set_editable(editable)
self.__bean_w = GS.new_int(name+': slice width', initial=150, min=0)
self.__bean_w.set_hidden(hidden)
self.__bean_w.set_editable(False)
self.__bean_h = GS.new_int(name+': slice height' ,initial=110, min=0)
self.__bean_h.set_hidden(hidden)
self.__bean_h.set_editable(False)
#Dependent on input image size!
self.__CENTERS = [(145,1101),(488,1098),(317,1099),(659,1097),(1001,1097),(829,1096),(1343,1094),(1173,1094),(230,979),(914,977),(573,978),(1086,975),(744,977),(401,978),(1255,974),(143,860),(486,858),(658,858),(313,858),(1171,856),(1000,856),(1341,855),(828,857),(571,739),(400,739),(228,739),(1084,736),(912,737),(742,738),(1254,735),(314,620),(143,621),(827,617),(998,615),(657,618),(1169,615),(485,618),(1339,614),(227,500),(740,498),(910,496),(570,498),(398,498),(1082,496),(1254,495),(315,381),(485,379),(142,380),(995,377),(825,378),(655,378),(1337,374),(1167,376),(226,261),(738,259),(399,260),(568,258),(1251,255),(1080,255),(909,256),(141,140),(311,140),(823,138),(653,138),(482,138),(1336,134),(1166,136),(994,136)]
def calibrate(self, img):
self.slice(img, True)
def slice(self, img, calibrate = False):
orig = img.copy()
if self.__blur.get() != -1:
img = cv2.GaussianBlur(img, (self.__blur.get(),self.__blur.get()), 0)
img = cv2.Canny(img, self.__canny_l.get(), self.__canny_h.get())
self.__canny.update(img)
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (self.__morph_size.get(),self.__morph_size.get()))
img = cv2.morphologyEx(img, cv2.MORPH_CLOSE, kernel)
self.__morph.update(img)
#Pass 1
img, contours, hierarchy = cv2.findContours(img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
img.fill(0)
cv2.drawContours(img, contours, -1, (255, 255, 255), self.__pass_1_width.get())
self.__pass_1.update(img)
#Pass 2
img, contours, hierarchy = cv2.findContours(img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
img.fill(0)
cv2.drawContours(img, contours, -1, (255, 255, 255), 6)
self.__pass_2.update(img)
#Discard bad contours (no center)
remove = []
for i in range(len(contours)):
m = cv2.moments(contours[i])
if m['m00'] == 0.0:
remove.append(i)
contours = np.delete(contours, remove, 0)
img = orig.copy()
empty_spots = []
calibrate_centers = []
#Show all contours
cv2.drawContours(img, contours, -1, (0, 255, 0), 3)
#Build
for c in contours:
m = cv2.moments(c)
cX = int(m["m10"] / m["m00"])
cY = int(m["m01"] / m["m00"])
color = (0, 0, 255)
#Test distances
d_cutoff = self.__cutoff_distance.get()
a_cutoff = self.__cutoff_area.get() * 10
for ref in self.__CENTERS:
#Is on a center?
if p_dist(cX, cY, ref[0], ref[1]) < d_cutoff:
#Is it a dot?
if cv2.contourArea(c) < a_cutoff:
#cv2.drawContours(img, [c], -1, (255, 0, 0), 3)
cv2.fillPoly(img, [c], (255,0,0))
empty_spots.append(ref)
calibrate_centers.append((cX, cY))
else:
cv2.drawContours(img, [c], -1, (255, 255, 255), 3)
#Draw
cv2.circle(img, (cX, cY), 5, (0, 0, 255), -1)
#Draw calibration
for ref in self.__CENTERS:
cv2.circle(img, ref, 5, (75, 75, 75), -1)
self.__fin.update(img)
if calibrate:
self.__CENTERS = calibrate_centers
img = orig.copy()
cropped = [] #[((x,y), IMG), ...]
#Get bean locations
beans = self.__CENTERS[:]
beans = [p for p in beans if p not in empty_spots]
w = self.__bean_w.get()
h = self.__bean_h.get()
for b in beans:
x = b[0] - w//2
y = b[1] - h//2
cropped.append( ((x,y),crop(orig, x, y, w, h)) )
cv2.rectangle(img, (x, y), (x+w, y+h), (50, 50, 50), 2)
self.__res.update(img)
return cropped
| {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,678 | FriesW/TeamJellyBean | refs/heads/master | /Code/UtilTF.py | import numpy as np
import tensorflow as tf
import cv2
class Classifier:
def __init__(self):
self.__graph = self.__load_graph('../trained_model/retrained_graph.pb')
self.__labels = self.__load_labels('../trained_model/retrained_labels.txt')
#with tf.gfile.FastGFile('../trained_model/retrained_graph.pb') as f:
# graph_def = tf.GraphDef()
# graph_def.ParseFromString(f.read())
# _ = tf.import_graph_def(graph_def, name='')
def __load_graph(self, model_file):
graph = tf.Graph()
graph_def = tf.GraphDef()
with open(model_file, "rb") as f:
graph_def.ParseFromString(f.read())
with graph.as_default():
tf.import_graph_def(graph_def)
return graph
def __load_labels(self, label_file):
label = []
proto_as_ascii_lines = tf.gfile.GFile(label_file).readlines()
for l in proto_as_ascii_lines:
label.append(l.rstrip())
return label
def classify(self, np_img):
o = 'unknown'
#https://stackoverflow.com/questions/40273109/convert-python-opencv-mat-image-to-tensorflow-image-data
img = cv2.resize(np_img, dsize=(299,299), interpolation = cv2.INTER_CUBIC)
img = np.asarray(img)
#img = cv2.normalize(img.astype('float'), None, -0.5, .5, cv2.NORM_MINMAX)
img_res = np.expand_dims(img, axis=0)
input_operation = self.__graph.get_operation_by_name('import/Mul')
output_operation = self.__graph.get_operation_by_name('import/final_result')
with tf.Session(graph = self.__graph) as sess:
results = sess.run(output_operation.outputs[0], {
input_operation.outputs[0]: img_res
})
results = np.squeeze(results)
top_k = results.argsort()[-5:][::-1]
print("Considering:")
for i in top_k:
print(self.__labels[i], results[i])
i = top_k[0]
o = self.__labels[i] + '(' + str(int(results[i]*100)) + '%)'
return o
#print('%s (score = %.5f' % (readable, score)) | {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,679 | FriesW/TeamJellyBean | refs/heads/master | /Code/Viewer/ParameterType.py | from .Parameter import Parameter
import re
class Integer(Parameter):
def __init__(self, name, listener, initial = 0, min = None, max = None, step = 1):
if step == None or step == 0:
step = 1
start = 0
if min != None:
start = min
if max != None:
max = max - ( (max - start) % step )
self.__min = min
self.__max = max
self.__step = step
super(Integer, self).__init__(name, listener, initial)
def _get_input_type(self):
return 'number'
def _validator(self, input):
input = int(float(input))
step_offset = 0
if self.__min != None:
step_offset = self.__min
if self.__step and (input - step_offset) % self.__step != 0:
leftover = abs(input - step_offset) % self.__step
if input > self.get():
input = input + self.__step - leftover
else:
input = input - leftover
if self.__max != None:
input = min(input, self.__max)
if self.__min != None:
input = max(input, self.__min)
return (True, input)
class Float(Parameter):
def __init__(self, name, listener, initial = 0, min = None, max = None):
self.__min = min
self.__max = max
super(Float, self).__init__(name, listener, initial)
def _get_input_type(self):
return 'number'
def _validator(self, input):
input = float(input)
if self.__max != None:
input = min(input, self.__max)
if self.__min != None:
input = max(input, self.__min)
return (True, input)
class String(Parameter):
def __init__(self, name, listener, initial = '', pattern_remove = None, pattern_accept = None):
self.__remove = pattern_remove
self.__accept = pattern_accept
super(String, self).__init__(name, listener, initial)
def _get_input_type(self):
return 'text'
def _validator(self, input):
input = str(input)
if self.__remove:
input = re.sub(self.__remove, '', input)
status = True
if self.__accept:
status = bool( re.fullmatch(self.__accept, input) ) #or maybe re.search
return (status, input)
class Event(Parameter):
def __init__(self, name, listener):
super(Event, self).__init__(name, listener, 0)
def _get_input_type(self):
return 'button'
def _validator(self, input):
try:
input = int(input)
except:
input = self.peek()
return (True, input)
def peek(self):
return super(Event, self).get()
def _remote_get(self):
return self.peek()
def get(self):
val = self.peek()
self.set(max(val - 1, 0))
return val
def _remote_set(self, val):
self.set( self.peek() + 1 )
class Boolean(Parameter):
def __init__(self, name, listener, initial = False):
super(Boolean, self).__init__(name, listener, initial)
def _get_input_type(self):
return 'checkbox'
def _validator(self, input):
input = bool(input)
return (True, input)
| {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,680 | FriesW/TeamJellyBean | refs/heads/master | /Code/Viewer/View.py | from .BridgeElement import BridgeElement
import threading
import numpy as np
import cv2
import base64
class View(BridgeElement):
def __init__(self, name, listener, encoding = 'jpg', default_scale = 0.5, interpolation = cv2.INTER_LINEAR):
super(View, self).__init__(name, listener)
self.__encoding = encoding #'png', 'jpg'
self.__scale = default_scale
self.__interpolation = interpolation
self.__lock = threading.Lock()
self.__image = None
self.__update_render = True
self.__render_str = None
self.__reset('! Unset View !')
def announce(self):
super(View, self).announce()
def _get_type(self):
return 'view';
def __reset(self, text = '! Reset View !'):
image = np.zeros((72,455,3), np.uint8)
cv2.putText(image, text, (5, 55), cv2.FONT_HERSHEY_SIMPLEX, 2, (0,0,255), 3)
self.update(image, 1.0)
def reset(self):
self.__reset()
def update(self, image, scale = None):
self.__lock.acquire()
self.__image = image.copy()
self.__update_render = True
self.__lock.release()
if not self.is_frozen():
self._notify_listener({'image_event':'update'})
def notify(self, data):
if 'request' in data:
self._notify_listener({'image':{
'encoding': self.__encoding,
'data': self.__render()
}})
def set_frozen(self, status):
super(View, self).set_frozen(status)
if not status and self.__update_render:
self._notify_listener({'image_event':'update'})
def __render(self):
#render if not initialized, or if update ready and not frozen
if self.__render_str is None or (not self.is_frozen() and self.__update_render):
self.__lock.acquire()
tmp_image = self.__image.copy()
self.__update_render = False
self.__lock.release()
scale = self.__scale
if scale != 1.0:
tmp_image = cv2.resize(tmp_image, None,
fx = self.__scale, fy = self.__scale,
interpolation = self.__interpolation)
self.__render_str = base64.b64encode(
cv2.imencode('.'+self.__encoding, tmp_image)[1].tostring()
).decode('utf-8')
return self.__render_str
def set_default_scale(self, new_scale):
self.__scale = new_scale
#cv2.INTER_LINEAR, cv2.INTER_CUBIC, cv2.INTER_AREA
def _set_interpolation(self, new_inter):
self.interpolation = new_inter
| {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,681 | FriesW/TeamJellyBean | refs/heads/master | /Code/Viewer/ViewServer.py | from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket
from .View import View
from . import ParameterType as PT
import threading
import _thread
import json
class ViewServer:
def __init__(self, port = 8000):
self.__lock = threading.Lock()
self._bridges = {}
self._client = None
self.__server = SimpleWebSocketServer('', port, self.__make_handler__())
_thread.start_new_thread(self.__run__, ())
def __run__(self):
self.__server.serveforever()
def __nb(self, name, Type, *args, **kwargs):
name = str(name)
nb = Type(name, self.__send_message__, *args, **kwargs)
self._bridges[nb.get_id()] = nb
return nb
def new_view(self, name, *args, **kwargs):
return self.__nb(name, View, *args, **kwargs)
def new_int(self, name, *args, **kwargs):
return self.__nb(name, PT.Integer, *args, **kwargs)
def new_float(self, name, *args, **kwargs):
return self.__nb(name, PT.Float, *args, **kwargs)
def new_string(self, name, *args, **kwargs):
return self.__nb(name, PT.String, *args, **kwargs)
def new_event(self, name, *args, **kwargs):
return self.__nb(name, PT.Event, *args, **kwargs)
def new_bool(self, name, *args, **kwargs):
return self.__nb(name, PT.Boolean, *args, **kwargs)
def __send_message__(self, message):
if self._client != None:
self._client.sendMessage(json.dumps(message))
def __make_handler__(parent):
class Handler(WebSocket):
def handleConnected(self):
print("Connected")
if parent._client != None:
parent._client.close()
parent._client = self
for b in parent._bridges:
parent._bridges[b].announce()
def handleClose(self):
if parent._client == self:
parent._client = None
print("Disconnect")
def handleMessage(self):
data = json.loads(self.data)
for obj_id, values in data.items():
if obj_id in parent._bridges:
parent._bridges[obj_id].notify(values)
return Handler
| {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,682 | FriesW/TeamJellyBean | refs/heads/master | /Code/Viewer/BridgeElement.py | from abc import ABC, abstractmethod
import uuid
class BridgeElement(ABC):
@abstractmethod
def __init__(self, name, listener, hidden = False):
self.__name = name
self.__id = 'obj_' + uuid.uuid4().hex
self.__listener = listener
self.__is_frozen = False
self.__is_hidden = hidden
self.announce()
@abstractmethod
def announce(self):
self.__listener({self.get_id() : self._get_type()})
self._notify_listener({
'name':self.get_name(),
'hidden':self.is_hidden()
})
@abstractmethod
def _get_type(self):
pass
def get_id(self):
return self.__id
def get_name(self):
return self.__name
@abstractmethod
def notify(self, data):
pass
#self._on_notify( data )
#@abstractmethod
#def _on_notify(self, data):
# pass
def _notify_listener(self, data):
self.__listener({self.get_id() : data})
@abstractmethod
def set_frozen(self, status):
self.__is_frozen = status
def is_frozen(self):
return self.__is_frozen
def set_hidden(self, status):
self.__is_hidden = status
self._notify_listener({'hidden':self.is_hidden()})
def is_hidden(self):
return self.__is_hidden | {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,683 | FriesW/TeamJellyBean | refs/heads/master | /Notes/Video_Processing/opencv/process.py | #Old notes, probably broken
import cv2
def show(pic, name = 'showing...'):
cv2.imshow(name, pic)
cv2.waitKey(0)
cv2.destroyAllWindows()
cam = cv2.VideoCapture(0)
cam.set(cv2.CAP_PROP_FRAME_WIDTH, 1920)
cam.set(cv2.CAP_PROP_FRAME_HEIGHT, 1080)
for i in range(4): #First few are bad...
rv, orig_img = cam.read()
cam.release()
#show(orig_img, "original")
#gray_img = cv2.cvtColor(orig_img, cv2.COLOR_BGR2GRAY)
gray_img = orig_img
#show(gray_img, "gray")
blur_img = cv2.GaussianBlur(gray_img, (25,25), 0)
show(blur_img, "blur")
edge_img = cv2.Canny(blur_img, 30, 50)
show(edge_img, "canny edge")
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (11,11))
close_img = cv2.morphologyEx(edge_img, cv2.MORPH_CLOSE, kernel)
show(close_img, "morphology close")
cont_img, contours, hierarchy = cv2.findContours(close_img.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
act_contour_img = orig_img.copy()
cv2.drawContours(act_contour_img, contours, -1, (0, 255, 0), 3)
show(act_contour_img, "actual contours")
#https://docs.opencv.org/3.4.0/dd/d49/tutorial_py_contour_features.html
for c in contours:
m = cv2.moments(c)
if m["m00"] != 0.0:
cX = int(m["m10"] / m["m00"])
cY = int(m["m01"] / m["m00"])
cv2.circle(act_contour_img, (cX, cY), 5, (255, 0, 255), -1)
show(act_contour_img, "actual contours w/ centers")
#approx_contour_img = orig_img.copy()
#for c in contours:
# peri = cv2.arcLength(c, True)
# approx = cv2.approxPolyDP(c, 0.02 * peri, True)
# cv2.drawContours(approx_contour_img, [approx], -1, (0, 255, 0), 3)
#show(approx_contour_img, "approximate contours")
import numpy as np
blank_img = np.zeros((1080,1920,3), np.uint8)
cv2.drawContours(blank_img, contours, -1, (0, 255, 0), 7)
show(blank_img, "just actual contours")
#edge_img = cv2.Canny(blank_img, 100, 255)
thres, edge_img = cv2.threshold( cv2.cvtColor(blank_img, cv2.COLOR_BGR2GRAY), 127, 255, cv2.THRESH_BINARY)
show(edge_img, "edges of actual contours")
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (7,7))
close_img = cv2.morphologyEx(edge_img, cv2.MORPH_CLOSE, kernel)
show(close_img, "morphology close")
cont_img, contours, hierarchy = cv2.findContours(close_img.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
act_contour_img = orig_img.copy()
cv2.drawContours(act_contour_img, contours, -1, (0, 255, 0), 3)
show(act_contour_img, "actual contours of contours")
#https://docs.opencv.org/3.4.0/dd/d49/tutorial_py_contour_features.html
for c in contours:
m = cv2.moments(c)
if m["m00"] != 0.0:
cX = int(m["m10"] / m["m00"])
cY = int(m["m01"] / m["m00"])
cv2.circle(act_contour_img, (cX, cY), 3, (255, 0, 255), -1)
show(act_contour_img, "centers")
| {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,684 | FriesW/TeamJellyBean | refs/heads/master | /Notes/Video_Processing/opencv/freerun.py | import traceback
import cv2
def freerun(cam):
while True:
rv, img = cam.read()
if not rv:
print("Camera read error")
break
size = cv2.resize(img, (0,0), fx=0.5, fy=0.5)
cv2.imshow('Freerun', size)
if cv2.waitKey(1) != -1:
break
cv2.destroyAllWindows()
try:
cam = cv2.VideoCapture(0)
cam.set(cv2.CAP_PROP_FRAME_WIDTH, 1920)
cam.set(cv2.CAP_PROP_FRAME_HEIGHT, 1080)
freerun(cam)
cam.release()
except Exception as e:
traceback.print_exc()
input("Done") | {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,685 | FriesW/TeamJellyBean | refs/heads/master | /Code/classifier.py | import cv2
import numpy as np
import Viewer.GlobalServer as GS
import Util
import UtilTF
cam = cv2.VideoCapture(0)
if not cam.set(cv2.CAP_PROP_FRAME_WIDTH, 1920):
raise Exception('Camera error: can\'t set width.')
if not cam.set(cv2.CAP_PROP_FRAME_HEIGHT, 1080):
raise Exception('Camera error: can\'t set height.')
v_orig = GS.new_view('Image')
e_cycle = GS.new_event('Single')
b_freerun = GS.new_bool('Freerun')
rate = Util.Timer(' per frame')
expo_1 = Util.Exposure('Exposure general', hidden=True)
expo_2 = Util.Exposure('Exposure tray')
init_crop = Util.Crop('Initial', 150, 85, 1620, 910, True, False)
tray_finder = Util.FindTray('Tray', True)
bean_slicer = Util.BeanSlicer('Beans', True)
v_out = GS.new_view('Classified')
classify = UtilTF.Classifier()
calibrate_btn = GS.new_event('Calibrate bean slicer')
calibrate_btn.set_hidden(True)
while True:
rate.cycle()
while not b_freerun.get() and not e_cycle.get():
e_cycle.await_remote(0.2)
if b_freerun.get():
e_cycle.set(0)
rv = False
while not rv:
rv, img = cam.read()
expo_1.measure(img)
img = init_crop.crop(img)
v_orig.update(img)
success, img = tray_finder.find(img)
if success:
expo_2.measure(img)
if calibrate_btn.get():
bean_slicer.calibrate(img)
else:
sliced = bean_slicer.slice(img)
for i in sliced:
label = classify.classify(i[1])
p = list(i[0])
p[1] += 30
cv2.putText(img, label, tuple(p), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 0), 2, cv2.LINE_AA)
v_out.update(img)
| {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,686 | FriesW/TeamJellyBean | refs/heads/master | /Code/Viewer/Parameter.py | from abc import ABC, abstractmethod
from .BridgeElement import BridgeElement
import threading
class Parameter(BridgeElement, ABC):
@abstractmethod
def __init__(self, name, listener, initial, editable = True):
self.__val = initial
self.__editable = editable
self.__await = threading.Event()
super(Parameter, self).__init__(name, listener)
self.set(initial)
def announce(self):
super(Parameter, self).announce()
self._notify_listener({
'input_type' : self._get_input_type(),
'input_value' : self._remote_get(),
'editable' : self.is_editable()
})
@abstractmethod
def _get_input_type(self):
pass
def _get_type(self):
return 'parameter'
def notify(self, data):
if 'input_value' in data:
if self.is_editable():
self._remote_set(data['input_value'])
else:
self.announce()
self.__await.set()
def await_remote(self, timeout = None):
rv = self.__await.wait(timeout)
if rv:
self.__await.clear()
return rv
def set_editable(self, status):
self.__editable = status
self._notify_listener({ 'editable' : self.is_editable() })
def is_editable(self):
return self.__editable
#return True
@abstractmethod
def _validator(self, input):
return (False, None)
def _remote_get(self):
return self.get()
def get(self):
return self.__val
def _remote_set(self, new_val):
return self.set(new_val)
def set(self, new_val):
accept = False
try: accept, new_val = self._validator(new_val)
except: pass
if(accept):
self.__val = new_val
self._notify_listener({ 'input_value' : self._remote_get() })
return accept
#Don't have time for this functionality
def set_frozen(self, status):
pass | {"/Code/Viewer/GlobalServer.py": ["/Code/Viewer/ViewServer.py"], "/Code/Viewer/ParameterType.py": ["/Code/Viewer/Parameter.py"], "/Code/Viewer/View.py": ["/Code/Viewer/BridgeElement.py"], "/Code/Viewer/ViewServer.py": ["/Code/Viewer/View.py"], "/Code/Viewer/Parameter.py": ["/Code/Viewer/BridgeElement.py"]} |
73,698 | CorgiCash/corgi-core | refs/heads/master | /hathor/transaction/resources/tips_histogram.py | # Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from twisted.web import resource
from hathor.api_util import parse_get_arguments, set_cors
from hathor.cli.openapi_files.register import register_resource
ARGS = ['begin', 'end']
@register_resource
class TipsHistogramResource(resource.Resource):
""" Implements a web server API to return the tips in a timestamp interval.
Returns a list of timestamps and numbers of tips.
You must run with option `--status <PORT>`.
"""
isLeaf = True
def __init__(self, manager):
self.manager = manager
def render_GET(self, request):
""" Get request to /tips-histogram/ that return the number of tips between two timestamp
We expect two GET parameters: 'begin' and 'end'
'begin': int that indicates the beginning of the interval
'end': int that indicates the end of the interval
:rtype: string (json)
"""
request.setHeader(b'content-type', b'application/json; charset=utf-8')
set_cors(request, 'GET')
parsed = parse_get_arguments(request.args, ARGS)
if not parsed['success']:
return json.dumps({
'success': False,
'message': 'Missing parameter: {}'.format(parsed['missing'])
}).encode('utf-8')
args = parsed['args']
# Get quantity for each
try:
begin = int(args['begin'])
except ValueError:
return json.dumps({
'success': False,
'message': 'Invalid parameter, cannot convert to int: begin'
}).encode('utf-8')
try:
end = int(args['end'])
except ValueError:
return json.dumps({
'success': False,
'message': 'Invalid parameter, cannot convert to int: end'
}).encode('utf-8')
v = []
for timestamp in range(begin, end + 1):
tx_tips = self.manager.tx_storage.get_tx_tips(timestamp)
v.append((timestamp, len(tx_tips)))
return json.dumps({'success': True, 'tips': v}).encode('utf-8')
TipsHistogramResource.openapi = {
'/tips-histogram': {
'x-visibility': 'private',
'get': {
'tags': ['transaction'],
'operationId': 'tips_histogram',
'summary': 'Histogram of tips',
'description': ('Returns a list of tuples (timestamp, quantity)'
'for each timestamp in the requested interval'),
'parameters': [
{
'name': 'begin',
'in': 'query',
'description': 'Beggining of the timestamp interval',
'required': True,
'schema': {
'type': 'int'
}
},
{
'name': 'end',
'in': 'query',
'description': 'End of the timestamp interval',
'required': True,
'schema': {
'type': 'int'
}
}
],
'responses': {
'200': {
'description': 'Success',
'content': {
'application/json': {
'examples': {
'success': {
'summary': 'Success',
'value': [
[
1547163020,
1
],
[
1547163021,
4
],
[
1547163022,
2
]
]
},
'error': {
'summary': 'Invalid parameter',
'value': {
'success': False,
'message': 'Missing parameter: begin'
}
}
}
}
}
}
}
}
}
}
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,699 | CorgiCash/corgi-core | refs/heads/master | /hathor/manager.py | # Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import random
import sys
import time
from enum import Enum
from typing import Any, Iterator, List, NamedTuple, Optional, Union, cast
from structlog import get_logger
from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.internet.interfaces import IReactorCore
from twisted.python.threadpool import ThreadPool
import hathor.util
from hathor import daa
from hathor.conf import HathorSettings
from hathor.consensus import ConsensusAlgorithm
from hathor.exception import InvalidNewTransaction
from hathor.indexes import TokensIndex, WalletIndex
from hathor.mining import BlockTemplate, BlockTemplates
from hathor.p2p.peer_discovery import PeerDiscovery
from hathor.p2p.peer_id import PeerId
from hathor.p2p.protocol import HathorProtocol
from hathor.profiler import get_cpu_profiler
from hathor.pubsub import HathorEvents, PubSubManager
from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights
from hathor.transaction.exceptions import TxValidationError
from hathor.transaction.storage import TransactionStorage
from hathor.wallet import BaseWallet
settings = HathorSettings()
logger = get_logger()
cpu = get_cpu_profiler()
class HathorManager:
""" HathorManager manages the node with the help of other specialized classes.
Its primary objective is to handle DAG-related matters, ensuring that the DAG is always valid and connected.
"""
class NodeState(Enum):
# This node is still initializing
INITIALIZING = 'INITIALIZING'
# This node is ready to establish new connections, sync, and exchange transactions.
READY = 'READY'
def __init__(self, reactor: IReactorCore, peer_id: Optional[PeerId] = None, network: Optional[str] = None,
hostname: Optional[str] = None, pubsub: Optional[PubSubManager] = None,
wallet: Optional[BaseWallet] = None, tx_storage: Optional[TransactionStorage] = None,
peer_storage: Optional[Any] = None, default_port: int = 40403, wallet_index: bool = False,
stratum_port: Optional[int] = None, ssl: bool = True,
capabilities: Optional[List[str]] = None) -> None:
"""
:param reactor: Twisted reactor which handles the mainloop and the events.
:param peer_id: Id of this node. If not given, a new one is created.
:param network: Name of the network this node participates. Usually it is either testnet or mainnet.
:type network: string
:param hostname: The hostname of this node. It is used to generate its entrypoints.
:type hostname: string
:param pubsub: If not given, a new one is created.
:type pubsub: :py:class:`hathor.pubsub.PubSubManager`
:param tx_storage: If not given, a :py:class:`TransactionMemoryStorage` one is created.
:type tx_storage: :py:class:`hathor.transaction.storage.transaction_storage.TransactionStorage`
:param peer_storage: If not given, a new one is created.
:type peer_storage: :py:class:`hathor.p2p.peer_storage.PeerStorage`
:param default_port: Network default port. It is used when only ip addresses are discovered.
:type default_port: int
:param wallet_index: If should add a wallet index in the storage
:type wallet_index: bool
:param stratum_port: Stratum server port. Stratum server will only be created if it is not None.
:type stratum_port: Optional[int]
"""
from hathor.metrics import Metrics
from hathor.p2p.factory import HathorClientFactory, HathorServerFactory
from hathor.p2p.manager import ConnectionsManager
from hathor.transaction.storage.memory_storage import TransactionMemoryStorage
self.log = logger.new()
self.reactor = reactor
if hasattr(self.reactor, 'addSystemEventTrigger'):
self.reactor.addSystemEventTrigger('after', 'shutdown', self.stop)
self.state: Optional[HathorManager.NodeState] = None
self.profiler: Optional[Any] = None
# Hostname, used to be accessed by other peers.
self.hostname = hostname
# Remote address, which can be different from local address.
self.remote_address = None
self.my_peer = peer_id or PeerId()
self.network = network or 'testnet'
self.is_started: bool = False
self.cpu = cpu
# XXX Should we use a singleton or a new PeerStorage? [msbrogli 2018-08-29]
self.pubsub = pubsub or PubSubManager(self.reactor)
self.tx_storage = tx_storage or TransactionMemoryStorage()
self.tx_storage.pubsub = self.pubsub
if wallet_index and self.tx_storage.with_index:
self.tx_storage.wallet_index = WalletIndex(self.pubsub)
self.tx_storage.tokens_index = TokensIndex()
self.metrics = Metrics(
pubsub=self.pubsub,
avg_time_between_blocks=settings.AVG_TIME_BETWEEN_BLOCKS,
tx_storage=self.tx_storage,
reactor=self.reactor,
)
self.consensus_algorithm = ConsensusAlgorithm()
self.peer_discoveries: List[PeerDiscovery] = []
self.ssl = ssl
self.server_factory = HathorServerFactory(self.network, self.my_peer, node=self, use_ssl=ssl)
self.client_factory = HathorClientFactory(self.network, self.my_peer, node=self, use_ssl=ssl)
self.connections = ConnectionsManager(self.reactor, self.my_peer, self.server_factory, self.client_factory,
self.pubsub, self, ssl)
self.wallet = wallet
if self.wallet:
self.wallet.pubsub = self.pubsub
self.wallet.reactor = self.reactor
if stratum_port:
# XXX: only import if needed
from hathor.stratum import StratumFactory
self.stratum_factory: Optional[StratumFactory] = StratumFactory(manager=self, port=stratum_port)
else:
self.stratum_factory = None
# Set stratum factory for metrics object
self.metrics.stratum_factory = self.stratum_factory
self._allow_mining_without_peers = False
# Thread pool used to resolve pow when sending tokens
self.pow_thread_pool = ThreadPool(minthreads=0, maxthreads=settings.MAX_POW_THREADS, name='Pow thread pool')
# List of addresses to listen for new connections (eg: [tcp:8000])
self.listen_addresses: List[str] = []
# Full verification execute all validations for transactions and blocks when initializing the node
# Can be activated on the command line with --full-verification
self._full_verification = False
# List of whitelisted peers
self.peers_whitelist: List[str] = []
# List of capabilities of the peer
if capabilities is not None:
self.capabilities = capabilities
else:
self.capabilities = [settings.CAPABILITY_WHITELIST]
def start(self) -> None:
""" A factory must be started only once. And it is usually automatically started.
"""
if self.is_started:
raise Exception('HathorManager is already started')
self.is_started = True
self.log.info('start manager', network=self.network)
# If it's a full verification, we save on the storage that we are starting it
# this is required because if we stop the initilization in the middle, the metadata
# saved on the storage is not reliable anymore, only if we finish it
if self._full_verification:
self.tx_storage.start_full_verification()
else:
# If it's a fast initialization and the last time a full initialization stopped in the middle
# we can't allow the full node to continue, so we need to remove the storage and do a full sync
# or execute an initialization with full verification
if self.tx_storage.is_running_full_verification():
self.log.error(
'Error initializing node. The last time you started your node you did a full verification '
'that was stopped in the middle. The storage is not reliable anymore and, because of that, '
'you must initialize with a full verification again or remove your storage and do a full sync.'
)
sys.exit()
# If self.tx_storage.is_running_manager() is True, the last time the node was running it had a sudden crash
# because of that, we must run a full verification because some storage data might be wrong.
# The metadata is the only piece of the storage that may be wrong, not the blocks and transactions.
if self.tx_storage.is_running_manager():
self.log.error(
'Error initializing node. The last time you executed your full node it wasn\'t stopped correctly. '
'The storage is not reliable anymore and, because of that, so you must run a full verification '
'or remove your storage and do a full sync.'
)
sys.exit()
self.state = self.NodeState.INITIALIZING
self.pubsub.publish(HathorEvents.MANAGER_ON_START)
self.connections.start()
self.pow_thread_pool.start()
# Disable get transaction lock when initializing components
self.tx_storage.disable_lock()
# Initialize manager's components.
self._initialize_components()
if self._full_verification:
# Before calling self._initialize_components() I start 'full verification' mode and after that I need to
# finish it. It's just to know if the full node has stopped a full initialization in the middle
self.tx_storage.finish_full_verification()
self.tx_storage.enable_lock()
# Metric starts to capture data
self.metrics.start()
for description in self.listen_addresses:
self.listen(description)
self.do_discovery()
self.start_time = time.time()
if self.wallet:
self.wallet.start()
if self.stratum_factory:
self.stratum_factory.start()
# Start running
self.tx_storage.start_running_manager()
def stop(self) -> Deferred:
if not self.is_started:
raise Exception('HathorManager is already stopped')
self.is_started = False
waits = []
self.log.info('stop manager')
self.tx_storage.stop_running_manager()
self.connections.stop()
self.pubsub.publish(HathorEvents.MANAGER_ON_STOP)
if self.pow_thread_pool.started:
self.pow_thread_pool.stop()
# Metric stops to capture data
self.metrics.stop()
if self.wallet:
self.wallet.stop()
if self.stratum_factory:
wait_stratum = self.stratum_factory.stop()
if wait_stratum:
waits.append(wait_stratum)
return defer.DeferredList(waits)
def do_discovery(self) -> None:
"""
Do a discovery and connect on all discovery strategies.
"""
for peer_discovery in self.peer_discoveries:
peer_discovery.discover_and_connect(self.connections.connect_to)
def start_profiler(self) -> None:
"""
Start profiler. It can be activated from a web resource, as well.
"""
if not self.profiler:
import cProfile
self.profiler = cProfile.Profile()
self.profiler.enable()
def stop_profiler(self, save_to: Optional[str] = None) -> None:
"""
Stop the profile and optionally save the results for future analysis.
:param save_to: path where the results will be saved
:type save_to: str
"""
assert self.profiler is not None
self.profiler.disable()
if save_to:
self.profiler.dump_stats(save_to)
def _initialize_components(self) -> None:
"""You are not supposed to run this method manually. You should run `doStart()` to initialize the
manager.
This method runs through all transactions, verifying them and updating our wallet.
"""
self.log.info('initialize')
if self.wallet:
self.wallet._manually_initialize()
t0 = time.time()
t1 = t0
cnt = 0
cnt2 = 0
t2 = t0
h = 0
block_count = 0
tx_count = 0
if self.tx_storage.get_count_tx_blocks() > 3 and not self.tx_storage.is_db_clean():
# If has more than 3 txs on storage (the genesis txs that are always on storage by default)
# and the db is not clean (the db has old data before we cleaned the voided txs/blocks)
# then we can't move forward and ask the user to remove the old db
self.log.error(
'Error initializing the node. You can\'t use an old database right now. '
'Please remove your database or start your full node again with an empty data folder.'
)
sys.exit()
# If has reached this line, the db is clean, so we add this attribute to it
self.tx_storage.set_db_clean()
# self.start_profiler()
self.log.debug('load blocks and transactions')
for tx in self.tx_storage._topological_sort():
assert tx.hash is not None
tx_meta = tx.get_metadata()
t2 = time.time()
dt = hathor.util.LogDuration(t2 - t1)
dcnt = cnt - cnt2
tx_rate = '?' if dt == 0 else dcnt / dt
h = max(h, tx_meta.height)
if dt > 30:
ts_date = datetime.datetime.fromtimestamp(self.tx_storage.latest_timestamp)
if h == 0:
self.log.debug('start loading transactions...')
else:
self.log.info('load transactions...', tx_rate=tx_rate, tx_new=dcnt, dt=dt,
total=cnt, latest_ts=ts_date, height=h)
t1 = t2
cnt2 = cnt
cnt += 1
# It's safe to skip block weight verification during initialization because
# we trust the difficulty stored in metadata
skip_block_weight_verification = True
if block_count % settings.VERIFY_WEIGHT_EVERY_N_BLOCKS == 0:
skip_block_weight_verification = False
try:
assert self.on_new_tx(
tx,
quiet=True,
fails_silently=False,
skip_block_weight_verification=skip_block_weight_verification
)
except (InvalidNewTransaction, TxValidationError):
self.log.error('unexpected error when initializing', tx=tx, exc_info=True)
raise
if tx.is_block:
block_count += 1
if time.time() - t2 > 1:
dt = hathor.util.LogDuration(time.time() - t2)
self.log.warn('tx took too long to load', tx=tx.hash_hex, dt=dt)
self.log.debug('done loading transactions')
# self.stop_profiler(save_to='profiles/initializing.prof')
self.state = self.NodeState.READY
tdt = hathor.util.LogDuration(t2 - t0)
tx_rate = '?' if tdt == 0 else cnt / tdt
self.log.info('ready', tx_count=cnt, tx_rate=tx_rate, total_dt=tdt, height=h, blocks=block_count, txs=tx_count)
def add_listen_address(self, addr: str) -> None:
self.listen_addresses.append(addr)
def add_peer_discovery(self, peer_discovery: PeerDiscovery) -> None:
self.peer_discoveries.append(peer_discovery)
def get_new_tx_parents(self, timestamp: Optional[float] = None) -> List[bytes]:
"""Select which transactions will be confirmed by a new transaction.
:return: The hashes of the parents for a new transaction.
:rtype: List[bytes(hash)]
"""
timestamp = timestamp or self.reactor.seconds()
ret = list(self.tx_storage.get_tx_tips(timestamp - 1))
random.shuffle(ret)
ret = ret[:2]
if len(ret) == 1:
# If there is only one tip, let's randomly choose one of its parents.
parents = list(self.tx_storage.get_tx_tips(ret[0].begin - 1))
ret.append(random.choice(parents))
assert len(ret) == 2, 'timestamp={} tips={}'.format(
timestamp, [x.hex() for x in self.tx_storage.get_tx_tips(timestamp - 1)])
return [x.data for x in ret]
def generate_parent_txs(self, timestamp: float) -> 'ParentTxs':
"""Select which transactions will be confirmed by a new block.
This method tries to return a stable result, such that for a given timestamp and storage state it will always
return the same.
"""
can_include_intervals = sorted(self.tx_storage.get_tx_tips(timestamp - 1))
assert can_include_intervals, 'tips cannot be empty'
max_timestamp = max(int(i.begin) for i in can_include_intervals)
must_include: List[bytes] = []
assert len(can_include_intervals) > 0, f'invalid timestamp "{timestamp}", no tips found"'
if len(can_include_intervals) < 2:
# If there is only one tip, let's randomly choose one of its parents.
must_include_interval = can_include_intervals[0]
must_include = [must_include_interval.data]
can_include_intervals = sorted(self.tx_storage.get_tx_tips(must_include_interval.begin - 1))
can_include = [i.data for i in can_include_intervals]
return ParentTxs(max_timestamp, can_include, must_include)
def allow_mining_without_peers(self) -> None:
"""Allow mining without being synced to at least one peer.
It should be used only for debugging purposes.
"""
self._allow_mining_without_peers = True
def can_start_mining(self) -> bool:
""" Return whether we can start mining.
"""
if self._allow_mining_without_peers:
return True
return self.connections.has_synced_peer()
def get_block_templates(self, parent_block_hash: Optional[bytes] = None,
timestamp: Optional[int] = None) -> BlockTemplates:
""" Cached version of `make_block_templates`, cache is invalidated when latest_timestamp changes."""
if parent_block_hash is not None:
return BlockTemplates([self.make_block_template(parent_block_hash, timestamp)], storage=self.tx_storage)
return BlockTemplates(self.make_block_templates(timestamp), storage=self.tx_storage)
# FIXME: the following caching scheme breaks tests:
# cached_timestamp: Optional[int]
# cached_block_template: BlockTemplates
# cached_timestamp, cached_block_template = getattr(self, '_block_templates_cache', (None, None))
# if cached_timestamp == self.tx_storage.latest_timestamp:
# return cached_block_template
# block_templates = BlockTemplates(self.make_block_templates(), storage=self.tx_storage)
# setattr(self, '_block_templates_cache', (self.tx_storage.latest_timestamp, block_templates))
# return block_templates
def make_block_templates(self, timestamp: Optional[int] = None) -> Iterator[BlockTemplate]:
""" Makes block templates for all possible best tips as of the latest timestamp.
Each block template has all the necessary info to build a block to be mined without requiring further
information from the blockchain state. Which is ideal for use by external mining servers.
"""
for parent_block_hash in self.tx_storage.get_best_block_tips():
yield self.make_block_template(parent_block_hash, timestamp)
def make_block_template(self, parent_block_hash: bytes, timestamp: Optional[int] = None) -> BlockTemplate:
""" Makes a block template using the given parent block.
"""
parent_block = self.tx_storage.get_transaction(parent_block_hash)
assert isinstance(parent_block, Block)
parent_txs = self.generate_parent_txs(parent_block.timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS)
if timestamp is None:
current_timestamp = int(max(self.tx_storage.latest_timestamp, self.reactor.seconds()))
else:
current_timestamp = timestamp
return self._make_block_template(parent_block, parent_txs, current_timestamp)
def make_custom_block_template(self, parent_block_hash: bytes, parent_tx_hashes: List[bytes],
timestamp: Optional[int] = None) -> BlockTemplate:
""" Makes a block template using the given parent block and txs.
"""
parent_block = self.tx_storage.get_transaction(parent_block_hash)
assert isinstance(parent_block, Block)
# gather the actual txs to query their timestamps
parent_tx_list: List[Transaction] = []
for tx_hash in parent_tx_hashes:
tx = self.tx_storage.get_transaction(tx_hash)
assert isinstance(tx, Transaction)
parent_tx_list.append(tx)
max_timestamp = max(tx.timestamp for tx in parent_tx_list)
parent_txs = ParentTxs(max_timestamp, parent_tx_hashes, [])
if timestamp is None:
current_timestamp = int(max(self.tx_storage.latest_timestamp, self.reactor.seconds()))
else:
current_timestamp = timestamp
return self._make_block_template(parent_block, parent_txs, current_timestamp)
def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', current_timestamp: int,
with_weight_decay: bool = False) -> BlockTemplate:
""" Further implementation of making block template, used by make_block_template and make_custom_block_template
"""
assert parent_block.hash is not None
# the absolute minimum would be the previous timestamp + 1
timestamp_abs_min = parent_block.timestamp + 1
# and absolute maximum limited by max time between blocks
if not parent_block.is_genesis:
timestamp_abs_max = parent_block.timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1
else:
timestamp_abs_max = 0xffffffff
assert timestamp_abs_max > timestamp_abs_min
# actual minimum depends on the timestamps of the parent txs
# it has to be at least the max timestamp of parents + 1
timestamp_min = max(timestamp_abs_min, parent_txs.max_timestamp + 1)
assert timestamp_min <= timestamp_abs_max
# when we have weight decay, the max timestamp will be when the next decay happens
if with_weight_decay and settings.WEIGHT_DECAY_ENABLED:
# we either have passed the first decay or not, the range will vary depending on that
if timestamp_min > timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE:
timestamp_max_decay = timestamp_min + settings.WEIGHT_DECAY_WINDOW_SIZE
else:
timestamp_max_decay = timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE
timestamp_max = min(timestamp_abs_max, timestamp_max_decay)
else:
timestamp_max = timestamp_abs_max
timestamp = min(max(current_timestamp, timestamp_min), timestamp_max)
weight = daa.calculate_next_weight(parent_block, timestamp)
parent_block_metadata = parent_block.get_metadata()
height = parent_block_metadata.height + 1
parents = [parent_block.hash] + parent_txs.must_include
parents_any = parent_txs.can_include
# simplify representation when you only have one to choose from
if len(parents) + len(parents_any) == 3:
parents.extend(sorted(parents_any))
parents_any = []
assert len(parents) + len(parents_any) >= 3, 'There should be enough parents to choose from'
assert 1 <= len(parents) <= 3, 'Impossible number of parents'
if __debug__ and len(parents) == 3:
assert len(parents_any) == 0, 'Extra parents to choose from that cannot be chosen'
return BlockTemplate(
versions={TxVersion.REGULAR_BLOCK.value, TxVersion.MERGE_MINED_BLOCK.value},
reward=daa.get_tokens_issued_per_block(height),
weight=weight,
timestamp_now=current_timestamp,
timestamp_min=timestamp_min,
timestamp_max=timestamp_max,
parents=parents,
parents_any=parents_any,
height=height,
score=sum_weights(parent_block_metadata.score, weight),
)
def generate_mining_block(self, timestamp: Optional[int] = None,
parent_block_hash: Optional[bytes] = None,
data: bytes = b'', address: Optional[bytes] = None,
merge_mined: bool = False) -> Union[Block, MergeMinedBlock]:
""" Generates a block ready to be mined. The block includes new issued tokens,
parents, and the weight.
:return: A block ready to be mined
:rtype: :py:class:`hathor.transaction.Block`
"""
if address is None:
if self.wallet is None:
raise ValueError('No wallet available and no mining address given')
address = self.wallet.get_unused_address_bytes(mark_as_used=False)
assert address is not None
block = self.get_block_templates(parent_block_hash, timestamp).generate_mining_block(
merge_mined=merge_mined,
address=address or None, # XXX: because we allow b'' for explicit empty output script
data=data,
)
return block
def get_tokens_issued_per_block(self, height: int) -> int:
"""Return the number of tokens issued (aka reward) per block of a given height."""
return daa.get_tokens_issued_per_block(height)
def validate_new_tx(self, tx: BaseTransaction, skip_block_weight_verification: bool = False) -> bool:
""" Process incoming transaction during initialization.
These transactions came only from storage.
"""
assert tx.hash is not None
if self.state == self.NodeState.INITIALIZING:
if tx.is_genesis:
return True
else:
if tx.is_genesis:
raise InvalidNewTransaction('Genesis? {}'.format(tx.hash_hex))
now = self.reactor.seconds()
if tx.timestamp - now > settings.MAX_FUTURE_TIMESTAMP_ALLOWED:
raise InvalidNewTransaction('Ignoring transaction in the future {} (timestamp={}, now={})'.format(
tx.hash_hex, tx.timestamp, now))
# Verify transaction and raises an TxValidationError if tx is not valid.
tx.verify()
if tx.is_block:
tx = cast(Block, tx)
assert tx.hash is not None # XXX: it appears that after casting this assert "casting" is lost
if not skip_block_weight_verification:
# Validate minimum block difficulty
block_weight = daa.calculate_block_difficulty(tx)
if tx.weight < block_weight - settings.WEIGHT_TOL:
raise InvalidNewTransaction(
'Invalid new block {}: weight ({}) is smaller than the minimum weight ({})'.format(
tx.hash.hex(), tx.weight, block_weight
)
)
parent_block = tx.get_block_parent()
tokens_issued_per_block = daa.get_tokens_issued_per_block(parent_block.get_metadata().height + 1)
if tx.sum_outputs != tokens_issued_per_block:
raise InvalidNewTransaction(
'Invalid number of issued tokens tag=invalid_issued_tokens'
' tx.hash={tx.hash_hex} issued={tx.sum_outputs} allowed={allowed}'.format(
tx=tx,
allowed=tokens_issued_per_block,
)
)
else:
assert tx.hash is not None # XXX: it appears that after casting this assert "casting" is lost
assert isinstance(tx, Transaction)
# Validate minimum tx difficulty
min_tx_weight = daa.minimum_tx_weight(tx)
if tx.weight < min_tx_weight - settings.WEIGHT_TOL:
raise InvalidNewTransaction(
'Invalid new tx {}: weight ({}) is smaller than the minimum weight ({})'.format(
tx.hash_hex, tx.weight, min_tx_weight
)
)
return True
def submit_block(self, blk: Block, fails_silently: bool = True) -> bool:
"""Used by submit block from all mining APIs.
"""
tips = self.tx_storage.get_best_block_tips()
parent_hash = blk.get_block_parent_hash()
if parent_hash not in tips:
return False
return self.propagate_tx(blk, fails_silently=fails_silently)
def propagate_tx(self, tx: BaseTransaction, fails_silently: bool = True) -> bool:
"""Push a new transaction to the network. It is used by both the wallet and the mining modules.
:return: True if the transaction was accepted
:rtype: bool
"""
if tx.storage:
assert tx.storage == self.tx_storage, 'Invalid tx storage'
else:
tx.storage = self.tx_storage
return self.on_new_tx(tx, fails_silently=fails_silently)
@cpu.profiler('on_new_tx')
def on_new_tx(self, tx: BaseTransaction, *, conn: Optional[HathorProtocol] = None,
quiet: bool = False, fails_silently: bool = True, propagate_to_peers: bool = True,
skip_block_weight_verification: bool = False) -> bool:
"""This method is called when any transaction arrive.
If `fails_silently` is False, it may raise either InvalidNewTransaction or TxValidationError.
:return: True if the transaction was accepted
:rtype: bool
"""
assert tx.hash is not None
if self.state != self.NodeState.INITIALIZING:
if self.tx_storage.transaction_exists(tx.hash):
if not fails_silently:
raise InvalidNewTransaction('Transaction already exists {}'.format(tx.hash_hex))
self.log.debug('on_new_tx(): Transaction already exists', tx=tx.hash_hex)
return False
if self.state != self.NodeState.INITIALIZING or self._full_verification:
try:
assert self.validate_new_tx(tx, skip_block_weight_verification=skip_block_weight_verification) is True
except (InvalidNewTransaction, TxValidationError):
# Discard invalid Transaction/block.
self.log.debug('tx/block discarded', tx=tx, exc_info=True)
if not fails_silently:
raise
return False
if self.state != self.NodeState.INITIALIZING:
self.tx_storage.save_transaction(tx)
else:
self.tx_storage._add_to_cache(tx)
if self._full_verification:
tx.reset_metadata()
else:
# When doing a fast init, we don't update the consensus, so we must trust the data on the metadata
# For transactions, we don't store them on the tips index if they are voided
# We have to execute _add_to_cache before because _del_from_cache does not remove from all indexes
metadata = tx.get_metadata()
if not tx.is_block and metadata.voided_by:
self.tx_storage._del_from_cache(tx)
if self.state != self.NodeState.INITIALIZING or self._full_verification:
try:
tx.update_initial_metadata()
self.consensus_algorithm.update(tx)
except Exception:
self.log.exception('unexpected error when processing tx', tx=tx)
self.tx_storage.remove_transaction(tx)
raise
if not quiet:
ts_date = datetime.datetime.fromtimestamp(tx.timestamp)
now = datetime.datetime.fromtimestamp(self.reactor.seconds())
if tx.is_block:
self.log.info('new block', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now(now))
else:
self.log.info('new tx', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now(now))
if propagate_to_peers:
# Propagate to our peers.
self.connections.send_tx_to_peers(tx)
if self.wallet:
# TODO Remove it and use pubsub instead.
self.wallet.on_new_tx(tx)
# Publish to pubsub manager the new tx accepted
self.pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=tx)
return True
def listen(self, description: str, use_ssl: Optional[bool] = None) -> None:
endpoint = self.connections.listen(description, use_ssl)
if self.hostname:
proto, _, _ = description.partition(':')
address = '{}://{}:{}'.format(proto, self.hostname, endpoint._port)
self.my_peer.entrypoints.append(address)
def add_peer_to_whitelist(self, peer_id):
if not settings.ENABLE_PEER_WHITELIST:
return
if peer_id in self.peers_whitelist:
self.log.info('peer already in whitelist', peer_id=peer_id)
else:
self.peers_whitelist.append(peer_id)
def remove_peer_from_whitelist_and_disconnect(self, peer_id: str) -> None:
if not settings.ENABLE_PEER_WHITELIST:
return
if peer_id in self.peers_whitelist:
self.peers_whitelist.remove(peer_id)
# disconnect from node
self.connections.drop_connection_by_peer_id(peer_id)
class ParentTxs(NamedTuple):
""" Tuple where the `must_include` hash, when present (at most 1), must be included in a pair, and a list of hashes
where any of them can be included. This is done in order to make sure that when there is only one tx tip, it is
included.
"""
max_timestamp: int
can_include: List[bytes]
must_include: List[bytes]
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,700 | CorgiCash/corgi-core | refs/heads/master | /hathor/protos/__init__.py | # Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from hathor.protos.transaction_pb2 import (
BaseTransaction,
BitcoinAuxPow,
Block,
Metadata,
TokenCreationTransaction,
Transaction,
TxInput,
TxOutput,
)
from hathor.protos.transaction_storage_pb2 import (
ANY_ORDER,
ANY_TYPE,
ASC_ORDER,
BLOCK_TYPE,
FOR_CACHING,
LEFT_RIGHT_ORDER_CHILDREN,
LEFT_RIGHT_ORDER_SPENT,
NO_FILTER,
ONLY_NEWER,
ONLY_OLDER,
TOPOLOGICAL_ORDER,
TRANSACTION_TYPE,
AddValueRequest,
CountRequest,
CountResponse,
Empty,
ExistsRequest,
ExistsResponse,
FirstTimestampRequest,
FirstTimestampResponse,
GetRequest,
GetResponse,
GetValueRequest,
GetValueResponse,
Interval,
LatestTimestampRequest,
LatestTimestampResponse,
ListItemResponse,
ListNewestRequest,
ListRequest,
ListTipsRequest,
MarkAsRequest,
MarkAsResponse,
RemoveRequest,
RemoveResponse,
RemoveValueRequest,
SaveRequest,
SaveResponse,
SortedTxsRequest,
)
try:
from hathor.protos.transaction_storage_pb2_grpc import (
TransactionStorageServicer,
TransactionStorageStub,
add_TransactionStorageServicer_to_server,
)
except ImportError:
pass
__all__ = [
'BaseTransaction',
'Transaction',
'Block',
'TxInput',
'TxOutput',
'BitcoinAuxPow',
'Metadata',
'ExistsRequest',
'ExistsResponse',
'GetRequest',
'GetResponse',
'SaveRequest',
'SaveResponse',
'RemoveRequest',
'RemoveResponse',
'CountRequest',
'CountResponse',
'LatestTimestampRequest',
'LatestTimestampResponse',
'AddValueRequest',
'GetValueRequest',
'GetValueResponse',
'RemoveValueRequest',
'Empty',
'FirstTimestampRequest',
'FirstTimestampResponse',
'MarkAsRequest',
'MarkAsResponse',
'ListRequest',
'ListTipsRequest',
'ListNewestRequest',
'ListItemResponse',
'Interval',
'SortedTxsRequest',
'TokenCreationTransaction',
'TransactionStorageStub',
'TransactionStorageServicer',
'ANY_TYPE',
'TRANSACTION_TYPE',
'BLOCK_TYPE',
'NO_FILTER',
'ONLY_NEWER',
'ONLY_OLDER',
'ANY_ORDER',
'ASC_ORDER',
'TOPOLOGICAL_ORDER',
'ONLY_NEWER',
'ONLY_OLDER',
'FOR_CACHING',
'LEFT_RIGHT_ORDER_CHILDREN',
'LEFT_RIGHT_ORDER_SPENT',
'VOIDED',
'add_TransactionStorageServicer_to_server',
]
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,701 | CorgiCash/corgi-core | refs/heads/master | /hathor/transaction/resources/tips.py | # Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from twisted.web import resource
from hathor.api_util import set_cors
from hathor.cli.openapi_files.register import register_resource
@register_resource
class TipsResource(resource.Resource):
""" Implements a web server API to return the tips
Returns a list of tips hashes
You must run with option `--status <PORT>`.
"""
isLeaf = True
def __init__(self, manager):
self.manager = manager
def render_GET(self, request):
""" Get request to /tips/ that return a list of tips hashes
'timestamp' is an optional parameter to be used in the get_tx_tips method
:rtype: string (json)
"""
request.setHeader(b'content-type', b'application/json; charset=utf-8')
set_cors(request, 'GET')
timestamp = None
if b'timestamp' in request.args:
try:
timestamp = int(request.args[b'timestamp'][0])
except ValueError:
return json.dumps({
'success': False,
'message': 'Invalid timestamp parameter, expecting an integer'
}).encode('utf-8')
tx_tips = self.manager.tx_storage.get_tx_tips(timestamp)
ret = {'success': True, 'tips': [tip.data.hex() for tip in tx_tips]}
return json.dumps(ret).encode('utf-8')
TipsResource.openapi = {
'/tips': {
'x-visibility': 'private',
'get': {
'tags': ['transaction'],
'operationId': 'tips',
'summary': 'Tips',
'description': 'Returns a list of tips hashes in hexadecimal',
'parameters': [
{
'name': 'timestamp',
'in': 'query',
'description': 'Timestamp to search for the tips',
'required': False,
'schema': {
'type': 'int'
}
}
],
'responses': {
'200': {
'description': 'Success',
'content': {
'application/json': {
'examples': {
'success': {
'summary': 'Success',
'value': {
'success': True,
'tips': [
'00002b3be4e3876e67b5e090d76dcd71cde1a30ca1e54e38d65717ba131cd22f',
'0002bb171de3490828028ec5eef3325956acb6bcffa6a50466bb9a81d38363c2'
]
}
},
'error': {
'summary': 'Invalid timestamp parameter',
'value': {
'success': False,
'message': 'Invalid timestamp parameter, expecting an integer'
}
}
}
}
}
}
}
}
}
}
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,702 | CorgiCash/corgi-core | refs/heads/master | /hathor/transaction/storage/remote_storage.py | # Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from math import inf
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Iterator, List, Optional, Set, Tuple, Union
import grpc
from grpc._server import _Context
from intervaltree import Interval
from structlog import get_logger
from twisted.internet.defer import Deferred, inlineCallbacks
from hathor import protos
from hathor.exception import HathorError
from hathor.indexes import TransactionIndexElement, TransactionsIndex
from hathor.transaction import Block
from hathor.transaction.storage.exceptions import TransactionDoesNotExist
from hathor.transaction.storage.transaction_storage import AllTipsCache, TransactionStorage
if TYPE_CHECKING:
from hathor.transaction import BaseTransaction # noqa: F401
logger = get_logger()
class RemoteCommunicationError(HathorError):
pass
def convert_grpc_exceptions(func: Callable) -> Callable:
"""Decorator to catch and conver grpc exceptions for hathor expections.
"""
from functools import wraps
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except grpc.RpcError as e:
if e.code() == grpc.StatusCode.NOT_FOUND:
raise TransactionDoesNotExist
else:
raise RemoteCommunicationError from e
return wrapper
def convert_grpc_exceptions_generator(func: Callable) -> Callable:
"""Decorator to catch and conver grpc excpetions for hathor expections. (for generators)
"""
from functools import wraps
@wraps(func)
def wrapper(*args, **kwargs):
try:
yield from func(*args, **kwargs)
except grpc.RpcError as e:
if e.code() == grpc.StatusCode.NOT_FOUND:
raise TransactionDoesNotExist
else:
raise RemoteCommunicationError from e
return wrapper
def convert_hathor_exceptions(func: Callable) -> Callable:
"""Decorator to annotate better details and codes on the grpc context for known exceptions.
"""
from functools import wraps
@wraps(func)
def wrapper(self: Any, request: Any, context: _Context) -> Any:
try:
return func(self, request, context)
except TransactionDoesNotExist:
context.set_code(grpc.StatusCode.NOT_FOUND)
context.set_details('Transaction does not exist.')
raise
return wrapper
def convert_hathor_exceptions_generator(func: Callable) -> Callable:
"""Decorator to annotate better details and codes on the grpc context for known exceptions. (for generators)
"""
from functools import wraps
@wraps(func)
def wrapper(self: Any, request: Any, context: _Context) -> Iterator:
try:
yield from func(self, request, context)
except TransactionDoesNotExist:
context.set_code(grpc.StatusCode.NOT_FOUND)
context.set_details('Transaction does not exist.')
raise
return wrapper
class TransactionRemoteStorage(TransactionStorage):
"""Connects to a Storage API Server at given port and exposes standard storage interface.
"""
def __init__(self, with_index=None):
super().__init__()
self._channel = None
self.with_index = with_index
# Set initial value for _best_block_tips cache.
self._best_block_tips = []
def connect_to(self, port: int) -> None:
if self._channel:
self._channel.close()
self._channel = grpc.insecure_channel('127.0.0.1:{}'.format(port))
self._stub = protos.TransactionStorageStub(self._channel)
# Initialize genesis.
self._save_or_verify_genesis()
# Set initial value for _best_block_tips cache.
self._best_block_tips = [x.hash for x in self.get_all_genesis() if x.is_block]
def _check_connection(self) -> None:
"""raise error if not connected"""
from .subprocess_storage import SubprocessNotAliveError
if not self._channel:
raise SubprocessNotAliveError('subprocess not started')
# TransactionStorageSync interface implementation:
@convert_grpc_exceptions
def remove_transaction(self, tx: 'BaseTransaction') -> None:
self._check_connection()
tx_proto = tx.to_proto()
request = protos.RemoveRequest(transaction=tx_proto)
result = self._stub.Remove(request) # noqa: F841
assert result.removed
self._remove_from_weakref(tx)
@convert_grpc_exceptions
def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False) -> None:
self._check_connection()
tx_proto = tx.to_proto()
request = protos.SaveRequest(transaction=tx_proto, only_metadata=only_metadata)
result = self._stub.Save(request) # noqa: F841
assert result.saved
self._save_to_weakref(tx)
@convert_grpc_exceptions
def transaction_exists(self, hash_bytes: bytes) -> bool:
self._check_connection()
request = protos.ExistsRequest(hash=hash_bytes)
result = self._stub.Exists(request)
return result.exists
@convert_grpc_exceptions
def _get_transaction(self, hash_bytes: bytes) -> 'BaseTransaction':
tx = self.get_transaction_from_weakref(hash_bytes)
if tx is not None:
return tx
from hathor.transaction import tx_or_block_from_proto
self._check_connection()
request = protos.GetRequest(hash=hash_bytes)
result = self._stub.Get(request)
tx = tx_or_block_from_proto(result.transaction, storage=self)
self._save_to_weakref(tx)
return tx
@convert_grpc_exceptions_generator
def get_all_transactions(self) -> Iterator['BaseTransaction']:
yield from self._call_list_request_generators()
@convert_grpc_exceptions
def get_count_tx_blocks(self) -> int:
self._check_connection()
request = protos.CountRequest(tx_type=protos.ANY_TYPE)
result = self._stub.Count(request)
return result.count
# TransactionStorageAsync interface implementation:
@convert_grpc_exceptions
def save_transaction_deferred(self, tx: 'BaseTransaction', *, only_metadata: bool = False) -> None:
# self._check_connection()
raise NotImplementedError
@convert_grpc_exceptions
def remove_transaction_deferred(self, tx: 'BaseTransaction') -> None:
# self._check_connection()
raise NotImplementedError
@inlineCallbacks
@convert_grpc_exceptions_generator
def transaction_exists_deferred(self, hash_bytes: bytes) -> Generator[None, protos.ExistsResponse, bool]:
self._check_connection()
request = protos.ExistsRequest(hash=hash_bytes)
result = yield Deferred.fromFuture(self._stub.Exists.future(request))
return result.exists
@convert_grpc_exceptions
def get_transaction_deferred(self, hash_bytes: bytes) -> Deferred:
# self._check_connection()
raise NotImplementedError
@convert_grpc_exceptions
def get_all_transactions_deferred(self) -> Deferred:
# self._check_connection()
raise NotImplementedError
@convert_grpc_exceptions
def get_count_tx_blocks_deferred(self) -> Deferred:
# self._check_connection()
raise NotImplementedError
# TransactionStorage interface implementation:
@property
def latest_timestamp(self) -> int:
return self._latest_timestamp()
@convert_grpc_exceptions
def _latest_timestamp(self) -> int:
self._check_connection()
request = protos.LatestTimestampRequest()
result = self._stub.LatestTimestamp(request)
return result.timestamp
@property
def first_timestamp(self) -> int:
if not hasattr(self, '_cached_first_timestamp'):
timestamp = self._first_timestamp()
if timestamp:
setattr(self, '_cached_first_timestamp', timestamp)
return getattr(self, '_cached_first_timestamp', None)
@convert_grpc_exceptions
def _first_timestamp(self) -> int:
self._check_connection()
request = protos.FirstTimestampRequest()
result = self._stub.FirstTimestamp(request)
return result.timestamp
def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> List[bytes]:
return super().get_best_block_tips(timestamp, skip_cache=skip_cache)
@convert_grpc_exceptions
def get_all_tips(self, timestamp: Optional[Union[int, float]] = None) -> Set[Interval]:
self._check_connection()
if isinstance(timestamp, float) and timestamp != inf:
self.log.warn('timestamp given in float will be truncated, use int instead')
timestamp = int(timestamp)
if self._all_tips_cache is not None and timestamp is not None and timestamp >= self._all_tips_cache.timestamp:
return self._all_tips_cache.tips
request = protos.ListTipsRequest(tx_type=protos.ANY_TYPE, timestamp=timestamp)
result = self._stub.ListTips(request)
tips = set()
for interval_proto in result:
tips.add(Interval(interval_proto.begin, interval_proto.end, interval_proto.data))
if timestamp is not None and timestamp >= self.latest_timestamp:
merkle_tree, hashes = self.calculate_merkle_tree(tips)
self._all_tips_cache = AllTipsCache(self.latest_timestamp, tips, merkle_tree, hashes)
return tips
@convert_grpc_exceptions
def get_block_tips(self, timestamp: Optional[float] = None) -> Set[Interval]:
self._check_connection()
if isinstance(timestamp, float) and timestamp != inf:
self.log.warn('timestamp given in float will be truncated, use int instead')
timestamp = int(timestamp)
request = protos.ListTipsRequest(tx_type=protos.BLOCK_TYPE, timestamp=timestamp)
result = self._stub.ListTips(request)
tips = set()
for interval_proto in result:
tips.add(Interval(interval_proto.begin, interval_proto.end, interval_proto.data))
return tips
@convert_grpc_exceptions
def get_tx_tips(self, timestamp: Optional[float] = None) -> Set[Interval]:
self._check_connection()
if isinstance(timestamp, float) and timestamp != inf:
self.log.warn('timestamp given in float will be truncated, use int instead')
timestamp = int(timestamp)
request = protos.ListTipsRequest(tx_type=protos.TRANSACTION_TYPE, timestamp=timestamp)
result = self._stub.ListTips(request)
tips = set()
for interval_proto in result:
tips.add(Interval(interval_proto.begin, interval_proto.end, interval_proto.data))
return tips
@convert_grpc_exceptions
def get_newest_blocks(self, count: int) -> Tuple[List['Block'], bool]:
from hathor.transaction import tx_or_block_from_proto
self._check_connection()
request = protos.ListNewestRequest(tx_type=protos.BLOCK_TYPE, count=count)
result = self._stub.ListNewest(request)
tx_list: List['Block'] = []
has_more = None
for list_item in result:
if list_item.HasField('transaction'):
tx_proto = list_item.transaction
blk = tx_or_block_from_proto(tx_proto, storage=self)
assert isinstance(blk, Block)
tx_list.append(blk)
elif list_item.HasField('has_more'):
has_more = list_item.has_more
# assuming there are no more items after `has_more`, break soon
break
else:
raise ValueError('unexpected list_item_oneof')
assert isinstance(has_more, bool)
return tx_list, has_more
@convert_grpc_exceptions
def get_newest_txs(self, count: int) -> Tuple[List['BaseTransaction'], bool]:
from hathor.transaction import tx_or_block_from_proto
self._check_connection()
request = protos.ListNewestRequest(tx_type=protos.TRANSACTION_TYPE, count=count)
result = self._stub.ListNewest(request)
tx_list = []
has_more = None
for list_item in result:
if list_item.HasField('transaction'):
tx_proto = list_item.transaction
tx_list.append(tx_or_block_from_proto(tx_proto, storage=self))
elif list_item.HasField('has_more'):
has_more = list_item.has_more
# assuming there are no more items after `has_more`, break soon
break
else:
raise ValueError('unexpected list_item_oneof')
assert has_more is not None
return tx_list, has_more
@convert_grpc_exceptions
def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> Tuple[List['BaseTransaction'], bool]:
from hathor.transaction import tx_or_block_from_proto
self._check_connection()
if isinstance(timestamp, float):
self.log.warn('timestamp given in float will be truncated, use int instead')
timestamp = int(timestamp)
request = protos.ListRequest(
tx_type=protos.BLOCK_TYPE,
time_filter=protos.ONLY_OLDER,
timestamp=timestamp,
hash=hash_bytes,
max_count=count,
)
result = self._stub.List(request)
tx_list = []
has_more = None
for list_item in result:
if list_item.HasField('transaction'):
tx_proto = list_item.transaction
tx_list.append(tx_or_block_from_proto(tx_proto, storage=self))
elif list_item.HasField('has_more'):
has_more = list_item.has_more
# assuming there are no more items after `has_more`, break soon
break
else:
raise ValueError('unexpected list_item_oneof')
assert has_more is not None
return tx_list, has_more
@convert_grpc_exceptions
def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> Tuple[List['BaseTransaction'], bool]:
from hathor.transaction import tx_or_block_from_proto
self._check_connection()
if isinstance(timestamp, float):
self.log.warn('timestamp given in float will be truncated, use int instead')
timestamp = int(timestamp)
request = protos.ListRequest(
tx_type=protos.BLOCK_TYPE,
time_filter=protos.ONLY_NEWER,
timestamp=timestamp,
hash=hash_bytes,
max_count=count,
)
result = self._stub.List(request)
tx_list = []
has_more = None
for list_item in result:
if list_item.HasField('transaction'):
tx_proto = list_item.transaction
tx_list.append(tx_or_block_from_proto(tx_proto, storage=self))
elif list_item.HasField('has_more'):
has_more = list_item.has_more
# assuming there are no more items after `has_more`, break soon
break
else:
raise ValueError('unexpected list_item_oneof')
assert has_more is not None
return tx_list, has_more
@convert_grpc_exceptions
def get_older_txs_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> Tuple[List['BaseTransaction'], bool]:
from hathor.transaction import tx_or_block_from_proto
self._check_connection()
if isinstance(timestamp, float):
self.log.warn('timestamp given in float will be truncated, use int instead')
timestamp = int(timestamp)
request = protos.ListRequest(
tx_type=protos.TRANSACTION_TYPE,
time_filter=protos.ONLY_OLDER,
timestamp=timestamp,
hash=hash_bytes,
max_count=count,
)
result = self._stub.List(request)
tx_list = []
has_more = None
for list_item in result:
if list_item.HasField('transaction'):
tx_proto = list_item.transaction
tx_list.append(tx_or_block_from_proto(tx_proto, storage=self))
elif list_item.HasField('has_more'):
has_more = list_item.has_more
# assuming there are no more items after `has_more`, break soon
break
else:
raise ValueError('unexpected list_item_oneof')
assert has_more is not None
return tx_list, has_more
@convert_grpc_exceptions
def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> Tuple[List['BaseTransaction'], bool]:
from hathor.transaction import tx_or_block_from_proto
self._check_connection()
if isinstance(timestamp, float):
self.log.warn('timestamp given in float will be truncated, use int instead')
timestamp = int(timestamp)
request = protos.ListRequest(
tx_type=protos.TRANSACTION_TYPE,
time_filter=protos.ONLY_NEWER,
timestamp=timestamp,
hash=hash_bytes,
max_count=count,
)
result = self._stub.List(request)
tx_list = []
has_more = None
for list_item in result:
if list_item.HasField('transaction'):
tx_proto = list_item.transaction
tx_list.append(tx_or_block_from_proto(tx_proto, storage=self))
elif list_item.HasField('has_more'):
has_more = list_item.has_more
# assuming there are no more items after `has_more`, break soon
break
else:
raise ValueError('unexpected list_item_oneof')
assert has_more is not None
return tx_list, has_more
def _manually_initialize(self) -> None:
pass
@convert_grpc_exceptions_generator
def _call_list_request_generators(self, kwargs: Optional[Dict[str, Any]] = None) -> Iterator['BaseTransaction']:
""" Execute a call for the ListRequest and yield the blocks or txs
:param kwargs: Parameters to be sent to ListRequest
:type kwargs: Dict[str,]
"""
from hathor.transaction import tx_or_block_from_proto
def get_tx(tx):
tx2 = self.get_transaction_from_weakref(tx.hash)
if tx2:
tx = tx2
else:
self._save_to_weakref(tx)
return tx
self._check_connection()
if kwargs:
request = protos.ListRequest(**kwargs)
else:
request = protos.ListRequest()
result = self._stub.List(request)
for list_item in result:
if not list_item.HasField('transaction'):
break
tx_proto = list_item.transaction
tx = tx_or_block_from_proto(tx_proto, storage=self)
assert tx.hash is not None
lock = self._get_lock(tx.hash)
if lock:
with lock:
tx = get_tx(tx)
else:
tx = get_tx(tx)
yield tx
@convert_grpc_exceptions_generator
def _topological_sort(self):
yield from self._call_list_request_generators({'order_by': protos.TOPOLOGICAL_ORDER})
@convert_grpc_exceptions
def _add_to_cache(self, tx):
self._check_connection()
tx_proto = tx.to_proto()
request = protos.MarkAsRequest(transaction=tx_proto, mark_type=protos.FOR_CACHING, relax_assert=False)
result = self._stub.MarkAs(request) # noqa: F841
@convert_grpc_exceptions
def _del_from_cache(self, tx: 'BaseTransaction', *, relax_assert: bool = False) -> None:
self._check_connection()
tx_proto = tx.to_proto()
request = protos.MarkAsRequest(transaction=tx_proto, mark_type=protos.FOR_CACHING, remove_mark=True,
relax_assert=relax_assert)
result = self._stub.MarkAs(request) # noqa: F841
@convert_grpc_exceptions
def get_block_count(self) -> int:
self._check_connection()
request = protos.CountRequest(tx_type=protos.BLOCK_TYPE)
result = self._stub.Count(request)
return result.count
@convert_grpc_exceptions
def get_tx_count(self) -> int:
self._check_connection()
request = protos.CountRequest(tx_type=protos.TRANSACTION_TYPE)
result = self._stub.Count(request)
return result.count
def get_genesis(self, hash_bytes: bytes) -> Optional['BaseTransaction']:
assert self._genesis_cache is not None
return self._genesis_cache.get(hash_bytes, None)
def get_all_genesis(self) -> Set['BaseTransaction']:
assert self._genesis_cache is not None
return set(self._genesis_cache.values())
@convert_grpc_exceptions
def get_transactions_before(self, hash_bytes, num_blocks=100): # pragma: no cover
from hathor.transaction import tx_or_block_from_proto
self._check_connection()
request = protos.ListRequest(
tx_type=protos.TRANSACTION_TYPE,
hash=hash_bytes,
max_count=num_blocks,
filter_before=True,
)
result = self._stub.List(request)
tx_list = []
for list_item in result:
if not list_item.HasField('transaction'):
break
tx_proto = list_item.transaction
tx_list.append(tx_or_block_from_proto(tx_proto, storage=self))
return tx_list
@convert_grpc_exceptions
def get_blocks_before(self, hash_bytes: bytes, num_blocks: int = 100) -> List[Block]:
from hathor.transaction import tx_or_block_from_proto
self._check_connection()
request = protos.ListRequest(
tx_type=protos.BLOCK_TYPE,
hash=hash_bytes,
max_count=num_blocks,
filter_before=True,
)
result = self._stub.List(request)
tx_list: List[Block] = []
for list_item in result:
if not list_item.HasField('transaction'):
break
tx_proto = list_item.transaction
block = tx_or_block_from_proto(tx_proto, storage=self)
assert isinstance(block, Block)
tx_list.append(block)
return tx_list
@convert_grpc_exceptions
def get_all_sorted_txs(self, timestamp: int, count: int, offset: int) -> TransactionsIndex:
self._check_connection()
request = protos.SortedTxsRequest(
timestamp=timestamp,
count=count,
offset=offset
)
result = self._stub.SortedTxs(request)
tx_list = []
for tx_proto in result:
tx_list.append(TransactionIndexElement(tx_proto.timestamp, tx_proto.hash))
all_sorted = TransactionsIndex()
all_sorted.update(tx_list)
return all_sorted
@convert_grpc_exceptions
def add_value(self, key: str, value: str) -> None:
self._check_connection()
request = protos.AddValueRequest(
key=key,
value=value
)
result = self._stub.AddValue(request) # noqa: F841
@convert_grpc_exceptions
def remove_value(self, key: str) -> None:
self._check_connection()
request = protos.RemoveValueRequest(
key=key,
)
result = self._stub.RemoveValue(request) # noqa: F841
@convert_grpc_exceptions
def get_value(self, key: str) -> Optional[str]:
self._check_connection()
request = protos.GetValueRequest(
key=key
)
result = self._stub.GetValue(request)
if not result.value:
return None
return result.value
class TransactionStorageServicer(protos.TransactionStorageServicer):
def __init__(self, tx_storage):
self.log = logger.new()
self.storage = tx_storage
# We must always disable weakref because it will run remotely, which means
# each call will create a new instance of the block/transaction during the
# deserialization process.
self.storage._disable_weakref()
@convert_hathor_exceptions
def Exists(self, request: protos.ExistsRequest, context: _Context) -> protos.ExistsResponse:
hash_bytes = request.hash
exists = self.storage.transaction_exists(hash_bytes)
return protos.ExistsResponse(exists=exists)
@convert_hathor_exceptions
def Get(self, request: protos.GetRequest, context: _Context) -> protos.GetResponse:
hash_bytes = request.hash
exclude_metadata = request.exclude_metadata
tx = self.storage.get_transaction(hash_bytes)
if exclude_metadata:
del tx._metadata
else:
tx.get_metadata()
return protos.GetResponse(transaction=tx.to_proto())
@convert_hathor_exceptions
def Save(self, request: protos.SaveRequest, context: _Context) -> protos.SaveResponse:
from hathor.transaction import tx_or_block_from_proto
tx_proto = request.transaction
only_metadata = request.only_metadata
result = protos.SaveResponse(saved=False)
tx = tx_or_block_from_proto(tx_proto, storage=self.storage)
self.storage.save_transaction(tx, only_metadata=only_metadata)
result.saved = True
return result
@convert_hathor_exceptions
def Remove(self, request: protos.RemoveRequest, context: _Context) -> protos.RemoveResponse:
from hathor.transaction import tx_or_block_from_proto
tx_proto = request.transaction
result = protos.RemoveResponse(removed=False)
tx = tx_or_block_from_proto(tx_proto, storage=self.storage)
self.storage.remove_transaction(tx)
result.removed = True
return result
@convert_hathor_exceptions
def Count(self, request: protos.CountRequest, context: _Context) -> protos.CountResponse:
tx_type = request.tx_type
if tx_type == protos.ANY_TYPE:
count = self.storage.get_count_tx_blocks()
elif tx_type == protos.TRANSACTION_TYPE:
count = self.storage.get_tx_count()
elif tx_type == protos.BLOCK_TYPE:
count = self.storage.get_block_count()
else:
raise ValueError('invalid tx_type %s' % (tx_type,))
return protos.CountResponse(count=count)
@convert_hathor_exceptions
def LatestTimestamp(self, request: protos.LatestTimestampRequest,
context: _Context) -> protos.LatestTimestampResponse:
return protos.LatestTimestampResponse(timestamp=self.storage.latest_timestamp)
@convert_hathor_exceptions
def FirstTimestamp(self, request: protos.FirstTimestampRequest,
context: _Context) -> protos.FirstTimestampResponse:
return protos.FirstTimestampResponse(timestamp=self.storage.first_timestamp)
@convert_hathor_exceptions
def MarkAs(self, request, context):
from hathor.transaction import tx_or_block_from_proto
tx = tx_or_block_from_proto(request.transaction, storage=self.storage)
if request.mark_type == protos.FOR_CACHING:
if request.remove_mark:
self.storage._del_from_cache(tx, relax_assert=request.relax_assert)
else:
self.storage._add_to_cache(tx)
else:
raise ValueError('invalid mark_type')
# TODO: correct value for `marked`
return protos.MarkAsResponse(marked=True)
@convert_hathor_exceptions_generator
def List(self, request: protos.ListRequest, context: _Context) -> Iterator[protos.ListItemResponse]:
exclude_metadata = request.exclude_metadata
has_more = None
hash_bytes = request.hash
count = request.max_count
timestamp = request.timestamp
# TODO: more exceptions for unsupported cases
if request.filter_before:
if request.tx_type == protos.ANY_TYPE:
raise NotImplementedError
elif request.tx_type == protos.TRANSACTION_TYPE:
tx_iter = self.storage.get_transactions_before(hash_bytes, count)
elif request.tx_type == protos.BLOCK_TYPE:
tx_iter = self.storage.get_blocks_before(hash_bytes, count)
else:
raise ValueError('invalid tx_type %s' % (request.tx_type,))
elif request.time_filter == protos.ONLY_NEWER:
if request.tx_type == protos.ANY_TYPE:
raise NotImplementedError
elif request.tx_type == protos.TRANSACTION_TYPE:
tx_iter, has_more = self.storage.get_newer_txs_after(timestamp, hash_bytes, count)
elif request.tx_type == protos.BLOCK_TYPE:
tx_iter, has_more = self.storage.get_newer_blocks_after(timestamp, hash_bytes, count)
else:
raise ValueError('invalid tx_type %s' % (request.tx_type,))
elif request.time_filter == protos.ONLY_OLDER:
if request.tx_type == protos.ANY_TYPE:
raise NotImplementedError
elif request.tx_type == protos.TRANSACTION_TYPE:
tx_iter, has_more = self.storage.get_older_txs_after(timestamp, hash_bytes, count)
elif request.tx_type == protos.BLOCK_TYPE:
tx_iter, has_more = self.storage.get_older_blocks_after(timestamp, hash_bytes, count)
else:
raise ValueError('invalid tx_type %s' % (request.tx_type,))
elif request.time_filter == protos.NO_FILTER:
if request.order_by == protos.ANY_ORDER:
tx_iter = self.storage.get_all_transactions()
elif request.order_by == protos.TOPOLOGICAL_ORDER:
tx_iter = self.storage._topological_sort()
else:
raise ValueError('invalid order_by')
else:
raise ValueError('invalid request')
for tx in tx_iter:
if exclude_metadata:
del tx._metadata
else:
tx.get_metadata()
yield protos.ListItemResponse(transaction=tx.to_proto())
if has_more is not None:
yield protos.ListItemResponse(has_more=has_more)
@convert_hathor_exceptions_generator
def ListTips(self, request: protos.ListTipsRequest, context: _Context) -> Iterator[protos.Interval]:
# XXX: using HasField (and oneof) to differentiate None from 0, which is very important in this context
timestamp = None
if request.HasField('timestamp'):
timestamp = request.timestamp
if request.tx_type == protos.ANY_TYPE:
tx_intervals = self.storage.get_all_tips(timestamp)
elif request.tx_type == protos.TRANSACTION_TYPE:
tx_intervals = self.storage.get_tx_tips(timestamp)
elif request.tx_type == protos.BLOCK_TYPE:
tx_intervals = self.storage.get_block_tips(timestamp)
else:
raise ValueError('invalid tx_type %s' % (request.tx_type,))
for interval in tx_intervals:
yield protos.Interval(begin=interval.begin, end=interval.end, data=interval.data)
@convert_hathor_exceptions_generator
def ListNewest(self, request: protos.ListNewestRequest, context: _Context) -> Iterator[protos.ListItemResponse]:
has_more = False
if request.tx_type == protos.ANY_TYPE:
raise NotImplementedError
elif request.tx_type == protos.TRANSACTION_TYPE:
tx_list, has_more = self.storage.get_newest_txs(request.count)
elif request.tx_type == protos.BLOCK_TYPE:
tx_list, has_more = self.storage.get_newest_blocks(request.count)
else:
raise ValueError('invalid tx_type %s' % (request.tx_type,))
for tx in tx_list:
yield protos.ListItemResponse(transaction=tx.to_proto())
yield protos.ListItemResponse(has_more=has_more)
@convert_hathor_exceptions_generator
def SortedTxs(self, request: protos.SortedTxsRequest, context: _Context) -> Iterator[protos.Transaction]:
timestamp = request.timestamp
offset = request.offset
count = request.count
txs_index = self.storage.get_all_sorted_txs(timestamp, count, offset)
for tx_element in txs_index[:]:
yield protos.Transaction(timestamp=tx_element.timestamp, hash=tx_element.hash)
@convert_hathor_exceptions
def AddValue(self, request: protos.AddValueRequest, context: _Context) -> protos.Empty:
key = request.key
value = request.value
self.storage.add_value(key, value)
return protos.Empty()
@convert_hathor_exceptions
def RemoveValue(self, request: protos.RemoveValueRequest, context: _Context) -> protos.Empty:
key = request.key
self.storage.remove_value(key)
return protos.Empty()
@convert_hathor_exceptions
def GetValue(self, request: protos.GetValueRequest, context: _Context) -> protos.GetValueResponse:
key = request.key
value = self.storage.get_value(key)
if value:
return protos.GetValueResponse(value=value)
else:
return protos.GetValueResponse()
def create_transaction_storage_server(server: grpc.Server, tx_storage: TransactionStorage,
port: Optional[int] = None) -> Tuple[protos.TransactionStorageServicer, int]:
"""Create a GRPC servicer for the given storage, returns a (servicer, port) tuple.
:param server: a GRPC server
:type server: :py:class:`grpc.Server`
:param tx_storage: an instance of TransactionStorage
:type tx_storage: :py:class:`hathor.transaction.storage.TransactionStorage`
:param port: optional listen port, if None a random port will be chosen (and returned)
:type server: :py:class:`typing.Optional[int]`
:rtype :py:class:`typing.Tuple[hathor.protos.TransactionStorageServicer, int]`
"""
servicer = TransactionStorageServicer(tx_storage)
protos.add_TransactionStorageServicer_to_server(servicer, server)
port = server.add_insecure_port('127.0.0.1:0')
assert port is not None
return servicer, port
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,703 | CorgiCash/corgi-core | refs/heads/master | /hathor/transaction/transaction_metadata.py | # Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set
from hathor import protos
from hathor.util import practically_equal
if TYPE_CHECKING:
from weakref import ReferenceType # noqa: F401
from hathor.transaction import BaseTransaction # noqa: F401
class TransactionMetadata:
hash: Optional[bytes]
spent_outputs: Dict[int, List[bytes]]
# XXX: the following Optional[] types use None to replace empty set/list to reduce memory use
conflict_with: Optional[List[bytes]]
voided_by: Optional[Set[bytes]]
received_by: List[int]
children: List[bytes]
twins: List[bytes]
accumulated_weight: float
score: float
first_block: Optional[bytes]
height: int
# It must be a weakref.
_tx_ref: Optional['ReferenceType[BaseTransaction]']
# Used to detect changes in voided_by.
_last_voided_by_hash: Optional[int]
_last_spent_by_hash: Optional[int]
def __init__(self, spent_outputs: Optional[Dict[int, List[bytes]]] = None, hash: Optional[bytes] = None,
accumulated_weight: float = 0, score: float = 0, height: int = 0) -> None:
# Hash of the transaction.
self.hash = hash
self._tx_ref = None
# Tx outputs that have been spent.
# The key is the output index, while the value is a set of the transactions which spend the output.
self.spent_outputs = spent_outputs or defaultdict(list)
self._last_spent_by_hash = None
# FIXME: conflict_with -> conflicts_with (as in "this transaction conflicts with these ones")
# Hash of the transactions that conflicts with this transaction.
self.conflict_with = None
# Hash of the transactions that void this transaction.
#
# When a transaction has a conflict and is voided because of this conflict, its own hash is added to
# voided_by. The logic is that the transaction is voiding itself.
#
# When a block is voided, its own hash is added to voided_by.
self.voided_by = None
self._last_voided_by_hash = None
# List of peers which have sent this transaction.
# Store only the peers' id.
self.received_by = []
# List of transactions which have this transaction as parent.
# Store only the transactions' hash.
self.children = []
# Hash of the transactions that are twin to this transaction.
# Twin transactions have the same inputs and outputs
self.twins = []
# Accumulated weight
self.accumulated_weight = accumulated_weight
# Score
self.score = score
# First valid block that verifies this transaction
# If two blocks verify the same parent block and have the same score, both are valid.
self.first_block = None
# Height
self.height = height
def get_tx(self) -> 'BaseTransaction':
assert self._tx_ref is not None
tx = self._tx_ref()
assert tx is not None
return tx
def get_output_spent_by(self, index: int) -> Optional[bytes]:
tx = self.get_tx()
assert tx.storage is not None
spent_set = self.spent_outputs[index]
spent_by = None
for h in spent_set:
tx2 = tx.storage.get_transaction(h)
tx2_meta = tx2.get_metadata()
if not bool(tx2_meta.voided_by):
# There may be only one spent_by.
assert spent_by is None
spent_by = tx2.hash
return spent_by
def has_spent_by_changed_since_last_call(self) -> bool:
"""Check whether `self.get_output_spent_by(...)` has been changed since the last call to this same method.
Notice that it will always return True when the transaction is first loaded into memory.
>>> meta = TransactionMetadata()
>>> b1 = meta.has_spent_by_changed_since_last_call()
>>> b2 = meta.has_spent_by_changed_since_last_call()
>>> assert b1 != b2
"""
cur_hash = hash(tuple((index, self.get_output_spent_by(index)) for index in self.spent_outputs.keys()))
if self._last_spent_by_hash != cur_hash:
self._last_spent_by_hash = cur_hash
return True
return False
def has_voided_by_changed_since_last_call(self) -> bool:
"""Check whether `self.voided_by` has been changed since the last call to this same method.
Notice that it will always return True when the transaction is first loaded into memory.
>>> meta = TransactionMetadata()
>>> meta.voided_by = {b'pretend_this_is_a_tx_hash'}
>>> b1 = meta.has_voided_by_changed_since_last_call()
>>> b2 = meta.has_voided_by_changed_since_last_call()
>>> assert b1 != b2
"""
cur_hash = hash(frozenset(self.voided_by)) if self.voided_by else None
if self._last_voided_by_hash != cur_hash:
self._last_voided_by_hash = cur_hash
return True
return False
def __eq__(self, other: Any) -> bool:
"""Override the default Equals behavior"""
if not isinstance(other, TransactionMetadata):
return False
for field in ['hash', 'conflict_with', 'voided_by', 'received_by',
'children', 'accumulated_weight', 'twins', 'score',
'first_block']:
if (getattr(self, field) or None) != (getattr(other, field) or None):
return False
# Compare self.spent_outputs separately because it is a defaultdict.
# We need to do this because a simple access to a key may have side effects.
# For example:
# >>> a = defaultdict(list)
# >>> b = defaultdict(list)
# >>> a == b
# True
# >>> a[0]
# []
# >>> a == b
# False
if not practically_equal(self.spent_outputs, other.spent_outputs):
return False
return True
def to_json(self) -> Dict[str, Any]:
data: Dict[str, Any] = {}
data['hash'] = self.hash and self.hash.hex()
data['spent_outputs'] = []
for idx, hashes in self.spent_outputs.items():
data['spent_outputs'].append([idx, [h_bytes.hex() for h_bytes in hashes]])
data['received_by'] = list(self.received_by)
data['children'] = [x.hex() for x in self.children]
data['conflict_with'] = [x.hex() for x in self.conflict_with] if self.conflict_with else []
data['voided_by'] = [x.hex() for x in self.voided_by] if self.voided_by else []
data['twins'] = [x.hex() for x in self.twins]
data['accumulated_weight'] = self.accumulated_weight
data['score'] = self.score
data['height'] = self.height
if self.first_block is not None:
data['first_block'] = self.first_block.hex()
else:
data['first_block'] = None
return data
@classmethod
def create_from_json(cls, data: Dict[str, Any]) -> 'TransactionMetadata':
meta = cls()
meta.hash = bytes.fromhex(data['hash']) if data['hash'] else None
for idx, hashes in data['spent_outputs']:
for h_hex in hashes:
meta.spent_outputs[idx].append(bytes.fromhex(h_hex))
meta.received_by = list(data['received_by'])
meta.children = [bytes.fromhex(h) for h in data['children']]
if 'conflict_with' in data:
meta.conflict_with = [bytes.fromhex(h) for h in data['conflict_with']] if data['conflict_with'] else None
else:
meta.conflict_with = None
if 'voided_by' in data:
meta.voided_by = set(bytes.fromhex(h) for h in data['voided_by']) if data['voided_by'] else None
else:
meta.voided_by = None
if 'twins' in data:
meta.twins = [bytes.fromhex(h) for h in data['twins']]
else:
meta.twins = []
meta.accumulated_weight = data['accumulated_weight']
meta.score = data.get('score', 0)
meta.height = data.get('height', 0) # XXX: should we calculate the height if it's not defined?
first_block_raw = data.get('first_block', None)
if first_block_raw:
meta.first_block = bytes.fromhex(first_block_raw)
return meta
# XXX(jansegre): I did not put the transaction hash in the protobuf object to keep it less redundant. Is this OK?
@classmethod
def create_from_proto(cls, hash_bytes: bytes, metadata_proto: protos.Metadata) -> 'TransactionMetadata':
""" Create a TransactionMetadata from a protobuf Metadata object.
:param hash_bytes: hash of the transaction in bytes
:type hash_bytes: bytes
:param metadata_proto: Protobuf transaction object
:type metadata_proto: :py:class:`hathor.protos.Metadata`
:return: A transaction metadata
:rtype: TransactionMetadata
"""
metadata = cls(hash=hash_bytes)
for i, hashes in metadata_proto.spent_outputs.items():
metadata.spent_outputs[i] = list(hashes.hashes)
metadata.conflict_with = list(metadata_proto.conflicts_with.hashes) or None
metadata.voided_by = set(metadata_proto.voided_by.hashes) or None
metadata.twins = list(metadata_proto.twins.hashes)
metadata.received_by = list(metadata_proto.received_by)
metadata.children = list(metadata_proto.children.hashes)
metadata.accumulated_weight = metadata_proto.accumulated_weight
metadata.score = metadata_proto.score
metadata.first_block = metadata_proto.first_block or None
metadata.height = metadata_proto.height
return metadata
def to_proto(self) -> protos.Metadata:
""" Creates a Probuf object from self
:return: Protobuf object
:rtype: :py:class:`hathor.protos.Metadata`
"""
from hathor import protos
return protos.Metadata(
spent_outputs={k: protos.Metadata.Hashes(hashes=v)
for k, v in self.spent_outputs.items()},
conflicts_with=protos.Metadata.Hashes(hashes=self.conflict_with),
voided_by=protos.Metadata.Hashes(hashes=self.voided_by),
twins=protos.Metadata.Hashes(hashes=self.twins),
received_by=self.received_by,
children=protos.Metadata.Hashes(hashes=self.children),
accumulated_weight=self.accumulated_weight,
score=self.score,
first_block=self.first_block,
height=self.height,
)
def clone(self) -> 'TransactionMetadata':
"""Return exact copy without sharing memory.
:return: TransactionMetadata
:rtype: :py:class:`hathor.transaction.TransactionMetadata`
"""
# XXX: using json serialization for simplicity, should it use pickle? manual fields? other alternative?
return self.create_from_json(self.to_json())
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,704 | CorgiCash/corgi-core | refs/heads/master | /hathor/transaction/storage/subprocess_storage.py | # Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from concurrent import futures
from multiprocessing import Process, Queue
import grpc
from hathor.exception import HathorError
from hathor.transaction.storage.remote_storage import TransactionRemoteStorage, create_transaction_storage_server
class SubprocessNotAliveError(HathorError):
pass
class TransactionSubprocessStorage(TransactionRemoteStorage, Process):
"""Subprocess storage to be used 'on top' of other storages.
Wraps a given store constructor and spawns it on a subprocess.
"""
def __init__(self, store_constructor, with_index=None):
"""
:param store_constructor: a callable that returns an instance of TransactionStorage
:type store_constructor: :py:class:`typing.Callable[..., hathor.transaction.storage.TransactionStorage]`
"""
Process.__init__(self)
TransactionRemoteStorage.__init__(self, with_index=with_index)
self._store_constructor = store_constructor
# this queue is used by the subprocess to inform which port was selected
self._port_q: 'Queue[int]' = Queue(1)
# this queue is used to inform the subprocess it can end
self._exit_q: 'Queue[int]' = Queue(1)
def _check_connection(self):
"""raise error if subprocess is not alive"""
super()._check_connection()
if not self.is_alive():
raise SubprocessNotAliveError('subprocess is dead')
def stop(self):
self._exit_q.put(None)
if self._channel:
self._channel.close()
def start(self):
super().start()
port = self._port_q.get()
self.connect_to(port)
def terminate(self):
self.close()
super().terminate()
def run(self):
"""internal method for Process interface, do not run directly"""
# TODO: some tuning with benchmarks
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
tx_storage = self._store_constructor()
tx_storage._manually_initialize()
_servicer, port = create_transaction_storage_server(server, tx_storage)
self._port_q.put(port)
server.start()
self._exit_q.get()
# the above all blocks until _exit_q.put(None) or _exit_q closes
server.stop(0)
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,705 | CorgiCash/corgi-core | refs/heads/master | /tests/tx/test_cache_storage.py | import collections
from twisted.internet.defer import inlineCallbacks
from hathor.daa import TestMode, _set_test_mode
from hathor.transaction import Block, Transaction, TransactionMetadata, TxOutput
from hathor.transaction.scripts import P2PKH
from hathor.transaction.storage import TransactionCacheStorage, TransactionMemoryStorage
from tests import unittest
from tests.utils import BURN_ADDRESS, MIN_TIMESTAMP, add_new_blocks, add_new_transactions
CACHE_SIZE = 5
class BasicTransaction(unittest.TestCase):
def setUp(self):
super().setUp()
store = TransactionMemoryStorage()
self.cache_storage = TransactionCacheStorage(store, self.clock, capacity=5)
self.cache_storage._manually_initialize()
self.cache_storage.start()
self.genesis = self.cache_storage.get_all_genesis()
self.genesis_blocks = [tx for tx in self.genesis if tx.is_block]
self.genesis_txs = [tx for tx in self.genesis if not tx.is_block]
# Save genesis metadata
self.cache_storage.save_transaction_deferred(self.genesis_txs[0], only_metadata=True)
self.manager = self.create_peer('testnet', tx_storage=self.cache_storage, unlock_wallet=True)
def tearDown(self):
super().tearDown()
def _get_new_tx(self, nonce):
tx = Transaction(nonce=nonce, storage=self.cache_storage)
tx.update_hash()
meta = TransactionMetadata(hash=tx.hash)
tx._metadata = meta
return tx
def test_write_read(self):
txs = [self._get_new_tx(nonce) for nonce in range(2 * CACHE_SIZE)]
for tx in txs:
self.cache_storage.save_transaction(tx)
txs2 = [self.cache_storage.get_transaction(tx.hash) for tx in txs]
self.assertEqual(txs, txs2)
def test_dirty_set(self):
txs = [self._get_new_tx(nonce) for nonce in range(CACHE_SIZE)]
for tx in txs:
self.cache_storage.save_transaction(tx)
for tx in txs:
self.assertIn(tx.hash, self.cache_storage.dirty_txs)
# should flush to disk and empty dirty set
self.cache_storage._flush_to_storage(self.cache_storage.dirty_txs.copy())
self.assertEqual(0, len(self.cache_storage.dirty_txs))
def test_capacity(self):
# cache should not grow over its capacity
txs = [self._get_new_tx(nonce) for nonce in range(2 * CACHE_SIZE)]
for tx in txs:
self.cache_storage.save_transaction(tx)
self.assertEqual(CACHE_SIZE, len(self.cache_storage.cache))
def test_read_adds_to_cache(self):
# make sure reading also adds to cache, not only writes
txs = [self._get_new_tx(nonce) for nonce in range(2 * CACHE_SIZE)]
for tx in txs:
self.cache_storage.save_transaction(tx)
# by now, tx[0] will already have left the cache
self.assertNotIn(txs[0].hash, self.cache_storage.cache)
# read tx
self.cache_storage.get_transaction(txs[0].hash)
# now it should be in cache
self.assertIn(txs[0].hash, self.cache_storage.cache)
def test_read_moves_to_end(self):
# when we read a tx from cache, it should be moved to the end of cache so it's evicted later
txs = [self._get_new_tx(nonce) for nonce in range(2 * CACHE_SIZE)]
for i in range(CACHE_SIZE):
self.cache_storage.save_transaction(txs[i])
# first tx added would be the first to leave cache if we add one more tx
# let's read it from cache so it goes to the end
self.cache_storage.get_transaction(txs[0].hash)
# add a new tx to cache, so it will evict a tx
self.cache_storage.save_transaction(txs[-1])
# first tx should be in cache
self.assertIn(txs[0].hash, self.cache_storage.cache)
def test_cache_eviction(self):
# tests we're evicting the oldest tx from cache
txs = [self._get_new_tx(nonce) for nonce in range(2 * CACHE_SIZE)]
for i in range(CACHE_SIZE):
self.cache_storage.save_transaction(txs[i])
# next save should evict first tx
self.cache_storage.save_transaction(txs[CACHE_SIZE])
self.assertNotIn(txs[0].hash, self.cache_storage.cache)
self.assertIn(txs[CACHE_SIZE].hash, self.cache_storage.cache)
self.assertEqual(CACHE_SIZE, len(self.cache_storage.cache))
def test_flush_thread(self):
txs = [self._get_new_tx(nonce) for nonce in range(CACHE_SIZE)]
for tx in txs:
self.cache_storage.save_transaction(tx)
for tx in txs:
self.assertIn(tx.hash, self.cache_storage.dirty_txs)
# Flush deferred is not None
self.assertIsNotNone(self.cache_storage.flush_deferred)
last_flush_deferred = self.cache_storage.flush_deferred
self.cache_storage._start_flush_thread()
self.assertEqual(last_flush_deferred, self.cache_storage.flush_deferred)
# We flush the cache and flush_deferred becomes None
self.cache_storage._cb_flush_thread(self.cache_storage.dirty_txs.copy())
self.assertIsNone(self.cache_storage.flush_deferred)
# After the interval it becomes not None again
self.clock.advance(10)
self.assertIsNotNone(self.cache_storage.flush_deferred)
# If an err occurs, it will become None again and then not None after the interval
self.cache_storage._err_flush_thread('')
self.assertIsNone(self.cache_storage.flush_deferred)
self.clock.advance(5)
self.assertIsNotNone(self.cache_storage.flush_deferred)
# Remove element from cache to test a part of the code
del self.cache_storage.cache[next(iter(self.cache_storage.dirty_txs))]
self.cache_storage._flush_to_storage(self.cache_storage.dirty_txs.copy())
def test_deferred_methods(self):
for _ in self._test_deferred_methods():
pass
@inlineCallbacks
def _test_deferred_methods(self):
# Testing without cloning
self.cache_storage._clone_if_needed = False
block_parents = [block.hash for block in self.genesis_blocks] + [tx.hash for tx in self.genesis_txs]
output = TxOutput(200, P2PKH.create_output_script(BURN_ADDRESS))
obj = Block(timestamp=MIN_TIMESTAMP, weight=12, outputs=[output], parents=block_parents, nonce=100781,
storage=self.cache_storage)
obj.resolve()
obj.verify()
self.cache_storage.save_transaction_deferred(obj)
loaded_obj1 = yield self.cache_storage.get_transaction_deferred(obj.hash)
metadata_obj1_def = yield self.cache_storage.get_metadata_deferred(obj.hash)
metadata_obj1 = obj.get_metadata()
self.assertEqual(metadata_obj1_def, metadata_obj1)
metadata_error = yield self.cache_storage.get_metadata_deferred(
bytes.fromhex('0001569c85fffa5782c3979e7d68dce1d8d84772505a53ddd76d636585f3977e'))
self.assertIsNone(metadata_error)
self.cache_storage._flush_to_storage(self.cache_storage.dirty_txs.copy())
self.cache_storage.cache = collections.OrderedDict()
loaded_obj2 = yield self.cache_storage.get_transaction_deferred(obj.hash)
self.assertEqual(loaded_obj1, loaded_obj2)
self.assertTrue((yield self.cache_storage.transaction_exists_deferred(obj.hash)))
self.assertFalse((yield self.cache_storage.transaction_exists_deferred(
'0001569c85fffa5782c3979e7d68dce1d8d84772505a53ddd76d636585f3977e')))
self.assertFalse(
self.cache_storage.transaction_exists('0001569c85fffa5782c3979e7d68dce1d8d84772505a53ddd76d636585f3977e'))
self.assertEqual(obj, loaded_obj1)
self.assertEqual(obj.is_block, loaded_obj1.is_block)
count = yield self.cache_storage.get_count_tx_blocks_deferred()
self.assertEqual(count, 4)
all_transactions = yield self.cache_storage.get_all_transactions_deferred()
total = 0
for tx in all_transactions:
total += 1
self.assertEqual(total, 4)
def test_topological_sort_dfs(self):
_set_test_mode(TestMode.TEST_ALL_WEIGHT)
add_new_blocks(self.manager, 11, advance_clock=1)
tx = add_new_transactions(self.manager, 1, advance_clock=1)[0]
total = 0
for tx in self.cache_storage._topological_sort_dfs(root=tx, visited=dict()):
total += 1
self.assertEqual(total, 5)
if __name__ == '__main__':
unittest.main()
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,706 | CorgiCash/corgi-core | refs/heads/master | /hathor/transaction/storage/transaction_storage.py | # Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
from abc import ABC, abstractmethod, abstractproperty
from collections import deque
from threading import Lock
from typing import Any, Dict, Generator, Iterator, List, NamedTuple, Optional, Set, Tuple, cast
from weakref import WeakValueDictionary
from intervaltree.interval import Interval
from structlog import get_logger
from twisted.internet.defer import Deferred, inlineCallbacks, succeed
from hathor.conf import HathorSettings
from hathor.indexes import IndexesManager, TokensIndex, TransactionsIndex, WalletIndex
from hathor.pubsub import HathorEvents, PubSubManager
from hathor.transaction.block import Block
from hathor.transaction.storage.exceptions import TransactionDoesNotExist, TransactionIsNotABlock
from hathor.transaction.transaction import BaseTransaction
from hathor.transaction.transaction_metadata import TransactionMetadata
from hathor.util import skip_warning
settings = HathorSettings()
class AllTipsCache(NamedTuple):
timestamp: int
tips: Set[Interval]
merkle_tree: bytes
hashes: List[bytes]
class TransactionStorage(ABC):
"""Legacy sync interface, please copy @deprecated decorator when implementing methods."""
pubsub: Optional[PubSubManager]
with_index: bool # noqa: E701
wallet_index: Optional[WalletIndex]
tokens_index: Optional[TokensIndex]
block_index: Optional[IndexesManager]
tx_index: Optional[IndexesManager]
all_index: Optional[IndexesManager]
log = get_logger()
def __init__(self):
# Weakref is used to guarantee that there is only one instance of each transaction in memory.
self._tx_weakref: WeakValueDictionary[bytes, BaseTransaction] = WeakValueDictionary()
self._tx_weakref_disabled: bool = False
# This lock is needed everytime a storage is getting a tx from the weakref and,
# in the case the tx is not there, it creates a new object to save there.
# We were having some concurrent access and two different objects were being saved
# in the weakref, what is an error (https://github.com/HathorNetwork/hathor-core/issues/70)
# With this lock we guarantee there isn't going to be any problem with concurrent access
self._weakref_lock_per_hash: WeakValueDictionary[bytes, Lock] = WeakValueDictionary()
# This is a global lock used to prevent concurrent access when getting the tx lock in the dict above
self._weakref_lock: Lock = Lock()
# Cache for the best block tips
# This cache is updated in the consensus algorithm.
self._best_block_tips = None
# If should create lock when getting a transaction
self._should_lock = False
# Provide local logger
self.log = self.log.new()
# Cache for the latest timestamp of all tips with merkle tree precalculated to be used on the sync algorithm
# This cache is invalidated every time a new tx or block is added to the cache and
# self._all_tips_cache.timestamp is always self.latest_timestamp
self._all_tips_cache: Optional[AllTipsCache] = None
# Initialize cache for genesis transactions.
self._genesis_cache: Dict[bytes, BaseTransaction] = {}
# Key storage attribute to save if the full node is running a full verification
self._running_full_verification_attribute: str = 'running_full_verification'
# Key storage attribute to save if the manager is running
self._manager_running_attribute: str = 'manager_running'
# Key storage attribute to save if the node has clean db
self._clean_db_attribute: str = 'clean_db'
def _save_or_verify_genesis(self) -> None:
"""Save all genesis in the storage."""
for tx in self._get_genesis_from_settings():
try:
assert tx.hash is not None
tx2 = self.get_transaction(tx.hash)
assert tx == tx2
except TransactionDoesNotExist:
self.save_transaction(tx)
tx2 = tx
assert tx2.hash is not None
self._genesis_cache[tx2.hash] = tx2
def _get_genesis_from_settings(self) -> List[BaseTransaction]:
"""Return all genesis from settings."""
from hathor.transaction.genesis import _get_genesis_transactions_unsafe
return _get_genesis_transactions_unsafe(self)
def _save_to_weakref(self, tx: BaseTransaction) -> None:
""" Save transaction to weakref.
"""
if self._tx_weakref_disabled:
return
assert tx.hash is not None
tx2 = self._tx_weakref.get(tx.hash, None)
if tx2 is None:
self._tx_weakref[tx.hash] = tx
else:
assert tx is tx2, 'There are two instances of the same transaction in memory ({})'.format(tx.hash_hex)
def _remove_from_weakref(self, tx: BaseTransaction) -> None:
"""Remove transaction from weakref.
"""
if self._tx_weakref_disabled:
return
assert tx.hash is not None
self._tx_weakref.pop(tx.hash, None)
def get_transaction_from_weakref(self, hash_bytes: bytes) -> Optional[BaseTransaction]:
""" Get a transaction from weakref if it exists. Otherwise, returns None.
"""
if self._tx_weakref_disabled:
return None
return self._tx_weakref.get(hash_bytes, None)
def _enable_weakref(self) -> None:
""" Weakref should never be disabled unless you know exactly what you are doing.
"""
self._tx_weakref_disabled = False
def _disable_weakref(self) -> None:
""" Weakref should never be disabled unless you know exactly what you are doing.
"""
self._tx_weakref_disabled = True
@abstractmethod
def save_transaction(self: 'TransactionStorage', tx: BaseTransaction, *, only_metadata: bool = False) -> None:
# XXX: although this method is abstract (because a subclass must implement it) the implementer
# should call the base implementation for correctly interacting with the index
"""Saves the tx.
:param tx: Transaction to save
:param only_metadata: Don't save the transaction, only the metadata of this transaction
"""
meta = tx.get_metadata()
if self.pubsub:
if not meta.voided_by:
self.pubsub.publish(HathorEvents.STORAGE_TX_WINNER, tx=tx)
else:
self.pubsub.publish(HathorEvents.STORAGE_TX_VOIDED, tx=tx)
if self.with_index and not only_metadata:
self._add_to_cache(tx)
@abstractmethod
def remove_transaction(self, tx: BaseTransaction) -> None:
"""Remove the tx.
:param tx: Trasaction to be removed
"""
if self.with_index:
assert self.all_index is not None
self._del_from_cache(tx, relax_assert=True)
# TODO Move it to self._del_from_cache. We cannot simply do it because
# this method is used by the consensus algorithm which does not
# expect to have it removed from self.all_index.
self.all_index.del_tx(tx, relax_assert=True)
if self.wallet_index:
self.wallet_index.remove_tx(tx)
@abstractmethod
def transaction_exists(self, hash_bytes: bytes) -> bool:
"""Returns `True` if transaction with hash `hash_bytes` exists.
:param hash_bytes: Hash in bytes that will be checked.
"""
raise NotImplementedError
@abstractmethod
def _get_transaction(self, hash_bytes: bytes) -> BaseTransaction:
"""Returns the transaction with hash `hash_bytes`.
:param hash_bytes: Hash in bytes that will be checked.
"""
raise NotImplementedError
def disable_lock(self) -> None:
""" Turn off lock
"""
self._should_lock = False
def enable_lock(self) -> None:
""" Turn on lock
"""
self._should_lock = True
def _get_lock(self, hash_bytes: bytes) -> Optional[Lock]:
""" Get lock for tx hash in the weakref dictionary
"""
if not self._should_lock:
return None
with self._weakref_lock:
lock = self._weakref_lock_per_hash.get(hash_bytes, None)
if lock is None:
lock = Lock()
self._weakref_lock_per_hash[hash_bytes] = lock
return lock
def get_transaction(self, hash_bytes: bytes) -> BaseTransaction:
"""Acquire the lock and get the transaction with hash `hash_bytes`.
:param hash_bytes: Hash in bytes that will be checked.
"""
if self._should_lock:
lock = self._get_lock(hash_bytes)
assert lock is not None
with lock:
tx = self._get_transaction(hash_bytes)
else:
tx = self._get_transaction(hash_bytes)
return tx
def get_metadata(self, hash_bytes: bytes) -> Optional[TransactionMetadata]:
"""Returns the transaction metadata with hash `hash_bytes`.
:param hash_bytes: Hash in bytes that will be checked.
:rtype :py:class:`hathor.transaction.TransactionMetadata`
"""
try:
tx = self.get_transaction(hash_bytes)
return tx.get_metadata(use_storage=False)
except TransactionDoesNotExist:
return None
@abstractmethod
def get_all_transactions(self) -> Iterator[BaseTransaction]:
# TODO: verify the following claim:
"""Return all transactions that are not blocks.
:rtype :py:class:`typing.Iterable[hathor.transaction.BaseTransaction]`
"""
raise NotImplementedError
@abstractmethod
def get_count_tx_blocks(self) -> int:
# TODO: verify the following claim:
"""Return the number of transactions/blocks stored.
:rtype int
"""
raise NotImplementedError
"""Async interface, all methods mirrorred from TransactionStorageSync, but suffixed with `_deferred`."""
@abstractmethod
def save_transaction_deferred(self, tx: BaseTransaction, *, only_metadata: bool = False) -> None:
"""Saves the tx.
:param tx: Transaction to save
:type tx: :py:class:`hathor.transaction.BaseTransaction`
:param only_metadata: Don't save the transaction, only the metadata of this transaction
:type only_metadata: bool
:rtype :py:class:`twisted.internet.defer.Deferred[None]`
"""
if self.with_index:
self._add_to_cache(tx)
return succeed(None)
@abstractmethod
def remove_transaction_deferred(self, tx: BaseTransaction) -> None:
"""Remove the tx.
:param tx: Transaction to be removed
:rtype :py:class:`twisted.internet.defer.Deferred[None]`
"""
if self.with_index:
self._del_from_cache(tx)
return succeed(None)
@abstractmethod
def transaction_exists_deferred(self, hash_bytes: bytes) -> bool:
"""Returns `True` if transaction with hash `hash_bytes` exists.
:param hash_bytes: Hash in bytes that will be checked.
:type hash_bytes: bytes
:rtype :py:class:`twisted.internet.defer.Deferred[bool]`
"""
raise NotImplementedError
@abstractmethod
def get_transaction_deferred(self, hash_bytes: bytes) -> BaseTransaction:
"""Returns the transaction with hash `hash_bytes`.
:param hash_bytes: Hash in bytes that will be checked.
:type hash_bytes: bytes
:rtype :py:class:`twisted.internet.defer.Deferred[hathor.transaction.BaseTransaction]`
"""
raise NotImplementedError
@inlineCallbacks
def get_metadata_deferred(self, hash_bytes: bytes) -> Generator[Any, Any, Optional[TransactionMetadata]]:
"""Returns the transaction metadata with hash `hash_bytes`.
:param hash_bytes: Hash in bytes that will be checked.
:type hash_bytes: bytes
:rtype :py:class:`twisted.internet.defer.Deferred[hathor.transaction.TransactionMetadata]`
"""
try:
tx = yield self.get_transaction_deferred(hash_bytes)
return tx.get_metadata(use_storage=False)
except TransactionDoesNotExist:
return None
@abstractmethod
def get_all_transactions_deferred(self) -> Iterator[BaseTransaction]:
# TODO: find an `async generator` type
# TODO: verify the following claim:
"""Return all transactions that are not blocks.
:rtype :py:class:`twisted.internet.defer.Deferred[typing.Iterable[hathor.transaction.BaseTransaction]]`
"""
raise NotImplementedError
@abstractmethod
def get_count_tx_blocks_deferred(self) -> int:
# TODO: verify the following claim:
"""Return the number of transactions/blocks stored.
:rtype :py:class:`twisted.internet.defer.Deferred[int]`
"""
raise NotImplementedError
@abstractproperty
def latest_timestamp(self) -> int:
raise NotImplementedError
@abstractproperty
def first_timestamp(self) -> int:
raise NotImplementedError
@abstractmethod
def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> List[bytes]:
""" Return a list of blocks that are heads in a best chain. It must be used when mining.
When more than one block is returned, it means that there are multiple best chains and
you can choose any of them.
"""
if timestamp is None and not skip_cache and self._best_block_tips is not None:
return self._best_block_tips
best_score = 0.0
best_tip_blocks = [] # List[bytes(hash)]
tip_blocks = [x.data for x in self.get_block_tips(timestamp)]
for block_hash in tip_blocks:
meta = self.get_metadata(block_hash)
assert meta is not None
if meta.voided_by and meta.voided_by != set([block_hash]):
# If anyone but the block itself is voiding this block, then it must be skipped.
continue
if abs(meta.score - best_score) < 1e-10:
best_tip_blocks.append(block_hash)
elif meta.score > best_score:
best_score = meta.score
best_tip_blocks = [block_hash]
return best_tip_blocks
def get_weight_best_block(self) -> float:
heads = [self.get_transaction(h) for h in self.get_best_block_tips()]
highest_weight = 0.0
for head in heads:
if head.weight > highest_weight:
highest_weight = head.weight
return highest_weight
def get_height_best_block(self) -> int:
""" Iterate over best block tips and get the highest height
"""
heads = [self.get_transaction(h) for h in self.get_best_block_tips()]
highest_height = 0
for head in heads:
head_height = head.get_metadata().height
if head_height > highest_height:
highest_height = head_height
return highest_height
def get_merkle_tree(self, timestamp: int) -> Tuple[bytes, List[bytes]]:
""" Generate a hash to check whether the DAG is the same at that timestamp.
:rtype: Tuple[bytes(hash), List[bytes(hash)]]
"""
if self._all_tips_cache is not None and timestamp >= self._all_tips_cache.timestamp:
return self._all_tips_cache.merkle_tree, self._all_tips_cache.hashes
intervals = self.get_all_tips(timestamp)
if timestamp >= self.latest_timestamp:
# get_all_tips will add to cache in that case
assert self._all_tips_cache is not None
return self._all_tips_cache.merkle_tree, self._all_tips_cache.hashes
return self.calculate_merkle_tree(intervals)
def calculate_merkle_tree(self, intervals: Set[Interval]) -> Tuple[bytes, List[bytes]]:
""" Generate a hash of the transactions at the intervals
:rtype: Tuple[bytes(hash), List[bytes(hash)]]
"""
hashes = [x.data for x in intervals]
hashes.sort()
merkle = hashlib.sha256()
for h in hashes:
merkle.update(h)
return merkle.digest(), hashes
@abstractmethod
def get_block_tips(self, timestamp: Optional[float] = None) -> Set[Interval]:
raise NotImplementedError
@abstractmethod
def get_all_tips(self, timestamp: Optional[float] = None) -> Set[Interval]:
raise NotImplementedError
@abstractmethod
def get_tx_tips(self, timestamp: Optional[float] = None) -> Set[Interval]:
raise NotImplementedError
@abstractmethod
def get_newest_blocks(self, count: int) -> Tuple[List[Block], bool]:
""" Get blocks from the newest to the oldest
:param count: Number of blocks to be returned
:return: List of blocks and a boolean indicating if has more blocks
"""
raise NotImplementedError
@abstractmethod
def get_newest_txs(self, count: int) -> Tuple[List[BaseTransaction], bool]:
""" Get transactions from the newest to the oldest
:param count: Number of transactions to be returned
:return: List of transactions and a boolean indicating if has more txs
"""
raise NotImplementedError
@abstractmethod
def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> Tuple[List[Block], bool]:
""" Get blocks from the timestamp/hash_bytes reference to the oldest
:param timestamp: Timestamp reference to start the search
:param hash_bytes: Hash reference to start the search
:param count: Number of blocks to be returned
:return: List of blocks and a boolean indicating if has more blocks
"""
raise NotImplementedError
@abstractmethod
def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> Tuple[List[BaseTransaction], bool]:
""" Get blocks from the timestamp/hash_bytes reference to the newest
:param timestamp: Timestamp reference to start the search
:param hash_bytes: Hash reference to start the search
:param count: Number of blocks to be returned
:return: List of blocks and a boolean indicating if has more blocks
"""
raise NotImplementedError
@abstractmethod
def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]:
""" Get transactions from the timestamp/hash_bytes reference to the oldest
:param timestamp: Timestamp reference to start the search
:param hash_bytes: Hash reference to start the search
:param count: Number of transactions to be returned
:return: List of transactions and a boolean indicating if has more txs
"""
raise NotImplementedError
@abstractmethod
def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]:
""" Get transactions from the timestamp/hash_bytes reference to the newest
:param timestamp: Timestamp reference to start the search
:param hash_bytes: Hash reference to start the search
:param count: Number of transactions to be returned
:return: List of transactions and a boolean indicating if has more txs
"""
raise NotImplementedError
@abstractmethod
def _manually_initialize(self) -> None:
# XXX: maybe refactor, this is actually part of the public interface
"""Caches must be initialized. This function should not be called, because
usually the HathorManager will handle all this initialization.
"""
pass
@abstractmethod
def _topological_sort(self) -> Iterator[BaseTransaction]:
"""Return an iterable of the transactions in topological ordering, i.e., from
genesis to the most recent transactions. The order is important because the
transactions are always valid---their parents and inputs exist.
:return: An iterable with the sorted transactions
"""
raise NotImplementedError
@abstractmethod
def _add_to_cache(self, tx: BaseTransaction) -> None:
raise NotImplementedError
@abstractmethod
def _del_from_cache(self, tx: BaseTransaction, *, relax_assert: bool = False) -> None:
raise NotImplementedError
@abstractmethod
def get_block_count(self) -> int:
raise NotImplementedError
@abstractmethod
def get_tx_count(self) -> int:
raise NotImplementedError
@abstractmethod
def get_genesis(self, hash_bytes: bytes) -> Optional[BaseTransaction]:
"""Returning hardcoded genesis block and transactions."""
raise NotImplementedError
@abstractmethod
def get_all_genesis(self) -> Set[BaseTransaction]:
raise NotImplementedError
@abstractmethod
def get_transactions_before(self, hash_bytes: bytes, num_blocks: int = 100) -> List[BaseTransaction]:
"""Run a BFS starting from the giving `hash_bytes`.
:param hash_bytes: Starting point of the BFS, either a block or a transaction.
:param num_blocks: Number of blocks to be return.
:return: List of transactions
"""
raise NotImplementedError
@abstractmethod
def get_blocks_before(self, hash_bytes: bytes, num_blocks: int = 100) -> List[Block]:
"""Run a BFS starting from the giving `hash_bytes`.
:param hash_bytes: Starting point of the BFS.
:param num_blocks: Number of blocks to be return.
:return: List of transactions
"""
raise NotImplementedError
@abstractmethod
def get_all_sorted_txs(self, timestamp: int, count: int, offset: int) -> TransactionsIndex:
""" Returns ordered blocks and txs in a TransactionIndex
"""
raise NotImplementedError
def add_value(self, key: str, value: str) -> None:
""" Save value on storage
Need to be a string to support all storages, including rocksdb, that needs bytes
"""
raise NotImplementedError
def remove_value(self, key: str) -> None:
""" Remove value from storage
"""
raise NotImplementedError
def get_value(self, key: str) -> Optional[str]:
""" Get value from storage
"""
raise NotImplementedError
def start_full_verification(self) -> None:
""" Save full verification on storage
"""
self.add_value(self._running_full_verification_attribute, '1')
def finish_full_verification(self) -> None:
""" Remove from storage that the full node is initializing with a full verification
"""
self.remove_value(self._running_full_verification_attribute)
def is_running_full_verification(self) -> bool:
""" Return if the full node is initializing with a full verification
or was running a full verification and was stopped in the middle
"""
return self.get_value(self._running_full_verification_attribute) == '1'
def start_running_manager(self) -> None:
""" Save on storage that manager is running
"""
self.add_value(self._manager_running_attribute, '1')
def stop_running_manager(self) -> None:
""" Remove from storage that manager is running
"""
self.remove_value(self._manager_running_attribute)
def is_running_manager(self) -> bool:
""" Return if the manager is running or was running and a sudden crash stopped the full node
"""
return self.get_value(self._manager_running_attribute) == '1'
def set_db_clean(self) -> None:
""" Save on storage that the db has clean data (without voided blocks/txs)
"""
self.add_value(self._clean_db_attribute, '1')
def is_db_clean(self) -> bool:
""" Return if the node has a clean db (without voided blocks/txs)
"""
return self.get_value(self._clean_db_attribute) == '1'
class TransactionStorageAsyncFromSync(TransactionStorage):
"""Implement async interface from sync interface, for legacy implementations."""
def save_transaction_deferred(self, tx: BaseTransaction, *, only_metadata: bool = False) -> Deferred:
return succeed(skip_warning(self.save_transaction)(tx, only_metadata=only_metadata))
def remove_transaction_deferred(self, tx: BaseTransaction) -> Deferred:
return succeed(skip_warning(self.remove_transaction)(tx))
def transaction_exists_deferred(self, hash_bytes: bytes) -> Deferred:
return succeed(skip_warning(self.transaction_exists)(hash_bytes))
def get_transaction_deferred(self, hash_bytes: bytes) -> Deferred:
return succeed(skip_warning(self.get_transaction)(hash_bytes))
def get_all_transactions_deferred(self) -> Deferred:
return succeed(skip_warning(self.get_all_transactions)())
def get_count_tx_blocks_deferred(self) -> Deferred:
return succeed(skip_warning(self.get_count_tx_blocks)())
class BaseTransactionStorage(TransactionStorage):
def __init__(self, with_index: bool = True, pubsub: Optional[Any] = None) -> None:
super().__init__()
# Pubsub is used to publish tx voided and winner but it's optional
self.pubsub = pubsub
# Initialize index if needed.
self.with_index = with_index
if with_index:
self._reset_cache()
# Either save or verify all genesis.
self._save_or_verify_genesis()
@property
def latest_timestamp(self) -> int:
return self._latest_timestamp
@property
def first_timestamp(self) -> int:
return self._first_timestamp
@abstractmethod
def _save_transaction(self, tx: BaseTransaction, *, only_metadata: bool = False) -> None:
raise NotImplementedError
def _reset_cache(self) -> None:
"""Reset all caches. This function should not be called unless you know what you are doing."""
assert self.with_index, 'Cannot reset cache because it has not been enabled.'
self._cache_block_count = 0
self._cache_tx_count = 0
self.block_index = IndexesManager()
self.tx_index = IndexesManager()
self.all_index = IndexesManager()
self.wallet_index = None
self.tokens_index = None
genesis = self.get_all_genesis()
if genesis:
self._latest_timestamp = max(x.timestamp for x in genesis)
self._first_timestamp = min(x.timestamp for x in genesis)
else:
self._latest_timestamp = 0
self._first_timestamp = 0
def remove_cache(self) -> None:
"""Remove all caches in case we don't need it."""
self.with_index = False
self.block_index = None
self.tx_index = None
self.all_index = None
def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> List[bytes]:
return super().get_best_block_tips(timestamp, skip_cache=skip_cache)
def get_weight_best_block(self) -> float:
return super().get_weight_best_block()
def get_block_tips(self, timestamp: Optional[float] = None) -> Set[Interval]:
if not self.with_index:
raise NotImplementedError
assert self.block_index is not None
if timestamp is None:
timestamp = self.latest_timestamp
return self.block_index.tips_index[timestamp]
def get_tx_tips(self, timestamp: Optional[float] = None) -> Set[Interval]:
if not self.with_index:
raise NotImplementedError
assert self.tx_index is not None
if timestamp is None:
timestamp = self.latest_timestamp
tips = self.tx_index.tips_index[timestamp]
# This `for` is for assert only. How to skip it when running with `-O` parameter?
for interval in tips:
meta = self.get_metadata(interval.data)
assert meta is not None
assert not meta.voided_by
return tips
def get_all_tips(self, timestamp: Optional[float] = None) -> Set[Interval]:
if not self.with_index:
raise NotImplementedError
assert self.all_index is not None
if timestamp is None:
timestamp = self.latest_timestamp
if self._all_tips_cache is not None and timestamp >= self._all_tips_cache.timestamp:
assert self._all_tips_cache.timestamp == self.latest_timestamp
return self._all_tips_cache.tips
tips = self.all_index.tips_index[timestamp]
if timestamp >= self.latest_timestamp:
merkle_tree, hashes = self.calculate_merkle_tree(tips)
self._all_tips_cache = AllTipsCache(self.latest_timestamp, tips, merkle_tree, hashes)
return tips
def get_newest_blocks(self, count: int) -> Tuple[List[Block], bool]:
if not self.with_index:
raise NotImplementedError
assert self.block_index is not None
block_hashes, has_more = self.block_index.get_newest(count)
blocks = [cast(Block, self.get_transaction(block_hash)) for block_hash in block_hashes]
return blocks, has_more
def get_newest_txs(self, count: int) -> Tuple[List[BaseTransaction], bool]:
if not self.with_index:
raise NotImplementedError
assert self.tx_index is not None
tx_hashes, has_more = self.tx_index.get_newest(count)
txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes]
return txs, has_more
def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[Block], bool]:
if not self.with_index:
raise NotImplementedError
assert self.block_index is not None
block_hashes, has_more = self.block_index.get_older(timestamp, hash_bytes, count)
blocks = [cast(Block, self.get_transaction(block_hash)) for block_hash in block_hashes]
return blocks, has_more
def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> Tuple[List[BaseTransaction], bool]:
if not self.with_index:
raise NotImplementedError
assert self.block_index is not None
block_hashes, has_more = self.block_index.get_newer(timestamp, hash_bytes, count)
blocks = [self.get_transaction(block_hash) for block_hash in block_hashes]
return blocks, has_more
def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]:
if not self.with_index:
raise NotImplementedError
assert self.tx_index is not None
tx_hashes, has_more = self.tx_index.get_older(timestamp, hash_bytes, count)
txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes]
return txs, has_more
def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]:
if not self.with_index:
raise NotImplementedError
assert self.tx_index is not None
tx_hashes, has_more = self.tx_index.get_newer(timestamp, hash_bytes, count)
txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes]
return txs, has_more
def _manually_initialize(self) -> None:
self._reset_cache()
# We need to construct a topological sort, then iterate from
# genesis to tips.
for tx in self._topological_sort():
self._add_to_cache(tx)
def _topological_sort(self) -> Iterator[BaseTransaction]:
# TODO We must optimize this algorithm to remove the `visited` set.
# It will consume too much memory when the number of transactions is big.
# A solution would be to store the ordering in disk, probably indexing by tx's height.
# Sorting the vertices by the lengths of their longest incoming paths produces a topological
# ordering (Dekel, Nassimi & Sahni 1981). See: https://epubs.siam.org/doi/10.1137/0210049
# See also: https://gitlab.com/HathorNetwork/hathor-python/merge_requests/31
visited: Dict[bytes, int] = dict() # Dict[bytes, int]
for tx in self.get_all_transactions():
if not tx.is_block:
continue
yield from self._topological_sort_dfs(tx, visited)
for tx in self.get_all_transactions():
yield from self._topological_sort_dfs(tx, visited)
def _topological_sort_dfs(self, root: BaseTransaction, visited: Dict[bytes, int]) -> Iterator[BaseTransaction]:
if root.hash in visited:
return
stack = [root]
while stack:
tx = stack[-1]
assert tx.hash is not None
if tx.hash in visited:
if visited[tx.hash] == 0:
visited[tx.hash] = 1 # 1 = Visited
yield tx
assert tx == stack.pop()
continue
visited[tx.hash] = 0 # 0 = Visit in progress
# The parents are reversed to go first through the blocks and only then
# go through the transactions. It works because blocks must have the
# previous block as the first parent. For transactions, the order does not
# matter.
for parent_hash in tx.parents[::-1]:
if parent_hash not in visited:
parent = self.get_transaction(parent_hash)
stack.append(parent)
for txin in tx.inputs:
if txin.tx_id not in visited:
txinput = self.get_transaction(txin.tx_id)
stack.append(txinput)
def _add_to_cache(self, tx: BaseTransaction) -> None:
if not self.with_index:
raise NotImplementedError
assert self.all_index is not None
assert self.block_index is not None
assert self.tx_index is not None
self._latest_timestamp = max(self.latest_timestamp, tx.timestamp)
if self._first_timestamp == 0:
self._first_timestamp = tx.timestamp
else:
self._first_timestamp = min(self.first_timestamp, tx.timestamp)
self._first_timestamp = min(self.first_timestamp, tx.timestamp)
self._all_tips_cache = None
self.all_index.add_tx(tx)
if self.wallet_index:
self.wallet_index.add_tx(tx)
if self.tokens_index:
self.tokens_index.add_tx(tx)
if tx.is_block:
if self.block_index.add_tx(tx):
self._cache_block_count += 1
else:
if self.tx_index.add_tx(tx):
self._cache_tx_count += 1
def _del_from_cache(self, tx: BaseTransaction, *, relax_assert: bool = False) -> None:
if not self.with_index:
raise NotImplementedError
assert self.block_index is not None
assert self.tx_index is not None
if self.tokens_index:
self.tokens_index.del_tx(tx)
if tx.is_block:
self._cache_block_count -= 1
self.block_index.del_tx(tx, relax_assert=relax_assert)
else:
self._cache_tx_count -= 1
self.tx_index.del_tx(tx, relax_assert=relax_assert)
def get_block_count(self) -> int:
if not self.with_index:
raise NotImplementedError
return self._cache_block_count
def get_tx_count(self) -> int:
if not self.with_index:
raise NotImplementedError
return self._cache_tx_count
def get_genesis(self, hash_bytes: bytes) -> Optional[BaseTransaction]:
assert self._genesis_cache is not None
return self._genesis_cache.get(hash_bytes, None)
def get_all_genesis(self) -> Set[BaseTransaction]:
assert self._genesis_cache is not None
return set(self._genesis_cache.values())
def get_transactions_before(self, hash_bytes: bytes,
num_blocks: int = 100) -> List[BaseTransaction]: # pragma: no cover
ref_tx = self.get_transaction(hash_bytes)
visited: Dict[bytes, int] = dict() # Dict[bytes, int]
result = [x for x in self._topological_sort_dfs(ref_tx, visited) if not x.is_block]
result = result[-num_blocks:]
return result
def get_blocks_before(self, hash_bytes: bytes, num_blocks: int = 100) -> List[Block]:
ref_tx = self.get_transaction(hash_bytes)
if not ref_tx.is_block:
raise TransactionIsNotABlock
result = [] # List[Block]
pending_visits = deque(ref_tx.parents) # List[bytes]
used = set(pending_visits) # Set[bytes]
while pending_visits:
tx_hash = pending_visits.popleft()
tx = self.get_transaction(tx_hash)
if not tx.is_block:
continue
assert isinstance(tx, Block)
result.append(tx)
if len(result) >= num_blocks:
break
for parent_hash in tx.parents:
if parent_hash not in used:
used.add(parent_hash)
pending_visits.append(parent_hash)
return result
def get_all_sorted_txs(self, timestamp: int, count: int, offset: int) -> TransactionsIndex:
""" Returns ordered blocks and txs in a TransactionIndex
"""
assert self.all_index is not None
idx = self.all_index.txs_index.find_first_at_timestamp(timestamp)
txs = self.all_index.txs_index[idx:idx+offset+count]
# merge sorted txs and blocks
all_sorted = TransactionsIndex()
all_sorted.update(txs)
return all_sorted
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,707 | CorgiCash/corgi-core | refs/heads/master | /hathor/cli/openapi_files/register.py | # Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
from twisted.web.resource import Resource
_registered_resources = []
def register_resource(resource_class: Resource) -> Resource:
""" Register a resource class to be added in the openapi docs page
"""
global _registered_resources
_registered_resources.append(resource_class)
return resource_class
def get_registered_resources() -> List[Resource]:
""" Returns a list with all the resources registered for the docs
"""
from hathor.p2p.resources import __all__ # noqa: 401
from hathor.resources import ProfilerResource # noqa: 401
from hathor.stratum.resources import MiningStatsResource # noqa: 401
from hathor.transaction.resources import __all__ # noqa: 401
from hathor.version_resource import VersionResource # noqa: 401
from hathor.wallet.resources.nano_contracts import __all__ # noqa: 401
from hathor.wallet.resources.thin_wallet import __all__ # noqa: 401
from hathor.websocket import WebsocketStatsResource # noqa: 401
global _registered_resources
return _registered_resources
| {"/hathor/transaction/resources/tips_histogram.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/resources/tips.py": ["/hathor/cli/openapi_files/register.py"], "/hathor/transaction/storage/remote_storage.py": ["/hathor/transaction/storage/transaction_storage.py", "/hathor/transaction/storage/subprocess_storage.py"], "/hathor/transaction/storage/subprocess_storage.py": ["/hathor/transaction/storage/remote_storage.py"], "/hathor/transaction/storage/transaction_storage.py": ["/hathor/transaction/transaction_metadata.py"]} |
73,709 | versace-hub/versace1-rep | refs/heads/master | /avb3_KL.py | import matplotlib.pyplot as plt
import numpy as np
import mdtraj
import keras
from deep_boltzmann.networks.training import MLTrainer, FlexibleTrainer
from deep_boltzmann.networks.invertible import invnet, EnergyInvNet
from deep_boltzmann.models.openmm import OpenMMEnergy
import mdtraj as md
from simtk import openmm, unit
import sys, os, shutil
import time
import tensorflow as tf
from avb3 import *
#リスタート用のステージ開始番号の辞書を作成
# NOTE: GPUのメモリオーバーで"Resource exhaustedエラー"がでた場合にはバッチ数を下げて、ステージ数を増やす
# 途中で学習が中断しときは、"中断したステージ番号"を以下に指定すれば、そのステージから学習をリスタート可能
restart = True
weight_file_name = "avb3_ML_stage5_saved.pkl" # restart=Falseのとき、ML学習で保存した重みデータ名を与える
inter_model = False # 各ステージ終了ごとに中間モデルを保存するか(保存に時間がかかる場合はFalseにする)
saveconfig = {}
saveconfig['stage'] = 2
# 各学習ステージでのスケジュール(エポック数、カットオフエネルギー、KL学習重み)リストを定義
layer_types = 'R'*12 # セットするレイヤーの種類
nl_layers = 4 # 隠れ層の総数 + 1
nl_hidden=[512, 254, 512]
batch_size = 1000
clipnorm = 1.0
lrs = [0.00001, 0.00005, 0.0001, 0.0001, 0.0001]
epochs_KL = [20, 30, 30, 30, 30]
high_energies = [1e11, 1e10, 1e10, 1e9, 1e8, 1e7, 1e6, 1e5, 1e5, 1e5, 1e5, 1e4, 1e4, 1e4, 1e3, 1e3, 1e2, 1e1, 0.]
max_energies = [1e20, 1e20, 1e20, 1e20, 1e20, 1e20, 1e20, 1e20, 1e20, 1e20, 1e20, 1e20, 1e20, 1e20, 1e20]
w_KLs = [1e-13, 1e-12, 1e-12, 1e-5, 1e-4, 1e-5, 1e-5, 1e-5, 5e-5, 1e-4, 5e-4, 5e-4, 5e-3, 5e-3, 5e-2, 5e-2]
w_MLs = [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]
w_RCs = [20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20]
w_L2_angles = [1e-3, 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]
rc_min=0
rc_max=53
# Data paths
# dataディレクトリに「avb3_head.pdb」を入れておく
pdb_dir = '../data/'
sim_x = np.load(pdb_dir+'sim_x.npy')
# setup ITGAVB3 energy model
def setup_AVB3(multi_gpu=False):
""" Integrin-avb3-head エネルギーモデルをセットアップする
Returns
-------
top [MDTraj Topology object] : AVB3のTopologyオブジェクト
system [OpenMM System object] : AVB3のSystemオブジェクト
avb3_omm_energy [Energy model] : AVB3のEnergy model
"""
INTEGRATOR_ARGS = (300*unit.kelvin, 1.0/unit.picoseconds, 2.0*unit.femtoseconds)
from simtk.openmm import app
# pdb構造をロードしてpdbオブジェクトを生成
pdb = app.PDBFile(pdb_dir + 'avb3_head.pdb')
# openMM組み込みの力場ファイルをロードしてForceFieldオブジェクトを生成 (implicit solvant[GB-obc]モデル)
forcefield = openmm.app.ForceField('amber99sbildn.xml', 'amber99_obc.xml')
# charmm力場を使う場合は以下をコメントアウトする
# forcefield = openmm.app.ForceField('charmm36.xml')
# pdbオブジェクトとForceFieldオブジェクトを合体し、計算条件を加えたsystemオブジェクトを生成
system = forcefield.createSystem(pdb.topology, removeCMMotion=False,
nonbondedMethod=app.CutoffNonPeriodic, nonbondedCutoff=1.0*unit.nanometers,
constraints=None, rigidWater=True)
# 運動方程式の積分器を定義
integrator = openmm.LangevinIntegrator(300*unit.kelvin, 1.0/unit.picoseconds, 2.0*unit.femtoseconds)
# pdbファイルとsystemファイルと積分器をまとめて、simulationオブジェクトを生成
simulation = openmm.app.Simulation(pdb.topology, system, integrator)
# openMM APIを使用してエネルギー計算用のモデルを生成
avb3_omm_energy = OpenMMEnergy(openmm_system=system,
openmm_integrator=openmm.LangevinIntegrator,
length_scale=unit.nanometers,
n_atoms=md.Topology().from_openmm(simulation.topology).n_atoms,
openmm_integrator_args=INTEGRATOR_ARGS,
multi_gpu=multi_gpu)
# MDtrajのopenMM APIを使用して、openMM用トポロジーをMDtraj用トポロジーに変換する
mdtraj_topology = md.Topology().from_openmm(pdb.topology)
return mdtraj_topology, system, avb3_omm_energy
# ITGAVB3モデルを定義
print('Integrin AVB3 set up\n')
sys.stdout.flush()
top, system, mm_avb3 = setup_AVB3()
print('Data loaded\n')
sys.stdout.flush()
# 軌道データのフレーム数
nframes = sim_x.shape[0]
# 各フレームの次元(3×原子数)
dim = sim_x.shape[1]
# 各原子の質量配列を取得
weights = np.array([system.getParticleMass(i).value_in_unit(unit.dalton) for i in range(system.getNumParticles())])
CartIndices, ZIndices = get_indices(top, cartesian_CYS=True)
if not restart:
# ML学習済みモデルをロード
print('Loading ML pretrained weights...'.format(saveconfig['stage']))
sys.stdout.flush()
s_time = time.time()
bg = EnergyInvNet.load("./"+ weight_file_name, mm_avb3)
load_time = time.time() - s_time
print('Weights loaded.')
print('Loading Time: {0}'.format(load_time) + "[sec]")
sys.stdout.flush()
# 反応座標関数1のセット
ini_conf = sim_x[0]
betaA_atoms, betaA_index = getDomainIndices(top, 'chainid 1 and not (resid 437 to 489 or resid 735 to 816)', atomwise=True)
hybrid_atoms, hybrid_index = getDomainIndices(top, 'chainid 1 and (resid 437 to 489 or resid 735 to 816)', atomwise=True)
weights_betaA = weights[betaA_atoms]
weights_hybrid = weights[hybrid_atoms]
hinge = HingeAngleRC(ini_conf=ini_conf,
ref_index=betaA_index, mob_index=hybrid_index,
ref_weights=weights_betaA, mob_weights=weights_hybrid)
# 反応座標関数2のセット
#_, Leu134CB = getDomainIndices(top, 'index 7799', atomwise=True)
#_, Leu333CB = getDomainIndices(top, 'index 10863', atomwise=True)
#LEUdist = distRC(Leu134CB, Leu333CB)
# 2つの反応座標関数をマージ
#MRCfunc = MergeRC(hinge, LEUdist)
else:
print('RESTART FROM STAGE:{0}'.format(saveconfig['stage']))
sys.stdout.flush()
s_time = time.time()
# リスタートする場合は以下の4行をコメントアウト (中断したステージの一つ前の重みをロード)
print('Loading Stage{0} weights...'.format(saveconfig['stage']-1))
sys.stdout.flush()
bg = EnergyInvNet.load("./avb3_KL_stage{0}_saved.pkl".format(saveconfig['stage']-1), mm_avb3)
load_time = time.time() - s_time
print('Weights loaded.')
print('Loading Time: {0}'.format(load_time) + "[sec]")
sys.stdout.flush()
# 反応座標関数1のセット
ini_conf = sim_x[0]
betaA_atoms, betaA_index = getDomainIndices(top, 'chainid 1 and not (resid 437 to 489 or resid 735 to 816)', atomwise=True)
hybrid_atoms, hybrid_index = getDomainIndices(top, 'chainid 1 and (resid 437 to 489 or resid 735 to 816)', atomwise=True)
weights_betaA = weights[betaA_atoms]
weights_hybrid = weights[hybrid_atoms]
hinge = HingeAngleRC(ini_conf=ini_conf,
ref_index=betaA_index, mob_index=hybrid_index,
ref_weights=weights_betaA, mob_weights=weights_hybrid)
# 反応座標関数2のセット
#_, Leu134CB = getDomainIndices(top, 'index 7799', atomwise=True)
#_, Leu333CB = getDomainIndices(top, 'index 10863', atomwise=True)
#LEUdist = distRC(Leu134CB, Leu333CB)
# 2つの反応座標関数をマージ
#MRCfunc = MergeRC(hinge, LEUdist)
# KL+ML+RC学習を実行する
# MLは重み付き学習ではない、またM_layerの角度損失による学習も行わない
# NOTE: 学習率(=0.001)はハードコート
print('KL Training start!!\n')
sys.stdout.flush()
train_KL(bg, sim_x, epochs_KL, high_energies, max_energies, w_KLs, lr=lrs, clipnorm=clipnorm, w_ML=w_MLs, batch_size=batch_size, stage=saveconfig['stage'],
rc_func=hinge, rc_min=rc_min, rc_max=rc_max, multi_rc=False, w_RC=w_RCs, w_L2_angle=w_L2_angles, inter_model=inter_model)
# 学習済みモデルを保存
print('KL Training completed!!\n')
sys.stdout.flush()
s_time = time.time()
bg.save('./avb3_save_after_KL.pkl')
save_time = time.time() - s_time
print('Saving completed.\n All task finished!\n')
print('Saving Time: {0}'.format(save_time) + "[sec]")
sys.stdout.flush()
| {"/avb3_KL.py": ["/avb3.py"], "/avb3_ML.py": ["/avb3.py"]} |
73,710 | versace-hub/versace1-rep | refs/heads/master | /avb3.py | import matplotlib.pyplot as plt
import numpy as np
import mdtraj
import keras
from deep_boltzmann.networks.training import MLTrainer, FlexibleTrainer
from deep_boltzmann.networks.invertible import invnet, EnergyInvNet
from deep_boltzmann.models.openmm import OpenMMEnergy
import mdtraj as md
from simtk import openmm, unit
import sys, os, shutil
import time
import tensorflow as tf
def get_indices(top, cartesian_CYS=True, notAminoacid=None):
""" 各IC原子に対し二面角を構成する4原子ペアの原子番号を列にもつZ行列(2D_array)を返す
また、内部座標に変換しないカーテシアン原子の原子番号の1D_arrayも返す
-------------------------------------------------------------------------------------------------
Args:
top [MDtraj topology] : MDtrajのtopologyオブジェクト
cartesian_CYS(=True) : CYS残基の重原子(SG, CB)をカーテシアン原子に加えるか
NOTE: カーテシアン原子とは内部座標に変換しない原子のこと
notAminoacid(=None) [list] : アミノ酸残基以外の残基がある場合は残基名(文字列)のリストをわたす
Returns:
cart [int_array (カーテシアン原子の原子数, )] : カーテシアン原子の原子番号の1D_array
Z [int_array (二面角構成原子ペアの総数, 4)] : 内部座標に変換される原子ペアのindexの2D_array
"""
from deep_boltzmann.models.proteins import mdtraj2Z
cartesian = ['CA', 'C', 'N'] # backbone中の重原子(Oを除く)
cart = top.select(' '.join(["name " + s for s in cartesian])) # CA,C,Nの原子番号の1D_ndarrayを取得
if cartesian_CYS:
# cartesian_CYS=Trueの場合、CYS残基のSG,CB原子を含まない4原子二面角構成ペアをZ_にリストする
# _cartsはSG,CBの原子番号リストである
Z_, _carts = mdtraj2Z(top, cartesian="resname CYS and mass>2 and sidechain", notAminoacid=notAminoacid)
Z_ = np.array(Z_) # リストのリストを2D_arrayに変換
cart = np.sort(np.concatenate((cart,_carts))) # CA,C,Nの原子番号リストとSG,CBの原子番号リストを1D_arrayにまとめて昇順に並べる
else:
# cartesian_CYS=Falseの場合、CYS残基のSG,CB原子も二面角構成原子(=非カーテシアン原子)としてZ_配列にまとめる
Z_ = np.array(mdtraj2Z(top), notAminoacid=notAminoacid)
return cart, Z_
def getDomainIndices(top, select, atomwise=True):
""" トポロジーオブジェクトから選択した原子やドメインのindex配列を返す
(MDTrajトポロジー用)
----------------------------------------------------------------------------------
Args:
top [MDtrajtopology] : MDtrajのトポロジーオブジェクト
select [str] : OpenMMにおけるセレクション文
(indexを取得したいドメインを指定)
atomwise(=True) [bool] : Trueにした場合、xyz座標のindex配列の形状を(-1, 3)で返す
Falseの場合、xyz座標のindex配列を1D_arrayで返す
Returns:
select_atoms [array] : 指定したドメインの原子番号の配列
select_xyzindices [array] : 指定したドメインのxyz座標の配列
"""
dim = top.n_atoms * 3
atom_indices = np.arange(dim).reshape((-1, 3))
select_atoms = top.select(select)
select_xyzindices = atom_indices[select_atoms]
if atomwise:
return select_atoms, select_xyzindices
else:
return select_atoms, select_xyzindices.flatten()
def getDomainIndicesPrody(pdb, domain, atomwise=True):
""" トポロジーオブジェクトから選択した原子やドメインのindex配列を返す
(Prodyトポロジー用)
----------------------------------------------------------------------------------
Args:
pdb [Prody AtomGroup] : タンパク質全体のPrody PDBトポロジー
domain [prody Selection] : Prodyのセレクションオブジェクト
(indexを取得したいドメイン)
atomwise(=True) [bool] : Trueにした場合、xyz座標のindex配列の形状を(-1, 3)で返す
Falseの場合、xyz座標のindex配列を1D_arrayで返す
Returns:
select_xyzindices [array] : 指定したドメインのxyz座標の配列
"""
dim = pdb.numAtoms() * 3
atom_indices = np.arange(dim).reshape((-1, 3))
select_atoms = domain.getIndices()
select_xyzindices = atom_indices[select_atoms]
if atomwise:
return select_xyzindices
else:
return select_xyzindices.flatten()
def calcCOM(conf, weights):
""" 入力配位[conf (B,natom,3)]の重心を計算する
"""
weights_sum = np.sum(weights)
com = np.expand_dims(np.sum((conf * weights), axis=1) / weights_sum, axis=1)
return com # shape (B, 1, 3)
def calcCOM_tf(conf, weights):
""" 入力配位[conf (B,natom,3)]の重心を計算する (TensorFlow version)
"""
weights_sum = tf.reduce_sum(weights)
com = tf.expand_dims(tf.reduce_sum((conf * weights), axis=1) / weights_sum, axis=1)
return com # shape (B, 1, 3)
def calcTransformMatrix(mobile, target, weights):
""" 4×4変換(回転+並進)行列を生成する
NOTE: mobileを動かす単一構造、targetを重ね合わせる複数構造であると仮定しているので注意
"""
batch_size = np.shape(target)[0]
weights_dot = np.dot(np.reshape(weights,[-1]).T, np.reshape(weights,[-1]))
mob_com = calcCOM(mobile, weights) # shape (B, 1, 3)
tar_com = calcCOM(target, weights) # shape (B, 1, 3)
mobile = mobile - mob_com
target = target - tar_com
#tf.cast(aaa, tf.float32) matmul前にはcastする
matrix = np.matmul(np.transpose(mobile * weights, [0,2,1]), target * weights) / weights_dot
# 特異値分解
U, s, Vh = np.linalg.svd(matrix)
# 4×4Id行列の作成
Id = np.reshape(np.tile(np.eye(2),[batch_size,1]), [batch_size,2,2])
Id = np.concatenate([Id, np.zeros((batch_size,2,1))], axis=2) # tf.concatに変換
det = np.reshape(np.sign(np.linalg.det(matrix)), [1,-1])
bottom = np.transpose(np.concatenate([np.zeros([2, batch_size]), det], axis=0), [1,0])
Id = np.concatenate([Id, np.expand_dims(bottom, axis=1)], axis=1)
# 回転行列、並進ベクトルを計算
rotation = np.matmul(np.transpose(Vh, [0,2,1]), np.matmul(Id, np.transpose(U, [0,2,1]))) # 回転行列
translation = tar_com - np.matmul(mob_com, np.transpose(rotation, [0,2,1])) # 並進ベクトル
translation = np.reshape(translation, [batch_size,3,1])
# 4×4変換行列にまとめる
T = np.concatenate([rotation, translation], axis=2)
T = np.concatenate([T, np.tile(np.array([[[0.,0.,0.,1.]]]),[batch_size,1,1])], axis=1)
return T # shape (B, 4, 4)
def calcTransformMatrix_tf(mobile, target, weights):
""" 4×4変換(回転+並進)行列を生成する (TensorFlow version)
NOTE: mobileを動かす単一構造、targetを重ね合わせる複数構造であると仮定しているので注意
"""
batch_size = tf.shape(target)[0]
weights_dot = np.dot(np.reshape(weights,[-1]).T, np.reshape(weights,[-1]))
mob_com = calcCOM_tf(mobile, weights) # shape (B, 1, 3)
tar_com = calcCOM_tf(target, weights) # shape (B, 1, 3)
mobile = mobile - mob_com
target = target - tar_com
#tf.cast(aaa, tf.float32) matmul前にはcastする
matrix = tf.matmul(tf.transpose(mobile * weights, [0,2,1]), target * weights) / weights_dot
# 特異値分解
# NOTE: TensorFlowの特異値分解はNumpyの特異値分解の出力順が違うので注意!
# Tf: s,U,Vh Np: U,s,Vh
# また、TensorFlowのVhは転置したものがNumpyのVhに対応する
s, U, Vh = tf.linalg.svd(matrix, full_matrices=True)
Vh = tf.transpose(Vh, [0,2,1])
# 4×4Id行列の作成
Id = tf.reshape(tf.tile(tf.eye(2),[batch_size,1]), [batch_size,2,2])
Id = tf.concat([Id, tf.zeros((batch_size,2,1))], axis=2) # tf.concatに変換
det = tf.reshape(tf.sign(tf.linalg.det(matrix)), [1,-1])
bottom = tf.transpose(tf.concat([tf.zeros([2, batch_size]), det], axis=0), [1,0])
Id = tf.concat([Id, tf.expand_dims(bottom, axis=1)], axis=1)
# 回転行列、並進ベクトルを計算
rotation = tf.matmul(tf.transpose(Vh, [0,2,1]), tf.matmul(Id, tf.transpose(U, [0,2,1]))) # 回転行列
translation = tar_com - tf.matmul(mob_com, tf.transpose(rotation, [0,2,1])) # 並進ベクトル
translation = tf.reshape(translation, [batch_size,3,1])
# 4×4変換行列にまとめる
T = tf.concat([rotation, translation], axis=2)
T = tf.concat([T, tf.tile(tf.constant([[[0.,0.,0.,1.]]]),[batch_size,1,1])], axis=1)
return T # shape (B, 4, 4)
def applyTransformMatrix(T, mobile):
""" 配位に変換行列を作用させる
"""
rotation = T[:, :3, :3]
translation = np.expand_dims(T[:, :3, 3], axis=1)
new_mobile = np.matmul(mobile, np.transpose(rotation, [0,2,1])) + translation
return new_mobile # (B, natom, 3)
def applyTransformMatrix_tf(T, mobile):
""" 配位に変換行列を作用させる (TensorFlow version)
"""
rotation = T[:, :3, :3]
translation = tf.expand_dims(T[:, :3, 3], axis=1)
new_mobile = tf.matmul(mobile, tf.transpose(rotation, [0,2,1])) + translation
return new_mobile # (B, natom, 3)
class HingeAngleRC(object):
""" 指定した2つのドメイン間のヒンジ角を計算するrcfunc
"""
def __init__(self, ini_conf, ref_index, mob_index, ref_weights, mob_weights):
""" Args:
ini_conf [array (ndim, )] : ヒンジ角を計算するための参照初期配位
ref_index [array (refatoms, 3)] : ヒンジ角を計算する際に、重ね合わせて基準とするドメインの原子座標index
mob_index [array (mobatoms, 3)] : ヒンジ角を計算する際に、動的に動くドメインの原子座標index
ref_weights [array (refatoms, )] : 重ね合わせるドメインの原子の質量配列
mob_weights [array (refatoms, )] : 動的なドメインの原子の質量配列
"""
self.ini_conf = np.reshape(ini_conf, [1,-1,3]).astype(np.float32)
self.ref_index = ref_index
self.mob_index = mob_index
self.ref_weights = np.expand_dims(ref_weights[:,None], axis=0).astype(np.float32)
self.mob_weights = np.expand_dims(mob_weights[:,None], axis=0).astype(np.float32)
self.ini_ref_conf = np.expand_dims(ini_conf, axis=0)[:, self.ref_index].astype(np.float32)
def __call__(self, x):
x = tf.cast(x, tf.float32)
batch_size = tf.shape(x)[0]
ref_conf = tf.gather(x, self.ref_index, axis=1)
mob_conf = tf.gather(x, self.mob_index, axis=1)
ref_T = calcTransformMatrix_tf(self.ini_ref_conf, ref_conf, self.ref_weights) # shape (B, 4, 4)
ini_mob_conf = tf.gather(tf.reshape(applyTransformMatrix_tf(ref_T, self.ini_conf), [batch_size,-1]), self.mob_index, axis=1)
com1 = calcCOM_tf(ini_mob_conf, self.mob_weights) # shape (B, 1, 3)
com2 = calcCOM_tf(mob_conf, self.mob_weights) # shape (B, 1, 3)
pl = (com2 - com1) / tf.linalg.norm(com2 - com1, axis=2, keepdims=True) # shape (B, 1, 3)
mob_T = calcTransformMatrix_tf(ini_mob_conf, mob_conf, self.mob_weights) # shape (B, 4, 4)
t21 = tf.reshape(tf.tile(tf.eye(3),[batch_size,1]), [batch_size,3,3])
t21 = tf.concat([t21, tf.reshape(com1 - com2, [batch_size,3,1])], axis=2)
t21 = tf.concat([t21, tf.tile(tf.constant([[[0.,0.,0.,1.]]]),[batch_size,1,1])], axis=1)
rot2 = tf.matmul(mob_T, t21)
p1 = applyTransformMatrix_tf(rot2, com1) # shape (B, 1, 3)
p2 = applyTransformMatrix_tf(rot2, p1) # shape (B, 1, 3)
rideal = tf.cross((com1-p2), (com1-p1))
rideal = rideal / tf.linalg.norm(rideal, axis=2, keepdims=True) # shape (B, 1, 3)
new = com2 - tf.matmul(rideal, tf.transpose(com2-com1, [0,2,1])) * rideal # shape (B, 1, 3)
cosine = tf.matmul((new-com1)/tf.linalg.norm(new-com1, axis=2, keepdims=True),
tf.transpose((new-p1)/tf.linalg.norm(new-p1, axis=2, keepdims=True), [0,2,1]))
angl = tf.acos(cosine)
perp = tf.matmul(rideal, tf.transpose(pl, [0,2,1])) # shape (B, 1, 1)
angp = tf.abs(tf.asin(perp)) # shape (B, 1, 1)
pro = rideal - perp * pl # shape (B, 1, 3)
tang = tf.cos(angp) * tf.tan(0.5 * angl) # shape (B, 1, 1)
angle = tf.reshape(2.0 * tf.atan(tang), [-1]) * 180.0 / np.pi # 度数に変換している
return angle
def calcRC(self, x):
""" 与えられた軌道(バッチ)データのヒンジ角を計算する (Numpy version) """
x = x.astype(np.float32)
batch_size = np.shape(x)[0]
ref_conf = x[:, self.ref_index] # tf.gatherに変える
mob_conf = x[:, self.mob_index] # tf.gatherに変える
ref_T = calcTransformMatrix(self.ini_ref_conf, ref_conf, self.ref_weights) # shape (B, 4, 4)
ini_mob_conf = np.reshape(applyTransformMatrix(ref_T, self.ini_conf), [batch_size,-1])[:, self.mob_index]
#ini_mob_conf = np.reshape(ini_mob_conf, [batch_size,-1,3])
com1 = calcCOM(ini_mob_conf, self.mob_weights) # shape (B, 1, 3)
com2 = calcCOM(mob_conf, self.mob_weights) # shape (B, 1, 3)
pl = (com2 - com1) / np.linalg.norm(com2 - com1, axis=2, keepdims=True) # shape (B, 1, 3)
mob_T = calcTransformMatrix(ini_mob_conf, mob_conf, self.mob_weights) # shape (B, 4, 4)
t21 = np.reshape(np.tile(np.eye(3),[batch_size,1]), [batch_size,3,3])
t21 = np.concatenate([t21, np.reshape(com1 - com2, [batch_size,3,1])], axis=2)
t21 = np.concatenate([t21, np.tile(np.array([[[0.,0.,0.,1.]]]),[batch_size,1,1])], axis=1)
rot2 = np.matmul(mob_T, t21)
p1 = applyTransformMatrix(rot2, com1) # shape (B, 1, 3)
p2 = applyTransformMatrix(rot2, p1) # shape (B, 1, 3)
rideal = np.cross((com1-p2), (com1-p1))
rideal = rideal / np.linalg.norm(rideal, axis=2, keepdims=True) # shape (B, 1, 3)
new = com2 - np.matmul(rideal, np.transpose(com2-com1, [0,2,1])) * rideal # shape (B, 1, 3)
cosine = np.matmul((new-com1)/np.linalg.norm(new-com1, axis=2, keepdims=True),
np.transpose((new-p1)/np.linalg.norm(new-p1, axis=2, keepdims=True), [0,2,1]))
angl = np.arccos(cosine) # tf.acos()に変える
perp = np.matmul(rideal, np.transpose(pl, [0,2,1])) # shape (B, 1, 1)
angp = np.abs(np.arcsin(perp)) # shape (B, 1, 1) tf.asin()に変える
pro = rideal - perp * pl # shape (B, 1, 3)
tang = np.cos(angp) * np.tan(0.5 * angl) # shape (B, 1, 1)
angle = np.reshape(2.0*np.arctan(tang), [-1]) * 180.0 / np.pi # 度数に変換している
return angle
class distRC(object):
""" 2ドメイン間の平均距離(質量を与えた場合は重心間距離)を計算する
----------------------------------------------------------------------------------------------------
Attributes:
indices1 indices2 [int_array (natom, 3)] : ドメインのxyz座標index
atomindices1 atomindices2 (=None) [int_array (natom, )] : ドメインの原子index
weights [float_array (natom, )] : タンパク質全体の質量
Note:
ドメイン間距離を計算する場合は、atomindices1とatomindices2をわたす
weightsをわたさなかった場合は平均座標間距離になり、わたした場合は重心間距離になる
1原子間距離の場合はatomindices1、atomindices2、weightsともにわたすす必要はない
"""
def __init__(self, indices1, indices2, atomindices1=None, atomindices2=None, weights=None):
self.indices1 = indices1
self.indices2 = indices2
self.atomindices1 = atomindices1
self.atomindices2 = atomindices2
self.weights = weights
if self.atomindices1 is not None and self.atomindices2 is not None and weights is not None:
self.weights1 = np.expand_dims(weights[self.atomindices1][:,None], axis=0).astype(np.float32)
self.weights2 = np.expand_dims(weights[self.atomindices2][:,None], axis=0).astype(np.float32)
def __call__(self, x):
if tf.shape(self.indices1)[0] != 1 and tf.shape(self.indices2)[0] != 1:
if self.weights is not None:
x = tf.cast(x, tf.float32)
com1 = calcCOM_tf(tf.gather(x, self.indices1, axis=1), self.weights1)
com2 = calcCOM_tf(tf.gather(x, self.indices2, axis=1) , self.weights2)
return tf.reshape(tf.linalg.norm(com2 - com1, axis=2, keepdims=True), [-1])
else:
mean1 = tf.reduce_mean(tf.gather(x, self.indices1, axis=1) , axis=1)
mean2 = tf.reduce_mean(tf.gather(x, self.indices2, axis=1) , axis=1)
return tf.linalg.norm(mean2 - mean1, axis=1)
else:
return tf.reshape(tf.linalg.norm(tf.gather(x, self.indices2, axis=1) - tf.gather(x, self.indices1, axis=1), axis=2), [-1])
def calcRC(self, x):
if np.shape(self.indices1)[0] != 1 and np.shape(self.indices2)[0] != 1:
if self.weights is not None:
x = x.astype(np.float32)
com1 = calcCOM(x[:, self.indices1], self.weights1)
com2 = calcCOM(x[:, self.indices2], self.weights2)
return np.reshape(np.linalg.norm(com2 - com1, axis=2, keepdims=True), [-1])
else:
mean1 = np.mean(x[:, self.indices1], axis=1)
mean2 = np.mean(x[:, self.indices2], axis=1)
return np.linalg.norm(mean2 - mean1, axis=1)
else:
return np.reshape(np.linalg.norm(x[:,self.indices2] - x[:,self.indices1], axis=2), [-1])
class MergeRC(object):
""" 2つの1-output rcfuncの出力結果をマージして2-output rcfuncに変換する
----------------------------------------------------------------------------
Attributes:
rcfunc1, rcfunc2 [rcfunction] : 1D-arrayで返す1-output rcfuncオブジェクト
outputdims(=2) [int] : 1にすると1D_arrayで結果を返す
"""
def __init__(self, rcfunc1, rcfunc2, outputdim=2):
self.rcfunc1 = rcfunc1
self.rcfunc2 = rcfunc2
self.outputdim = outputdim
def __call__(self, x):
rc1 = tf.expand_dims(self.rcfunc1(x), axis=1)
rc2 = tf.expand_dims(self.rcfunc2(x), axis=1)
rc12 = tf.concat([rc1, rc2], axis=1)
if self.outputdim == 1:
return tf.reshape(rc12, [-1]) # shape (2B, )
else:
return rc12 # shape (B, 2)
def calcRC(self, x):
rc1 = np.expand_dims(self.rcfunc1.calcRC(x), axis=1)
rc2 = np.expand_dims(self.rcfunc2.calcRC(x), axis=1)
rc12 = np.concatenate([rc1, rc2], axis=1)
if self.outputdim == 1:
return np.reshape(rc12, [-1]) # shape (2B, )
else:
return rc12 # shape (B, 2)
def train_ML(bg, xtrain, epochs, batch_sizes, lr=0.001, clipnorm=None, counter=0, log_file='train_ML.log', file_path='./', log_stride=1):
""" 入力BGモデルに対し、バッチサイズを変えるスケジューリングML学習を実行する
-------------------------------------------------------------------------------
Args:
bg [model] : 学習するBoltzmann Generator
xtrain [float_array (nsample, ndim)] : ML学習に用いる軌道データ
epochs [int] : 学習するエポック数
batch_sizes [int_list] : 各スケジュールでのバッチ数のリスト
Note:
総学習イテレーション数は "batch_sizesリストの要素数 × epochs" で決まる
"""
trainer_ML = MLTrainer(bg, lr=lr, clipnorm=clipnorm)
with open(log_file, "w") as f:
for i, batch_size in enumerate(batch_sizes):
start_time = time.time()
trainer_ML.train(xtrain, epochs=epochs, batch_size=batch_size, log_file=f, log_stride=log_stride)
stage_time = time.time() - start_time
print('Time spent at Stage{0}:{1}'.format(counter, stage_time) + "[sec]", file=f)
save_start = time.time()
print('Intermediate model is now saving...', file=f)
bg.save(file_path + 'avb3_ML_stage{0}_saved.pkl'.format(counter))
print('Intermediate result saved', file=f)
save_time = time.time() - save_start
print('Model Saving Time at Stage{0}:{1}'.format(counter, save_time) + "[sec]", file=f)
sys.stdout.flush()
counter += 1
def train_KL(bg, xtrain, epochs, high_energies, max_energies, w_KLs, lr=0.0001, clipnorm=1.0, batch_size=128, w_ML=1., weigh_ML=False, stage=0, rc_func=None, rc_min=-1,
rc_max=6, multi_rc=False, w_RC=1., w_L2_angle=0., file_path='./', log_file=None, log_stride=1, inter_model=True):
""" 入力BGモデルに対し、エポック数、カットオフエネルギー、KL重みを変えるスケジューリングKL+ML+RC学習を実行する
スケジュールの各ステージごとに途中経過として学習済み重みデータをpickle形式で保存する
NOTE: スケジュールごとにバッチサイズ(=5000)、学習率(=0.001)は固定であるとする
----------------------------------------------------------------------------------------------------------------------------
Args:
bg [model] : 学習するBoltzmann Generator
xtrain [float_array (nsample, ndim)] : ML学習に用いる軌道データ
epochs [int_list] : スケジュールのステージごとのエポック数リスト
high_energies [float_list] : スケジュールのステージごとのカットオフエネルギーリスト
w_KLs [float_list] : スケジュールのステージごとのKL学習重みリスト
batch_size(=128) [int] : 各ステージのバッチサイズ
NOTE: メモリオーバーになったときはbatch_sizeを少なくして
stage数を増やす
w_ML(=1.) [float] : ML学習の重み
weigh_ML(=False) [bool] : 重み付きML学習にするかどうか
stage(=0) [int] : スケジュールのどのステージから学習をはじめるか
NOTE: 何らかの問題で学習が中断した時は、中断したステージ番号を
この引数にわたすことで、学習をリスタートできる
rc_func(=None) [function] : Tensorflowで書かれた反応座標関数
rc_min(=-1), rc_max(=6) [float or list] : 反応座標の探索範囲の最小値/最大値
NOTE: rcfuncを複数output(複数反応座標リスト)で設計した場合
rc_min=[反応座標1の最小値, ..., 反応座標Nの最小値]
rc_max=[反応座標1の最大値, ..., 反応座標Nの最大値]
のようなリストでわたす
multi_rc(=False) [bool] : rcfuncが複数output(複数反応座標リスト)で設計した場合Trueにする
w_RC(=1.) [float] : RC学習の重み
w_L2_angle(=0.) [float] : M_layerの角度損失による学習の重み
file_path(='./') [str] : 中間学習重みデータを保存するディレクトリパス
log_file(=None) [str] : 損失関数の値を出力するlogファイルのパスと名前 NOTE: Noneの場合、ステージごとに分けてログが出力される
log_stride(=1) [int] : logファイルに出力するエポックの間隔
inter_model(=True) : 中間ステージでの学習モデルを保存するか(保存に時間がかかる場合はFalseにする)
Note:
各学習スケジュールのステージごとに、学習したモデルで10,000配位を生成しそのエネルギー分布を調べる
各ステージのカットオフエネルギー(high_energies)以上の生成配位数を標準出力する
また、各ステージで学習に要した時間も秒単位で出力する
"""
trainers_KL_state3 = []
counter = 0
for current_stage in range(stage, len(epochs)):
if log_file is None:
log_file = f'train_KL_stage{current_stage}.log'
with open(log_file, "w") as f:
# スケジュールの各ステージごとに学習イテレーション
if counter == 0: # 開始ステージ(リスタートなら途中から)の時刻とステージ番号を保存
root_time = time.time()
root_stage = current_stage
# 現在のイテレーションの開始時刻を保存
start_time = time.time()
# 現在のステージの情報を標準出力
print('-----------------------', file=f)
print('Stage:{0}\n high_energy={1} w_KL={2}'.format(current_stage, high_energies[current_stage], w_KLs[current_stage]), file=f)
sys.stdout.flush()
# 学習スケジュールに則りトレイナーを定義し、学習を実施
flextrainer = FlexibleTrainer(bg, lr=lr[current_stage], clipnorm=clipnorm, batch_size=batch_size, high_energy=high_energies[current_stage], max_energy=max_energies[current_stage],
w_KL=w_KLs[current_stage], w_ML=w_ML[current_stage], weigh_ML=weigh_ML, w_RC=w_RC[current_stage],
rc_func=rc_func, rc_min=np.array(rc_min), rc_max=np.array(rc_max), multi_rc=multi_rc,
w_L2_angle=w_L2_angle[current_stage])
flextrainer.train(xtrain, epochs=epochs[current_stage], log_file=f, log_stride=log_stride)
trainers_KL_state3.append(flextrainer)
# 学習後の中間BGモデルで配位を生成し、エネルギーを計算する
samples_z = np.random.randn(10000, bg.dim)
samples_x = bg.Tzx.predict(samples_z)
samples_e = bg.energy_model.energy(samples_x)
# 生成配位のエネルギーの内、カットオフエネルギーより大きなエネルギーの配位の総数をステージごとに計算してリストにする
energy_violations = [np.count_nonzero(samples_e > E) for E in high_energies]
# 生成配位のエネルギーについての情報を標準出力する
print('Energy violations: Total number of generated samples with energies higher than high_energies', file=f)
for i, (E, V) in enumerate(zip(high_energies, energy_violations)):
print('NUM of samples:', V, '\t>\t', 'high_energy at Stage{0}:'.format(i), E, file=f)
sys.stdout.flush()
# 学習に要した時間を標準出力する
stage_time = time.time() - start_time
total_time = time.time() - root_time
print('Time spent at Stage{0}:{1}'.format(current_stage, stage_time) + "[sec]", file=f)
print('Total time from Stage{0}:{1}'.format(root_stage, total_time) + "[sec]", file=f)
sys.stdout.flush()
# 現在のステージで学習した中間BGモデルをpickleで保存 (保存に時間がかかる場合がある)
if inter_model:
save_start = time.time()
print('Intermediate model is now saving...', file=f)
bg.save(file_path + 'avb3_intermediate_model_stage{0}_saved.pkl'.format(current_stage))
print('Intermediate result saved', file=f)
save_time = time.time() - save_start
print('Model Saving Time at Stage{0}:{1}'.format(current_stage, save_time) + "[sec]", file=f)
sys.stdout.flush()
counter += 1
| {"/avb3_KL.py": ["/avb3.py"], "/avb3_ML.py": ["/avb3.py"]} |
73,711 | versace-hub/versace1-rep | refs/heads/master | /avb3_ML.py | import matplotlib.pyplot as plt
import numpy as np
import mdtraj
from deep_boltzmann.networks.training import MLTrainer, FlexibleTrainer
from deep_boltzmann.networks.invertible import invnet, EnergyInvNet
from deep_boltzmann.models.openmm import OpenMMEnergy
import mdtraj as md
from simtk import openmm, unit
import sys, os, shutil
import time
import tensorflow as tf
from avb3 import *
# ハイパーパラメータ
layer_types = 'R'*12 # セットするレイヤーの種類
nl_layers = 4 # 隠れ層の総数 + 1
nl_hidden=[512, 254, 512] # 隠れ層のノード数
epochs_ML = 1000 # エポック数
batch_sizes_ML = [512] # バッチサイズ
current_stage = 5 # 現在の学習stage
restart = True # リスタートするか
weight_file_name = "avb3_ML_stage4_saved.pkl" # restart=Trueのとき、保存した重みデータ名を与える
lr = 0.0005 # 学習率
clipnorm = 0.6 # 勾配クリッピング
# Data paths
# dataディレクトリに「avb3_head.pdb」と「sim_x.npy」を入れておく
pdb_dir = '../data/'
sim_x = np.load(pdb_dir+'sim_x.npy')
# setup ITGAVB3 energy model
def setup_AVB3(multi_gpu=False):
""" Integrin-avb3-head エネルギーモデルをセットアップする
Returns
-------
top [MDTraj Topology object] : AVB3のTopologyオブジェクト
system [OpenMM System object] : AVB3のSystemオブジェクト
avb3_omm_energy [Energy model] : AVB3のEnergy model
"""
INTEGRATOR_ARGS = (300*unit.kelvin, 1.0/unit.picoseconds, 2.0*unit.femtoseconds)
from simtk.openmm import app
# pdb構造をロードしてpdbオブジェクトを生成
pdb = app.PDBFile(pdb_dir + 'avb3_head.pdb')
# openMM組み込みの力場ファイルをロードしてForceFieldオブジェクトを生成 (implicit solvant[GB-obc]モデル)
forcefield = openmm.app.ForceField('amber99sbildn.xml', 'amber99_obc.xml')
# charmm力場を使う場合は以下をコメントアウトする
# forcefield = openmm.app.ForceField('charmm36.xml')
# pdbオブジェクトとForceFieldオブジェクトを合体し、計算条件を加えたsystemオブジェクトを生成
system = forcefield.createSystem(pdb.topology, removeCMMotion=False,
nonbondedMethod=app.CutoffNonPeriodic, nonbondedCutoff=1.0*unit.nanometers,
constraints=None, rigidWater=True)
# 運動方程式の積分器を定義
integrator = openmm.LangevinIntegrator(300*unit.kelvin, 1.0/unit.picoseconds, 2.0*unit.femtoseconds)
# pdbファイルとsystemファイルと積分器をまとめて、simulationオブジェクトを生成
simulation = openmm.app.Simulation(pdb.topology, system, integrator)
# openMM APIを使用してエネルギー計算用のモデルを生成
avb3_omm_energy = OpenMMEnergy(openmm_system=system,
openmm_integrator=openmm.LangevinIntegrator,
length_scale=unit.nanometers,
n_atoms=md.Topology().from_openmm(simulation.topology).n_atoms,
openmm_integrator_args=INTEGRATOR_ARGS,
multi_gpu=multi_gpu)
# MDtrajのopenMM APIを使用して、openMM用トポロジーをMDtraj用トポロジーに変換する
mdtraj_topology = md.Topology().from_openmm(pdb.topology)
return mdtraj_topology, system, avb3_omm_energy
# ITGAVB3モデルを定義
print('Integrin AVB3 set up\n')
sys.stdout.flush()
top, system, mm_avb3 = setup_AVB3()
print('Data loaded\n')
sys.stdout.flush()
# MD軌道の座標配列データを読み込む
ini_conf = sim_x[0]
#np.save('ini_conf', ini_conf)
# 軌道データのフレーム数
nframes = sim_x.shape[0]
# 各フレームの次元(3×原子数)
dim = sim_x.shape[1]
# フレーム同士をシャッフル
#np.random.shuffle(sim_x)
# 訓練サンプルを保存
#np.save('sim_x', sim_x)
print('Data loaded\n')
sys.stdout.flush()
# 各原子の質量配列を取得
weights = np.array([system.getParticleMass(i).value_in_unit(unit.dalton) for i in range(system.getNumParticles())])
# 主鎖原子の原子番号indexと側鎖原子のZ matrixを取得
CartIndices, ZIndices = get_indices(top, cartesian_CYS=True)
# BGを定義
# RealNVPを8層を使用し,RealNVPにセットする全結合層は3つの隠れ層と1つの出力層をもつ
# 各隠れ層のノード数を個別に指定するため、nl_hidden引数をリストでわたしていることに注意
print('BG now set up...\n')
start_time = time.time()
if not restart:
bg = invnet(dim, layer_types, energy_model=mm_avb3,
ic=ZIndices, ic_cart=CartIndices, ic_norm=sim_x,
nl_layers=nl_layers, nl_hidden=nl_hidden, nl_activation='relu', nl_activation_scale='tanh')
else:
bg = EnergyInvNet.load("./"+ weight_file_name, mm_avb3)
load_time = time.time() - start_time
print('BG constructed\n')
print('Time spent at loading:{0}'.format(load_time) + "[sec]")
sys.stdout.flush()
# バッチサイズを変更させてML学習を3回(×2000エポック)行う
print('ML Training Start !!\n')
sys.stdout.flush()
train_ML(bg, sim_x, epochs_ML, batch_sizes_ML, lr=lr, clipnorm=clipnorm, counter=current_stage)
print('ML Training Finished !!\n')
sys.stdout.flush()
# 学習済みモデルを保存
#print('ML Training completed!!\n Trained model is now saving...\n')
#sys.stdout.flush()
#s_time = time.time()
#bg.save('./avb3_save_after_ML.pkl')
#save_time = time.time() - s_time
#print('Saving completed.\n All task finished!\n')
#print('Saving Time: {0}'.format(save_time) + "[sec]")
#sys.stdout.flush()
samples_z = np.random.randn(10000, bg.dim)
samples_x = bg.Tzx.predict(samples_z)
samples_e = bg.energy_model.energy(samples_x)
high_energies = [1e26, 1e25, 1e24, 1e23, 1e22, 1e21, 1e20, 1e19, 1e18, 1e17, 1e16, 1e15, 1e14, 1e13, 1e12, 1e11,
1e10, 1e9, 1e8, 1e7, 1e6, 1e5, 1e4, 1e3]
# 生成配位のエネルギーの内、カットオフエネルギーより大きなエネルギーの配位の総数をステージごとに計算してリストにする
energy_violations = [np.count_nonzero(samples_e > E) for E in high_energies]
# 生成配位のエネルギーについての情報を標準出力する
print('Energy violations: Total number of generated samples with energies higher than high_energies')
for i, (E, V) in enumerate(zip(high_energies, energy_violations)):
print('NUM of samples:', V, '\t>\t', 'high_energy at Stage{0}:'.format(i), E)
sys.stdout.flush()
| {"/avb3_KL.py": ["/avb3.py"], "/avb3_ML.py": ["/avb3.py"]} |
73,712 | SB48/Argupedia | refs/heads/master | /argupedia/argupedia/argupedia/database.py |
from django.http import HttpResponse
from django.conf import settings
from django.shortcuts import render
from django.contrib import auth
import pyrebase
import math
import random
import numpy as np
import pandas as pd
config = {
'apiKey': "AIzaSyDTNvuZOGHUuuG1PTkBZOW64xdb9Ry5TWE",
'authDomain': "argupedia-d2e12.firebaseapp.com",
'databaseURL': "https://argupedia-d2e12.firebaseio.com",
'projectId': "argupedia-d2e12",
'storageBucket': "argupedia-d2e12.appspot.com",
'messagingSenderId': "356627337302",
'appId': "1:356627337302:web:db1808484ddac94be2ebf7",
'measurementId': "G-8JS80GCSHV"
}
firebase = pyrebase.initialize_app(config)
authF = firebase.auth()
db = firebase.database()
storage = firebase.storage()
class Database:
def __init__(self):
self.registerInitialState()
#when the file is created it is initiated with an empty userID
def registerInitialState(self):
self.userID = None
self.idToken = None
#when a user logs in or registers the userID is set
def set_uid(self, currentUid, userIDToken):
self.idToken = userIDToken
self.userID = currentUid
#returns the stored userID to the views to check if the user is logged in
def return_uid(self):
return self.userID
def log_out(self):
self.idToken = None
self.userID = None
def return_firebaseID(self):
return self.idToken
def check_logged_in(self):
if (self.userID is None):
return False
else:
return True
def delete_user(self):
pass
#function to delete to be implemented
#This returns the username of the current user that is logged in
def return_username(self):
#return self.userID
if self.check_logged_in():
#reference https://argupedia-d2e12.firebaseio.com/users/n3mWvrmhtehe7oC2FEQbh7q9dlpg2
try:
username = db.child('users').child(self.userID).child('username').get().val()
return (username)
except:
print("Error fetching username")
return None
else:
return None
# This function returns all arguments in the topic section of the database that have an image listed.
# These arguments are sent to the home page along with their related image so they can be
# displayed and link to the relevant argument
def return_images(self):
allTopics = db.child("topics").get()
topicsDF = pd.DataFrame(allTopics.val())
topics = topicsDF.to_dict()
toReturn = {}
for topic, topicInformation in topics.items():
if (str(topicInformation['image']) != "nan") :
if type(topicInformation['arguments']) is dict :
argumentsDict = topicInformation['arguments']
# This ensures that an image only links to one argument. Even if the topic has many arguments.
# Currently the choice of argument to show is random but this could be changed by altering this line
argument = random.choice(list(argumentsDict.keys()))
toReturn[argument] = topicInformation['image']
return toReturn
# loop through argument schema under original key, altering labellings via which ones are being attacked
# originalKey is the key to the schema
# key is the new argument that has just been created
# attackingKeys are all the arguments that the new argument, that has just been created, attacks (could be one or multiple)
# alternate and selfAttacking are booleans
def change_labellings(self, originalKey, key, attackingKeys, alternate, selfAttacking):
# in order to understand the code in this function the following names / keys will be used
# argument being attacked = B - this could be the original argument for the schema but not necessarily
# new argument attacking = A - this is the argument that has just been added by the user
# arguments being attacked by B = C
# A --> B --> C or A <--> B --> C
#returns the argument(s) that the current argument is attacking
for attacking in attackingKeys:
# information to updates argument B with to say it is now being attacked by A
updateAttackedBy = {key : "attackedBy"}
#if the first argument / originalKey is the one being attacked - original argument is argument B
if originalKey == attacking:
# updates originalKey to say it is being attacked by the new argument
db.child('arguments').child(originalKey).child('attackedBy').update(updateAttackedBy)
#if the new attacking argument is not an "alternate"
if alternate == False:
#if the new attacking argument is attacking itself then the argument it attacks does not need to change
if selfAttacking == False:
# as long as the new argument A is not self attacking or alternate then the arguments B which A attacks will always be out
# as they now have an attacker which is in
labellings = {'in': False, 'out': True, 'undec': False}
db.child('arguments').child(originalKey).child('labellings').set(labellings)
#if A is an alternate to B
else:
#argument B is still the originalKey
updateAttackedBy = {attacking : "attackedBy"}
#this sets the new argument as attacked by original
db.child('arguments').child(originalKey).child('argumentSchema').child(key).child('attackedBy').update(updateAttackedBy)
#this sets the original argument as attacking the new argument
updateAttacking = {key : "attacking"}
db.child('arguments').child(originalKey).child('attacking').update(updateAttacking)
#now change the labellings for argument B as undecided as long as the new argument does not attack itself
if selfAttacking == False:
labellings = {'in': False, 'out': False, 'undec': True}
db.child('arguments').child(originalKey).child('labellings').set(labellings)
#returns the next arguments whose labels need to be edited
argument = db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).get()
else: #if argument being attacked is not the original
# updates argument B to say it is now being attacked by A
db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).child('attackedBy').update(updateAttackedBy)
if alternate == False:
#if the new attacking argument is attacking itself then the argument it attacks does not need to change
if selfAttacking == False:
# as long as the new argument A is not self attacking or alternate then the arguments B which A attacks will always be out
# as they now have an attacker which is in
labellings = {'in': False, 'out': True, 'undec': False}
db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).child('labellings').set(labellings)
#originalKey is not being attacked and alternate is true
else:
#this sets the new argument A as also attacked by B which it attacks
updateAttackedBy = {attacking : "attackedBy"}
db.child('arguments').child(originalKey).child('argumentSchema').child(key).child('attackedBy').update(updateAttackedBy)
#this sets the argument B as also attacking the new argument A
updateAttacking = {key : "attacking"}
db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).child('attacking').update(updateAttacking)
#updates argument B as undecided
if selfAttacking == False:
labellings = {'in': False, 'out': False, 'undec': True}
db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).child('labellings').set(labellings)
try: #this could be empty if the argument (B) which is now being attacked is itself not attacking any arguments
#this should be a data frame of the information about argument B
#returns the next arguments whose labels need to be edited
if originalKey == attacking:
argument = db.child('arguments').child(originalKey).child('attacking').get().val()
else:
argument = db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).child('attacking').get().val()
try:
newAttacking = argument.keys()
except:
multipleKeys = pd.DataFrame(argument).to_dict()
newAttacking = multipleKeys.keys()
if newAttacking is not None:
#send through recursively running through the rest of the schema checking which arguments are now in or out
self.change_labellings_recursive(originalKey, list(newAttacking), {})
except:
print("Error returning list of new arguments being attacked")
pass
# This function will check the rest of the arguments in the schema with the new information and
# recursively check if they have an attacker that is in
# so if there are two arguments which are being attacked by new argument A then this will run twice for both of them
def change_labellings_recursive(self, originalKey, attackingKeys, alternate):
#dictionary of the next arguments to check
nextRecursionKeys = {}
#ensure that looking at alternate arguemnts does not lead to infinite recursion
alternateCheck = False
#key is the argument that has just newly been attacked and been made either undecided or out
#new attacking - this will be named C - are the arguments attacked by key
for attacking in attackingKeys:
# the next step when check C's labellings is to look at its attackers
if attacking == originalKey:
#below is potential improvement for code - currently had bugs but could be implemented to impove code
#path = "db.child('arguments')"
#These are the arguments that need to be checked to determine C's labellings - they are C's attackers
attackedKeys = db.child('arguments').child(originalKey).child('attackedBy').get().val()
#These are the arguments that C attacks which will be checked in the next iteration of this function
nextRecursion = db.child('arguments').child(originalKey).child('attacking').get().val()
#in order to prevent infinite recursion, there must be a check to ensure the alternates are not repeated
try:
alternateCheck = db.child('arguments').child(originalKey).child('alternate').get().val()
except:
pass
else:
#path = "db.child('arguments').child(originalKey).child('argumentSchema')"
#These are the arguments that need to be checked to determine C's labellings - they are C's attackers
attackedKeys = db.child('arguments').child(originalKey).child("argumentSchema").child(attacking).child('attackedBy').get().val()
#These are the arguments that C attacks which will be checked in the next iteration of this function
nextRecursion = db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).child('attacking').get().val()
#in order to prevent infinite recursion, there must be a check to ensure the alternates are not repeated
try:
alternateCheck = db.child('arguments').child(originalKey).child("argumentSchema").child(attacking).child('alternate').get().val()
except:
pass
#add to list of new arguments to check the labellings of
try:
for item, value in nextRecursion.items():
nextRecursionKeys[item] = 'checkLabel'
except:
pass
newAttacked = list(attackedKeys.keys())
in_argument_found = False
labellings = {'in': True, 'out': False, 'undec': False}
count = 0
count_out_arguments = 0
count_undec_arguments = 0
selfAttack = False
for element in newAttacked:
count += 1
#run through all arguments attacking the arguments in attacking Keys (B) and check if they are in or out
#finding an in argument is the deciding factor for when the algorithm should stop running
#even if an undecided attacker is found
if in_argument_found == False and selfAttack == False:
if element == originalKey:
check = db.child('arguments').child(originalKey).child('labellings').get().val()
else:
check = db.child('arguments').child(originalKey).child('argumentSchema').child(element).child('labellings').get().val()
if check['in'] == True:
labellings = {'in': False, 'out': True, 'undec': False}
in_argument_found = True
if check['out'] == True:
count_out_arguments += 1
if check['undec'] == True:
count_undec_arguments += 1
#if the argument is self attacking then it should not have its labellings changed as it will always be out
try:
for key, value in nextRecursionKeys.items():
if key == originalKey:
selfAttack = db.child('arguments').child(originalKey).child('selfAttack').get().val()
else:
selfAttack = db.child('arguments').child(originalKey).child('argumentSchema').child(key).child('selfAttack').get().val()
if selfAttack == True:
nextRecursionKeys.pop(key)
except:
print('Error in checking if the argument is self attacking in the recursive change labellings algorithm')
pass
#if all arguments that attack this one are out or there is no in argument found then change this argument to 'in'
#this allows the possibility of it remaining as 'undecided'
if in_argument_found == False and selfAttack == False:
#all arguments attacking this argument (C) are out therefore this one is in
if count_out_arguments == count:
labellings = {'in': True, 'out': False, 'undec': False}
#if every argument attacking this one (C) is undecided then this argument is undecided
elif count_undec_arguments == count:
labellings = {'in': False, 'out': False, 'undec': True}
elif in_argument_found == True: #if an in attacker is found then this argument is out
labellings = {'in': False, 'out': True, 'undec': False}
if attacking == originalKey:
db.child('arguments').child(originalKey).child('labellings').set(labellings)
else:
db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).child('labellings').set(labellings)
try:
for key, value in alternate.items():
# needs to be checked more than once as otherwise the labellings
# will be dependent on which alternate argument is found first
if value < 2:
value +=1
else:
nextRecursionKeys.pop(element)
except:
pass
if bool(nextRecursionKeys):
recursiveNewAttacking = list(nextRecursionKeys.keys())
try:
if alternateCheck == True:
#add to list of alternate arguments to limit checking
alternate[attacking] = 0
except:
self.change_labellings_recursive(originalKey, recursiveNewAttacking, alternate)
# When a user adds an attacking argument, the algorithm begins here.
def add_attack(self, data, originalKey, fileRef, image, attacking):
try:
data["title"].title()
data["uidAuthor"] = self.userID
if data["alternate"] is False:
#the current argument just added is the starting point for the algorithm which is now "in"
labellings = {"in": True, "out": False, "undec": False}
else:
#if the argument is an alternate then it is undecided, not in
labellings = {"in": False, "out": False, "undec": True}
data["labellings"] = labellings
#push the new argument's data into the database
argumentKey = db.child("arguments").child(originalKey).child("argumentSchema").push(data)
#store the id of this argument
key = argumentKey['name']
#updates the user table with the argument they now have created
userData = {key : originalKey}
db.child("users").child(self.userID).child("attacks").update(userData)
alternate = data["alternate"]
selfAttacking = data["selfAttack"]
if selfAttacking == True:
self.add_self_attack(originalKey, False, key)
elif alternate == True: #selfAttacking is false but alternate is true
self.alternate_update(originalKey, key, attacking)
self.change_labellings(originalKey, key, attacking, alternate, selfAttacking)
return (data["title"])
except:
pass
# In the case of adding an attacking argument which is an alternate to the argument it attacks, this function is called
# This stores information in each argument a link to its alternate.
# This will assist in the comparing of the votes
def alternate_update(self, originalKey, key, attackingKeys):
alternateCheck = {'alternate': True}
for attacking in attackingKeys:
if originalKey == attacking:
try:
alternateArgument = {'alternateArgument' : originalKey}
alternateArgumentNew = {'alternateArgument' : key}
db.child('arguments').child(originalKey).update(alternateArgumentNew)
db.child('arguments').child(originalKey).update(alternateCheck)
except:
print('Error alternate original argument')
pass
else:
try:
alternateArgument = {'alternateArgument' : attacking}
alternateArgumentNew = {'alternateArgument' : key}
db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).update(alternateArgumentNew)
db.child('arguments').child(originalKey).child('argumentSchema').child(attacking).update(alternateCheck)
except:
print('Error alternate secondary argument')
db.child('arguments').child(originalKey).child('argumentSchema').child(key).update(alternateArgument)
db.child('arguments').child(originalKey).child('argumentSchema').child(key).update(alternateCheck)
#argument attacks itself and is attacked by itself
#original key relates to the original argument premise, original is a boolean to see if the
#argument which is being updated is itself an original argument (not an attacker)
def add_self_attack(self, originalKey, original, key):
attacking = {key : "attacking"}
attacked = {key : "attackedBy"}
labellings = {"in": False, "out": True, "undec": False}
#if the new argument is not an attack
if original:
db.child('arguments').child(originalKey).child('attacking').update(attacking)
db.child('arguments').child(originalKey).child('attackedBy').update(attacked)
db.child('arguments').child(originalKey).child('labellings').set(labellings)
#if the new argument is an attack on another
else:
db.child('arguments').child(originalKey).child('argumentSchema').child(key).child('attacking').update(attacking)
db.child('arguments').child(originalKey).child('argumentSchema').child(key).child('attackedBy').update(attacked)
db.child('arguments').child(originalKey).child('argumentSchema').child(key).child('labellings').set(labellings)
# This function adds an argument to the database - this is different to when a user adds an attacking argument
def add_argument(self, data, fileRef, image):
if self.check_logged_in():
try:
data["title"].title()
data["topic"].title()
data["uidAuthor"] = self.userID
labellings = {"in": True, "out": False, "undec": False}
data["labellings"] = labellings
# Adds the argument to the 'arguments' section of the database
argumentKey = db.child("arguments").push(data)
if data['selfAttack'] == True:
self.add_self_attack(argumentKey, True, argumentKey)
key = argumentKey['name']
# Adds the argument to the 'users' section of the database
userData = {key : "author"}
db.child("users").child(self.userID).child("arguments").update(userData)
# Adds a new topic to the database making the arguments searchable
topic = data["topic"]
authorInfo = {key : self.userID}
topicKey = db.child("topics").child(topic).child("arguments").update(authorInfo)
return (data["title"])
except:
pass
else:
return None
#returns the list of critical questions that can be used to attack an argument
def return_criticalQuestions(self, argumentKey, originalKey):
try:
if originalKey == argumentKey:
argumentType = db.child("arguments").child(argumentKey).child("argumentType").get().val()
else:
argumentType = db.child("arguments").child(originalKey).child('argumentSchema').child(argumentKey).child("argumentType").get().val()
criticalQuestions = db.child("argumentType").child(argumentType).child("criticalQuestions").get().val()
criticalQuestions.pop(0)
return criticalQuestions
except:
return None
#return a list of the argument types the user can choose from
def return_argumentTypes(self):
argumentTypeTitle = db.child("argumentType").get().val()
data = pd.DataFrame(argumentTypeTitle).to_dict()
listTypes = {}
for argumentType in argumentTypeTitle:
description = data[argumentType]["description"]
description.pop(0)
listTypes[argumentType] = description[0]
return listTypes
#returns the formatting of the argument that can be used to attack an argument
def return_argumentFormats(self, argumentType):
try:
argumentFormat = db.child("argumentType").child(argumentType).child("format").get().val()
argumentFormat.pop(0)
return argumentFormat
except:
return None
#returns the current user's arguments
def return_arguments(self):
if self.check_logged_in():
argumentKey = db.child('users').child(self.userID).child('arguments').get().val()
toReturn = {}
if argumentKey != None:
for key, value in argumentKey.items():
toReturn[key] = db.child('arguments').child(key).get().val()
toReturn[key]["originalKey"] = key
argumentKey = db.child('users').child(self.userID).child('attacks').get().val()
if argumentKey != None:
for key, value in argumentKey.items():
#key is the key of the attacking argument
#value is the key of the original argument
toReturn[key] = db.child('arguments').child(value).child('argumentSchema').child(key).get().val()
toReturn[key]["originalKey"] = value
return toReturn
else:
return None
# Return the entire schema of an argument i.e. all attacking arguments within one debate
def return_schema(self, originalKey):
originalArgument = db.child('arguments').child(originalKey).get().val()
try:
originalArgument.pop('argumentSchema', None)
except:
print("Error in returning the schema")
pass
argument = db.child('arguments').child(originalKey).child('argumentSchema').get()
data = pd.DataFrame(argument.val())
inclOriginal = data.to_dict()
inclOriginal[originalKey] = originalArgument
return inclOriginal
# Search through the arguments in the database by a kew word / topic
def search_arguments(self, topic):
topic = topic.title()
data = db.child("topics").child(topic).child("arguments").get().val()
if data is not None:
try:
dataInDict = {}
for key, value in data.items():
argumentInfo = db.child("arguments").child(key).get().val()
dataInDict[key] = argumentInfo
return dataInDict
except:
return None
else:
return None
# Return the number of votes for an undecided argument
def returnVotes(self, originalKey, argumentKey):
try:
if originalKey == argumentKey:
# Check that the argument is not self attacking
selfAttackCheck = db.child('arguments').child(originalKey).child('selfAttack').get().val()
if selfAttackCheck == None:
selfAttackCheck = False
try:
votes = db.child('arguments').child(originalKey).child('votes').get().val()
return [selfAttackCheck, votes]
except:
return[False, 0]
else:
selfAttackCheck = db.child('arguments').child(originalKey).child('argumentSchema').child(argumentKey).child('selfAttack').get().val()
if selfAttackCheck == None:
selfAttackCheck = False
try:
votes = db.child('arguments').child(originalKey).child('argumentSchema').child(argumentKey).child('votes').get().val()
return [selfAttackCheck, votes]
except:
return[selfAttackCheck, 0]
except:
return [True, 0]
# Allow the user to vote on an argument
def vote(self, originalKey, argumentKey):
votes = self.returnVotes(originalKey, argumentKey)
if votes[0] == False:
try:
votes[1] += 1
except:
pass
vote = votes[1]
increaseVotes = {"votes": vote}
if originalKey == argumentKey:
votes = db.child('arguments').child(originalKey).update(increaseVotes)
else:
votes = db.child('arguments').child(originalKey).child('argumentSchema').child(argumentKey).update(increaseVotes)
return ("Your vote has been logged")
else:
return('Sorry you cannot vote on a self attacking argument')
# Return the directed graph of attacks of an argument schema
def return_graph_data(self, originalKey):
schema = self.return_schema(originalKey)
nodes = {}
edges = []
edgesNames = {}
name = 1
try:
for key, value in schema.items():
if value["labellings"]["in"] == True:
labelling = "in"
elif value["labellings"]["out"] == True:
labelling = "out"
else:
labelling = "undecided"
nodes[key] = [value["title"], labelling]
try:
attacking = value["attacking"]
for aKey, aValue in attacking.items():
if key not in edgesNames and aKey not in edgesNames:
edgesNames[key] = name
name += 1
edgesNames[aKey] = name
name += 1
elif key not in edgesNames:
edgesNames[key] = name
name += 1
elif aKey not in edgesNames:
edgesNames[aKey] = name
name += 1
edges.append((edgesNames[key], edgesNames[aKey]))
except:
pass
for key, value in nodes.items():
nodes[key] = {"title": value[0], "number": edgesNames[key], "labelling": value[1]}
except:
print("Error returning graph schema data")
pass
return [nodes, edges]
| {"/argupedia/argupedia/argupedia/views.py": ["/argupedia/argupedia/argupedia/database.py"], "/argupedia/argupedia/argupedia/urls.py": ["/argupedia/argupedia/argupedia/views.py"]} |
73,713 | SB48/Argupedia | refs/heads/master | /argupedia/argupedia/argupedia/views.py |
from django.http import HttpResponse
from django.conf import settings
from django.shortcuts import render
from django.contrib import auth
from .database import Database
import pyrebase
config = {
'apiKey': "AIzaSyDTNvuZOGHUuuG1PTkBZOW64xdb9Ry5TWE",
'authDomain': "argupedia-d2e12.firebaseapp.com",
'databaseURL': "https://argupedia-d2e12.firebaseio.com",
'projectId': "argupedia-d2e12",
'storageBucket': "argupedia-d2e12.appspot.com",
'messagingSenderId': "356627337302",
'appId': "1:356627337302:web:db1808484ddac94be2ebf7",
'measurementId': "G-8JS80GCSHV"
}
firebase = pyrebase.initialize_app(config)
authF = firebase.auth()
dbF = firebase.database()
db = Database()
#Load Home Page:
def home_page(request):
images = db.return_images()
uid = db.return_uid()
context = {"title": "Home", "uid": uid, "schema" : True, "images" : images, "uid": uid}
template_name = 'argupedia/index.html'
return render(request, template_name, context)
#check if user is logged in
def logged_in(request):
try:
uID = firebase.auth().currentUser().getIdToken()
return authF.getInstance().getCurrentUser()
except:
print("Error logging user in")
return False
#Load register user page
def register_page(request):
uid = db.return_uid()
context = {"title": "Register", "uid": uid}
template_name = 'argupedia/register.html'
return render(request, template_name, context)
#Register user's details
def post_register(request):
data = {
"fName": request.POST.get('fName'),
"lName" : request.POST.get('lName'),
"username": request.POST.get('username'),
"email": request.POST.get('email')
}
email = request.POST.get('email')
password = request.POST.get('password')
try:
user = authF.create_user_with_email_and_password(email, password)
userID = user['localId']
dbF.child("users").child(userID).set(data)
post_login(request)
except:
context = {"message": "There was an error creating your account", "uid": None}
template_name = 'argupedia/register.html'
return render(request, template_name, context)
uid = db.return_uid()
context = {"e": "Welcome {}".format(email), "uid": uid}
template_name = 'argupedia/login_success.html'
return render(request, template_name, context)
def login_page(request):
uid = db.return_uid()
context = {"title": "Log In", "uid": uid}
template_name = 'argupedia/login.html'
return render(request, template_name, context)
def post_login(request):
email = request.POST.get('email')
password = request.POST.get('password')
try:
user = authF.sign_in_with_email_and_password(email, password)
session_id = user['localId']
request.session['uid'] = str(session_id)
db.set_uid(session_id, user['idToken'])
except:
context = {"message":"invalid credentials", "uid": None}
template_name = 'argupedia/login.html'
return render(request, template_name, context )
uid = db.return_uid()
context = {"e": "Welcome {}".format(email), "uid": uid}
template_name = 'argupedia/login_success.html'
return render(request, template_name, context)
def log_out(request):
try:
auth.logout(request)
db.log_out()
context = {"message": "You have successfully logged out", "uid": None}
except:
print("Error Logging the user out")
uid = db.return_uid()
context = {"message": "There was an error logging you out", "uid": uid}
template_name = 'argupedia/login_success.html'
return render(request, template_name, context)
#function does not currently work - could be implemented in the future
def delete_user(request):
uid = db.return_uid()
token = db.return_firebaseID()
try:
auth.delete_user(uid)
db.delete_user()
context = {"message": "You have successfully deleted your account", "uid": None}
except:
uid = db.return_uid()
context = {"message": "There was an error deleting your account", "uid": uid}
template_name = 'argupedia/login_success.html'
return render(request, template_name, context)
def about_page(request):
uid = db.return_uid()
context = {"title": "About", "uid": uid}
template_name = 'argupedia/about.html'
return render(request, template_name, context)
#add an argument
def add_argument(request):
if request.POST.get('selfAttack') is None:
selfAttack = False
else:
selfAttack = True
contentString = str(request.POST.get('content-1')) + " \n " + str(request.POST.get('content-2')) + " \n " + str(request.POST.get('content-3')) + " \n " + str(request.POST.get('content-4'))
data = {
"argumentType": request.POST.get('argumentType'),
"topic": request.POST.get('topic'),
"title" : request.POST.get('title'),
"urlReference": request.POST.get('urlReference'),
"fileReference": request.POST.get('fileReference'),
"image": request.POST.get('image'),
"argumentType" : request.POST.get('argumentType'),
"selfAttack" : selfAttack,
"content": contentString,
'votes': 0,
}
fileRef = request.POST.get('fileReference')
image = request.POST.get('image')
passCheck = db.add_argument(data, fileRef, image)
uid = db.return_uid()
if passCheck is None:
context = {"message": "Error", "uid": uid}
template_name = 'argupedia/create_argument.html'
return render(request, template_name, context)
else:
context = {"e": "Your argument {} has been successfully submitted".format(passCheck), "uid": uid}
template_name = 'argupedia/login_success.html'
return render(request, template_name, context)
def search_argument_page(request):
argumentTypes = db.return_argumentTypes()
template_name = 'argupedia/search_argument.html'
uid = db.return_username()
return render(request, template_name, {"uid" : uid, "types": argumentTypes})
def create_argument_page(request):
template_name = 'argupedia/create_argument.html'
argumentType = request.POST.get('argumentType')
argumentFormat = db.return_argumentFormats(argumentType)
uid = db.return_uid()
return render(request, template_name, {"argumentType": argumentType, "format" : argumentFormat, "uid": uid})
def turn_content_to_list(contentString):
contentList = contentString.strip('][').split(',')
content = []
for item in contentList:
content.append(item.replace("'", ""))
return content
def view_argument_info_page(request):
uid = db.return_uid()
template_name = 'argupedia/view_argument_info.html'
originalKey = request.POST.get('originalKey')
argumentKey = request.POST.get('argumentKey')
votes = db.returnVotes(originalKey, argumentKey)
selfAttack = votes[0]
data = {
"selfAttack" : selfAttack,
"votes": votes[1],
"originalKey" : originalKey,
"argumentKey" : argumentKey,
"content": request.POST.get('content'),
"title" : request.POST.get('title'),
"urlReference": request.POST.get('urlReference'),
"fileReference": request.POST.get('fileReference'),
"labellings": request.POST.get('labellings'),
}
return render(request, template_name, {"value": data,"uid": uid})
def view_arguments_page(request):
uid = db.return_uid()
template_name = 'argupedia/view_arguments.html'
if uid is not None:
argumentTypes = db.return_argumentTypes()
argument = db.return_arguments()
toReturn = {"uid" : uid, "arguments" : argument, "types": argumentTypes}
if (argument):
toReturn = {"uid" : uid, "arguments" : argument}
return render(request, template_name, toReturn)
else:
return render(request, template_name, {"uid" : uid, "arguments": None, "types": argumentTypes})
else:
return render(request, template_name, {"uid" : uid, "arguments": None, "types": None})
#reference : https://stackoverflow.com/questions/298772/django-template-variables-and-javascript Daniel Munoz
# this function recieves the necessary information from the database in the appropriate form
# this information then has to be converted to javascript before being run in the views
def view_argument_schema_page(request):
uid = db.return_uid()
originalKey = request.POST.get('originalKey')
arguments = db.return_schema(originalKey)
argumentTypes = db.return_argumentTypes()
template_name = 'argupedia/view_schema.html'
if arguments is not None:
graphFile = db.return_graph_data(originalKey)
if len(graphFile[1]) >= 1:
dot = "var dot = \"dinetwork {node[shape=circle];"
for tuple in graphFile[1]:
toAdd = " " + str(tuple[0]) + " -> " + str(tuple[1]) + ";"
dot = dot + "" + toAdd + ""
dot = dot[:-1]
dot = dot + " }\";"
context = {"originalKey": originalKey, "arguments": arguments, "schema" : True, "dot": dot, "names" : graphFile[0], "uid": uid, "types": argumentTypes}
else:
context = {"originalKey": originalKey, "arguments": arguments, "schema" : True, "dot": None, "names" : None, "uid": uid, "types" : argumentTypes}
else:
context = {"originalKey": originalKey, "arguments": None, "schema" : True, "uid": uid}
return render(request, template_name, context)
def view_graph_page(request):
uid = db.return_uid()
originalKey = request.POST.get("originalKey")
originalKey = request.POST.get("key")
graphFile = db.return_graph_data(originalKey)
template_name = 'argupedia/view_graph.html'
context = {"graph": graphFile, "uid": uid}
return render(request, template_name, context)
def search_argument_nav_page(request):
uid = db.return_uid()
search = request.POST.get('searchTerm')
argumentTypes = db.return_argumentTypes()
template_name = 'argupedia/search_results.html'
results = db.search_arguments(search)
context = {"arguments": results, "uid": uid, "types" : argumentTypes}
return render(request, template_name, context)
def critical_questions_page(request):
uid = db.return_uid()
topic = request.POST.get('topic')
argumentType = request.POST.get('argumentType')
argumentFormat = db.return_argumentFormats(argumentType)
key = request.POST.get('key')
originalKey = request.POST.get('originalKey')
uid = db.return_uid()
criticalQuestions = db.return_criticalQuestions(key, originalKey)
context = {"uid": uid, "topic": topic, "key": key, "originalKey": originalKey, "criticalQuestions": criticalQuestions, "argumentType": argumentType, "format" : argumentFormat}
template_name = 'argupedia/critical_questions.html'
return render(request, template_name, context)
#adds an attacking argument
def add_attack(request):
uid = db.return_uid()
originalKey = request.POST.get('originalKey')
if request.POST.get('alternate') is None:
alternate = False
else:
alternate = True
if request.POST.get('selfAttack') is None:
selfAttack = False
else:
selfAttack = True
contentString = contentString = str(request.POST.get('content-1')) + " \n " + str(request.POST.get('content-2')) + " \n " + str(request.POST.get('content-3')) + " \n " + str(request.POST.get('content-4'))
data = {
"title" : request.POST.get('title'),
"content": contentString,
"urlReference": request.POST.get('urlReference'),
"fileReference": request.POST.get('fileReference'),
"image": request.POST.get('image'),
"argumentType" : request.POST.get('argumentType'),
"criticalQuestion": request.POST.get('criticalQuestion'),
"attacking" : {request.POST.get('attackingKey') : "attacking"},
"originalKey": originalKey,
"attackedBy": "",
"alternate" : alternate,
"selfAttack": selfAttack,
"votes": 0,
}
fileRef = request.POST.get('fileReference')
image = request.POST.get('image')
passCheck = db.add_attack(data, originalKey, fileRef, image, [request.POST.get('attackingKey')] )
if passCheck is None:
context = {"message": "Error", "uid": uid}
template_name = 'argupedia/create_argument.html'
return render(request, template_name, context)
else:
context = {"e": "Your argument {} has been successfully submitted".format(passCheck), "uid": uid}
template_name = 'argupedia/login_success.html'
return render(request, template_name, context)
def vote_argument(request):
uid = db.return_uid()
vote = db.vote(request.POST.get('originalKey'), request.POST.get('argumentKey'))
context = {"e": vote, "uid": uid}
template_name = 'argupedia/login_success.html'
return render(request, template_name, context)
| {"/argupedia/argupedia/argupedia/views.py": ["/argupedia/argupedia/argupedia/database.py"], "/argupedia/argupedia/argupedia/urls.py": ["/argupedia/argupedia/argupedia/views.py"]} |
73,714 | SB48/Argupedia | refs/heads/master | /argupedia/argupedia/argupedia/urls.py | """argupedia URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf import settings
from django.urls import path
from .views import home_page, register_page, login_page, post_login, log_out, post_register, log_out, search_argument_page, add_argument, create_argument_page, view_arguments_page, search_argument_nav_page, critical_questions_page, add_attack, view_argument_schema_page, about_page, view_graph_page, view_argument_info_page, delete_user, vote_argument
urlpatterns = [
path('', home_page, name='home_page'),
path('register/', register_page, name='register_page'),
path('login/', login_page, name= 'login_page'),
path('post_login/', post_login, name = 'post_login'),
path('post_register/', post_register, name = 'post_register'),
path('delete_account/', delete_user, name= 'delete_user'),
path('add_argument/', add_argument, name = 'add_argument'),
path('add_attack/', add_attack, name = 'add_attack'),
path('vote_argument/', vote_argument, name = 'vote_argument'),
path('view_argument/', view_argument_info_page, name = 'view_argument_info_page'),
path('about/', about_page, name = 'about_page'),
path('graph/', view_graph_page, name = 'view_graph_page'),
path('start_a_debate/', search_argument_page, name = 'search_argument_page'),
path('get_writing/', create_argument_page, name = 'create_argument_page'),
path('your_contributions/', view_arguments_page, name = 'view_arguments_page'),
path('search_results/', search_argument_nav_page, name = 'search_argument_nav_page'),
path('schema_page/', view_argument_schema_page, name = 'view_argument_schema_page'),
path('critical_questions/', critical_questions_page, name = 'critical_questions_page'),
path('log_out/', log_out, name = 'log_out'),
path('admin/', admin.site.urls),
]
| {"/argupedia/argupedia/argupedia/views.py": ["/argupedia/argupedia/argupedia/database.py"], "/argupedia/argupedia/argupedia/urls.py": ["/argupedia/argupedia/argupedia/views.py"]} |
73,716 | Prashambhuta/my_projects | refs/heads/master | /web_apps/books_review/import.py | import csv
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
# create engine, set up db
engine = create_engine(os.getenv("DATABASE_URL"))
db = scoped_session(sessionmaker(bind=engine))
def main():
csv_file = open("books.csv")
csv_reader = csv.reader(csv_file)
books_done = 0
for isbn, title, author, year in csv_reader:
db.execute("INSERT INTO books (isbn, title, author, year) VALUES (:isbn, :title, :author, :year)", {"isbn": isbn, "title": title, "author": author, "year": year})
books_done += 1
if books_done % 50 == 0:
print("Books done: %d" % books_done)
db.commit()
if __name__ == "__main__":
main() | {"/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/spiders/flipkart_men_tshirt.py": ["/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/items.py"]} |
73,717 | Prashambhuta/my_projects | refs/heads/master | /data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/spiders/flipkart_men_tshirt.py | # -*- coding: utf-8 -*-
import scrapy
from ..items import FlipkartScrapperItem
class FlipkartMenTshirtSpider(scrapy.Spider):
name = 'flipkart_men_tshirt'
page_number = 2 # initiating page number variable
iters = 0 # iteration to get page number once
start_urls = [
'https://www.flipkart.com/men/tshirts/pr?sid=2oq%2Cs9b'
'%2Cj9y&otracker=nmenu_sub_Men_0_T-Shirts&page=1']
def parse(self, response):
scraped_items = FlipkartScrapperItem()
# Extracting total number of pages to be scraped, ONCE!!!
# if iters = 0, get page numner
if FlipkartMenTshirtSpider.iters == 0:
page_number_str = \
response.css('._2zg3yZ span::text').get().split(' ')[3]
# assign page_number to the nonlocal variable
FlipkartMenTshirtSpider.page_number = \
int((page_number_str.replace(',', '')))
FlipkartMenTshirtSpider.iters += 1
for listings in response.css('.IIdQZO._1SSAGr'):
# Extracting brand_name
brand_name = listings.css('._2B_pmu::text').getall()
# Extracting product name & product url
product_name = listings.css('._2mylT6::text').getall()
product_url_full = listings.css('._2mylT6::attr(href)').getall()
product_url = [('http://flipkart.com' + s.split('?')[0])
for s in product_url_full]
# Extracting product_id using split & splice
raw_string = [s.split('?')[1] for s in product_url_full]
product_id = [s.split('&')[0][4:] for s in raw_string]
# Extracting listing_id using split & splice
listing_id = [s.split('&')[1][4:] for s in raw_string]
# Extracting image url - not possible (Ajax request)
# thumb_image_url = listings.css('img._3togXc').extract()
# thumb_image_url = listings.css('img').xpath('@src').getall()
# Extracting offer price, with removing 'rupee' symbol
offer_price = listings.css('._1vC4OE::text').extract()
offer_price = [s.replace('\u20B9', '') for s in offer_price]
# Extracting original price, with removing 'rupee' symbol
original_price_raw = listings.css('._3auQ3N::text').getall()
original_price = [original_price_raw[i] for i in
range(1, len(original_price_raw), 2)]
scraped_items['brand_name'] = brand_name
scraped_items['product_name'] = product_name
scraped_items['product_link'] = product_url
scraped_items['offer_price'] = offer_price
scraped_items['original_price'] = original_price
# scraped_items['mainimage_url'] = thumb_image_url
scraped_items['product_id'] = product_id
scraped_items['listing_id'] = listing_id
yield scraped_items
for i in range(2, 26):
# Change 26 to FlipkartMenTshirtSpider.page_number for scraping
# multiple pages
next_page = FlipkartMenTshirtSpider.start_urls[0][:-1] + str(i)
yield response.follow(next_page, callback=self.parse)
| {"/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/spiders/flipkart_men_tshirt.py": ["/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/items.py"]} |
73,718 | Prashambhuta/my_projects | refs/heads/master | /web_apps/books_review/application.py | import os
import requests
from flask import Flask, session, render_template, request, redirect, url_for
from flask_session import Session
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
app = Flask(__name__)
# Check for environment variable
if not os.getenv("DATABASE_URL"):
raise RuntimeError("DATABASE_URL is not set")
# Configure session to use filesystem
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Set up database
engine = create_engine(os.getenv("DATABASE_URL"), pool_size=10, max_overflow=10)
db = scoped_session(sessionmaker(bind=engine))
# Default route
@app.route("/", methods=["POST", "GET"])
def index():
# for 'get' method
if request.method == "GET":
try:
if session["user_id"]:
return redirect(url_for("search"))
except KeyError:
return render_template("index.html")
else:
# if session["user_id"]:
# return render_template("index.html", session=session["user_id"])
return render_template("index.html")
# registration route
@app.route("/register", methods=["POST", "GET"])
def register():
""" Register a new user """
if request.method == "POST":
username = request.form.get("username")
password = request.form.get("password")
if username.strip(" ") != "":
check_available_username = db.execute("SELECT * from users WHERE username=:username", {"username": username}).fetchall()
if not check_available_username:
# execute insertion
db.execute("INSERT INTO users (username, password) VALUES (:username, :password)", {"username": username, "password": password})
db.commit()
# execute selection
user_details = db.execute("SELECT * from users WHERE username=:username and password=:password", {"username": username, "password": password}).fetchone()
session["user_id"] = user_details[0]
return redirect(url_for("search"))
else:
return render_template("error.html", error_message="Username exists. Try again with different username.")
elif username.strip(" ") == "":
return render_template ("error.html", error_message ="Enter valid username & password.")
elif request.method == "GET":
render_template ("register.html")
# return render_template("error.html",error_message="Kindly provide a username and password.")
return render_template("register.html")
# Login route
@app.route("/login", methods=["GET", "POST"])
def login():
""" Logging in a user. """
if request.method == "POST":
username = request.form.get("username")
password = request.form.get("password")
# check credentials
check_login_credentials = db.execute("SELECT * FROM users where username= '%s' AND password= '%s'" % (username, password)).fetchone()
if not check_login_credentials:
return render_template("error.html", error_message="Invalid credentials.")
return redirect(url_for("login"))
else:
session["user_id"] = check_login_credentials[0]
return redirect(url_for("search"))
else:
return render_template("login.html")
# Logout route
@app.route("/logout", methods=["GET"])
def logout():
""" Logging out a user """
session.clear()
return render_template("index.html")
# Search route
@app.route("/search", methods=["POST", "GET"])
def search():
""" Defining the search method. """
# check here
if request.method == "GET":
try:
if session["user_id"]:
return render_template("search.html")
except:
return render_template("error.html", error_message="Kindly login or register")
if request.method == "POST":
isbn_search = request.form.get("isbn")
title_search = request.form.get("title")
author_search = request.form.get("author")
if isbn_search != "":
result = db.execute("SELECT * FROM books WHERE isbn ~* :isbn LIMIT 10", {"isbn": isbn_search}).fetchall()
db.commit()
return render_template("search.html", result=result)
elif title_search != "":
title_search = title_search.title()
result = db.execute("SELECT * FROM books WHERE title ~* :title LIMIT 10", {"title": title_search}).fetchall()
db.commit()
return render_template("search.html", result=result)
elif author_search != "":
author_search = author_search.title()
result = db.execute("SELECT * FROM books WHERE author ~* :author LIMIT 10", {"author": author_search}).fetchall()
db.commit()
return render_template("search.html", result=result)
else:
return render_template("error.html", error_message="Enter valid search parameters.")
# Book route
@app.route("/book/<int:book_id>", methods=["POST", "GET"])
def book(book_id):
""" List details about book, inc. goodreads number. """
# cross check if books exists
if request.method == "GET":
book = db.execute("SELECT * FROM books WHERE id = %s" % book_id).fetchone()
# getting no_of_ratings & avg_rating from goodreads api
goodreads_object = requests.get("https://goodreads.com/book/review_counts.json?", params={"key": "OmILO8KjgxHJOvvnGxDtUw", "isbns": "%s" % book.isbn})
goodreads_data_raw = goodreads_object.json()
goodreads_data = goodreads_data_raw['books']
# get average rating
avg_rating = float(goodreads_data[0]['average_rating'])
# get total no of reviews
no_of_rating = goodreads_data[0]['work_ratings_count']
# display existing user reviews
existing_reviews = db.execute("SELECT username, rating, text from reviews INNER JOIN users ON reviews.user_id = users.id INNER JOIN books ON reviews.book_id = books.id WHERE books.id = %s" % (book_id)).fetchall()
return render_template("book.html", result=book, avg_rating=avg_rating, no_of_rating=no_of_rating, existing_reviews=existing_reviews)
elif request.method == "POST":
user_rating = request.form.get("rating")
user_review = request.form.get("review")
# checking for user_id
user_id = session["user_id"]
# check if user_review is non None
if user_review == "":
return render_template("error.html", error_message="Cannot have empty fields")
# check if previous entry exists
else:
get_review = db.execute("SELECT username, rating, text from reviews INNER JOIN users ON reviews.user_id = users.id INNER JOIN books ON reviews.book_id = books.id WHERE users.id = %s AND books.id = %s" % (user_id, book_id)).fetchall()
if not get_review:
# commit the review to database table review
db.execute("INSERT INTO reviews (book_id, rating, text, user_id) VALUES (:book_id, :rating, :text, :user_id)", {"book_id": book_id, "rating": user_rating, "text": user_review, "user_id": user_id})
db.commit()
return redirect("/book/" + str(book_id))
else:
return render_template("error.html", error_message="You have already reviewed this book.")
# adding fields for user to leave review
else:
return render_template("error.html", error_message="No such book found")
# api return
@app.route("/api/<isbn>", methods=["GET"])
def api(isbn):
""" API route for users """
if isbn == "":
return render_template("error.html", error_message="ISBN not found")
if isbn:
books_object = db.execute("SELECT * FROM books WHERE isbn = '%s'" % isbn).fetchone()
if books_object:
reviews_object = db.execute("SELECT COUNT(*), AVG(rating) FROM reviews WHERE book_id = %d" % books_object.id).fetchone()
# converting to printable objects
review_count = reviews_object.count
review_average_rating = reviews_object.avg
if review_average_rating != None:
api_result = {
"title": books_object.title,
"author": books_object.author,
"year": int(books_object.year),
"isbn": str(isbn),
"review_count": review_count,
"average_rating": float('%.2f' % review_average_rating)
}
return (api_result)
if review_average_rating == None:
api_result = {
"title": books_object.title,
"author": books_object.author,
"year": int(books_object.year),
"isbn": str(isbn),
"review_count": review_count,
"average_rating": 'N/A'
}
return (api_result)
else:
error_404 = {
"Error": 404,
"Type": "ISBN NOT FOUND",
}
return render_template("error_404.html")
else:
error_404 = {
"Error": 404,
"Type": "ISBN NOT FOUND",
}
return render_template("error_404.html")
| {"/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/spiders/flipkart_men_tshirt.py": ["/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/items.py"]} |
73,719 | Prashambhuta/my_projects | refs/heads/master | /data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/items.py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
import scrapy
class FlipkartScrapperItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
# mainimage_url = scrapy.Field()
brand_name = scrapy.Field()
product_name = scrapy.Field()
offer_price = scrapy.Field()
original_price = scrapy.Field()
product_link = scrapy.Field()
product_id = scrapy.Field()
listing_id = scrapy.Field()
pass
| {"/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/spiders/flipkart_men_tshirt.py": ["/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/items.py"]} |
73,720 | Prashambhuta/my_projects | refs/heads/master | /data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/pipelines.py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import pymongo
from scrapy.exceptions import DropItem # to remove duplicates
from .settings import MONGO_URI, MONGO_DATABASE # import connection params
class FlipkartScrapperPipeline:
def __init__(self):
# # LOCAL CONNECTION
# # Creating a local connection variable
# self.connection = pymongo.MongoClient('localhost', 27017)
#
# # Creating a local database, and assigning to a variable 'db'
# db = self.connection['flipkart']
#
# # Creating a local collection(table, schema)
# self.collection = db['men_tshirt']
# REMOTE CONNECTION
# Creating remote connection variable
self.connection = pymongo.MongoClient(MONGO_URI)
# Creating a remote database, and assigning to a variable 'db'
db = self.connection[MONGO_DATABASE]
# Creating a remote collection(table, schema)
self.collection = db['men_tshirt']
# Creating set of inputs
self.ids_seen = set()
def process_item(self, item, spider):
# Removing duplicates, filter by 'product_id'
if 'product_id' in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.collection.update({'product_id': item['product_id']},
dict(item), upsert=True)
return item
| {"/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/spiders/flipkart_men_tshirt.py": ["/data_scraping/flipkart/flipkart_scrapper/flipkart_scrapper/items.py"]} |
73,724 | kstepanovdev/egicli | refs/heads/master | /egicli/checkin.py | from __future__ import print_function
import os
import re
import click
from tabulate import tabulate
import requests
def oidc_discover(checkin_url):
# discover oidc endpoints
r = requests.get(checkin_url + "/.well-known/openid-configuration")
r.raise_for_status()
return r.json()
def token_refresh(
checkin_client_id, checkin_client_secret, checkin_refresh_token, token_url
):
"""Mananages Check-in tokens"""
refresh_data = {
"client_id": checkin_client_id,
"grant_type": "refresh_token",
"refresh_token": checkin_refresh_token,
"scope": "openid email profile eduperson_entitlement",
}
auth = None
if checkin_client_secret:
auth=(checkin_client_id, checkin_client_secret)
refresh_data.update({"client_secret": checkin_client_secret})
r = requests.post(token_url, auth=auth, data=refresh_data)
r.raise_for_status()
return r.json()
def refresh_access_token(
checkin_client_id, checkin_client_secret, checkin_refresh_token, checkin_url
):
oidc_ep = oidc_discover(checkin_url)
return token_refresh(
checkin_client_id,
checkin_client_secret,
checkin_refresh_token,
oidc_ep["token_endpoint"],
)["access_token"]
@click.group()
def token():
pass
@token.command()
@click.option(
"--checkin-client-id",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_CLIENT_ID", None),
)
@click.option(
"--checkin-client-secret",
help="Check-in client secret",
required=False,
default=lambda: os.environ.get("CHECKIN_CLIENT_SECRET", None),
)
@click.option(
"--checkin-refresh-token",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_REFRESH_TOKEN", None),
)
@click.option(
"--checkin-url",
help="Check-in OIDC URL",
required=True,
default=lambda: os.environ.get("CHECKIN_OIDC_URL", "https://aai.egi.eu/oidc"),
)
@click.option("--list-vos/--no-list-vos", default=False, help="List user VOs")
def refresh(
checkin_client_id,
checkin_client_secret,
checkin_refresh_token,
checkin_url,
list_vos,
):
oidc_ep = oidc_discover(checkin_url)
output = token_refresh(
checkin_client_id,
checkin_client_secret,
checkin_refresh_token,
oidc_ep["token_endpoint"],
)
access_token = output["access_token"]
# shall we also get the user info?
if list_vos:
r = requests.get(
oidc_ep["userinfo_endpoint"],
headers={"Authorization": "Bearer %s" % access_token},
)
r.raise_for_status()
vos = []
m = re.compile("urn:mace:egi.eu:group:(.*.):role=member#aai.egi.eu")
for claim in r.json().get("eduperson_entitlement", []):
vo = m.match(claim)
if vo:
vos.append(vo.groups()[0])
output["VOs"] = "\n".join(vos)
print(tabulate([(k, v) for k, v in output.items()], headers=["Field", "Value"]))
| {"/egicli/endpoint.py": ["/egicli/checkin.py"], "/egicli/cli.py": ["/egicli/checkin.py", "/egicli/endpoint.py"]} |
73,725 | kstepanovdev/egicli | refs/heads/master | /egicli/endpoint.py | from __future__ import print_function
import os
import time
import defusedxml.ElementTree as ET
import click
import jwt
from six.moves.urllib import parse
import requests
from tabulate import tabulate
from egicli.checkin import refresh_access_token
GOCDB_PUBLICURL = "https://goc.egi.eu/gocdbpi/public/"
EC3_REFRESHTOKEN_TEMPLATE = """
description refreshtoken (
kind = 'component' and
short = 'Tool to refresh LToS access token.' and
content = 'Tool to refresh LToS access token.'
)
configure front (
@begin
- vars:
CLIENT_ID: %(client_id)s
CLIENT_SECRET: %(client_secret)s
REFRESH_TOKEN: %(refresh_token)s
tasks:
- name: Check if docker is available
command: which docker
changed_when: false
failed_when: docker_installed.rc not in [0,1]
register: docker_installed
- name: local install of egicli
block:
- name: Create dir /usr/local/ec3/
file: path=/usr/local/ec3/ state=directory
- name: install git
package:
name: git
state: present
- name: install egicli
pip:
name:
- git+http://github.com/enolfc/egicli@ec3
- cron:
name: "refresh token"
minute: "*/5"
job: "[ -f /usr/local/ec3/auth.dat ] && /usr/local/bin/egicli endpoint ec3-refresh --checkin-client-id {{ CLIENT_ID }} --checkin-client-secret {{ CLIENT_SECRET }} --checkin-refresh-token {{ REFRESH_TOKEN }} --auth-file /usr/local/ec3/auth.dat &> /var/log/refresh.log"
user: root
cron_file: refresh_token
state: present
when: docker_installed.rc not in [ 0 ]
- name: local install of egicli
block:
- cron:
name: "refresh token"
minute: "*/5"
job: "[ -f /usr/local/ec3/auth.dat ] && docker run -v /usr/local/ec3/auth.dat:/usr/local/ec3/auth.dat egifedcloud/egicli egicli endpoint ec3-refresh --checkin-client-id {{ CLIENT_ID }} --checkin-client-secret {{ CLIENT_SECRET }} --checkin-refresh-token {{ REFRESH_TOKEN }} --auth-file /usr/local/ec3/auth.dat &> /var/log/refresh.log"
user: root
cron_file: refresh_token
state: present
when: docker_installed.rc not in [ 1 ]
@end
)
"""
def get_sites():
q = {"method": "get_site_list", "certification_status": "Certified"}
url = "?".join([GOCDB_PUBLICURL, parse.urlencode(q)])
r = requests.get(url)
sites = []
if r.status_code == 200:
root = ET.fromstring(r.text)
for s in root:
sites.append(s.attrib.get('NAME'))
else:
print("Something went wrong...")
print(r.status)
print(r.text)
return sites
def find_endpoint(service_type, production=True, monitored=True, site=None):
q = {"method": "get_service_endpoint", "service_type": service_type}
if monitored:
q["monitored"] = "Y"
if site:
q["sitename"] = site
sites = [site]
else:
sites = get_sites()
url = "?".join([GOCDB_PUBLICURL, parse.urlencode(q)])
r = requests.get(url)
endpoints = []
if r.status_code == 200:
root = ET.fromstring(r.text)
for sp in root:
if production:
prod = sp.find("IN_PRODUCTION").text.upper()
if prod != "Y":
continue
os_url = sp.find("URL").text
ep_site = sp.find('SITENAME').text
if ep_site not in sites:
continue
# os_url = urlparse.urlparse(sp.find('URL').text)
# sites[sp.find('SITENAME').text] = urlparse.urlunparse(
# (os_url[0], os_url[1], os_url[2], '', '', ''))
endpoints.append([sp.find("SITENAME").text, service_type, os_url])
else:
print("Something went wrong...")
print(r.status)
print(r.text)
return endpoints
def get_keystone_url(os_auth_url, path):
url = parse.urlparse(os_auth_url)
prefix = url.path.rstrip("/")
if prefix.endswith("v2.0") or prefix.endswith("v3"):
prefix = os.path.dirname(prefix)
path = os.path.join(prefix, path)
return parse.urlunparse((url[0], url[1], path, url[3], url[4], url[5]))
def get_unscoped_token(os_auth_url, access_token):
"""Get an unscopped token, trying various protocol names if needed"""
protocols = ["openid", "oidc"]
for p in protocols:
try:
unscoped_token = retrieve_unscoped_token(os_auth_url, access_token, p)
return unscoped_token, p
except RuntimeError:
pass
raise RuntimeError("Unable to get an scoped token")
def get_scoped_token(os_auth_url, access_token, project_id):
unscoped_token, protocol = get_unscoped_token(os_auth_url, access_token)
url = get_keystone_url(os_auth_url, "/v3/auth/tokens")
body = {
"auth": {
"identity": {"methods": ["token"], "token": {"id": unscoped_token}},
"scope": {"project": {"id": project_id}},
}
}
r = requests.post(url, json=body)
if r.status_code != requests.codes.created:
raise RuntimeError("Unable to get an scoped token")
else:
return r.headers["X-Subject-Token"], protocol
def retrieve_unscoped_token(os_auth_url, access_token, protocol="openid"):
"""Request an unscopped token"""
url = get_keystone_url(
os_auth_url,
"/v3/OS-FEDERATION/identity_providers/egi.eu/protocols/%s/auth" % protocol,
)
r = requests.post(url, headers={"Authorization": "Bearer %s" % access_token})
if r.status_code != requests.codes.created:
raise RuntimeError("Unable to get an unscoped token")
else:
return r.headers["X-Subject-Token"]
def get_projects(os_auth_url, unscoped_token):
url = get_keystone_url(os_auth_url, "/v3/auth/projects")
r = requests.get(url, headers={"X-Auth-Token": unscoped_token})
r.raise_for_status()
return r.json()["projects"]
@click.group()
def endpoint():
pass
@endpoint.command()
@click.option(
"--checkin-client-id",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_CLIENT_ID", None),
)
@click.option(
"--checkin-client-secret",
help="Check-in client secret",
required=False,
default=lambda: os.environ.get("CHECKIN_CLIENT_SECRET", None),
)
@click.option(
"--checkin-refresh-token",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_REFRESH_TOKEN", None),
)
@click.option(
"--checkin-url",
help="Check-in OIDC URL",
required=True,
default=lambda: os.environ.get("CHECKIN_OIDC_URL", "https://aai.egi.eu/oidc"),
)
@click.option(
"--site",
help="Name of the site",
default=lambda: os.environ.get("EGI_SITE", None),
)
def projects(
checkin_client_id, checkin_client_secret, checkin_refresh_token, checkin_url, site
):
# Get the right endpoint from GOCDB
project_list = []
access_token = refresh_access_token(
checkin_client_id, checkin_client_secret, checkin_refresh_token, checkin_url
)
for ep in find_endpoint("org.openstack.nova", site=site):
os_auth_url = ep[2]
unscoped_token, _ = get_unscoped_token(os_auth_url, access_token)
project_list.extend(
[
[p["id"], p["name"], p["enabled"], ep[0]]
for p in get_projects(os_auth_url, unscoped_token)
]
)
print(tabulate(project_list, headers=["id", "Name", "enabled", "site"]))
@endpoint.command()
@click.option(
"--checkin-client-id",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_CLIENT_ID", None),
)
@click.option(
"--checkin-client-secret",
help="Check-in client secret",
required=False,
default=lambda: os.environ.get("CHECKIN_CLIENT_SECRET", None),
)
@click.option(
"--checkin-refresh-token",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_REFRESH_TOKEN", None),
)
@click.option(
"--checkin-url",
help="Check-in OIDC URL",
required=True,
default=lambda: os.environ.get("CHECKIN_OIDC_URL", "https://aai.egi.eu/oidc"),
)
@click.option(
"--site", help="Name of the site", default=lambda: os.environ.get("EGI_SITE", None)
)
@click.option(
"--project-id",
help="Project ID",
required=True,
default=lambda: os.environ.get("OS_PROJECT_ID", None),
)
def token(
checkin_client_id,
checkin_client_secret,
checkin_refresh_token,
checkin_url,
project_id,
site,
):
# Get the right endpoint from GOCDB
access_token = refresh_access_token(
checkin_client_id, checkin_client_secret, checkin_refresh_token, checkin_url
)
# assume first one is ok
ep = find_endpoint("org.openstack.nova", site=site).pop()
os_auth_url = ep[2]
token, _ = get_scoped_token(os_auth_url, access_token, project_id)
print('export OS_TOKEN="%s"' % token)
@endpoint.command()
@click.option(
"--checkin-client-id",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_CLIENT_ID", None),
)
@click.option(
"--checkin-client-secret",
help="Check-in client secret",
required=False,
default=lambda: os.environ.get("CHECKIN_CLIENT_SECRET", None),
)
@click.option(
"--checkin-refresh-token",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_REFRESH_TOKEN", None),
)
@click.option(
"--checkin-url",
help="Check-in OIDC URL",
required=True,
default=lambda: os.environ.get("CHECKIN_OIDC_URL", "https://aai.egi.eu/oidc"),
)
@click.option(
"--auth-file",
help="Authorization file",
required=True,
default="auth.dat",
)
def ec3_refresh(
checkin_client_id,
checkin_client_secret,
checkin_refresh_token,
checkin_url,
auth_file,
):
# Get the right endpoint from GOCDB
access_token = refresh_access_token(
checkin_client_id, checkin_client_secret, checkin_refresh_token, checkin_url
)
auth_file_contents = []
with open(auth_file, "r") as f:
for raw_line in f.readlines():
line = raw_line.strip()
if 'OpenStack' in line:
auth_tokens = []
for token in line.split(";"):
if token.strip().startswith("password"):
access_token = token.split("=")[1].strip()
if access_token[0] in ["'", '"']:
access_token = access_token[1:-1]
# FIXME(enolfc): add verification
payload = jwt.decode(access_token, verify=False)
now = int(time.time())
expires = int(payload['exp'])
if expires - now < 300:
access_token = refresh_access_token(
checkin_client_id,
checkin_client_secret,
checkin_refresh_token,
checkin_url
)
auth_tokens.append("password = %s" % access_token)
else:
auth_tokens.append(token.strip())
auth_file_contents.append("; ".join(auth_tokens))
elif line:
auth_file_contents.append(line)
with open(auth_file, "w+") as f:
f.write("\n".join(auth_file_contents))
@endpoint.command()
@click.option(
"--checkin-client-id",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_CLIENT_ID", None),
)
@click.option(
"--checkin-client-secret",
help="Check-in client secret",
required=True,
default=lambda: os.environ.get("CHECKIN_CLIENT_SECRET", None),
)
@click.option(
"--checkin-refresh-token",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_REFRESH_TOKEN", None),
)
@click.option(
"--checkin-url",
help="Check-in OIDC URL",
required=True,
default=lambda: os.environ.get("CHECKIN_OIDC_URL", "https://aai.egi.eu/oidc"),
)
@click.option(
"--site", help="Name of the site", default=lambda: os.environ.get("EGI_SITE", None)
)
@click.option(
"--project-id",
help="Project ID",
required=True,
default=lambda: os.environ.get("OS_PROJECT_ID", None),
)
@click.option(
"--auth-file",
help="Authorization file",
required=True,
default="auth.dat",
)
@click.option(
"--template-dir",
help="EC3 templates dir",
required=True,
default="./templates",
)
@click.option("--force", is_flag=True, help="Force rewrite of files")
def ec3(
checkin_client_id,
checkin_client_secret,
checkin_refresh_token,
checkin_url,
site,
project_id,
auth_file,
template_dir,
force,
):
if os.path.exists(auth_file) and not force:
print("Auth file already exists, not replacing unless --force option is included")
raise click.Abort()
access_token = refresh_access_token(
checkin_client_id, checkin_client_secret, checkin_refresh_token, checkin_url
)
# Get the right endpoint from GOCDB
# assume first one is ok
ep = find_endpoint("org.openstack.nova", site=site).pop()
os_auth_url = ep[2]
site_auth = [
"id = %s" % site,
"type = OpenStack",
"username = egi.eu",
"tenant = openid",
"auth_version = 3.x_oidc_access_token",
"host = %s" % os_auth_url,
"domain = %s" % project_id,
"password = '%s'" % access_token
]
auth_file_contents = [";".join(site_auth)]
if os.path.exists(auth_file):
with open(auth_file, "r") as f:
for line in f.readlines():
if 'OpenStack' in line:
continue
auth_file_contents.append(line)
with open(auth_file, "w+") as f:
f.write("\n".join(auth_file_contents))
if not os.path.exists(template_dir):
os.mkdir(template_dir)
with open(os.path.join(template_dir, "refresh.radl"), "w+") as f:
v = dict(client_id=checkin_client_id,
client_secret=checkin_client_secret,
refresh_token=checkin_refresh_token)
f.write(EC3_REFRESHTOKEN_TEMPLATE % v)
@endpoint.command()
@click.option(
"--service-type", default="org.openstack.nova", help="Service type in GOCDB"
)
@click.option("--production/--not-production", default=True, help="Production status")
@click.option("--monitored/--not-monitored", default=True, help="Monitoring status")
@click.option(
"--site", help="Name of the site", default=lambda: os.environ.get("EGI_SITE", None)
)
def list(service_type, production, monitored, site):
endpoints = find_endpoint(service_type, production, monitored, site)
print(tabulate(endpoints, headers=["Site", "type", "URL"]))
@endpoint.command()
@click.option(
"--checkin-client-id",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_CLIENT_ID", None),
)
@click.option(
"--checkin-client-secret",
help="Check-in client secret",
required=False,
default=lambda: os.environ.get("CHECKIN_CLIENT_SECRET", None),
)
@click.option(
"--checkin-refresh-token",
help="Check-in client id",
required=True,
default=lambda: os.environ.get("CHECKIN_REFRESH_TOKEN", None),
)
@click.option(
"--checkin-url",
help="Check-in OIDC URL",
required=True,
default=lambda: os.environ.get("CHECKIN_OIDC_URL", "https://aai.egi.eu/oidc"),
)
@click.option(
"--site",
help="Name of the site",
required=True,
default=lambda: os.environ.get("EGI_SITE", None),
)
@click.option(
"--project-id",
help="Project ID",
required=True,
default=lambda: os.environ.get("OS_PROJECT_ID", None),
)
def env(
checkin_client_id,
checkin_client_secret,
checkin_refresh_token,
checkin_url,
project_id,
site,
):
# Get the right endpoint from GOCDB
access_token = refresh_access_token(
checkin_client_id, checkin_client_secret, checkin_refresh_token, checkin_url
)
# assume first one is ok
ep = find_endpoint("org.openstack.nova", site=site).pop()
os_auth_url = ep[2]
token, protocol = get_scoped_token(os_auth_url, access_token, project_id)
print("# environment for %s" % site)
print('export OS_AUTH_URL="%s"' % os_auth_url)
print('export OS_AUTH_TYPE="v3oidcaccesstoken"')
print('export OS_IDENTITY_PROVIDER="egi.eu"')
print('export OS_PROTOCOL="%s"' % protocol)
print('export OS_ACCESS_TOKEN="%s"' % access_token)
print('export OS_PROJECT_ID="%s"' % project_id)
| {"/egicli/endpoint.py": ["/egicli/checkin.py"], "/egicli/cli.py": ["/egicli/checkin.py", "/egicli/endpoint.py"]} |
73,726 | kstepanovdev/egicli | refs/heads/master | /egicli/cli.py | import logging
import click
from egicli.checkin import token
from egicli.endpoint import endpoint
@click.group()
@click.option("--debug/--no-debug", default=False)
def cli(debug):
level = logging.DEBUG if debug else logging.INFO
logging.basicConfig(level=level)
cli.add_command(token)
cli.add_command(endpoint)
if __name__ == "__main__":
cli()
| {"/egicli/endpoint.py": ["/egicli/checkin.py"], "/egicli/cli.py": ["/egicli/checkin.py", "/egicli/endpoint.py"]} |
73,737 | Cloudxtreme/terminal-coin-ticker | refs/heads/master | /terminal_coin_ticker/clients.py | #!/bin/python3
# -*- coding: UTF-8 -*-
"""
Yeah, there's only one client, so far. And it's only partially implemented,
meaning it only supports those calls needed by the ticker. Glaring omissions
include candle subscriptions and all the trading/order/account stuff.
"""
# This file is part of <https://github.com/poppyschmo/terminal-coin-ticker>
import asyncio
import json
import os
import reprlib
import signal
import sys
try:
import websockets
except ModuleNotFoundError:
pass
try:
import aiohttp
except ModuleNotFoundError:
pass
from decimal import Decimal
from functools import partial
USE_AIOHTTP = (False if "websockets" in globals() else
True if "aiohttp" in globals() else None)
if USE_AIOHTTP is None:
raise SystemExit("Please install websockets or aiohttp")
VERBOSITY = 6
class HitBTCWebSocketsClient:
exchange = "HitBTC"
url = "wss://api.hitbtc.com/api/2/ws"
errors_reference = {
403: (401, "Action is forbidden for account"),
429: (429, "Too many requests. Action is being rate limited "
"for account"),
500: (500, "Internal Server Error"),
503: (503, "Service Unavailable. Try it again later"),
504: (504, "Gateway Timeout. Check the result of your request "
"later"),
1001: (401, "Authorisation required"),
1002: (401, "Authorisation failed"),
1003: (403, "Action is forbidden for this API key. "
"Check permissions for API key"),
1004: (401, "Unsupported authorisation method. Use Basic "
"authentication"),
2001: (400, "Symbol not found"),
2002: (400, "Currency not found "),
20001: (400, "Insufficient funds. Insufficient funds for creating "
"order or any account operation"),
20002: (400, "Order not found. Attempt to get active order that "
"not existing: filled, canceled or expired. Attempt "
"to cancel not existing order. Attempt to cancel "
"already filled or expired order."),
20003: (400, "Limit exceeded. Withdrawal limit exceeded"),
20004: (400, "Transaction not found. Requested transaction not "
"found"),
20005: (400, "Payout not found"),
20006: (400, "Payout already committed"),
20007: (400, "Payout already rolled back"),
20008: (400, "Duplicate clientOrderId"),
10001: (400, "Validation error. Input not valid, see more in "
"message field")
}
def __init__(self, verbosity=VERBOSITY, logfile=None,
use_aiohttp=USE_AIOHTTP):
"""
Neither the jsonrpc spec nor the api docs mention max size for
``id``. If they did, it'd probably be better to catch
``StopIteration`` and remake the ws connection when approaching
this limit. Or, if the server resets its cache at that point,
use ``itertools.cycle`` and keep on going. If the server simply
forgets the ids of fulfilled requests and/or overwrites
duplicates, then this is pointless.
.. _: http://www.jsonrpc.org/specification
"""
self.verbose = verbosity
self.log = logfile if logfile else sys.stderr
if use_aiohttp and "aiohttp" not in globals():
self.echo("Can't find the aiohttp module. Trying websockets.", 3)
use_aiohttp = False
elif not use_aiohttp and "websockets" not in globals():
use_aiohttp = True
self.aio = use_aiohttp
self.rqids = iter(range(1, sys.maxsize))
self.replies = {}
# Next two are kept separate so ticker data can be preserved and
# updating is less complicated
self.ticker = {}
self.symbols = None
self.markets = None
self.ticker_subscriptions = set()
self.consumers = {self.consume_response: 0}
# These are only for logging send/recv raw message i/o
try:
reprlib.aRepr.maxstring = os.get_terminal_size().columns - 2
except AttributeError:
pass
self.lrepr = reprlib.aRepr.repr
def echo(self, msg, level=6):
if (level > self.verbose):
return
from datetime import datetime
from inspect import stack
fr = stack()[1]
fparts = []
if "self" in fr.frame.f_locals:
fparts = [(self.__class__.__name__), "."]
fparts += [stack()[1].function, "()"]
funcname = "".join(fparts)
if hasattr(os, "isatty") and os.isatty(sys.stdout.fileno()):
fmtstr = "[\x1b[38;5;244m{}\x1b[m] \x1b[38;5;249m{}\x1b[m: {}"
else:
fmtstr = "[{}] {}: {}"
print(fmtstr.format(datetime.now(), funcname, msg), file=self.log)
def prep_request(self, method, payload, rqid=None):
# Can also use channel variant, e.g.:
# {channel: 'ticker', event: 'unsub', params:{symbol: pair}}
if rqid is None:
rqid = next(self.rqids)
outdict = dict(method=method, params=payload, id=rqid)
# No need for bytes
return rqid, json.dumps(outdict)
async def consume_response(self, message):
if "error" in message:
self.echo(message["error"], level=3)
# To prevent other consumers from running, should return an actual
# Exception
code = message["error"].get("code")
if code in self.errors_reference:
message["error"].update(zip("status docs".split(),
self.errors_reference[code]))
return message["error"]
rqid = message.get("id")
if rqid is None:
return
rqid = int(rqid)
result = message.get("result", message.get("error"))
self.replies.update({rqid: result})
async def consume_ticker_notes(self, message):
if not self.ticker_subscriptions:
del self.consumers[self.consume_ticker_notes]
return None
if message.get("method", message.get("channel")) != "ticker":
return None
params = message.get("params", message.get("data"))
symbol = params.get("symbol")
self.ticker.update({symbol: params})
return params
async def __aenter__(self):
if self.aio:
self._conn = aiohttp.ClientSession()
self.websocket = await self._conn.ws_connect(self.url).__aenter__()
else:
self._conn = websockets.connect(self.url)
self.websocket = await self._conn.__aenter__()
# Start reading messages
self.active_recv_Task = asyncio.ensure_future(self.recv_handler())
return self
async def __aexit__(self, *args, **kwargs):
"""Should probably close receive task as well"""
await self._conn.__aexit__(*args, **kwargs)
async def do_send(self, message):
if self.verbose > 6:
print("> {}".format(self.lrepr(message)), file=self.log)
if self.aio:
await self.websocket.send_str(message)
else:
await self.websocket.send(message)
async def check_replies(self, rqid):
while rqid not in self.replies:
await asyncio.sleep(0.1)
result = self.replies[rqid]
del self.replies[rqid]
return result
async def recv_handler(self):
if self.verbose:
self.echo("Starting receive handler")
if self.aio:
self.echo("Using aiohttp instead of websockets")
try:
async for raw_message in self.websocket:
if self.aio:
raw_message = raw_message.data
if self.verbose > 6:
print("< {}".format(self.lrepr(raw_message)),
file=self.log)
message = json.loads(raw_message)
for __, consumer in sorted((weight, func) for func, weight
in self.consumers.items()):
if await consumer(message) is not None:
break
except asyncio.CancelledError:
# pass
return "recv_handler exited"
async def get_currency(self, currency=None):
payload = {}
if currency is not None:
payload.update(currency=currency)
rqid, message = self.prep_request("getCurrency", payload)
await self.do_send(message)
return await self.check_replies(rqid)
async def get_symbols(self, symbol=None, cache_result=True):
if self.symbols is None:
rqid, message = self.prep_request("getSymbols", {})
await self.do_send(message)
result = await self.check_replies(rqid)
self.symbols = {s["id"]: s for s in result}
self.markets = {v["quoteCurrency"] for v in self.symbols.values()}
if symbol is None:
return list(self.symbols.values())
else:
return self.symbols[symbol]
async def subscribe_ticker(self, symbol):
if symbol in self.ticker_subscriptions:
self.echo("Already subscribed to %r" % symbol, level=4)
return None
payload = {"symbol": symbol}
rqid, message = self.prep_request("subscribeTicker", payload)
if self.verbose:
self.echo("adding %s to ticker_sub...s for id %d" %
(symbol, rqid))
self.ticker_subscriptions.add(symbol)
self.consumers.setdefault(self.consume_ticker_notes, 5)
await self.do_send(message)
result = await self.check_replies(rqid)
return ("subscribe_ticker(%r) exited" % symbol, result)
async def unsubscribe_ticker(self, symbol):
if symbol not in self.ticker_subscriptions:
self.echo("Already unsubscribed from %r" % symbol, level=4)
return None
payload = {"symbol": symbol}
rqid, message = self.prep_request("unsubscribeTicker", payload)
await self.do_send(message)
result = await self.check_replies(rqid)
self.ticker_subscriptions.discard(symbol)
return ("unsubscribe_ticker(%r) exited" % symbol, result)
async def authenticate(self, public_key, secret_key):
"""
This is confusing. HS256 is symmetric, so what's with the key pair?
"""
import hmac
import secrets
nonce = secrets.token_urlsafe()
sig = hmac.new(secret_key.encode(),
nonce.encode(), "SHA256").digest().hex()
payload = dict(algo="HS256", pKey=public_key, nonce=nonce,
signature=sig)
rqid, message = self.prep_request("login", payload)
await self.do_send(message)
result = await self.check_replies(rqid)
return result
async def get_trading_balance(self):
rqid, message = self.prep_request("getTradingBalance", {})
await self.do_send(message)
return await self.check_replies(rqid)
async def get_active_orders(self):
rqid, message = self.prep_request("getOrders", {})
await self.do_send(message)
return await self.check_replies(rqid)
async def canonicalize_pair(self, pair, as_tuple=False):
# Unfortunately, base/quote currency ids are not always the same as the
# concatenated pair, e.g., "BXTUSDT" != "BXT" + "USD". So probably best
# to only return one kind per invocation. Exchange's "currency" market
# data market seems to use Tether and USD interchangeably.
if self.symbols is None:
await self.get_symbols()
assert self.symbols is not None
if as_tuple is False and pair in self.symbols:
return pair
if not pair.isalnum():
sep = [c for c in pair if not c.isalnum()].pop()
pair = pair.replace(sep, "")
pair = pair.upper()
if pair not in self.symbols:
if pair.endswith("USD") and pair + "T" in self.symbols:
pair += "T"
else:
raise ValueError("%r not found in client.symbols" % pair)
if as_tuple:
base = self.symbols[pair]["baseCurrency"]
quote = self.symbols[pair]["quoteCurrency"]
return base, quote
return pair
def remove_async_sig_handlers(sigs, loop):
outlist = []
for sig in sigs:
sig_obj = getattr(signal, sig)
existing = loop._signal_handlers.get(sig_obj)
if existing:
outlist.append(existing)
loop.remove_signal_handler(sig_obj)
assert sig_obj not in loop._signal_handlers
return outlist
def add_async_sig_handlers(sigs, loop, callback=None):
"""``callbacks`` is a dict of the form {sig: callback, ...}"""
_callback = callback
def handle_sig(signame):
# assert "loop" in locals()
print("Got a signal: %r" % signame, file=sys.stderr)
loop.stop()
for sig in sigs:
if _callback is None:
# Must partialize the ``signames`` param here by freezing current
# ``sig`` "for" obvious reasons, but ``loop`` is safe
callback = partial(handle_sig, signame=sig)
loop.add_signal_handler(getattr(signal, sig), callback)
def decimate(inobj):
"""
Convert numbers and numeric strings in native JSON-like objects to
Decimal instances.
"""
from collections.abc import Mapping, MutableSequence
if isinstance(inobj, Mapping):
outobj = dict(inobj)
for k, v in inobj.items():
if (isinstance(v, str) and
v.lstrip("-").replace(".", "", 1).isdigit()):
outobj[k] = Decimal(v)
elif isinstance(v, (int, float)):
outobj[k] = Decimal(repr(v)) # covers inf, nan
elif isinstance(v, (Mapping, MutableSequence)):
outobj[k] = decimate(v)
elif isinstance(inobj, MutableSequence):
outobj = [decimate(item) for item in inobj]
else:
return inobj
return outobj
def ppj(obj, *args, **kwargs):
"""
Prints collections containing the results of futures.
"""
try:
print(json.dumps(obj, indent=2), *args, **kwargs)
except TypeError:
import pprint
pprint.pprint(obj, indent=2, stream=sys.stderr)
async def apply_many(func, args: list):
tasks = []
for arg in args:
tasks.append(asyncio.ensure_future(func(arg)))
return await asyncio.gather(*tasks)
async def main(**kwargs):
Client = HitBTCWebSocketsClient
async with Client(VERBOSITY, use_aiohttp=USE_AIOHTTP) as client:
my_currencies = "ETH BCH BTC".split()
my_symbols = "ETHBTC BCHBTC".split()
ppj(await apply_many(client.get_currency, my_currencies))
#
# public = "baadac1dbaadac1dbaadac1dbaadac1d"
# secret = "feedbeefdeadbabefeedbabedeadbeef"
# ppj(await client.authenticate(public, secret))
# ppj(await client.get_active_orders())
#
client.echo("Starting subscription cycle demo")
futs = await apply_many(client.subscribe_ticker, my_symbols)
futs.append(await asyncio.sleep(5, result="Did stuff")) # ← do stuff
futs += await apply_many(client.unsubscribe_ticker, my_symbols)
client.active_recv_Task.cancel()
futs.append(await asyncio.wait_for(client.active_recv_Task,
timeout=None))
client.echo("All done...")
ppj(client.ticker)
return dict(futs=futs)
if __name__ == "__main__":
VERBOSITY = int(os.getenv("VERBOSITY", VERBOSITY))
USE_AIOHTTP = any(s == os.getenv("USE_AIOHTTP", str(USE_AIOHTTP)).lower()
for s in "1 yes true".split())
#
loop = asyncio.get_event_loop()
sigs = "sigint sigterm".upper().split()
teardown_cbs = {}
add_async_sig_handlers(sigs, loop)
try:
ppj(loop.run_until_complete(main()))
except RuntimeError as e:
if "loop stopped before Future completed" not in str(e):
raise
| {"/terminal_coin_ticker/ticker.py": ["/terminal_coin_ticker/clients.py"]} |
73,738 | Cloudxtreme/terminal-coin-ticker | refs/heads/master | /terminal_coin_ticker/ticker.py | #!/bin/python3
# -*- coding: UTF-8 -*-
"""
Usage::
tc-ticker [NUM] [PAIR ...]
Show NUM leading products, by volume, and/or PAIRs taking one of the
following (case-insensitive) forms:
basequote base_quote base/quote "base quote"
Env-var-based options are listed atop the main script.
Warning
-------
Whether due to my own failings or those of the exchange, the market data
displayed is often inaccurate and should be considered untrustworthy.
"""
# Author: Jane Soko
# License: Apache 2.0
import asyncio
import os
import sys
from decimal import Decimal as Dec
if __name__ == "__main__":
sys.path.insert(0, os.path.dirname(
os.path.dirname(os.path.abspath(__file__))))
from terminal_coin_ticker.clients import ( # noqa
HitBTCWebSocketsClient, apply_many, add_async_sig_handlers, decimate,
remove_async_sig_handlers, ppj
)
SHOW_FIRST = 24
# Env vars
VERBOSITY = 6 # Ignored unless LOGFILE is also exported
USE_AIOHTTP = False # Ignored unless websockets is also installed
VOL_SORTED = True # Sort all pairs by volume, auto-selected or named
VOL_UNIT = "USD" # BTC, ETH, or None for per-pair base currencies
HAS_24 = False # For apps and utils that outlaw COLORTERM
STRICT_TIME = True # Die when service notifications aren't updating
PULSE = "normal" # Flash style of "normal," "fast," or null
PULSE_OVER = 0.125 # Flash threshold as percent change since last update
HEADING = "normal" # Also "hr_over," "hr_under," "full," and "slim"
# TTL vars
MAX_STALE = 0.5 # Tolerance threshold ratio of stale/all pairs
STALE_SECS = 15 # Max seconds pair data is considered valid
POLL_INTERVAL = 10 # Seconds to wait between checks
def _rank_by_volume():
"""
Query server via http GET request for the latest ticker data on all
traded pairs. Return (pair, volume in USD).
Note
----
Even though this is hardwired to HitBTC, should still probably
obtain a list of supported markets and loop through them. Although
an actual upstream change in available markets would break plenty of
stuff elsewhere.
"""
import json
import urllib.request
from urllib.error import HTTPError
from operator import itemgetter
url = "https://api.hitbtc.com/api/2/public/ticker"
try:
with urllib.request.urlopen(url) as f:
data = json.load(f)
except HTTPError as e:
raise ConnectionError("Problem connecting to server, try again later")
if "error" in data:
raise ConnectionError(data["error"])
eth_usd = Dec([s["last"] for s in data if s["symbol"] == "ETHUSD"].pop())
btc_usd = Dec([s["last"] for s in data if s["symbol"] == "BTCUSD"].pop())
def _helper(d):
if d["symbol"].endswith("USD") or d["symbol"].endswith("USDT"):
in_usd = Dec(d["volumeQuote"])
elif d["symbol"].endswith("ETH"):
in_usd = Dec(d["volumeQuote"]) * eth_usd
elif d["symbol"].endswith("BTC"):
in_usd = Dec(d["volumeQuote"]) * btc_usd
else:
raise ValueError("Could not convert %s" % d["symbol"])
return d["symbol"], in_usd
for sym, vol in sorted(map(_helper, data), key=itemgetter(1),
reverse=True):
yield sym
def _get_top_markets(num):
"""
Return a list of ``num`` leading products by trade volume.
"""
sym_gen = _rank_by_volume()
return list(next(sym_gen) for n in range(num))
def _make_date(timestamp):
fmt = "%Y-%m-%dT%H:%M:%S.%fZ"
from datetime import datetime
return datetime.strptime(timestamp, fmt)
def _hex_to_rgb(hstr):
"""
>>> _hex_to_rgb("#fafafa")
(250, 250, 250)
"""
return tuple(int(c) for c in bytes.fromhex(hstr.lstrip("#")))
def _convert_volume(sym, base, quote, tickdict, ticker):
"""
Return volume in target units. Assumptions:
1. ``target`` is one of BTC, ETH, USD
2. ``sym`` is canonical (in correct format and confirmed available)
3. ``tickdict`` has been decimated (digit strings to Decimal instances)
4. ``quote`` is never "USDT"
Update: API recently added ``volumeQuote``, so it's probably better
to just combine this with ``_rank_by_volume.<locals>._helper``
"""
target = VOL_UNIT
if sym.endswith(target) or (target == "USD" and sym.endswith("USDT")):
return tickdict["volumeQuote"]
if target == "USD" or quote == "ETH":
rate = Dec(ticker[quote + target]["last"])
else:
rate = 1 / Dec(ticker[target + quote]["last"])
return Dec(ticker[sym]["volumeQuote"]) * rate
def _print_heading(client, colors, widths, numrows, volstr):
from subprocess import check_output
try:
sitm = check_output(["tput", "sitm"]).decode()
ritm = check_output(["tput", "ritm"]).decode()
except FileNotFoundError:
sitm = ritm = ""
else:
if not ritm:
sitm = ""
#
bg, fg = colors
#
align_chars = ("<", ">" if VOL_UNIT else "<", "<", "<", ">", "")
if HEADING not in ("normal", "slim"):
align_chars = ("", "<") + align_chars
#
_w = widths[2:] if HEADING in ("normal", "slim") else widths
head_fmt = "".join("{:%s%d}" % (a, w) for a, w in zip(align_chars, _w))
#
# heading background
head_bg = bg.dark
# heading foreground
head_fg = fg.head_alt if HAS_24 else fg.dim
# board
board = (
# background
bg.dark if HAS_24 else bg.tint,
# rows
(" " * sum(widths) + "\n") * (numrows - 1),
" " * sum(widths), "\x1b[m\x1b[K"
)
if HEADING == "normal":
print(head_bg, " " * widths[0], # heading background, left margin
# exchange
fg.dark, sitm,
"{:<{w}}".format(client.exchange, w=widths[1]), ritm,
# heading
head_fg, head_fmt.format("Price", volstr, "Bid", "Ask",
"Δ (24h)", ""), "\n",
# hr
fg.dark, "\x1b[4m", "─" * sum(widths), "\x1b[m", "\n",
# board
*board, sep="", end="")
elif "hr_" in HEADING:
ex_hr = (sitm, fg.faint_shade if HAS_24 else fg.dark,
"─" * widths[0], client.exchange,
"─" * (sum(widths) - len(client.exchange) - widths[0]), "\n")
heading = (head_fg, head_fmt.format("", "", "Price", volstr, "Bid",
"Ask", "Δ (24h)", ""), "\n")
if HEADING == "hr_over":
print(head_bg, *ex_hr, *heading, *board, sep="", end="")
else:
print(head_bg, *heading, *ex_hr, *board, sep="", end="")
elif HEADING == "full":
print(head_bg, # heading background
# exchange
sitm, fg.faint_shade if HAS_24 else fg.dark,
"─" * (sum(widths) - len(client.exchange) - widths[-1]),
client.exchange, "─" * widths[-1], ritm, "\n",
# heading
head_fg, head_fmt.format("", "Pair", "Price", volstr, "Bid",
"Ask", "Δ (24h)", ""), "\n",
# hr
fg.faint_shade if HAS_24 else fg.dark,
"\x1b[4m", "─" * sum(widths), "\x1b[m", "\n",
# board
*board, sep="", end="")
elif HEADING == "slim":
print(head_bg, " " * widths[0], # heading background, left margin
# exchange
fg.dark, sitm, "{:<{w}}".format(client.exchange, w=widths[1]),
ritm if HAS_24 else "",
# heading
head_fg, head_fmt.format("Price", volstr, "Bid", "Ask",
"Δ (24h)", ""), "\n",
# board
*board, sep="", end="")
async def _check_timestamps(all_subs, client, kill_handler, strict=True,
max_stale=MAX_STALE, stale_secs=STALE_SECS,
poll_interval=POLL_INTERVAL):
"""
Iterate over latest ticker entries and check timestamps against ttl
threshold. Like ``_paint_ticker_line()``, it doesn't make sense to
return a value for this function because it can only die if its
outer future is cancelled.
Sending ``SIGINT`` to pid 0 raises ``BlockingIOError`` (errno 11).
Raising a ``KeyboardInterrupt`` works, but the teardown handlers
registered for ``SIGINT`` won't run.
Note: The API docs imply that ``channel`` notifications are only
pushed when a change in price has occurred. Simply juxtaposing the
website's ticker with this one pretty much debunks this. Never mind
that for many notification updates, only the timestamps have
changed. For now, assume push consistency is governed by network
load and other operating factors. Other APIs present
heartbeat-related options that probably require an actual
understanding of websockets standards/conventions.
"""
from itertools import cycle
from datetime import datetime
while not client.ticker_subscriptions:
await asyncio.sleep(poll_interval)
stale_subs = set()
for sym in cycle(all_subs):
ts_str = client.ticker[sym]["timestamp"]
if ts_str is None:
continue
ts = _make_date(ts_str)
diff = (datetime.utcnow() - ts).seconds
if diff > stale_secs:
m = diff // stale_secs
s = diff % stale_secs
client.echo("Stale timestamp for %r. "
"Off by %d min %d secs" % (sym, m, s), 5)
stale_subs.add(sym)
if strict and len(stale_subs) / len(all_subs) > max_stale:
kill_handler(error="The number of pairs awaiting updates has "
"exceeded the maximum allowed")
else:
client.ticker[sym]["timestamp"] = None # <- mark as stale
else:
stale_subs.discard(sym)
await asyncio.sleep(poll_interval)
client.echo("Exiting", 6)
async def _paint_ticker_line(lnum, sym, semaphore, snapshots, ticker, fmt,
colors, bq_pair, wait=1.0, pulse_over=PULSE_OVER):
"""
The kwargs are tweakable and should perhaps be presented as global
options. ``wait`` is the update period. ``pulse_over`` is the
red/green flash threshold.
"""
base, quote = bq_pair
if quote == "USD" and Dec(ticker.get(sym, {}).get("last", 0)) >= 10:
for _s in ("_vol", "ask", "_chg"):
fmt = fmt.replace("f}{%s" % _s, ".2f}{%s" % _s)
#
cbg, cfg = colors
sep = "/"
bg = cbg.shade if lnum % 2 else cbg.tint
up = "\x1b[A" * lnum + "\r"
down = "\x1b[B" * lnum
last_seen = {}
#
# Delay pulsing while staggering initial update
pulse_over, pulse_delay, pulse = Dec(pulse_over), 5, None
_pulse_over = Dec(pulse_over + pulse_delay)
from random import random
#
while True:
# Without this, pulses get backlogged/front-loaded and fire in a
# fusillade on init, sometimes after a short hang. Not sure why.
if _pulse_over > pulse_over:
_pulse_over -= Dec(1)
_wait = random()
await asyncio.sleep(_wait)
_wait = wait
if pulse:
latest = last_seen
else:
latest = decimate(dict(ticker.get(sym)))
if snapshots.get(sym) and snapshots[sym] == latest:
continue
last_seen = snapshots.setdefault(sym, latest)
# Better to save as decimal quotient and only display as percent
change = ((latest["last"] - latest["open"]) / latest["open"])
latest["change"] = change
# Use explicit value for ``normal`` instead of ``\e[39m`` to reset
clrs = dict(_beg=bg, _sym=cfg.dim, _sepl=cfg.normal,
_sepr=cfg.dim, _prc=cfg.normal, _vol=cfg.dim,
_chg="", _end="\x1b[m\x1b[K")
clrs["_chg"] = (cfg.red if change < 0 else
cfg.green if change > 0 else clrs["_vol"])
#
volconv = None
if VOL_UNIT:
volconv = _convert_volume(sym, base, quote, latest, ticker)
#
with await semaphore:
print(up, end="")
if pulse:
if HAS_24:
clrs["_beg"] = (cbg.mix_green if
pulse == "+" else cbg.mix_red)
else:
clrs["_beg"] = bg
clrs["_prc"] = clrs["_chg"] = \
cfg.bright_green if pulse == "+" else cfg.bright_red
clrs["_vol"] = cfg.green if pulse == "+" else cfg.red
_wait = 0.124 if PULSE == "fast" else 0.0764
pulse = None
elif latest["timestamp"] is None:
clrs.update(dict(_sym=cfg.dark, _sepl="", _sepr="",
_prc=(cfg.faint_shade if lnum % 2 else
cfg.faint_tint), _vol="", _chg=""))
change, pulse = 0, None
# Must divide by 100 because ``_pulse_over`` is a %
elif (abs(abs(latest["last"]) - abs(last_seen["last"])) >
abs(_pulse_over / 100 * last_seen["last"])):
pulse = None
_wait = 0.0764 if PULSE == "fast" else 0.124
if change - last_seen["change"] > 0:
pulse = "+"
clrs["_beg"] = cbg.green
if not HAS_24:
clrs.update(dict(_sym=cfg.green, _sepl="", _sepr="",
_vol="", _prc="", _chg=""))
else:
pulse = "-"
clrs["_beg"] = cbg.red
if not HAS_24:
clrs.update(dict(_sym=cfg.red, _sepl="", _sepr="",
_vol="", _prc="", _chg=""))
print(fmt.format("", "",
base=base.lower(), sep=sep, quote=quote.lower(),
**clrs, **latest, volconv=volconv),
down, sep="", end="", flush=True)
last_seen.update(latest)
async def do_run_ticker(syms, client, loop, manage_subs=True,
manage_sigs=True):
"""
Only works with ansi/vt terminals. Keys returned by api call::
"ask", "bid", "last", "open", "low", "high", "volume",
"volumeQuote", "timestamp", "symbol"
The value of ``open`` is that of ``last`` from 24 hours ago and is
continuous/"moving". This can't be gotten with the various ``*Candle``
calls because the limit for ``period="M1"`` is 1000, but we'd need 1440.
"""
if manage_sigs:
# If this works like signal.signal, shouldn't have to remove existing
old_sigint_cb = remove_async_sig_handlers(["SIGINT"], loop).pop()
def rt_sig_cb(**kwargs):
if not gathered.cancelled():
kwargs.setdefault("msg", "Cancelling gathered")
try:
gathered.set_result(kwargs)
except asyncio.futures.InvalidStateError:
# Not sure if the repr displays exception if set
client.echo("Already done: %r" % gathered)
else:
client.echo("gathered: %r" % gathered)
finally:
add_sig_hands(["SIGINT"], callback=old_sigint_cb)
# XXX Is this obsolete? See related note for last try/except below
else:
client.echo("Already cancelled: %r" % gathered)
# No need to partialize since ``gathered``, which ``rt_sig_cb``
# should have closure over once initialized below, will be the same
# object when the trap is sprung
add_sig_hands(["SIGINT"], callback=rt_sig_cb)
#
from collections import namedtuple
c_bg_nt = namedtuple("background_colors",
"shade tint dark red mix_red green mix_green")
c_fg_nt = namedtuple("foreground_colors",
"normal dim dark faint_shade faint_tint "
"red bright_red green bright_green head_alt")
tc_bg_tmpl, tc_fg_tmpl = "\x1b[48;2;{};{};{}m", "\x1b[38;2;{};{};{}m"
# Pulse blends (shade, tint): red(#293a49, #2a3d4d), gr(#12464f, #134953)
# These tones are too similar to justify defining separately
if HAS_24:
c_bg = c_bg_nt(*(tc_bg_tmpl.format(*_hex_to_rgb(x)) for x in
"#14374A #163E53 #153043 "
"#3E3D48 #293a49 #105554 #12464f".split()))
c_fg = c_fg_nt(*(tc_fg_tmpl.format(*_hex_to_rgb(x)) for x in
"#d3d7cf #a1b5c1 #325a6a #224a5a #153043 "
"#BF4232 #E55541 #01A868 #0ACD8A #507691".split()))
else:
c_bg = c_bg_nt(*("\x1b[48;5;23%sm" % n for n in "6785555"))
c_fg = c_fg_nt(*("\x1b[38;5;%sm" % n for n in
"253 250 243 237 236 95 167;1 65 83;1 228".split()))
#
ranked = []
# Need to preserve order, so can't use set union here
for sym in reversed(syms):
try:
symbol = await client.canonicalize_pair(sym)
except ValueError as e:
# Could use ``warnings.warn`` for stuff like this
print(e, "Removing...", file=sys.stderr)
else:
if symbol not in ranked:
ranked.append(symbol)
#
# TODO need auto-culling option crap shoot
if len(ranked) > MAX_HEIGHT:
msg = ("Too many pairs requested for current terminal height. "
"Over by %d." % (len(ranked) - MAX_HEIGHT))
return {"error": msg}
#
all_subs = set(ranked)
# Ensure conversion pairs available for all volume units
if VOL_UNIT:
if manage_subs:
all_subs |= {"BTCUSD", "ETHUSD"}
else:
client.echo("The ``VOL_UNIT`` option requires ``manage_subs``", 3)
globals()["VOL_UNIT"] = None
#
# Results to return
out_futs = {}
#
# Abbreviations
_cv, cls, clt = _convert_volume, client.symbols, client.ticker
bC, qC = "baseCurrency", "quoteCurrency"
#
if manage_subs:
await apply_many(client.subscribe_ticker, all_subs)
max_tries = 3
while max_tries:
if all(s in clt and s in cls for s in ranked):
break
await asyncio.sleep(1)
max_tries -= 1
else:
out_futs["subs"] = await apply_many(client.unsubscribe_ticker,
all_subs)
out_futs["error"] = "Problem subscribing to remote service"
return out_futs
#
if VOL_UNIT and VOL_SORTED:
vr = sorted((_cv(s, cls[s][bC], cls[s][qC], decimate(clt[s]), clt), s)
for s in ranked)
ranked = [s for v, s in vr]
#
# Arbitrarily assume biggest volume and/or change could grow 10x between
# open/close, so +1 for those.
#
# TODO move all this widths figuring to a separate coro that updates some
# shared location at regular intervals. If max column width is exceeded,
# just lower precision for the offending item. So, if some "change" value
# were to grow from 99.99 to 100.00, make it 100.0 instead.
sep = "/"
volstr = "Vol (%s)" % (VOL_UNIT or "base") + (" " if VOL_UNIT else "")
if VOL_UNIT:
vprec = "USD ETH BTC".split().index(VOL_UNIT)
# Market (symbol) pairs will be "concatenated" (no intervening padding)
sym_widths = (
# Base
max(len(cls[s][bC]) for s in ranked),
# Sep
len(sep),
# Quote (corner case: left-justifying, so need padding)
max(len(cls[s][qC]) for s in ranked)
)
# Can't decide among exchange name, "" (blank), "Pair," and "Product"
widths = (
# 1: Exchange name
max(sum(sym_widths), len(client.exchange)),
# 2: Price
max(len("{:.2f}".format(Dec(clt[s]["last"])) if
"USD" in s else clt[s]["last"]) for s in ranked),
# 3: Volume
max(*(len("{:,.{pc}f}".format(_cv(s, cls[s][bC], cls[s][qC],
decimate(clt[s]), clt), pc=vprec) if
VOL_UNIT else clt[s]["volume"]) for s in ranked),
len(volstr)),
# 4: Bid
max(len("{:.2f}".format(Dec(clt[s]["bid"])) if
"USD" in s else clt[s]["bid"]) for s in ranked),
# 5: Ask
max(len("{:.2f}".format(Dec(clt[s]["ask"])) if
"USD" in s else clt[s]["ask"]) for s in ranked),
# 6: Change (should maybe do max++ for breathing room)
max(len("{:+.3f}%".format(
(Dec(clt[s]["last"]) - Dec(clt[s]["open"])) / Dec(clt[s]["open"])
)) for s in ranked),
)
pad = 2
widths = (pad, # <- 0: Left padding
*(l + pad for l in widths),
pad) # <- 7: Right padding
del _cv, cls, clt, bC, qC
#
# Die nicely when needed width exceeds what's available
if sum(widths) > os.get_terminal_size().columns:
msg = ("Insufficient terminal width. Need %d more column(s)."
% (sum(widths) - os.get_terminal_size().columns))
out_futs["error"] = msg
if manage_subs:
out_futs["subs"] = await apply_many(client.unsubscribe_ticker,
all_subs)
return out_futs
# Format string for actual line items
fmt = "".join(("{_beg}{:%d}" % widths[0],
"{_sym}{base}{_sepl}{sep}{_sepr}{quote:<{quote_w}}",
"{_prc}{last:<%df}" % widths[2],
"{_vol}",
("{volconv:>%d,.%df}%s" % (widths[3] - pad, vprec,
" " * pad) if
VOL_UNIT else "{volume:<%df}" % widths[3]),
"{bid:<%df}" % widths[4],
"{ask:<%df}" % widths[5],
"{_chg}{change:>+%d.3%%}" % widths[6],
"{:%d}{_end}" % widths[7]))
#
_print_heading(client, (c_bg, c_fg), widths, len(ranked), volstr)
#
semaphore = asyncio.Semaphore(1)
snapshots = {}
coros = []
for lnum, sym in enumerate(ranked):
base = client.symbols[sym]["baseCurrency"]
quote = client.symbols[sym]["quoteCurrency"]
fmt_nudge = fmt.replace("{quote_w}", "%d" %
(len(quote) + sym_widths[0] - len(base) + pad))
coros.append(_paint_ticker_line(
lnum, sym, semaphore, snapshots, client.ticker, fmt_nudge,
(c_bg, c_fg), (base, quote), wait=(0.1 * len(ranked)),
pulse_over=(PULSE_OVER if PULSE else 100.0)
))
# Should conversion pairs (all_subs) be included here if not displayed?
ts_chk = _check_timestamps(all_subs, client, rt_sig_cb, STRICT_TIME)
#
gathered = asyncio.gather(*coros, ts_chk)
#
try:
out_futs["gathered"] = await gathered
# XXX this means ``gathered`` has been cancelled, but how would this
# ever run? None of the signal handlers calls ``cancel()``. Seems a
# holdover from early experimenting. Same for the check in ``rt_sig_cb``.
except asyncio.CancelledError as e:
out_futs["error"] = e
finally:
if manage_subs:
client.echo("Unsubscribing", 6)
out_futs["subs"] = await apply_many(client.unsubscribe_ticker,
all_subs)
if manage_sigs:
add_sig_hands(["SIGINT"], callback=old_sigint_cb)
return out_futs
async def main(loop, syms):
Client = HitBTCWebSocketsClient
async with Client(VERBOSITY, LOGFILE, USE_AIOHTTP) as client:
#
rt_fut = do_run_ticker(syms, client, loop)
# ``asyncio.CancelledError`` is not raised when interrupting
# ``run_ticker`` with ^C. Seems like it's only raised by calling
# ``Future.result()`` or "Future.cancel()" or ``Task.cancel()``
return await rt_fut
def main_entry():
global HAS_24, LOGFILE, PULSE, PULSE_OVER, HEADING, MAX_HEIGHT, \
STRICT_TIME, VERBOSITY, VOL_SORTED, VOL_UNIT, USE_AIOHTTP
#
if sys.platform != 'linux':
raise SystemExit("Sorry, but this probably only works on Linux")
if sys.version_info < (3, 6):
raise SystemExit("Sorry, but this thing needs Python 3.6+")
#
from enum import Enum
class Headings(Enum):
slim = 1
normal = hr_over = hr_under = 2
full = 3
#
VERBOSITY = int(os.getenv("VERBOSITY", VERBOSITY))
USE_AIOHTTP = any(s == os.getenv("USE_AIOHTTP", str(USE_AIOHTTP)).lower()
for s in "yes true 1".split())
HAS_24 = (
any(s == os.getenv("COLORTERM", "") for s in ("24bit", "truecolor")) or
any(s == os.getenv("HAS_24", str(HAS_24)).lower() for
s in "24bit truecolor yes on true 1".split())
)
STRICT_TIME = any(s == os.getenv("STRICT_TIME", str(STRICT_TIME)).lower()
for s in "yes on true 1".split())
PULSE = os.getenv("PULSE", PULSE)
if PULSE.lower() in "0 off false no null none".split():
PULSE = None
PULSE_OVER = float(os.getenv("PULSE_OVER", PULSE_OVER))
_heading = os.getenv("HEADING", HEADING)
HEADING = (_heading if _heading in Headings.__members__ else HEADING)
VOL_SORTED = any(s == os.getenv("VOL_SORTED", str(VOL_SORTED)).lower()
for s in "yes on true 1".split())
VOL_UNIT = os.getenv("VOL_UNIT", VOL_UNIT)
if VOL_UNIT.lower() in ("", "null", "none"):
VOL_UNIT = None
#
num, syms = None, []
# TODO combine this stuff with the max-rows check in do_run()
MAX_HEIGHT = os.get_terminal_size().lines - Headings[HEADING].value
if len(sys.argv) == 1:
num = min(MAX_HEIGHT, SHOW_FIRST)
elif sys.argv[1] in ("--help", "-h"):
print(__doc__.partition("\nWarn")[0].partition("::\n")[-1])
sys.exit()
elif sys.argv[1].isdigit():
num = int(sys.argv[1])
syms = sys.argv[2:]
else:
syms = sys.argv[1:]
if num:
syms += _get_top_markets(num)
if not syms:
raise ValueError("Could not determine trading pairs to display")
#
loop = asyncio.get_event_loop()
global add_sig_hands
from functools import partial
add_sig_hands = partial(add_async_sig_handlers, loop=loop)
add_sig_hands("SIGINT SIGTERM".split())
#
LOGFILE = os.getenv("LOGFILE", None)
#
# Since this doesn't use curses, shell out to get cursor vis
# escape sequences, if supported (absent in ansi and vt100).
civis = cnorm = ""
from subprocess import check_output
try:
civis = check_output(["tput", "civis"]).decode()
cnorm = check_output(["tput", "cnorm"]).decode()
except FileNotFoundError:
pass
else:
print(civis, end="", flush=True)
#
if LOGFILE and os.path.exists(LOGFILE):
from contextlib import redirect_stderr
# Multi-context comma syntax doesn't scope left to right, so must nest:
with open(os.getenv("LOGFILE"), "w") as LOGFILE:
with redirect_stderr(LOGFILE):
try:
ppj(loop.run_until_complete(main(loop, syms)),
file=LOGFILE)
except RuntimeError as e:
if "loop stopped before Future completed" not in str(e):
raise
finally:
print(cnorm, "\x1b[K")
else:
VERBOSITY = 3
try:
results = loop.run_until_complete(main(loop, syms))
except RuntimeError as e:
if "loop stopped before Future completed" not in str(e):
raise
else:
if "error" in results:
print(results["error"], file=sys.stderr)
if "error" in results.get("gathered", {}):
print(results["gathered"]["error"], file=sys.stderr)
finally:
print(cnorm, "\x1b[K")
if __name__ == "__main__":
sys.exit(main_entry())
# Copyright 2017 Jane Soko <boynamedjane@misled.ml>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| {"/terminal_coin_ticker/ticker.py": ["/terminal_coin_ticker/clients.py"]} |
73,740 | huylee13t2/beautiXiao | refs/heads/master | /env/bin/django-admin.py | #!/home/huylee/HuyLee/freelancer/beautiXiao/env/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| {"/app/forms.py": ["/app/models.py"], "/app/admin.py": ["/app/models.py"], "/app/views.py": ["/app/forms.py", "/app/models.py"]} |
73,741 | huylee13t2/beautiXiao | refs/heads/master | /app/forms.py | from django.forms import ModelForm
from app.models import Organisation, Person, Demo
class PostFormOrganisation(ModelForm):
class Meta:
model = Organisation
fields = ['address_street', 'address_line1', 'address_line2', 'address_line3', 'city', 'region', 'zip_code', 'country', 'mobile_phone', 'mobile_work_phone', 'work_phone', 'email', 'alt_email', 'skype', 'telegram', 'viber', 'whatsapp']
class PostFormPerson(ModelForm):
class Meta:
model = Person
fields = ['accountable_user', 'first_name', 'mid_name', 'last_name', 'organisation', 'address_street', 'address_line1', 'address_line2', 'address_line3', 'city', 'region', 'zip_code', 'country', 'mobile_phone', 'mobile_work_phone', 'work_phone', 'email', 'alt_email', 'skype', 'telegram', 'viber', 'whatsapp']
# class Meta:
# model = Person
# fields = "__all__"
# # fields = ('address_street', 'address_line1', 'address_line2', 'address_line3', 'city', 'region', 'zip_code', 'country', 'mobile_phone', 'mobile_work_phone', 'work_phone', 'email', 'alt_email', 'skype', 'telegram', 'viber', 'whatsapp',)
# # class PostFormOrganisation(forms.ModelForm):
# # class Meta:
# # model = Organisation
# # fields = ('accountable_user', 'update_author', 'first_name', 'mid_name', 'last_name', 'organisation', 'address_street', 'address_line1',
# # 'address_line2', 'address_line3', 'city', 'region', 'zip_code', 'country', 'mobile_phone', 'mobile_work_phone', 'work_phone', 'email',
# # 'alt_email', 'skype', 'telegram', 'viber', 'whatsapp',)
| {"/app/forms.py": ["/app/models.py"], "/app/admin.py": ["/app/models.py"], "/app/views.py": ["/app/forms.py", "/app/models.py"]} |
73,742 | huylee13t2/beautiXiao | refs/heads/master | /app/models.py | from django.db import models
from django.contrib.auth.models import User
class Organisation(models.Model):
# accountable_user = models.ForeignKey(User)
date_created = models.DateTimeField(u'Date created', null=True)
date_updated = models.DateTimeField(u'Date updated', auto_now=True, null=True)
# update_author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True,)
# first_name = models.CharField(u'User name', max_length=30)
# mid_name = models.CharField(u'User name', max_length=30)
# last_name = models.CharField(u'Last name', max_length=30)
# organisation = models.ForeignKey(Organisations)
address_street = models.CharField(u'Street', max_length=255, null=True)
address_line1 = models.CharField(u'Address line 1', max_length=255, null=True)
address_line2 = models.CharField(u'Address line 2', max_length=255, null=True)
address_line3 = models.CharField(u'Address line 3', max_length=255, null=True)
city = models.CharField(u'City', max_length=50, null=True)
region = models.CharField(u'Region', max_length=50, null=True)
zip_code = models.CharField(u'Zip code', max_length=50, null=True)
country = models.CharField(max_length=255, blank=True, null=True)
mobile_phone = models.CharField(u'Mobile phone', max_length=30, null=True)
mobile_work_phone = models.CharField(u'Mobile work phone', max_length=30, null=True)
work_phone = models.CharField(u'Work phone', max_length=30, null=True)
email = models.CharField(u'E-mail', max_length=50, null=True)
alt_email = models.CharField(u'Alternative E-mail', max_length=50, null=True)
skype = models.CharField(u'Skype', max_length=50, null=True)
telegram = models.CharField(u'Telegram', max_length=50, null=True)
viber = models.CharField(u'Viber', max_length=50, null=True)
whatsapp = models.CharField(u'WhatsApp', max_length=50, null=True)
def __str__(self):
return u'%s' % self.city
class Person(models.Model):
accountable_user = models.ForeignKey(User, related_name="person_user", null=True)
date_created = models.DateTimeField(u'Date created', null=True)
date_updated = models.DateTimeField(u'Date updated', auto_now=True, null=True)
update_author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True,)
first_name = models.CharField(u'First name', max_length=30, null=True)
mid_name = models.CharField(u'Middle name', max_length=30, null=True)
last_name = models.CharField(u'Last name', max_length=30, null=True)
organisation = models.ForeignKey(Organisation, null=True)
address_street = models.CharField(u'Street', max_length=255, null=True)
address_line1 = models.CharField(u'Address line 1', max_length=255, null=True)
address_line2 = models.CharField(u'Address line 2', max_length=255, null=True)
address_line3 = models.CharField(u'Address line 3', max_length=255, null=True)
city = models.CharField(u'City', max_length=50, null=True)
region = models.CharField(u'Region', max_length=50, null=True)
zip_code = models.CharField(u'Zip code', max_length=50, null=True)
country = models.CharField(max_length=255, null=True, blank=True)
mobile_phone = models.CharField(u'Mobile phone', max_length=30, null=True)
mobile_work_phone = models.CharField(u'Mobile work phone', max_length=30, null=True)
work_phone = models.CharField(u'Work phone', max_length=30, null=True)
email = models.CharField(u'E-mail', max_length=50, null=True)
alt_email = models.CharField(u'Alternative E-mail', max_length=50, null=True)
skype = models.CharField(u'Skype', max_length=50, null=True)
telegram = models.CharField(u'Telegram', max_length=50, null=True)
viber = models.CharField(u'Viber', max_length=50, null=True)
whatsapp = models.CharField(u'WhatsApp', max_length=50, null=True)
def __str__(self):
return u'%s' % self.first_name
class Demo(models.Model):
title = models.CharField(max_length=255, null=True) | {"/app/forms.py": ["/app/models.py"], "/app/admin.py": ["/app/models.py"], "/app/views.py": ["/app/forms.py", "/app/models.py"]} |
73,743 | huylee13t2/beautiXiao | refs/heads/master | /app/admin.py | from django.contrib import admin
from app.models import Person, Organisation
class PersonAdmin(admin.ModelAdmin):
list_display = ['first_name',]
class OrganisationAdmin(admin.ModelAdmin):
list_display = ['city',]
admin.site.register(Person, PersonAdmin)
admin.site.register(Organisation, OrganisationAdmin)
| {"/app/forms.py": ["/app/models.py"], "/app/admin.py": ["/app/models.py"], "/app/views.py": ["/app/forms.py", "/app/models.py"]} |
73,744 | huylee13t2/beautiXiao | refs/heads/master | /app/views.py | from django.shortcuts import render, redirect
from app.forms import PostFormPerson, PostFormOrganisation
from app.models import Organisation, Person
import datetime
from django.utils import timezone
def index(request):
person = Person.objects.all()
orgs = Organisation.objects.all()
return render(request, 'index.html', { 'person' : person, 'orgs' : orgs })
def form_person(request):
if request.method == 'POST':
form = PostFormPerson(request.POST)
if form.is_valid():
post = form.save(commit = False)
post.published_date = timezone.now()
post.save()
return redirect('index')
else:
form = PostFormPerson()
return render(request, 'form.html', {'form' : form})
def form_org(request):
if request.method == 'POST':
form = PostFormOrganisation(request.POST)
if form.is_valid():
post = form.save(commit = False)
post.published_date = timezone.now()
post.save()
return redirect('index')
else:
form = PostFormOrganisation()
return render(request, 'form.html', {'form' : form}) | {"/app/forms.py": ["/app/models.py"], "/app/admin.py": ["/app/models.py"], "/app/views.py": ["/app/forms.py", "/app/models.py"]} |
73,745 | huylee13t2/beautiXiao | refs/heads/master | /app/migrations/0002_auto_20170804_1604.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-04 16:04
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('app', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='organisation',
old_name='name',
new_name='country',
),
migrations.RenameField(
model_name='person',
old_name='name',
new_name='country',
),
migrations.AddField(
model_name='organisation',
name='address_line1',
field=models.CharField(max_length=255, null=True, verbose_name='Address line 1'),
),
migrations.AddField(
model_name='organisation',
name='address_line2',
field=models.CharField(max_length=255, null=True, verbose_name='Address line 2'),
),
migrations.AddField(
model_name='organisation',
name='address_line3',
field=models.CharField(max_length=255, null=True, verbose_name='Address line 3'),
),
migrations.AddField(
model_name='organisation',
name='address_street',
field=models.CharField(max_length=255, null=True, verbose_name='Street'),
),
migrations.AddField(
model_name='organisation',
name='alt_email',
field=models.CharField(max_length=50, null=True, verbose_name='Alternative E-mail'),
),
migrations.AddField(
model_name='organisation',
name='city',
field=models.CharField(max_length=50, null=True, verbose_name='City'),
),
migrations.AddField(
model_name='organisation',
name='date_created',
field=models.DateTimeField(null=True, verbose_name='Date created'),
),
migrations.AddField(
model_name='organisation',
name='date_updated',
field=models.DateTimeField(auto_now=True, null=True, verbose_name='Date updated'),
),
migrations.AddField(
model_name='organisation',
name='email',
field=models.CharField(max_length=50, null=True, verbose_name='E-mail'),
),
migrations.AddField(
model_name='organisation',
name='mobile_phone',
field=models.CharField(max_length=30, null=True, verbose_name='Mobile phone'),
),
migrations.AddField(
model_name='organisation',
name='mobile_work_phone',
field=models.CharField(max_length=30, null=True, verbose_name='Mobile work phone'),
),
migrations.AddField(
model_name='organisation',
name='region',
field=models.CharField(max_length=50, null=True, verbose_name='Region'),
),
migrations.AddField(
model_name='organisation',
name='skype',
field=models.CharField(max_length=50, null=True, verbose_name='Skype'),
),
migrations.AddField(
model_name='organisation',
name='telegram',
field=models.CharField(max_length=50, null=True, verbose_name='Telegram'),
),
migrations.AddField(
model_name='organisation',
name='viber',
field=models.CharField(max_length=50, null=True, verbose_name='Viber'),
),
migrations.AddField(
model_name='organisation',
name='whatsapp',
field=models.CharField(max_length=50, null=True, verbose_name='WhatsApp'),
),
migrations.AddField(
model_name='organisation',
name='work_phone',
field=models.CharField(max_length=30, null=True, verbose_name='Work phone'),
),
migrations.AddField(
model_name='organisation',
name='zip_code',
field=models.CharField(max_length=50, null=True, verbose_name='Zip code'),
),
migrations.AddField(
model_name='person',
name='accountable_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='person_user', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='person',
name='address_line1',
field=models.CharField(max_length=255, null=True, verbose_name='Address line 1'),
),
migrations.AddField(
model_name='person',
name='address_line2',
field=models.CharField(max_length=255, null=True, verbose_name='Address line 2'),
),
migrations.AddField(
model_name='person',
name='address_line3',
field=models.CharField(max_length=255, null=True, verbose_name='Address line 3'),
),
migrations.AddField(
model_name='person',
name='address_street',
field=models.CharField(max_length=255, null=True, verbose_name='Street'),
),
migrations.AddField(
model_name='person',
name='alt_email',
field=models.CharField(max_length=50, null=True, verbose_name='Alternative E-mail'),
),
migrations.AddField(
model_name='person',
name='city',
field=models.CharField(max_length=50, null=True, verbose_name='City'),
),
migrations.AddField(
model_name='person',
name='date_created',
field=models.DateTimeField(null=True, verbose_name='Date created'),
),
migrations.AddField(
model_name='person',
name='date_updated',
field=models.DateTimeField(auto_now=True, null=True, verbose_name='Date updated'),
),
migrations.AddField(
model_name='person',
name='email',
field=models.CharField(max_length=50, null=True, verbose_name='E-mail'),
),
migrations.AddField(
model_name='person',
name='first_name',
field=models.CharField(max_length=30, null=True, verbose_name='User name'),
),
migrations.AddField(
model_name='person',
name='last_name',
field=models.CharField(max_length=30, null=True, verbose_name='Last name'),
),
migrations.AddField(
model_name='person',
name='mid_name',
field=models.CharField(max_length=30, null=True, verbose_name='User name'),
),
migrations.AddField(
model_name='person',
name='mobile_phone',
field=models.CharField(max_length=30, null=True, verbose_name='Mobile phone'),
),
migrations.AddField(
model_name='person',
name='mobile_work_phone',
field=models.CharField(max_length=30, null=True, verbose_name='Mobile work phone'),
),
migrations.AddField(
model_name='person',
name='organisation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='app.Organisation'),
),
migrations.AddField(
model_name='person',
name='region',
field=models.CharField(max_length=50, null=True, verbose_name='Region'),
),
migrations.AddField(
model_name='person',
name='skype',
field=models.CharField(max_length=50, null=True, verbose_name='Skype'),
),
migrations.AddField(
model_name='person',
name='telegram',
field=models.CharField(max_length=50, null=True, verbose_name='Telegram'),
),
migrations.AddField(
model_name='person',
name='update_author',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='person',
name='viber',
field=models.CharField(max_length=50, null=True, verbose_name='Viber'),
),
migrations.AddField(
model_name='person',
name='whatsapp',
field=models.CharField(max_length=50, null=True, verbose_name='WhatsApp'),
),
migrations.AddField(
model_name='person',
name='work_phone',
field=models.CharField(max_length=30, null=True, verbose_name='Work phone'),
),
migrations.AddField(
model_name='person',
name='zip_code',
field=models.CharField(max_length=50, null=True, verbose_name='Zip code'),
),
]
| {"/app/forms.py": ["/app/models.py"], "/app/admin.py": ["/app/models.py"], "/app/views.py": ["/app/forms.py", "/app/models.py"]} |
73,746 | huylee13t2/beautiXiao | refs/heads/master | /app/urls.py | from django.conf.urls import url
from app import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^form-person$', views.form_person, name='form_person'),
url(r'^form-org$', views.form_org, name='form_org'),
]
| {"/app/forms.py": ["/app/models.py"], "/app/admin.py": ["/app/models.py"], "/app/views.py": ["/app/forms.py", "/app/models.py"]} |
73,747 | huylee13t2/beautiXiao | refs/heads/master | /app/migrations/0004_auto_20170805_0514.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-05 05:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0003_demo'),
]
operations = [
migrations.AlterField(
model_name='person',
name='first_name',
field=models.CharField(max_length=30, null=True, verbose_name='First name'),
),
migrations.AlterField(
model_name='person',
name='mid_name',
field=models.CharField(max_length=30, null=True, verbose_name='Middle name'),
),
]
| {"/app/forms.py": ["/app/models.py"], "/app/admin.py": ["/app/models.py"], "/app/views.py": ["/app/forms.py", "/app/models.py"]} |
73,757 | DeRaafMedia/ProjectIRCInteractivity | refs/heads/master | /SerialPort.py | __author__ = 'DeRaaf'
# TODO Clean up comments. Fix bugs. On going project!
import serial
class SerialPort(object):
def __init__(self,
serial_port,
baud_rate,
time_out,
serial_port_id):
self.serial_port = serial_port
self.baud_rate = baud_rate
self.time_out = time_out
self.serial_port_id = serial_port_id
self.serializer = serial.Serial(port=self.serial_port,
baudrate=self.baud_rate,
timeout=self.time_out,
writeTimeout=self.time_out)
def __str__(self):
return '\n\nSerial port : {0}\n' \
'Baud Rate : {1}\n' \
'Time Out : {2}' \
'Serial Port Id : {3}' \
'\n\n'.format(self.serial_port,
self.baud_rate,
self.time_out,
self.serial_port_id)
def __getattr__(self):
return 'Not found'.format()
def connect_serial_port(self):
return serial.Serial(port=self.serial_port,
baudrate=self.baud_rate,
timeout=self.time_out,
writeTimeout=self.time_out)
def open_serial_port(self):
"""
Opens the Serial port
:return:
"""
# Method to open the serial port. Not needed in normal use cases. push_data/pull_data do this automatic
return 'open_serial_port'
#self.serializer.open()
def close_serial_port(self):
"""
Closes the Serial port
:return:
"""
# Method to close the serial port. Not needed in normal use cases. push_data/pull_data do this automatic
return 'close_serial_port'
#self.serializer.close()
def push_data(self,
serial_tx):
"""
serial_tx -> The data you want to send to the Arduino as a quoted string (i.e 'Test message')
Most of the time msg is used from other functions
:param serial_tx:
:return:
"""
# Write serial data to receiver (Arduino)
#return 'push_data'
self.serializer.write(serial_tx)
def pull_data(self):
"""
Pulls data from the Arduino.
:return:
"""
# Read serial data to receiver (Arduino)
return 'pull_data'
#return self.serializer.readline()
def serial_flush(self):
"""
Flush serial buffers after file transfer
:return:
"""
return 'serial_flush'
#self.serializer.flush()
def serial_flush_in(self):
"""
Flush the incoming (rx) serial port
:return:
"""
return 'serial_flush_in'
#self.serializer.flushInput()
def serial_flush_out(self):
"""
Flush the outgoing (tx) serial port
:return:
"""
return 'serial_flush_out' | {"/Arduino.py": ["/SerialPort.py"]} |
73,758 | DeRaafMedia/ProjectIRCInteractivity | refs/heads/master | /IRCBot.py | __author__ = 'DeRaaf'
# TODO Clean up comments. Fix bugs. On going project!
import socket
from time import sleep
from Utilities import *
load_imports = Utilities()
load_imports.load_skills_init('skills/')
from skills import *
class IRCBot(object):
def __init__(self,
irc_network,
irc_port,
irc_channel,
irc_bot_nick,
irc_bot_owner,
physical_device_id,
serial_port_id):
"""
irc_network -> Give address of IRC chat as quoted string (i.e 'irc.freenode.net')
irc_port -> Give the port number which IRC is using (i.e 6667)
irc_channel -> Give the name of the IRC channel as quoted string (i.e '#FooChannel')
irc_bot_nick -> Give a name to the IRC bot. This name needs to be same as you used for it's 'Brain'. But replace
underscores (_) with spaces if you use them (better not!). i.e the Brain.cvs is called
Robby_The_Robot.cvs the robot name has to be 'Robby the Robot'. Upper/lower cases are important!
irc_bot_owner -> The name that you use as a IRC handle. Feature is handy when you want to invent something where
two bots battle in the name of it's owners or something
# TODO buy some other physical devices (MSP or something) to see how this code can be made more universal
physical_device_id -> The id of the physical device you connected (i.e arduino_1)
serial_port_id -> The id of the serial device you created
Set the IRC variables and create the IRC socket object
Set the physical device variables
:param irc_network:
:param irc_port:
:param irc_channel:
:param irc_bot_nick:
:param irc_bot_owner:
:param physical_device_id:
:param serial_port_id:
"""
self.utility = Utilities()
self.think_tasks_array = []
self.speak_tasks_array = []
self.chat_speak_array = []
self.act_tasks_array = []
self.feel_tasks_array = []
self.combined_tasks_array = []
self.irc_network = irc_network
self.irc_port = irc_port
self.irc_channel = irc_channel
self.irc_bot_nick = irc_bot_nick
self.irc_bot_owner = irc_bot_owner
self.irc_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.physical_device = physical_device_id
self.serial_port = serial_port_id.serial_port
self.baud_rate = serial_port_id.baud_rate
self.time_out = serial_port_id.time_out
self.start_thinking = False
# self.utility.initiate_preference()
def __str__(self):
return '\n\nIRC Network: {0}\n' \
'IRC Port: {1}\n' \
'IRC Channel: {2}\n' \
'IRC NickName: {3}\n' \
'IRC Bot Owner: {4}\n' \
'IRC Bot Voice : {5}' \
'Physical Device: {6}\n' \
'Serial Port: {8}\n' \
'Baud Rate: {9}\n' \
'Time Out: {10}\n' \
'\n\n'.format(self.irc_network,
self.irc_port,
self.irc_channel,
self.irc_bot_nick,
self.irc_bot_owner,
self.irc_bot_voice,
self.physical_device,
self.serial_port,
self.baud_rate,
self.time_out)
def __getattr__(self):
return '{0}'.format('Not Found')
def irc_connect(self):
"""
Connect to give IRC channel
:rtype : object
"""
self.irc_socket.connect((self.irc_network, self.irc_port))
if self.utility.get_preference_value('chat_log_enabled'):
self.utility.create_chat_log(self.irc_bot_nick)
self.utility.write_chat_log(self.irc_socket.recv(4096).strip()+'\n')
else:
pass
def get_born(self):
"""
Connects IRCBot to IRC and let's make itself know
"""
self.irc_connect()
self.irc_socket.send('NICK ' + self.irc_bot_nick + '\r\n')
self.irc_socket.send('USER ' + self.irc_bot_nick + ' 0 * :' + self.irc_bot_owner + '\r\n')
self.irc_socket.send('JOIN ' + self.irc_channel + '\r\n')
self.irc_socket.send('PRIVMSG ' + self.irc_channel + ' :Hello World. My name is ' + self.irc_bot_nick + '\r\n')
def survive(self,
conversation):
"""
conversation -> Passed from listen function
PING response function. Because every PING deserves a appropriate PONG response
:param conversation:
:rtype : object
"""
self.irc_socket.send('PONG ' + conversation.split()[1] + '\r\n')
print('PONG') # TEST PRINT
def listen(self,
as_thread,
as_daemon):
"""
as_thread -> 'yes' if this method needs to be executed as a thread, 'no' if it doesn't
as_daemon -> 'yes' if this method needs to be executed as a daemon, 'no' if it doesn't
To thread or not to thread was the question. I wrote in threading functionality for future use (maybe starting
today???). Default behaviour is to start functions as procedural (inline with main script) processes.
:param as_thread:
:param as_daemon:
:rtype : object
"""
if as_thread == 'yes':
if as_daemon == 'yes':
self.utility.new_thread('yes', self.listen_function, 'none')
else:
self.utility.new_thread('no', self.listen_function, 'none')
else:
self.listen_function()
def listen_function(self):
"""
Puts the individual IRC conversation in a task list so the think function can handle it one 'thought' at a time.
:return:
"""
self.get_born()
while True:
conversation = self.irc_socket.recv(4096)
if conversation.find('PING') != -1:
self.survive(conversation)
else:
if conversation:
sleep(0.2)
cleaned_conversation = self.utility.parse_irc_chat(conversation)
# TODO Need some way's to do some fun stuff with this!
# print(cleaned_conversation[0])
# print(cleaned_conversation[1])
# print(' '.join(cleaned_conversation[2]))
# print(cleaned_conversation[3])
self.think('no', 'no', [cleaned_conversation[0], cleaned_conversation[3]])
def think(self,
as_thread,
as_daemon,
conversation):
"""
as_thread -> 'yes' if this method needs to be executed as a thread, 'no' if it doesn't
as_daemon -> 'yes' if this method needs to be executed as a daemon, 'no' if it doesn't
To thread or not to thread was the question. I wrote in threading functionality for future use (maybe starting
today???). Default behaviour is to start functions as procedural (inline with main script) processes.
:param as_thread:
:param as_daemon:
:return:
"""
# Put the received conversation into the thinking task array
self.think_tasks_array.append(conversation)
if as_thread == 'yes':
if as_daemon == 'yes':
self.utility.new_thread('yes', self.think_function, 'none')
else:
self.utility.new_thread('no', self.think_function, 'none')
else:
self.think_function()
def think_function(self):
"""
Takes the think tasks list from the listen function and checks them against
a CSV file with keywords and their appropriate response.
Brain.csv is to be placed in the
'appFolder -> brains -> <irc_bot_nick> (With underscores for spaces i.e Robby_The_Robot) -> Brain.csv'
:return:
"""
# Loop through the thinking task array
for task in self.think_tasks_array:
# Send the sentence received to a utility that check for the keyword to toggle chat on/off
if self.utility.set_toggle_state(str(task[1]), self.irc_bot_nick, 0):
self.irc_socket.send('PRIVMSG '
+ self.irc_channel
+ ' : Log chat : '
+ self.utility.get_preference_value('chat_log_enabled')
+ '\r\n')
# Send the sentence received to a utility that check for the keyword to toggle voice on/off
if self.utility.set_toggle_state(str(task[1]), self.irc_bot_nick, 1):
self.irc_socket.send('PRIVMSG '
+ self.irc_channel
+ ' : Voice : '
+ self.utility.get_preference_value('voice_enabled')
+ '\r\n')
# Send the sentence received to a utility that check for the keyword to toggle chat on/off
if self.utility.set_toggle_state(str(task[1]), self.irc_bot_nick, 2):
self.irc_socket.send('PRIVMSG '
+ self.irc_channel
+ ' : Chat voice : '
+ self.utility.get_preference_value('chat_voice_enabled')
+ '\r\n')
# Send the sentence received to a utility that check for the keyword to toggle chat on/off
if self.utility.set_toggle_state(str(task[1]), self.irc_bot_nick, 3):
self.irc_socket.send('PRIVMSG '
+ self.irc_channel
+ ' : Nick voice : '
+ self.utility.get_preference_value('announcement_voice_enabled')
+ '\r\n')
# If chat logging is enabled write the sentence to the file
if self.utility.get_preference_value('chat_log_enabled') == 'yes':
self.utility.write_chat_log(task)
else:
pass
# Only start 'thinking' if al the header information from the IRC is received (otherwise headache!)
if task[1].find('End of /NAMES list') != -1:
if not self.start_thinking:
self.start_thinking = True
# Start the 'thinking' function
if self.start_thinking:
self.chat_speak('no', 'no', task)
# Little bit of a hackish solution to deal with empty strings coming from the IRC. It works for now
execute = self.utility.check_conversation(task[1], self.irc_bot_nick)
if execute:
# TODO Scaffolding code for what to do with directed messages
if execute[1] == 'yes':
pass
self.speak('yes', 'no', execute[2], execute[3])
# If a message is received that calls for an serial action
if execute[7] != 'no':
self.act('yes', 'no', execute[7], self.serial_port, self.baud_rate, self.time_out, execute[8])
# If a message is received that calls for to listen to a serial function
if execute[4] != 'no':
self.feel('yes', 'no', execute[4], self.serial_port, self.baud_rate, self.time_out, execute[5])
# Clear this thinking task from the array
del self.think_tasks_array[0]
def speak(self,
as_thread,
as_daemon,
conversation,
voice):
"""
as_thread -> 'yes' if this method needs to be executed as a thread, 'no' if it doesn't
as_daemon -> 'yes' if this method needs to be executed as a daemon, 'no' if it doesn't
voice -> A voice name used on Mac OSX system as a quoted string. Gets passed from Brain.csv (i.e 'Alex')
Takes a string from the think function and sends it to IRC
:param as_thread:
:param as_daemon:
:param voice:
:return:
"""
# Put the conversation received into the speak_task_array
self.speak_tasks_array.append(conversation)
if as_thread == 'yes':
if as_daemon == 'yes':
self.utility.new_thread('yes', self.speak_function, voice)
else:
self.utility.new_thread('no', self.speak_function, voice)
else:
self.speak_function(voice)
def speak_function(self,
voice):
"""
voice -> A voice name used on Mac OSX system as a quoted string. Gets passed from Brain.csv (i.e 'Alex')
Executes the speak function (the communication coming from itself) if enabled as an parallel (threaded)
object to main script
:param voice:
:return:
"""
# TODO Implement code in Utilities
for task in self.speak_tasks_array:
if self.utility.get_preference_value('voice_enabled') == 'yes':
self.irc_socket.send('PRIVMSG ' + self.irc_channel + ' : ' + str(task) + '\r\n')
if self.utility.get_preference_value('chat_log_enabled') == 'yes':
self.utility.write_chat_log(['RESPONSE -> ' + self.irc_bot_nick, str(task) + '\r\n'])
else:
pass
self.utility.speak(voice, task)
else:
self.irc_socket.send('PRIVMSG ' + self.irc_channel + ' : ' + str(task) + '\r\n')
if self.utility.get_preference_value('chat_log_enabled') == 'yes':
self.utility.write_chat_log(['RESPONSE -> ' + self.irc_bot_nick, str(task) + '\r\n'])
else:
pass
# Remove task from the speak task array
del self.speak_tasks_array[0]
return
def chat_speak(self,
as_thread,
as_daemon,
conversation):
"""
as_thread -> 'yes' if this method needs to be executed as a thread, 'no' if it doesn't
as_daemon -> 'yes' if this method needs to be executed as a daemon, 'no' if it doesn't
conversation -> (i.e 'Hello world') This text is meant as a raw IRC text parser. I strips the IRC stuff from it
Takes a string from the think function and sends it to IRC
:param as_thread:
:param as_daemon:
:param conversation:
:return:
"""
# Put chat speak task in the speak chat array
self.chat_speak_array.append(conversation)
if as_thread == 'yes':
if as_daemon == 'yes':
self.thread.new_thread('yes', self.chat_speak_function)
else:
self.thread.new_thread('no', self.chat_speak_function)
else:
self.chat_speak_function()
def chat_speak_function(self):
"""
Executes the chat speak (the communication coming from others) if enabled as an inline object to main script
:return:
"""
for task in self.chat_speak_array:
if self.utility.get_preference_value('announcement_voice_enabled') == 'yes':
nick_speaker = task[0].split('!')
self.utility.speak(self.utility.get_preference_value('announcement_voice'), str(nick_speaker[0]))
sleep(0.15)
else:
pass
if self.utility.chat_voice_enabled == 'yes':
speakers_sentence = task[1]
sleep(0.6)
# TODO make voice IRC user selectable would be a nice feature!!
self.utility.speak(self.utility.get_preference_value('voice'), speakers_sentence)
# Remove task form array
del self.chat_speak_array[0]
def act(self,
as_thread,
as_daemon,
action,
serial_port,
baud_rate,
time_out,
action_parameter):
"""
as_thread -> 'yes' if this method needs to be executed as a thread, 'no' if it doesn't
as_daemon -> 'yes' if this method needs to be executed as a daemon, 'no' if it doesn't
serial_port -> The serial port that it is given to be used
baud_rate -> The baud_rate to be used
time_out -> The serial port time out to be used
action -> The name of the act action given in the Brain.csv file (i.e blink_pretty)
action_parameter -> Place holder for secondary parameter None if there aren't any (i.e None)
Takes a string from the think function converts it to an array and send it to act_function for execution.
:param as_thread:
:param as_daemon:
:param serial_port:
:param baud_rate:
:param time_out:
:param action:
:param action_parameter:
:return:
"""
# Put task inside act tasks array
self.act_tasks_array.append([action, serial_port, baud_rate, time_out, action_parameter])
if as_thread == 'yes':
if as_daemon == 'yes':
self.utility.new_thread('yes', self.act_function)
else:
self.utility.new_thread('no', self.act_function)
else:
self.act_function()
def act_function(self):
"""
Takes the act action from think function and passes it through to the skills class
:return:
"""
for task in self.act_tasks_array:
execute = ''\
+ str(task[0]) + '.' + str(task[0])\
+ '("' + str(task[1])\
+ '", ' + str(task[2])\
+ ', ' + str(task[3])\
+ ', ' + str(task[4])\
+ ')'
exec execute
# Remove the act task from the array
del self.act_tasks_array[0]
def feel(self,
as_thread,
as_daemon,
action,
serial_port,
baud_rate,
time_out,
action_parameter):
"""
as_thread -> 'yes' if this method needs to be executed as a thread, 'no' if it doesn't
as_daemon -> 'yes' if this method needs to be executed as a daemon, 'no' if it doesn't
serial_port -> The serial port that it is given to be used
baud_rate -> The baud_rate to be used
time_out -> The serial port time out to be used
action -> The name of the act action given in the Brain.csv file (i.e blink_pretty)
action_parameter -> Place holder for secondary parameter None if there aren't any (i.e None)
Takes a string from the think function converts it to an array and send it to feel_function for execution.
:param as_thread:
:param as_daemon:
:param serial_port:
:param baud_rate:
:param time_out:
:param action:
:param action_parameter:
:return:
"""
self.feel_tasks_array.append([action, serial_port, baud_rate, time_out, action_parameter])
if as_thread == 'yes':
if as_daemon == 'yes':
self.utility.new_thread('yes', self.feel_function)
else:
self.utility.new_thread('no', self.feel_function)
else:
self.feel_function()
def feel_function(self):
"""
Takes the feel action from think function and passes it through to the skills class (skills)
:return:
"""
for task in self.feel_tasks_array:
# TODO double code!
execute = ''\
+ str(task[0]) + '.' + str(task[0])\
+ '("' + str(task[1])\
+ '", ' + str(task[2])\
+ ', ' + str(task[3])\
+ ', ' + str(task[4])\
+ ')'
exec execute
del self.feel_tasks_array[0] | {"/Arduino.py": ["/SerialPort.py"]} |
73,759 | DeRaafMedia/ProjectIRCInteractivity | refs/heads/master | /main.py | __author__ = 'DeRaaf'
"""
EXAMPLE
"""
"""
These are the minimum on classes you need to import. If you want to hook up other devices than an Arduino take the
Arduino class (Ardiuno.py) as template to create a physical device with which this software can speak
"""
import sys
from SerialPort import *
from Arduino import *
from IRCBot import *
def main(arg_1, # serial_port
arg_2, # baud_rate
arg_3, # time_out
arg_4, # serial_port_id
arg_5, # physical_device_id
arg_6, # bot_name
arg_7, # bot_owner
arg_8, # irc_network
arg_9, # irc_port
arg_10): # irc_channel
"""
:param arg_1:
:param arg_2:
:param arg_3:
:param arg_4:
:param arg_5:
:param arg_6:
:param arg_7:
:param arg_8:
:param arg_9:
:param arg_10:
:return:
"""
serial_port = arg_1 # Write your own setting instead of arg_1
baud_rate = arg_2 # Write your own setting instead of arg_2
time_out = arg_3 # Write your own setting instead of arg_3
serial_port_id = arg_4 # Write your own setting instead of arg_4
physical_device_id = arg_5 # Write your own setting instead of arg_5
bot_name = arg_6 # Write your own setting instead of arg_6
bot_owner = arg_7 # Write your own setting instead of arg_7
irc_network = arg_8 # Write your own setting instead of arg_8
irc_port = arg_9 # Write your own setting instead of arg_9
irc_channel = arg_10 # Write your own setting instead of arg_10
"""
This code can be replaced with:
serial_port_id = SerialPort('/dev/your_serial_port',
115200,
2,
'serial_port_id')
"""
initiate_serial_port = (serial_port_id
+ ' = SerialPort("'
+ serial_port
+ '", '
+ baud_rate
+ ', '
+ time_out
+ ', "'
+ serial_port_id
+ '")')
exec initiate_serial_port
"""
This code can be replaced with:
physical_device_id = PhysicalDevice(serial_port_id,
physical_device_id)
"""
initiate_physical_device = (physical_device_id
+ ' = Arduino('
+ serial_port_id
+ ', "'
+ physical_device_id
+ '")')
exec initiate_physical_device
"""
This code can be replaced with:
robby_the_robot = IRCBot('irc.freenode.net', # The IRC channel you want to use
6667, # The IRC channels port number
'#YourChatChannel', # The IRC channel name
'RobbyTheRobot', # The name of the IRCBot (Same name as folder of Brain.csv)
'TheBotOwner',
physical_device_id,
serial_port_id)
"""
initiate_ircbot = (bot_name
+ ' = IRCBot("'
+ irc_network
+ '", '
+ irc_port
+ ', "'
+ irc_channel
+ '", "'
+ bot_name
+ '", "'
+ bot_owner
+ '", '
+ physical_device_id
+ ', '
+ serial_port_id
+ ')')
exec initiate_ircbot
start_ircbot = (bot_name
+ '.listen("no", "no")')
exec start_ircbot
if __name__ == '__main__':
"""
This code makes this function executable from terminal with the following arguments:
Serial port : '/dev/your_port'
Baud Rate : '115200'
Time Out : '2' (Sometimes needed to troubleshoot)
A name for the Serial Port : 'serial_port_1'
A name for the Physical Device you want to use : 'arduino_1'
A name for the IRCBot : 'Robby_The_Robot' (needs to be the same name as the folder with the Brain.csv file!!)
The owner of the IRCBot : YourChatNickName
The IRC network : 'irc.freenode.net'
The IRC port : 6667
The IRC channel : '#YourChannel'
Terminal command: (one line)
$ python startIRCInteractivity.py
'your/serial_device'
'115200'
'2'
'serial_port_1'
'arduino_1'
'RobbyTheRobot'
'DeRaaf'
'irc.freenode.net'
'6667'
'##SandBoxDeRaaf'
"""
main(sys.argv[1],
sys.argv[2],
sys.argv[3],
sys.argv[4],
sys.argv[5],
sys.argv[6],
sys.argv[7],
sys.argv[8],
sys.argv[9],
sys.argv[10]) | {"/Arduino.py": ["/SerialPort.py"]} |
73,760 | DeRaafMedia/ProjectIRCInteractivity | refs/heads/master | /Arduino.py | __author__ = 'DeRaaf'
# TODO Clean up comments. Fix bugs. On going project!
from SerialPort import *
class Arduino(SerialPort):
def __init__(self,
serial_port_id,
physical_device_id):
"""
serial_port_id -> Give the serial port id as a variable name (i.e serial_port_2)
Object can be addressed but serves as glue between SerialPort and Pin
:param serial_port_id:
:return:
"""
super(Arduino, self).__init__(serial_port_id.serial_port,
serial_port_id.baud_rate,
serial_port_id.time_out,
serial_port_id.serial_port_id)
self.arduino_attr = SerialPort(self.serial_port,
self.baud_rate,
self.time_out,
self.serial_port_id)
self.physical_device_id = physical_device_id
def __str__(self):
return '\n\nSerial port: {0}\n' \
'Baudrate : {1}\n' \
'Time Out : {2}\n' \
'Physical Device ID : {3}' \
'\n\n'.format(self.serial_port,
self.baud_rate,
self.time_out,
self.physical_device_id)
def __getattr__(self):
return '{0}'.format('Not Found')
def soft_reset(self):
"""
Function to perform a soft reset of the Arduino
"""
self.push_data('{0}/{1}/'.format(3, 3))
# TODO scaffold code needs to be implemented!!
class Pin(Arduino):
def __init__(self,
arduino_id,
pin_number,
pin_type):
"""
arduino_id -> Give the arduino id as a variable name (i.e arduino_1)
pin_number -> Give the number of a pin as an int (i.e 10)
pin_type -> Give he type of pin four possible (i.e 'analog_in', 'analog_out', 'digital_in' and 'digital_out')
Pin object. This function makes individual pins on Arduino addressable
:param arduino_id:
:param pin_number:
:param pin_type:
:return:
"""
super(Pin, self).__init__(arduino_id)
self.pin_number = pin_number
self.pin_type = pin_type
self.close_serial_port()
def __str__(self):
return '\n\nPin number: {0}\n' \
'Pin type: {1}' \
'\n\n'.format(self.pin_number,
self.pin_type)
def analog_read(self):
"""
Analog read function not yet implemented
:return:
"""
if self.pin_type != 'analog_in':
print('\nError: Pin needs to be analog_in. Pin {0} on {1} is {2}'
'\n'.format(self.pin_number,
self.serial_port,
self.pin_type))
else:
# TODO read analog function.
print('\nGood to go\n')
def analog_write(self,
analog_value):
"""
analog_value -> A value between 0 - 255 (i.e 125)
Function handles the analog write communications for specified pin
:param analog_value:
:return:
"""
if self.pin_type != 'analog_out':
print('\nError: Pin needs to be analog_out. Pin {0} on {1} is {2}'
'\n'.format(self.pin_number,
self.serial_port,
self.pin_type))
else:
self.push_data('{0}/{1}/{2}/{3}/'.format(1,
1,
self.pin_number,
analog_value))
def digital_read(self):
"""
Digital read function not yet implemented
:return:
"""
if self.pin_type != 'digital_in':
print('\nError: Pin needs to be digital_in. Pin {0} on {1} is {2}'
'\n'.format(self.pin_number,
self.serial_port,
self.pin_type))
else:
print('\nGood to go\n')
def digital_write(self,
digital_value):
"""
digital_value -> Binary 0 is LOW (false) or 1 is HIGH (true)
:param digital_value:
:return:
"""
if self.pin_type != 'digital_out':
print('\nError: Pin needs to be digital_out. Pin {0} on {1} is {2}'
'\n'.format(self.pin_number,
self.serial_port,
self.pin_type))
else:
self.push_data('{0}/{1}/{2}/{3}/'.format(1,
2,
self.pin_number,
digital_value)) | {"/Arduino.py": ["/SerialPort.py"]} |
73,761 | DeRaafMedia/ProjectIRCInteractivity | refs/heads/master | /skills/template.py | import sys
def main(arg_1, arg_2, arg_3, arg_4):
# Function here
pass
if __name__ == "__main__":
main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
pass | {"/Arduino.py": ["/SerialPort.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.